pax_global_header00006660000000000000000000000064131737170120014514gustar00rootroot0000000000000052 comment=25036c7c1d1f56797421aa162fdbf4942a86eda9 dipy-0.13.0/000077500000000000000000000000001317371701200125425ustar00rootroot00000000000000dipy-0.13.0/.coveragerc000066400000000000000000000001731317371701200146640ustar00rootroot00000000000000[run] branch = True source = dipy include = */dipy/* omit = */setup.py */benchmarks/* [report] show_missing = True dipy-0.13.0/.gitattributes000066400000000000000000000000421317371701200154310ustar00rootroot00000000000000dipy/COMMIT_INFO.txt export-subst dipy-0.13.0/.gitignore000066400000000000000000000004531317371701200145340ustar00rootroot00000000000000*.pyc *.pyd *.so *.c *.cpp *.png *.vtk *.gz *.dpy *.npy *.img *.hdr *.mat *.pkl *.orig build *~ doc/_build doc/reference_cmd doc/*-stamp MANIFEST dist/ .project .pydevproject *.sw[po] dipy.egg-info/ nibabel*egg/ pyx-stamps __config__.py .DS_Store .coverage .buildbot.patch .eggs/ dipy/.idea/ .idea/ dipy-0.13.0/.mailmap000066400000000000000000000137111317371701200141660ustar00rootroot00000000000000Ariel Rokem arokem Ariel Rokem arokem Bago Amirbekian Bago Amirbekian Bago Amirbekian Bago Amirbekian Bago Amirbekian Bago Amirbekian Bago Amirbekian MrBago Bago Amirbekian MrBago Maxime Descoteaux mdesco Maxime Descoteaux mdesco Maxime Descoteaux Maxime Descoteaux Stefan van der Walt Stefan van der Walt Christopher Nguyen Christopher Christopher Nguyen Christopher Nguyen Eleftherios Garyfallidis Dipy Developers Eleftherios Garyfallidis Eleftherios Garyfallidis Eleftherios Garyfallidis Eleftherios Garyfallidis Eleftherios Garyfallidis Eleftherios Garyfallidis Marc-Alexandre Côté Marc-Alexandre Cote Gabriel Girard Gabriel Girard Etienne St-Onge Etienne St-Onge Etienne St-Onge StongeEtienne Emanuele Olivetti Emanuele Olivetti Ian Nimmo-Smith Ian Nimmo-Smith Ian Nimmo-Smith iannimmosmith Ian Nimmo-Smith Ian Nimmo-Smith Matthew Brett Matthew Brett Jon Haitz Legarreta Gorroño Jon Haitz Legarreta Omar Ocegueda omarocegueda Omar Ocegueda omarocegueda Omar Ocegueda Omar Ocegueda Gonzalez Shahnawaz Ahmed Shahnawaz Ahmed Shahnawaz Ahmed root Shahnawaz Ahmed Your Name Sylvain Merlet smerlet Mauro Zucchelli Mauro Mauro Zucchelli maurozucchelli Andrew Lawrence AndrewLawrence Samuel St-Jean samuelstjean Samuel St-Jean samuelstjean Samuel St-Jean samuelstjean Samuel St-Jean Samuel St-Jean Samuel St-Jean Samuel St-Jean Samuel St-Jean Samuel St-Jean Riddhish Bhalodia Riddhish Bhalodia Ranveer Aggarwal Ranveer Aggarwal Serge Koudoro skab12 Serge Koudoro skab12 Serge Koudoro skab12 Serge Koudoro skab12 Serge Koudoro skoudoro Tingyi Wanyan Tingyi Wanyan Tingyi Wanyan Tingyi Wanyan Tingyi Wanyan Tingyi Wanyan Kesshi Jordan kesshijordan Kesshi Jordan Kesshi jordan Rafael Neto Henriques Rafael Henriques Rafael Neto Henriques Rafael Neto Henriques RafaelNH Alexandre Gauvin algo Alexandre Gauvin Alexandre Gauvin Nil Goyette Nil Goyette Eric Peterson etpeterson Rutger Fick Rutger Fick Demian Wassermann Demian Wassermann Sourav Singh Sourav Sven Dorkenwald Manu Tej Sharma manu-tej David Qixiang Chen Oscar Esteban Matthieu Dumont unknown Guillaume Theaud Adam Rybinski Bennet Fauber dipy-0.13.0/.travis.yml000066400000000000000000000102121317371701200146470ustar00rootroot00000000000000# vim ft=yaml # Multiple lines can be made a single "virtual line" because of the way that # Travis munges each line before executing it to print out the exit status. # It's okay for it to be on multiple physical lines, so long as you remember: # - There can't be any leading "-"s - All newlines will be removed, so use # ";"s sudo: false # To use travis container infrastructure language: python cache: directories: - $HOME/.cache/pip addons: apt: packages: - libhdf5-serial-dev env: global: - DEPENDS="cython numpy scipy matplotlib h5py nibabel cvxpy" - VENV_ARGS="--python=python" - INSTALL_TYPE="setup" - EXTRA_WHEELS="https://5cf40426d9f06eb7461d-6fe47d9331aba7cd62fc36c7196769e4.ssl.cf2.rackcdn.com" - PRE_WHEELS="https://7933911d6844c6c53a7d-47bd50c35cd79bd838daf386af554a83.ssl.cf2.rackcdn.com" - EXTRA_PIP_FLAGS="--timeout=60 --find-links=$EXTRA_WHEELS" python: - 3.4 - 3.5 - 3.6 matrix: include: - python: 2.7 # To test minimum dependencies - python: 2.7 env: # Check these values against requirements.txt and dipy/info.py - DEPENDS="cython==0.25.1 numpy==1.7.1 scipy==0.9.0 nibabel==2.1.0 h5py==2.4.0" - python: 2.7 env: - DEPENDS="$DEPENDS scikit_learn" - python: 3.5 env: - COVERAGE=1 - DEPENDS="$DEPENDS scikit_learn" # To test vtk functionality - python: 2.7 sudo: true # This is set to true for apt-get env: - COVERAGE=1 - VTK=1 - VTK_VER="python-vtk" - LIBGL_ALWAYS_INDIRECT=y - VENV_ARGS="--system-site-packages --python=/usr/bin/python2.7" - TEST_WITH_XVFB=true - DEPENDS="$DEPENDS scikit_learn" - python: 2.7 env: - INSTALL_TYPE=sdist - python: 2.7 env: - INSTALL_TYPE=pip # Dependency checking should get all needed dependencies - DEPENDS="" - python: 2.7 env: - INSTALL_TYPE=wheel - python: 2.7 env: - INSTALL_TYPE=requirements - DEPENDS="" - python: 3.5 # Check against latest available pre-release version of all packages env: - USE_PRE=1 before_install: - PIPI="pip install $EXTRA_PIP_FLAGS" - if [ -n "$USE_PRE" ]; then PIPI="$PIPI --find-links=$PRE_WHEELS --pre"; fi - pip install --upgrade virtualenv - virtualenv $VENV_ARGS venv - source venv/bin/activate - python --version # just to check # Needed for Python 3.5 wheel fetching - $PIPI --upgrade pip setuptools - $PIPI nose; - $PIPI numpy; - if [ -n "$DEPENDS" ]; then $PIPI $DEPENDS; fi - if [ "${COVERAGE}" == "1" ]; then pip install coverage coveralls codecov; fi - if [ "${VTK}" == "1" ]; then sudo apt-get update; sudo apt-get install -y $VTK_VER; sudo apt-get install -y xvfb; sudo apt-get install -y python-tk; sudo apt-get install -y python-imaging; $PIPI xvfbwrapper; fi install: - | if [ "$INSTALL_TYPE" == "setup" ]; then python setup.py install elif [ "$INSTALL_TYPE" == "pip" ]; then $PIPI . elif [ "$INSTALL_TYPE" == "sdist" ]; then python setup_egg.py egg_info # check egg_info while we're here python setup_egg.py sdist $PIPI dist/*.tar.gz elif [ "$INSTALL_TYPE" == "wheel" ]; then pip install wheel python setup_egg.py bdist_wheel $PIPI dist/*.whl elif [ "$INSTALL_TYPE" == "requirements" ]; then $PIPI -r requirements.txt python setup.py install fi # command to run tests, e.g. python setup.py test script: # Change into an innocuous directory and find tests from installation - mkdir for_testing - cd for_testing # No figure windows for mpl; quote to hide : from travis-ci yaml parsing - 'echo "backend : agg" > matplotlibrc' - if [ "${COVERAGE}" == "1" ]; then cp ../.coveragerc .; COVER_ARGS="--with-coverage --cover-package dipy"; fi - nosetests --with-doctest --verbose $COVER_ARGS dipy after_success: - if [ "${COVERAGE}" == "1" ]; then coveralls; codecov; fi dipy-0.13.0/AUTHOR000066400000000000000000000025641317371701200134760ustar00rootroot00000000000000Eleftherios Garyfallidis Ian Nimmo-Smith Matthew Brett Bago Amirbekian Stefan Van der Walt Ariel Rokem Christopher Nguyen Yaroslav Halchenko Emanuele Olivetti Mauro Zucchelli Samuel St-Jean Maxime Descoteaux Gabriel Girard Matthieu Dumont Kimberly Chan Erik Ziegler Emmanuel Caruyer Matthias Ekman Jean-Christophe Houde Michael Paquette Sylvain Merlet Omar Ocegueda Marc-Alexandre Cote Demian Wassermann endolith Andrew Lawrence Gregory R. Lee Maria Luisa Mandelli Kesshi jordan Chantal Tax Qiyuan Tian Shahnawaz Ahmed Eric Peterson dipy-0.13.0/CONTRIBUTING.md000066400000000000000000000060241317371701200147750ustar00rootroot00000000000000# Contributing to DIPY DIPY is an open-source software project, and we have an open development process. This means that we welcome contributions from anyone. We do ask that you first read this document and follow the guidelines we have outlined here and that you follow the [NIPY community code of conduct](http://nipy.org/conduct.html). ## Getting started If you are looking for places that you could make a meaningful contribution, please contact us! We respond to queries on the [Nipy mailing list](https://mail.python.org/mailman/listinfo/neuroimaging), and to questions on our [gitter channel](https://gitter.im/nipy/dipy). A good place to get an idea for things that currently need attention is the [issues](https://github.com/nipy/dipy/issues) page of our Github repository. This page collects outstanding issues that you can help address. Join the conversation about the issue, by typing into the text box in the issue page. ## The development process Please refer to the [development section](http://dipy.org/devel/index.html) of the documentation for the procedures we use in developing the code. ## When writing code, please pay attention to the following: ### Tests and test coverage We use [nosetests](https://nose.readthedocs.org/) to write tests of the code, and [Travis-CI](https://travis-ci.org/nipy/dipy) for continuous integration. If you are adding code into a module that already has a 'test' file (e.g., if you are adding code into ``dipy/tracking/streamline.py``), add additional tests into the respective file (e.g., ``dipy/tracking/tests/test_streamline.py ``). New contributions are required to have as close to 100% code coverage as possible. This means that the tests written cause each and every statement in the code to be executed, covering corner-cases, error-handling, and logical branch points. To check how much coverage the tests have, you will need. When running: nosetests --with-coverage --cover-package=dipy You will get the usual output of nose, but also a table that indicates the test coverage in each module: the percentage of coverage and also the lines of code that are not run in the tests. You can also see the test coverage in the Travis run corresponding to the PR (in the log for the machine with ``COVERAGE=1``). Contributions to tests that extend test coverage in older modules that are not fully covered are very welcome! ### Supporting both Python 2 and 3 Most of the functionality in DIPY works on both Python 3 and Python 2. Please follow the instructions [here](http://dipy.org/devel/python3.html) to write code that works on both versions. ### Code style Code contributions should be formatted according to the [DIPY Coding Style Guideline](./doc/devel/coding_style_guideline.rst). Please, read the document to conform your code contributions to the DIPY standard. ### Documentation DIPY uses `Sphinx `_ to generate documentation. The [DIPY Coding Style Guideline](./doc/devel/coding_style_guideline.rst) contains details about documenting the contributions. dipy-0.13.0/Changelog000066400000000000000000000137341317371701200143640ustar00rootroot00000000000000.. -*- mode: rst -*- .. vim:syntax=rest .. _changelog: Dipy Development Changelog ----------------------------- Dipy is a diffusion MR imaging library written in Python 'Close gh-' statements refer to GitHub issues that are available at:: http://github.com/nipy/dipy/issues The full VCS changelog is available here: http://github.com/nipy/dipy/commits/master Releases ~~~~~~~~ Dipy ++++ The code found in Dipy was created by the people found in the AUTHOR file. * 0.13 (Monday, 24 October 2017) - Faster local PCA implementation. - Fixed different issues with OpenMP and Windows / OSX. - Replacement of cvxopt by cvxpy. - Replacement of Pytables by h5py. - Updated API to support latest numpy version (1.14). - New user interfaces for visualization. - Large documentation update. * 0.12 (Tuesday, 26 June 2017) - IVIM Simultaneous modeling of perfusion and diffusion. - MAPL, tissue microstructure estimation using Laplacian-regularized MAP-MRI. - DKI-based microstructural modelling. - Free water diffusion tensor imaging. - Denoising using Local PCA. - Streamline-based registration (SLR). - Fiber to bundle coherence (FBC) measures. - Bayesian MRF-based tissue classification. - New API for integrated user interfaces. - New hdf5 file (.pam5) for saving reconstruction results. - Intreractive slicing of images, ODFS and peaks. - Updated API to support latest numpy versions. - New system for automatically generating command line interfaces. - Faster computation of Cross Correlation metric for registration. * 0.11 (Sunday, 21 February 2016) - New framework for contextual enhancement of ODFs. - Compatibility with numpy (1.11). - Compatibility with VTK 7.0 which supports Python 3.x. - Faster PIESNO for noise estimation. - Reorient gradient directions according to motion correction parameters. - Supporting Python 3.3+ but not 3.2. - Reduced memory usage in DTI. - DSI now can use datasets with multiple b0s. - Fixed different issues with Windows 64bit and Python 3.5. * 0.10 (Thursday, 2 December 2015) * Compatibility with new versions of scipy (0.16) and numpy (1.10). * New cleaner visualization API, including compatibility with VTK 6, and functions to create your own interactive visualizations. * Diffusion Kurtosis Imaging(DKI): Google Summer of Code work by Rafael Henriques. * Mean Apparent Propagator (MAP) MRI for tissue microstructure estimation. * Anisotropic Power Maps from spherical harmonic coefficients. * New framework for affine registration of images. * 0.9.2 (Wednesday, 18 March 2015) * Anatomically Constrained Tissue Classifiers for Tracking * Massive speedup of Constrained Spherical Deconvolution (CSD) * Recursive calibration of response function for CSD * New experimental framework for clustering * Improvements and 10X speedup for Quickbundles * Improvements in Linear Fascicle Evaluation (LiFE) * New implementation of Geodesic Anisotropy * New efficient transformation functions for registration * Sparse Fascicle Model supports acquisitions with multiple b-values * 0.8.0 (Tuesday, 6 Jan 2015) * Nonlinear Image-based Registration (SyN) * Streamline-based Linear Registration (SLR) * Linear Fascicle Evaluation (LiFE) * Cross-validation for reconstruction models * Sparse Fascicle Model (SFM) * Non-local means denoising (NLMEANS) * New modular tracking machinery * Closed 388 issues and merged 155 pull requests * A variety of bug-fixes and speed improvements * 0.7.1 (Thursday, 16 Jan 2014) * Made installing Dipy easier and more universal * Fixed automated seeding problems for tracking * Removed default parameter for odf_vertices in EuDX * 0.7.0 (Monday, 23 Dec 2013) * Constrained Spherical Deconvolution (CSD) * Simple Harmonic Oscillator based Reconstruction and Estimation (SHORE) * Sharpening Deconvolution Transform (SDT) * Signal-to-noise ratio estimation * Parallel processing enabled for all reconstruction models using `peaks_from_model` * Simultaneous peak and ODF visualization * Streamtube visualization * Electrostatic repulsion for sphere generation * Connectivity matrices and density maps * Streamline filtering through specific ROIs using `target` * Brain extraction and foreground extraction using `median_otsu` * RESTORE fitting for DTI * Westin's Tensor maps * Access to more publicly available datasets directly through Dipy functions. * 3x more tutorials than previous release * 0.6.0 (Sunday, 24 Mar 2013) * Cython 0.17+ enforced * New reconstruction models API * Diffusion Spectrum Imaging (DSI) * DSI with deconvolution * Generalized Q-sampling Imaging 2 (GQI2) * Modular fiber tracking * deterministic * probabilistic * Fast volume indexing (a faster ndindex) * Spherical Harmonic Models * Opdt (Tristan-Vega et. al) * CSA odf (Aganj et. al) * Analytical Q-ball (Descoteaux et. al) * Tuch's Q-ball (Tuch et. al) * Visualization of spherical functions * Peak finding in odfs * Non-linear peak finding * Sphere Object * Gradients Object * 2D sphere plotting * MultiTensor and Ball & Sticks voxel simulations * Fetch/Download data for examples * Software phantom generation * Angular similarity for comparisons between multiple peaks * SingleVoxelModel to MultiVoxelModel decorator * Mrtrix and fibernavigator SH bases * More Benchmarks * More Tests * Color FA and other Tensor metrics added * Scripts for the ISBI 2013 competition * Fit_tensor script added * Radial basis function interpolation on the sphere * New examples/tutorials * 0.5.0 (Friday, 11 Feb 2011) * Initial release. * Reconstruction algorithms e.g. GQI, DTI * Tractography generation algorithms e.g. EuDX * Intelligent downsampling of tracks * Ultra fast tractography clustering * Resampling datasets with anisotropic voxels to isotropic * Visualizing multiple brains simultaneously * Finding track correspondence between different brains * Reading many different file formats e.g. Trackvis or Nifti * Dealing with huge tractographies without memory restrictions * Playing with datasets interactively without storing * And much more and even more to come in next releases dipy-0.13.0/ISSUE_TEMPLATE.md000066400000000000000000000020771317371701200152550ustar00rootroot00000000000000## Description [Please provide a general introduction to the issue/proposal.] [If reporting a bug, attach the entire traceback from Python and follow the way to reproduce below] [If proposing an enhancement/new feature, provide links to related articles, reference examples, etc.] ## Way to reproduce [If reporting a bug, please include the following important information:] - [ ] Code example - [ ] Relevant images (if any) - [ ] Operating system and version (run `python -c "import platform; print(platform.platform())"`) - [ ] Python version (run `python -c "import sys; print("Python", sys.version)"`) - [ ] dipy version (run `python -c "import dipy; print(dipy.__version__)"`) - [ ] dependency version (numpy, scipy, nibabel, h5py, cvxpy, vtk) * import numpy; print("NumPy", numpy.__version__) * import scipy; print("SciPy", scipy.__version__) * import nibabel; print("Nibabel", nibabel.__version__) * import h5py; print("H5py", h5py.__version__) * import cvxpy; print("Cvxpy", cvxpy.__version__) * import vtk; print(vtk.vtkVersion.GetVTKSourceVersion()) dipy-0.13.0/LICENSE000066400000000000000000000032061317371701200135500ustar00rootroot00000000000000Unless otherwise specified by LICENSE.txt files in individual directories, or within individual files or functions, all code is: Copyright (c) 2008-2016, dipy developers All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the dipy developers nor the names of any contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. dipy-0.13.0/MANIFEST.in000066400000000000000000000007351317371701200143050ustar00rootroot00000000000000include AUTHOR LICENSE Makefile* MANIFEST.in setup* README.* include Changelog TODO recursive-include doc * recursive-include bin * recursive-include tools * recursive-include src * # setup utilities include setup_helpers.py include version_helpers.py include cythexts.py recursive-include fake_pyrex * # put this stuff back into setup.py (package_data) once I'm enlightened # enough to accomplish this herculean task recursive-include dipy/data * include dipy/COMMIT_INFO.txt dipy-0.13.0/Makefile000066400000000000000000000071461317371701200142120ustar00rootroot00000000000000# Simple makefile to quickly access handy build commands for Cython extension # code generation. Note that the actual code to produce the extension lives in # the setup.py file, this Makefile is just meant as a command # convenience/reminder while doing development. PYTHON ?= python PKGDIR=dipy DOCSRC_DIR=doc DOCDIR=${PKGDIR}/${DOCSRC_DIR} TESTDIR=${PKGDIR}/tests help: @echo "Numpy/Cython tasks. Available tasks:" @echo "ext -> build the Cython extension module." @echo "cython-html -> create annotated HTML from the .pyx sources" @echo "test -> run a simple test demo." @echo "all -> Call ext, html and finally test." all: ext cython-html test ext: recspeed.so propspeed.so vox2track.so \ distances.so streamlinespeed.so denspeed.so \ vec_val_sum.so quick_squash.so vector_fields.so \ crosscorr.so sumsqdiff.so expectmax.so bundlemin.so \ cythonutils.so featurespeed.so metricspeed.so \ clusteringspeed.so clustering_algorithms.so \ mrf.so test: ext nosetests . cython-html: ${PKGDIR}/reconst/recspeed.html ${PKGDIR}/tracking/propspeed.html ${PKGDIR}/tracking/vox2track.html ${PKGDIR}/tracking/distances.html ${PKGDIR}/tracking/streamlinespeed.html ${PKGDIR}/segment/cythonutils.html ${PKGDIR}/segment/featurespeed.html ${PKGDIR}/segment/metricspeed.html ${PKGDIR}/segment/clusteringspeed.html ${PKGDIR}/segment/clustering_algorithms.html recspeed.so: ${PKGDIR}/reconst/recspeed.pyx cythonutils.so: ${PKGDIR}/segment/cythonutils.pyx featurespeed.so: ${PKGDIR}/segment/featurespeed.pyx metricspeed.so: ${PKGDIR}/segment/metricspeed.pyx mrf.so: ${PKGDIR}/segment/mrf.pyx clusteringspeed.so: ${PKGDIR}/segment/clusteringspeed.pyx clustering_algorithms.so: ${PKGDIR}/segment/clustering_algorithms.pyx propspeed.so: ${PKGDIR}/tracking/propspeed.pyx vox2track.so: ${PKGDIR}/tracking/vox2track.pyx distances.so: ${PKGDIR}/tracking/distances.pyx streamlinespeed.so: ${PKGDIR}/tracking/streamlinespeed.pyx denspeed.so: ${PKGDIR}/denoise/denspeed.pyx vec_val_sum.so: ${PKGDIR}/reconst/vec_val_sum.pyx quick_squash.so: ${PKGDIR}/reconst/quick_squash.pyx vector_fields.so: ${PKGDIR}/align/vector_fields.pyx crosscorr.so: ${PKGDIR}/align/crosscorr.pyx sumsqdiff.so: ${PKGDIR}/align/sumsqdiff.pyx expectmax.so: ${PKGDIR}/align/expectmax.pyx bundlemin.so: ${PKGDIR}/align/bundlemin.pyx $(PYTHON) setup.py build_ext --inplace # Phony targets for cleanup and similar uses .PHONY: clean clean: - find ${PKGDIR} -name "*.so" -print0 | xargs -0 rm - find ${PKGDIR} -name "*.pyd" -print0 | xargs -0 rm - find ${PKGDIR} -name "*.c" -print0 | xargs -0 rm - find ${PKGDIR} -name "*.html" -print0 | xargs -0 rm rm -rf build rm -rf docs/_build rm -rf docs/dist rm -rf dipy/dipy.egg-info distclean: clean rm -rf dist # Suffix rules %.c : %.pyx cython $< %.html : %.pyx cython -a $< # Check for files not installed check-files: $(PYTHON) -c 'from nisext.testers import check_files; check_files("dipy")' # Print out info for possible install methods check-version-info: $(PYTHON) -c 'from nisext.testers import info_from_here; info_from_here("dipy")' # Run tests from installed code installed-tests: $(PYTHON) -c 'from nisext.testers import tests_installed; tests_installed("dipy")' # Run tests from installed code sdist-tests: $(PYTHON) -c 'from nisext.testers import sdist_tests; sdist_tests("dipy")' bdist-egg-tests: $(PYTHON) -c 'from nisext.testers import bdist_egg_tests; bdist_egg_tests("dipy")' source-release: clean $(PYTHON) -m compileall . $(PYTHON) setup.py sdist --formats=gztar,zip binary-release: clean $(PYTHON) setup_egg.py bdist_egg build-stamp-source: $(PYTHON) -c 'import cythexts; cythexts.build_stamp_source()' dipy-0.13.0/README.rst000066400000000000000000000033231317371701200142320ustar00rootroot00000000000000====== DIPY ====== .. image:: https://travis-ci.org/nipy/dipy.svg?branch=master :target: https://travis-ci.org/nipy/dipy .. image:: https://codecov.io/gh/nipy/dipy/branch/master/graph/badge.svg :target: https://codecov.io/gh/nipy/dipy DIPY is a python toolbox for analysis of MR diffusion imaging. DIPY is for research only; please do not use results from DIPY for clinical decisions. Website ======= Current information can always be found from the DIPY website - http://dipy.org Mailing Lists ============= Please see the developers' list at https://mail.python.org/mailman/listinfo/neuroimaging Please see the users' forum at https://neurostars.org Please join the gitter chatroom `here `_. Code ==== You can find our sources and single-click downloads: * `Main repository`_ on Github. * Documentation_ for all releases and current development tree. * Download as a tar/zip file the `current trunk`_. .. _main repository: http://github.com/nipy/dipy .. _Documentation: http://dipy.org .. _current trunk: http://github.com/nipy/dipy/archives/master Installing DIPY =============== DIPY can be installed using `pip`:: pip install dipy or using `conda`:: conda install -c conda-forge dipy vtk For detailed installation instructions, including instructions for installing from source, please read our `documentation `_. License ======= DIPY is licensed under the terms of the BSD license. Please see `LICENSE file `_. Contributing ============ We welcome contributions from the community. Please read our `contributor guidelines `. dipy-0.13.0/bin/000077500000000000000000000000001317371701200133125ustar00rootroot00000000000000dipy-0.13.0/bin/dipy_mask000077500000000000000000000003111317371701200152130ustar00rootroot00000000000000#!python from __future__ import division, print_function from dipy.workflows.flow_runner import run_flow from dipy.workflows.mask import MaskFlow if __name__ == "__main__": run_flow(MaskFlow()) dipy-0.13.0/bin/dipy_median_otsu000077500000000000000000000003301317371701200165700ustar00rootroot00000000000000#!python from __future__ import division, print_function from dipy.workflows.flow_runner import run_flow from dipy.workflows.segment import MedianOtsuFlow if __name__ == "__main__": run_flow(MedianOtsuFlow()) dipy-0.13.0/bin/dipy_nlmeans000077500000000000000000000003221317371701200157170ustar00rootroot00000000000000#!python from __future__ import division, print_function from dipy.workflows.denoise import NLMeansFlow from dipy.workflows.flow_runner import run_flow if __name__ == "__main__": run_flow(NLMeansFlow()) dipy-0.13.0/bin/dipy_reconst_csa000077500000000000000000000003461317371701200165730ustar00rootroot00000000000000#! /usr/bin/env python from __future__ import division, print_function from dipy.workflows.flow_runner import run_flow from dipy.workflows.reconst import ReconstCSAFlow if __name__ == "__main__": run_flow(ReconstCSAFlow()) dipy-0.13.0/bin/dipy_reconst_csd000077500000000000000000000003461317371701200165760ustar00rootroot00000000000000#! /usr/bin/env python from __future__ import division, print_function from dipy.workflows.flow_runner import run_flow from dipy.workflows.reconst import ReconstCSDFlow if __name__ == "__main__": run_flow(ReconstCSDFlow()) dipy-0.13.0/bin/dipy_reconst_dti000077500000000000000000000003301317371701200165760ustar00rootroot00000000000000#!python from __future__ import division, print_function from dipy.workflows.flow_runner import run_flow from dipy.workflows.reconst import ReconstDtiFlow if __name__ == "__main__": run_flow(ReconstDtiFlow()) dipy-0.13.0/bin/dipy_reconst_dti_restore000077500000000000000000000003461317371701200203500ustar00rootroot00000000000000#!python from __future__ import division, print_function from dipy.workflows.flow_runner import run_flow from dipy.workflows.reconst import ReconstDtiRestoreFlow if __name__ == "__main__": run_flow(ReconstDtiRestoreFlow()) dipy-0.13.0/cythexts.py000066400000000000000000000254711317371701200150000ustar00rootroot00000000000000import os from os.path import splitext, sep as filesep, join as pjoin, relpath from hashlib import sha1 from distutils.command.build_ext import build_ext from distutils.command.sdist import sdist from distutils.version import LooseVersion def derror_maker(klass, msg): """ Decorate distutils class to make run method raise error """ class K(klass): def run(self): raise RuntimeError(msg) return K def stamped_pyx_ok(exts, hash_stamp_fname): """ Check for match of recorded hashes for pyx, corresponding c files Parameters ---------- exts : sequence of ``Extension`` distutils ``Extension`` instances, in fact only need to contain a ``sources`` sequence field. hash_stamp_fname : str filename of text file containing hash stamps Returns ------- tf : bool True if there is a corresponding c file for each pyx or py file in `exts` sources, and the hash for both the (pyx, py) file *and* the c file match those recorded in the file named in `hash_stamp_fname`. """ # Calculate hashes for pyx and c files. Check for presence of c files. stamps = {} for mod in exts: for source in mod.sources: base, ext = splitext(source) if ext not in ('.pyx', '.py'): continue source_hash = sha1(open(source, 'rb').read()).hexdigest() c_fname = base + '.c' try: c_file = open(c_fname, 'rb') except IOError: return False c_hash = sha1(c_file.read()).hexdigest() stamps[source_hash] = source stamps[c_hash] = c_fname # Read stamps from hash_stamp_fname; check in stamps dictionary try: stamp_file = open(hash_stamp_fname, 'rt') except IOError: return False for line in stamp_file: if line.startswith('#'): continue fname, hash = [e.strip() for e in line.split(',')] if hash not in stamps: return False # Compare path made canonical for \/ fname = fname.replace(filesep, '/') if not stamps[hash].replace(filesep, '/') == fname: return False stamps.pop(hash) # All good if we found all hashes we need return len(stamps) == 0 def cyproc_exts(exts, cython_min_version, hash_stamps_fname = 'pyx-stamps', build_ext=build_ext): """ Process sequence of `exts` to check if we need Cython. Return builder Parameters ---------- exts : sequence of distutils ``Extension`` If we already have good c files for any pyx or py sources, we replace the pyx or py files with their compiled up c versions inplace. cython_min_version : str Minimum cython version neede for compile hash_stamps_fname : str, optional filename with hashes for pyx/py and c files known to be in sync. Default is 'pyx-stamps' build_ext : distutils command default build_ext to return if not cythonizing. Default is distutils ``build_ext`` class Returns ------- builder : ``distutils`` ``build_ext`` class or similar Can be ``build_ext`` input (if we have good c files) or cython ``build_ext`` if we have a good cython, or a class raising an informative error on ``run()`` need_cython : bool True if we need Cython to build extensions, False otherwise. """ if stamped_pyx_ok(exts, hash_stamps_fname): # Replace pyx with c files, use standard builder for mod in exts: sources = [] for source in mod.sources: base, ext = splitext(source) if ext in ('.pyx', '.py'): sources.append(base + '.c') else: sources.append(source) mod.sources = sources return build_ext, False # We need cython try: from Cython.Compiler.Version import version as cyversion except ImportError: return derror_maker(build_ext, 'Need cython>={0} to build extensions ' 'but cannot import "Cython"'.format( cython_min_version)), True if LooseVersion(cyversion) >= cython_min_version: from Cython.Distutils import build_ext as extbuilder return extbuilder, True return derror_maker(build_ext, 'Need cython>={0} to build extensions' 'but found cython version {1}'.format( cython_min_version, cyversion)), True def build_stamp(pyxes, include_dirs=()): """ Cythonize files in `pyxes`, return pyx, C filenames, hashes Parameters ---------- pyxes : sequence sequence of filenames of files on which to run Cython include_dirs : sequence Any extra include directories in which to find Cython files. Returns ------- pyx_defs : dict dict has key, value pairs of , , where is a dict with key, value pairs of "pyx_hash", ; "c_filename", ; "c_hash", . """ pyx_defs = {} from Cython.Compiler.Main import compile from Cython.Compiler.CmdLine import parse_command_line includes = sum([['--include-dir', d] for d in include_dirs], []) for source in pyxes: base, ext = splitext(source) pyx_hash = sha1(open(source, 'rt').read()).hexdigest() c_filename = base + '.c' options, sources = parse_command_line(includes + [source]) result = compile(sources, options) if result.num_errors > 0: raise RuntimeError('Cython failed to compile ' + source) c_hash = sha1(open(c_filename, 'rt').read()).hexdigest() pyx_defs[source] = dict(pyx_hash=pyx_hash, c_filename=c_filename, c_hash=c_hash) return pyx_defs def write_stamps(pyx_defs, stamp_fname='pyx-stamps'): """ Write stamp information in `pyx_defs` to filename `stamp_fname` Parameters ---------- pyx_defs : dict dict has key, value pairs of , , where is a dict with key, value pairs of "pyx_hash", ; "c_filename", ; "c_hash", . stamp_fname : str filename to which to write stamp information """ with open(stamp_fname, 'wt') as stamp_file: stamp_file.write('# SHA1 hashes for pyx files and generated c files\n') stamp_file.write('# Auto-generated file, do not edit\n') for pyx_fname, pyx_info in pyx_defs.items(): stamp_file.write('%s, %s\n' % (pyx_fname, pyx_info['pyx_hash'])) stamp_file.write('%s, %s\n' % (pyx_info['c_filename'], pyx_info['c_hash'])) def find_pyx(root_dir): """ Recursively find files with extension '.pyx' starting at `root_dir` Parameters ---------- root_dir : str Directory from which to search for pyx files. Returns ------- pyxes : list list of filenames relative to `root_dir` """ pyxes = [] for dirpath, dirnames, filenames in os.walk(root_dir): for filename in filenames: if not filename.endswith('.pyx'): continue base = relpath(dirpath, root_dir) pyxes.append(pjoin(base, filename)) return pyxes def get_pyx_sdist(sdist_like=sdist, hash_stamps_fname='pyx-stamps', include_dirs=()): """ Add pyx->c conversion, hash recording to sdist command `sdist_like` Parameters ---------- sdist_like : sdist command class, optional command that will do work of ``distutils.command.sdist.sdist``. By default we use the distutils version hash_stamps_fname : str, optional filename to which to write hashes of pyx / py and c files. Default is ``pyx-stamps`` include_dirs : sequence Any extra include directories in which to find Cython files. Returns ------- modified_sdist : sdist-like command class decorated `sdist_like` class, for compiling pyx / py files to c, putting the .c files in the the source archive, and writing hashes for these into the file named from `hash_stamps_fname` """ class PyxSDist(sdist_like): """ Custom distutils sdist command to generate .c files from pyx files. Running the command object ``obj.run()`` will compile the pyx / py files in any extensions, into c files, and add them to the list of files to put into the source archive, as well as the usual behavior of distutils ``sdist``. It will also take the sha1 hashes of the pyx / py and c files, and store them in a file ``pyx-stamps``, and put this file in the release tree. This allows someone who has the archive to know that the pyx and c files that they have are the ones packed into the archive, and therefore they may not need Cython at install time. See ``cython_process_exts`` for the build-time command. """ def make_distribution(self): """ Compile pyx to c files, add to sources, stamp sha1s """ pyxes = [] for mod in self.distribution.ext_modules: for source in mod.sources: base, ext = splitext(source) if ext in ('.pyx', '.py'): pyxes.append(source) self.pyx_defs = build_stamp(pyxes, include_dirs) for pyx_fname, pyx_info in self.pyx_defs.items(): self.filelist.append(pyx_info['c_filename']) sdist_like.make_distribution(self) def make_release_tree(self, base_dir, files): """ Put pyx stamps file into release tree """ sdist_like.make_release_tree(self, base_dir, files) stamp_fname = pjoin(base_dir, hash_stamps_fname) write_stamps(self.pyx_defs, stamp_fname) return PyxSDist def build_stamp_source(root_dir=None, stamp_fname='pyx-stamps', include_dirs=None): """ Build cython c files, make stamp file in source tree `root_dir` Parameters ---------- root_dir : None or str, optional Directory from which to find ``.pyx`` files. If None, use current working directory. stamp_fname : str, optional Filename for stamp file we will write include_dirs : None or sequence Any extra Cython include directories """ if root_dir is None: root_dir = os.getcwd() if include_dirs is None: include_dirs = [pjoin(root_dir, 'src')] pyxes = find_pyx(root_dir) pyx_defs = build_stamp(pyxes, include_dirs=include_dirs) write_stamps(pyx_defs, stamp_fname) dipy-0.13.0/dipy/000077500000000000000000000000001317371701200135075ustar00rootroot00000000000000dipy-0.13.0/dipy/COMMIT_INFO.txt000066400000000000000000000004151317371701200160530ustar00rootroot00000000000000# This is an ini file that may contain information about the code state [commit hash] # The line below may contain a valid hash if it has been substituted during 'git archive' archive_subst_hash=25036c7c # This line may be modified by the install process install_hash= dipy-0.13.0/dipy/__init__.py000066400000000000000000000026351317371701200156260ustar00rootroot00000000000000""" Diffusion Imaging in Python ============================ For more information, please visit http://dipy.org Subpackages ----------- :: align -- Registration, streamline alignment, volume resampling boots -- Bootstrapping algorithms core -- Spheres, gradient tables core.geometry -- Spherical geometry, coordinate and vector manipulation core.meshes -- Point distributions on the sphere data -- Small testing datasets external -- Interfaces to external tools such as FSL io -- Loading/saving of dpy datasets reconst -- Signal reconstruction modules (tensor, spherical harmonics, diffusion spectrum, etc.) segment -- Tractography segmentation sims -- MRI phantom signal simulation tracking -- Tractography, metrics for streamlines viz -- Visualization and GUIs Utilities --------- :: test -- Run unittests __version__ -- Dipy version """ import sys if sys.version[0:3] < '2.6': raise ImportError('Dipy needs Python version 2.6 or above') from .info import __version__ from .testing import setup_test # Test callable from numpy.testing import Tester test = Tester().test bench = Tester().bench del Tester # Plumb in version etc info stuff from .pkg_info import get_pkg_info as _get_pkg_info def get_info(): from os.path import dirname return _get_pkg_info(dirname(__file__)) del sys dipy-0.13.0/dipy/align/000077500000000000000000000000001317371701200146015ustar00rootroot00000000000000dipy-0.13.0/dipy/align/__init__.py000066400000000000000000000014331317371701200167130ustar00rootroot00000000000000import numpy as np floating = np.float32 class Bunch(object): def __init__(self, **kwds): r"""A 'bunch' of values (a replacement of Enum) This is a temporary replacement of Enum, which is not available on all versions of Python 2 """ self.__dict__.update(kwds) VerbosityLevels = Bunch(NONE=0, STATUS=1, DIAGNOSE=2, DEBUG=3) r""" VerbosityLevels This enum defines the four levels of verbosity we use in the align module. NONE : do not print anything STATUS : print information about the current status of the algorithm DIAGNOSE : print high level information of the components involved in the registration that can be used to detect a failing component. DEBUG : print as much information as possible to isolate the cause of a bug. """ dipy-0.13.0/dipy/align/bundlemin.pyx000066400000000000000000000207621317371701200173270ustar00rootroot00000000000000#!python #cython: boundscheck=False #cython: wraparound=False #cython: cdivision=True import numpy as np cimport numpy as cnp cimport cython cimport safe_openmp as openmp from safe_openmp cimport have_openmp from cython.parallel import prange from libc.stdlib cimport malloc, free from libc.math cimport sqrt, sin, cos from multiprocessing import cpu_count cdef cnp.dtype f64_dt = np.dtype(np.float64) cdef double min_direct_flip_dist(double *a,double *b, cnp.npy_intp rows) nogil: r""" Minimum of direct and flip average (MDF) distance [Garyfallidis12] between two streamlines. Parameters ---------- a : double pointer first streamline b : double pointer second streamline rows : number of points of the streamline both tracks need to have the same number of points Returns ------- out : double mininum of direct and flipped average distances Reference --------- .. [Garyfallidis12] Garyfallidis E. et al., QuickBundles a method for tractography simplification, Frontiers in Neuroscience, vol 6, no 175, 2012. """ cdef: cnp.npy_intp i=0, j=0 double sub=0, subf=0, distf=0, dist=0, tmprow=0, tmprowf=0 for i in range(rows): tmprow = 0 tmprowf = 0 for j in range(3): sub = a[i * 3 + j] - b[i * 3 + j] subf = a[i * 3 + j] - b[(rows - 1 - i) * 3 + j] tmprow += sub * sub tmprowf += subf * subf dist += sqrt(tmprow) distf += sqrt(tmprowf) dist = dist / rows distf = distf / rows if dist <= distf: return dist return distf def _bundle_minimum_distance_matrix(double [:, ::1] static, double [:, ::1] moving, cnp.npy_intp static_size, cnp.npy_intp moving_size, cnp.npy_intp rows, double [:, ::1] D, num_threads=None): """ MDF-based pairwise distance optimization function We minimize the distance between moving streamlines of the same number of points as they align with the static streamlines. Parameters ----------- static: array Static streamlines moving: array Moving streamlines static_size : int Number of static streamlines moving_size : int Number of moving streamlines rows : int Number of points per streamline D : 2D array Distance matrix num_threads : int Number of threads. If None (default) then all available threads will be used. Returns ------- cost : double """ cdef: cnp.npy_intp i=0, j=0, mov_i=0, mov_j=0 int all_cores = openmp.omp_get_num_procs() int threads_to_use = -1 if num_threads is not None: threads_to_use = num_threads else: threads_to_use = all_cores if have_openmp: openmp.omp_set_dynamic(0) openmp.omp_set_num_threads(threads_to_use) with nogil: for i in prange(static_size): for j in prange(moving_size): D[i, j] = min_direct_flip_dist(&static[i * rows, 0], &moving[j * rows, 0], rows) if have_openmp and num_threads is not None: openmp.omp_set_num_threads(all_cores) return np.asarray(D) def _bundle_minimum_distance(double [:, ::1] stat, double [:, ::1] mov, cnp.npy_intp static_size, cnp.npy_intp moving_size, cnp.npy_intp rows, num_threads=None): """ MDF-based pairwise distance optimization function We minimize the distance between moving streamlines of the same number of points as they align with the static streamlines. Parameters ----------- static : array Static streamlines moving : array Moving streamlines static_size : int Number of static streamlines moving_size : int Number of moving streamlines rows : int Number of points per streamline num_threads : int Number of threads. If None (default) then all available threads will be used. Returns ------- cost : double Notes ----- The difference with ``_bundle_minimum_distance_matrix`` is that it does not save the full distance matrix and therefore needs much less memory. """ cdef: cnp.npy_intp i=0, j=0 double sum_i=0, sum_j=0, tmp=0 double inf = np.finfo('f8').max double dist=0 double * min_j double * min_i openmp.omp_lock_t lock int all_cores = openmp.omp_get_num_procs() int threads_to_use = -1 if num_threads is not None: threads_to_use = num_threads else: threads_to_use = all_cores if have_openmp: openmp.omp_set_dynamic(0) openmp.omp_set_num_threads(threads_to_use) with nogil: if have_openmp: openmp.omp_init_lock(&lock) min_j = malloc(static_size * sizeof(double)) min_i = malloc(moving_size * sizeof(double)) for i in range(static_size): min_j[i] = inf for j in range(moving_size): min_i[j] = inf for i in prange(static_size): for j in range(moving_size): tmp = min_direct_flip_dist(&stat[i * rows, 0], &mov[j * rows, 0], rows) if have_openmp: openmp.omp_set_lock(&lock) if tmp < min_j[i]: min_j[i] = tmp if tmp < min_i[j]: min_i[j] = tmp if have_openmp: openmp.omp_unset_lock(&lock) if have_openmp: openmp.omp_destroy_lock(&lock) for i in range(static_size): sum_i += min_j[i] for j in range(moving_size): sum_j += min_i[j] free(min_j) free(min_i) dist = (sum_i / static_size + sum_j / moving_size) dist = 0.25 * dist * dist if have_openmp and num_threads is not None: openmp.omp_set_num_threads(all_cores) return dist def distance_matrix_mdf(streamlines_a, streamlines_b): r""" Minimum direct flipped distance matrix between two streamline sets All streamlines need to have the same number of points Parameters ---------- streamlines_a : sequence of streamlines as arrays, [(N, 3) .. (N, 3)] streamlines_b : sequence of streamlines as arrays, [(N, 3) .. (N, 3)] Returns ------- DM : array, shape (len(streamlines_a), len(streamlines_b)) distance matrix """ cdef: size_t i, j, lentA, lentB # preprocess tracks cdef: size_t longest_track_len = 0, track_len longest_track_lenA, longest_track_lenB cnp.ndarray[object, ndim=1] tracksA64 cnp.ndarray[object, ndim=1] tracksB64 cnp.ndarray[cnp.double_t, ndim=2] DM lentA = len(streamlines_a) lentB = len(streamlines_b) tracksA64 = np.zeros((lentA,), dtype=object) tracksB64 = np.zeros((lentB,), dtype=object) DM = np.zeros((lentA,lentB), dtype=np.double) if streamlines_a[0].shape[0] != streamlines_b[0].shape[0]: msg = 'Streamlines should have the same number of points as required' msg += 'by the MDF distance' raise ValueError(msg) # process tracks to predictable memory layout for i in range(lentA): tracksA64[i] = np.ascontiguousarray(streamlines_a[i], dtype=f64_dt) for i in range(lentB): tracksB64[i] = np.ascontiguousarray(streamlines_b[i], dtype=f64_dt) # preallocate buffer array for track distance calculations cdef: cnp.float64_t *t1_ptr, *t2_ptr, *min_buffer # cycle over tracks cdef: cnp.ndarray [cnp.float64_t, ndim=2] t1, t2 size_t t1_len, t2_len double d[2] t_len = tracksA64[0].shape[0] for i from 0 <= i < lentA: t1 = tracksA64[i] t1_ptr = t1.data for j from 0 <= j < lentB: t2 = tracksB64[j] t2_ptr = t2.data DM[i, j] = min_direct_flip_dist(t1_ptr, t2_ptr,t_len) return DM dipy-0.13.0/dipy/align/crosscorr.pyx000066400000000000000000001045361317371701200173730ustar00rootroot00000000000000""" Utility functions used by the Cross Correlation (CC) metric """ import numpy as np from fused_types cimport floating cimport cython cimport numpy as cnp cdef inline int _int_max(int a, int b) nogil: r""" Returns the maximum of a and b """ return a if a >= b else b cdef inline int _int_min(int a, int b) nogil: r""" Returns the minimum of a and b """ return a if a <= b else b cdef enum: SI = 0 SI2 = 1 SJ = 2 SJ2 = 3 SIJ = 4 CNT = 5 @cython.boundscheck(False) @cython.wraparound(False) @cython.cdivision(True) cdef inline int _wrap(int x, int m)nogil: r""" Auxiliary function to `wrap` an array around its low-end side. Negative indices are mapped to last coordinates so that no extra memory is required to account for local rectangular windows that exceed the array's low-end boundary. Parameters ---------- x : int the array position to be wrapped m : int array length """ if x < 0: return x + m return x @cython.boundscheck(False) @cython.wraparound(False) @cython.cdivision(True) cdef inline void _update_factors(double[:, :, :, :] factors, floating[:, :, :] moving, floating[:, :, :] static, int ss, int rr, int cc, int s, int r, int c, int operation)nogil: r"""Updates the precomputed CC factors of a rectangular window Updates the precomputed CC factors of the rectangular window centered at (`ss`, `rr`, `cc`) by adding the factors corresponding to voxel (`s`, `r`, `c`) of input images `moving` and `static`. Parameters ---------- factors : array, shape (S, R, C, 5) array containing the current precomputed factors to be updated moving : array, shape (S, R, C) the moving volume (notice that both images must already be in a common reference domain, in particular, they must have the same shape) static : array, shape (S, R, C) the static volume, which also defines the reference registration domain ss : int first coordinate of the rectangular window to be updated rr : int second coordinate of the rectangular window to be updated cc : int third coordinate of the rectangular window to be updated s: int first coordinate of the voxel the local window should be updated with r: int second coordinate of the voxel the local window should be updated with c: int third coordinate of the voxel the local window should be updated with operation : int, either -1, 0 or 1 indicates whether the factors of voxel (`s`, `r`, `c`) should be added to (`operation`=1), subtracted from (`operation`=-1), or set as (`operation`=0) the current factors for the rectangular window centered at (`ss`, `rr`, `cc`). """ cdef: double sval double mval if s >= moving.shape[0] or r >= moving.shape[1] or c >= moving.shape[2]: if operation == 0: factors[ss, rr, cc, SI] = 0 factors[ss, rr, cc, SI2] = 0 factors[ss, rr, cc, SJ] = 0 factors[ss, rr, cc, SJ2] = 0 factors[ss, rr, cc, SIJ] = 0 else: sval = static[s, r, c] mval = moving[s, r, c] if operation == 0: factors[ss, rr, cc, SI] = sval factors[ss, rr, cc, SI2] = sval*sval factors[ss, rr, cc, SJ] = mval factors[ss, rr, cc, SJ2] = mval*mval factors[ss, rr, cc, SIJ] = sval*mval elif operation == -1: factors[ss, rr, cc, SI] -= sval factors[ss, rr, cc, SI2] -= sval*sval factors[ss, rr, cc, SJ] -= mval factors[ss, rr, cc, SJ2] -= mval*mval factors[ss, rr, cc, SIJ] -= sval*mval elif operation == 1: factors[ss, rr, cc, SI] += sval factors[ss, rr, cc, SI2] += sval*sval factors[ss, rr, cc, SJ] += mval factors[ss, rr, cc, SJ2] += mval*mval factors[ss, rr, cc, SIJ] += sval*mval @cython.boundscheck(False) @cython.wraparound(False) @cython.cdivision(True) def precompute_cc_factors_3d(floating[:, :, :] static, floating[:, :, :] moving, cnp.npy_intp radius, num_threads=None): r"""Precomputations to quickly compute the gradient of the CC Metric Pre-computes the separate terms of the cross correlation metric and image norms at each voxel considering a neighborhood of the given radius to efficiently compute the gradient of the metric with respect to the deformation field [Ocegueda2016]_ [Avants2008]_ [Avants2011]_. Parameters ---------- static : array, shape (S, R, C) the static volume, which also defines the reference registration domain moving : array, shape (S, R, C) the moving volume (notice that both images must already be in a common reference domain, i.e. the same S, R, C) radius : the radius of the neighborhood (cube of (2 * radius + 1)^3 voxels) Returns ------- factors : array, shape (S, R, C, 5) the precomputed cross correlation terms: factors[:,:,:,0] : static minus its mean value along the neighborhood factors[:,:,:,1] : moving minus its mean value along the neighborhood factors[:,:,:,2] : sum of the pointwise products of static and moving along the neighborhood factors[:,:,:,3] : sum of sq. values of static along the neighborhood factors[:,:,:,4] : sum of sq. values of moving along the neighborhood References ---------- .. [Ocegueda2016]_ Ocegueda, O., Dalmau, O., Garyfallidis, E., Descoteaux, M., & Rivera, M. (2016). On the computation of integrals over fixed-size rectangles of arbitrary dimension, Pattern Recognition Letters. doi:10.1016/j.patrec.2016.05.008 .. [Avants2008]_ Avants, B. B., Epstein, C. L., Grossman, M., & Gee, J. C. (2008). Symmetric Diffeomorphic Image Registration with Cross-Correlation: Evaluating Automated Labeling of Elderly and Neurodegenerative Brain, Med Image Anal. 12(1), 26-41. .. [Avants2011]_ Avants, B. B., Tustison, N., & Song, G. (2011). Advanced Normalization Tools ( ANTS ), 1-35. """ cdef: cnp.npy_intp ns = static.shape[0] cnp.npy_intp nr = static.shape[1] cnp.npy_intp nc = static.shape[2] cnp.npy_intp side = 2 * radius + 1 cnp.npy_intp firstc, lastc, firstr, lastr, firsts, lasts cnp.npy_intp s, r, c, it, sides, sider, sidec double cnt cnp.npy_intp ssss, sss, ss, rr, cc, prev_ss, prev_rr, prev_cc double Imean, Jmean, IJprods, Isq, Jsq double[:, :, :, :] temp = np.zeros((2, nr, nc, 5), dtype=np.float64) floating[:, :, :, :] factors = np.zeros((ns, nr, nc, 5), dtype=np.asarray(static).dtype) with nogil: sss = 1 for s in range(ns+radius): ss = _wrap(s - radius, ns) sss = 1 - sss firsts = _int_max(0, ss - radius) lasts = _int_min(ns - 1, ss + radius) sides = (lasts - firsts + 1) for r in range(nr+radius): rr = _wrap(r - radius, nr) firstr = _int_max(0, rr - radius) lastr = _int_min(nr - 1, rr + radius) sider = (lastr - firstr + 1) for c in range(nc+radius): cc = _wrap(c - radius, nc) # New corner _update_factors(temp, moving, static, sss, rr, cc, s, r, c, 0) # Add signed sub-volumes if s > 0: prev_ss = 1 - sss for it in range(5): temp[sss, rr, cc, it] += temp[prev_ss, rr, cc, it] if r > 0: prev_rr = _wrap(rr-1, nr) for it in range(5): temp[sss, rr, cc, it] -= \ temp[prev_ss, prev_rr, cc, it] if c > 0: prev_cc = _wrap(cc-1, nc) for it in range(5): temp[sss, rr, cc, it] += \ temp[prev_ss, prev_rr, prev_cc, it] if c > 0: prev_cc = _wrap(cc-1, nc) for it in range(5): temp[sss, rr, cc, it] -= \ temp[prev_ss, rr, prev_cc, it] if(r > 0): prev_rr = _wrap(rr-1, nr) for it in range(5): temp[sss, rr, cc, it] += \ temp[sss, prev_rr, cc, it] if(c > 0): prev_cc = _wrap(cc-1, nc) for it in range(5): temp[sss, rr, cc, it] -= \ temp[sss, prev_rr, prev_cc, it] if(c > 0): prev_cc = _wrap(cc-1, nc) for it in range(5): temp[sss, rr, cc, it] += temp[sss, rr, prev_cc, it] # Add signed corners if s >= side: _update_factors(temp, moving, static, sss, rr, cc, s-side, r, c, -1) if r >= side: _update_factors(temp, moving, static, sss, rr, cc, s-side, r-side, c, 1) if c >= side: _update_factors(temp, moving, static, sss, rr, cc, s-side, r-side, c-side, -1) if c >= side: _update_factors(temp, moving, static, sss, rr, cc, s-side, r, c-side, 1) if r >= side: _update_factors(temp, moving, static, sss, rr, cc, s, r-side, c, -1) if c >= side: _update_factors(temp, moving, static, sss, rr, cc, s, r-side, c-side, 1) if c >= side: _update_factors(temp, moving, static, sss, rr, cc, s, r, c-side, -1) # Compute final factors if s >= radius and r >= radius and c >= radius: firstc = _int_max(0, cc - radius) lastc = _int_min(nc - 1, cc + radius) sidec = (lastc - firstc + 1) cnt = sides*sider*sidec Imean = temp[sss, rr, cc, SI] / cnt Jmean = temp[sss, rr, cc, SJ] / cnt IJprods = (temp[sss, rr, cc, SIJ] - Jmean * temp[sss, rr, cc, SI] - Imean * temp[sss, rr, cc, SJ] + cnt * Jmean * Imean) Isq = (temp[sss, rr, cc, SI2] - Imean * temp[sss, rr, cc, SI] - Imean * temp[sss, rr, cc, SI] + cnt * Imean * Imean) Jsq = (temp[sss, rr, cc, SJ2] - Jmean * temp[sss, rr, cc, SJ] - Jmean * temp[sss, rr, cc, SJ] + cnt * Jmean * Jmean) factors[ss, rr, cc, 0] = static[ss, rr, cc] - Imean factors[ss, rr, cc, 1] = moving[ss, rr, cc] - Jmean factors[ss, rr, cc, 2] = IJprods factors[ss, rr, cc, 3] = Isq factors[ss, rr, cc, 4] = Jsq return factors @cython.boundscheck(False) @cython.wraparound(False) @cython.cdivision(True) def precompute_cc_factors_3d_test(floating[:, :, :] static, floating[:, :, :] moving, int radius): r"""Precomputations to quickly compute the gradient of the CC Metric This version of precompute_cc_factors_3d is for testing purposes, it directly computes the local cross-correlation factors without any optimization, so it is less error-prone than the accelerated version. """ cdef: cnp.npy_intp ns = static.shape[0] cnp.npy_intp nr = static.shape[1] cnp.npy_intp nc = static.shape[2] cnp.npy_intp s, r, c, k, i, j, t cnp.npy_intp firstc, lastc, firstr, lastr, firsts, lasts double Imean, Jmean floating[:, :, :, :] factors = np.zeros((ns, nr, nc, 5), dtype=np.asarray(static).dtype) double[:] sums = np.zeros((6,), dtype=np.float64) with nogil: for s in range(ns): firsts = _int_max(0, s - radius) lasts = _int_min(ns - 1, s + radius) for r in range(nr): firstr = _int_max(0, r - radius) lastr = _int_min(nr - 1, r + radius) for c in range(nc): firstc = _int_max(0, c - radius) lastc = _int_min(nc - 1, c + radius) for t in range(6): sums[t] = 0 for k in range(firsts, 1 + lasts): for i in range(firstr, 1 + lastr): for j in range(firstc, 1 + lastc): sums[SI] += static[k, i, j] sums[SI2] += static[k, i, j]**2 sums[SJ] += moving[k, i, j] sums[SJ2] += moving[k, i, j]**2 sums[SIJ] += static[k, i, j]*moving[k, i, j] sums[CNT] += 1 Imean = sums[SI] / sums[CNT] Jmean = sums[SJ] / sums[CNT] factors[s, r, c, 0] = static[s, r, c] - Imean factors[s, r, c, 1] = moving[s, r, c] - Jmean factors[s, r, c, 2] = (sums[SIJ] - Jmean * sums[SI] - Imean * sums[SJ] + sums[CNT] * Jmean * Imean) factors[s, r, c, 3] = (sums[SI2] - Imean * sums[SI] - Imean * sums[SI] + sums[CNT] * Imean * Imean) factors[s, r, c, 4] = (sums[SJ2] - Jmean * sums[SJ] - Jmean * sums[SJ] + sums[CNT] * Jmean * Jmean) return np.asarray(factors) @cython.boundscheck(False) @cython.wraparound(False) @cython.cdivision(True) def compute_cc_forward_step_3d(floating[:, :, :, :] grad_static, floating[:, :, :, :] factors, cnp.npy_intp radius): r"""Gradient of the CC Metric w.r.t. the forward transformation Computes the gradient of the Cross Correlation metric for symmetric registration (SyN) [Avants2008]_ w.r.t. the displacement associated to the moving volume ('forward' step) as in [Avants2011]_ Parameters ---------- grad_static : array, shape (S, R, C, 3) the gradient of the static volume factors : array, shape (S, R, C, 5) the precomputed cross correlation terms obtained via precompute_cc_factors_3d radius : int the radius of the neighborhood used for the CC metric when computing the factors. The returned vector field will be zero along a boundary of width radius voxels. Returns ------- out : array, shape (S, R, C, 3) the gradient of the cross correlation metric with respect to the displacement associated to the moving volume energy : the cross correlation energy (data term) at this iteration References ---------- .. [Avants2008]_ Avants, B. B., Epstein, C. L., Grossman, M., & Gee, J. C. (2008). Symmetric Diffeomorphic Image Registration with Cross-Correlation: Evaluating Automated Labeling of Elderly and Neurodegenerative Brain, Med Image Anal. 12(1), 26-41. .. [Avants2011]_ Avants, B. B., Tustison, N., & Song, G. (2011). Advanced Normalization Tools ( ANTS ), 1-35. """ cdef: cnp.npy_intp ns = grad_static.shape[0] cnp.npy_intp nr = grad_static.shape[1] cnp.npy_intp nc = grad_static.shape[2] double energy = 0 cnp.npy_intp s, r, c double Ii, Ji, sfm, sff, smm, localCorrelation, temp floating[:, :, :, :] out =\ np.zeros((ns, nr, nc, 3), dtype=np.asarray(grad_static).dtype) with nogil: for s in range(radius, ns-radius): for r in range(radius, nr-radius): for c in range(radius, nc-radius): Ii = factors[s, r, c, 0] Ji = factors[s, r, c, 1] sfm = factors[s, r, c, 2] sff = factors[s, r, c, 3] smm = factors[s, r, c, 4] if(sff == 0.0 or smm == 0.0): continue localCorrelation = 0 if(sff * smm > 1e-5): localCorrelation = sfm * sfm / (sff * smm) if(localCorrelation < 1): # avoid bad values... energy -= localCorrelation temp = 2.0 * sfm / (sff * smm) * (Ji - sfm / sff * Ii) out[s, r, c, 0] -= temp * grad_static[s, r, c, 0] out[s, r, c, 1] -= temp * grad_static[s, r, c, 1] out[s, r, c, 2] -= temp * grad_static[s, r, c, 2] return np.asarray(out), energy @cython.boundscheck(False) @cython.wraparound(False) @cython.cdivision(True) def compute_cc_backward_step_3d(floating[:, :, :, :] grad_moving, floating[:, :, :, :] factors, cnp.npy_intp radius): r"""Gradient of the CC Metric w.r.t. the backward transformation Computes the gradient of the Cross Correlation metric for symmetric registration (SyN) [Avants08]_ w.r.t. the displacement associated to the static volume ('backward' step) as in [Avants11]_ Parameters ---------- grad_moving : array, shape (S, R, C, 3) the gradient of the moving volume factors : array, shape (S, R, C, 5) the precomputed cross correlation terms obtained via precompute_cc_factors_3d radius : int the radius of the neighborhood used for the CC metric when computing the factors. The returned vector field will be zero along a boundary of width radius voxels. Returns ------- out : array, shape (S, R, C, 3) the gradient of the cross correlation metric with respect to the displacement associated to the static volume energy : the cross correlation energy (data term) at this iteration References ---------- [Avants08]_ Avants, B. B., Epstein, C. L., Grossman, M., & Gee, J. C. (2008) Symmetric Diffeomorphic Image Registration with Cross-Correlation: Evaluating Automated Labeling of Elderly and Neurodegenerative Brain, Med Image Anal. 12(1), 26-41. [Avants11]_ Avants, B. B., Tustison, N., & Song, G. (2011). Advanced Normalization Tools ( ANTS ), 1-35. """ ftype = np.asarray(grad_moving).dtype cdef: cnp.npy_intp ns = grad_moving.shape[0] cnp.npy_intp nr = grad_moving.shape[1] cnp.npy_intp nc = grad_moving.shape[2] cnp.npy_intp s, r, c double energy = 0 double Ii, Ji, sfm, sff, smm, localCorrelation, temp floating[:, :, :, :] out = np.zeros((ns, nr, nc, 3), dtype=ftype) with nogil: for s in range(radius, ns-radius): for r in range(radius, nr-radius): for c in range(radius, nc-radius): Ii = factors[s, r, c, 0] Ji = factors[s, r, c, 1] sfm = factors[s, r, c, 2] sff = factors[s, r, c, 3] smm = factors[s, r, c, 4] if(sff == 0.0 or smm == 0.0): continue localCorrelation = 0 if(sff * smm > 1e-5): localCorrelation = sfm * sfm / (sff * smm) if(localCorrelation < 1): # avoid bad values... energy -= localCorrelation temp = 2.0 * sfm / (sff * smm) * (Ii - sfm / smm * Ji) out[s, r, c, 0] -= temp * grad_moving[s, r, c, 0] out[s, r, c, 1] -= temp * grad_moving[s, r, c, 1] out[s, r, c, 2] -= temp * grad_moving[s, r, c, 2] return np.asarray(out), energy @cython.boundscheck(False) @cython.wraparound(False) @cython.cdivision(True) def precompute_cc_factors_2d(floating[:, :] static, floating[:, :] moving, cnp.npy_intp radius): r"""Precomputations to quickly compute the gradient of the CC Metric Pre-computes the separate terms of the cross correlation metric [Avants2008]_ and image norms at each voxel considering a neighborhood of the given radius to efficiently [Avants2011]_ compute the gradient of the metric with respect to the deformation field. Parameters ---------- static : array, shape (R, C) the static volume, which also defines the reference registration domain moving : array, shape (R, C) the moving volume (notice that both images must already be in a common reference domain, i.e. the same R, C) radius : the radius of the neighborhood(square of (2*radius + 1)^2 voxels) Returns ------- factors : array, shape (R, C, 5) the precomputed cross correlation terms: factors[:,:,0] : static minus its mean value along the neighborhood factors[:,:,1] : moving minus its mean value along the neighborhood factors[:,:,2] : sum of the pointwise products of static and moving along the neighborhood factors[:,:,3] : sum of sq. values of static along the neighborhood factors[:,:,4] : sum of sq. values of moving along the neighborhood References ---------- .. [Avants2008]_ Avants, B. B., Epstein, C. L., Grossman, M., & Gee, J. C. (2008). Symmetric Diffeomorphic Image Registration with Cross-Correlation: Evaluating Automated Labeling of Elderly and Neurodegenerative Brain, Med Image Anal. 12(1), 26-41. .. [Avants2011]_ Avants, B. B., Tustison, N., & Song, G. (2011). Advanced Normalization Tools ( ANTS ), 1-35. """ ftype = np.asarray(static).dtype cdef: cnp.npy_intp side = 2 * radius + 1 cnp.npy_intp nr = static.shape[0] cnp.npy_intp nc = static.shape[1] cnp.npy_intp r, c, i, j, t, q, qq, firstc, lastc double Imean, Jmean floating[:, :, :] factors = np.zeros((nr, nc, 5), dtype=ftype) double[:, :] lines = np.zeros((6, side), dtype=np.float64) double[:] sums = np.zeros((6,), dtype=np.float64) with nogil: for c in range(nc): firstc = _int_max(0, c - radius) lastc = _int_min(nc - 1, c + radius) # compute factors for row [:,c] for t in range(6): for q in range(side): lines[t, q] = 0 # Compute all rows and set the sums on the fly # compute row [i, j = {c-radius, c + radius}] for i in range(nr): q = i % side for t in range(6): lines[t, q] = 0 for j in range(firstc, lastc + 1): lines[SI, q] += static[i, j] lines[SI2, q] += static[i, j] * static[i, j] lines[SJ, q] += moving[i, j] lines[SJ2, q] += moving[i, j] * moving[i, j] lines[SIJ, q] += static[i, j] * moving[i, j] lines[CNT, q] += 1 for t in range(6): sums[t] = 0 for qq in range(side): sums[t] += lines[t, qq] if(i >= radius): # r is the pixel that is affected by the cube with slices # [r - radius.. r + radius, :] r = i - radius Imean = sums[SI] / sums[CNT] Jmean = sums[SJ] / sums[CNT] factors[r, c, 0] = static[r, c] - Imean factors[r, c, 1] = moving[r, c] - Jmean factors[r, c, 2] = (sums[SIJ] - Jmean * sums[SI] - Imean * sums[SJ] + sums[CNT] * Jmean * Imean) factors[r, c, 3] = (sums[SI2] - Imean * sums[SI] - Imean * sums[SI] + sums[CNT] * Imean * Imean) factors[r, c, 4] = (sums[SJ2] - Jmean * sums[SJ] - Jmean * sums[SJ] + sums[CNT] * Jmean * Jmean) # Finally set the values at the end of the line for r in range(nr - radius, nr): # this would be the last slice to be processed for pixel # [r, c], if it existed i = r + radius q = i % side for t in range(6): sums[t] -= lines[t, q] Imean = sums[SI] / sums[CNT] Jmean = sums[SJ] / sums[CNT] factors[r, c, 0] = static[r, c] - Imean factors[r, c, 1] = moving[r, c] - Jmean factors[r, c, 2] = (sums[SIJ] - Jmean * sums[SI] - Imean * sums[SJ] + sums[CNT] * Jmean * Imean) factors[r, c, 3] = (sums[SI2] - Imean * sums[SI] - Imean * sums[SI] + sums[CNT] * Imean * Imean) factors[r, c, 4] = (sums[SJ2] - Jmean * sums[SJ] - Jmean * sums[SJ] + sums[CNT] * Jmean * Jmean) return np.asarray(factors) @cython.boundscheck(False) @cython.wraparound(False) @cython.cdivision(True) def precompute_cc_factors_2d_test(floating[:, :] static, floating[:, :] moving, cnp.npy_intp radius): r"""Precomputations to quickly compute the gradient of the CC Metric This version of precompute_cc_factors_2d is for testing purposes, it directly computes the local cross-correlation without any optimization. """ ftype = np.asarray(static).dtype cdef: cnp.npy_intp nr = static.shape[0] cnp.npy_intp nc = static.shape[1] cnp.npy_intp r, c, i, j, t, firstr, lastr, firstc, lastc double Imean, Jmean floating[:, :, :] factors = np.zeros((nr, nc, 5), dtype=ftype) double[:] sums = np.zeros((6,), dtype=np.float64) with nogil: for r in range(nr): firstr = _int_max(0, r - radius) lastr = _int_min(nr - 1, r + radius) for c in range(nc): firstc = _int_max(0, c - radius) lastc = _int_min(nc - 1, c + radius) for t in range(6): sums[t] = 0 for i in range(firstr, 1 + lastr): for j in range(firstc, 1+lastc): sums[SI] += static[i, j] sums[SI2] += static[i, j]**2 sums[SJ] += moving[i, j] sums[SJ2] += moving[i, j]**2 sums[SIJ] += static[i, j]*moving[i, j] sums[CNT] += 1 Imean = sums[SI] / sums[CNT] Jmean = sums[SJ] / sums[CNT] factors[r, c, 0] = static[r, c] - Imean factors[r, c, 1] = moving[r, c] - Jmean factors[r, c, 2] = (sums[SIJ] - Jmean * sums[SI] - Imean * sums[SJ] + sums[CNT] * Jmean * Imean) factors[r, c, 3] = (sums[SI2] - Imean * sums[SI] - Imean * sums[SI] + sums[CNT] * Imean * Imean) factors[r, c, 4] = (sums[SJ2] - Jmean * sums[SJ] - Jmean * sums[SJ] + sums[CNT] * Jmean * Jmean) return np.asarray(factors) @cython.boundscheck(False) @cython.wraparound(False) @cython.cdivision(True) def compute_cc_forward_step_2d(floating[:, :, :] grad_static, floating[:, :, :] factors, cnp.npy_intp radius): r"""Gradient of the CC Metric w.r.t. the forward transformation Computes the gradient of the Cross Correlation metric for symmetric registration (SyN) [Avants2008]_ w.r.t. the displacement associated to the moving image ('backward' step) as in [Avants2011]_ Parameters ---------- grad_static : array, shape (R, C, 2) the gradient of the static image factors : array, shape (R, C, 5) the precomputed cross correlation terms obtained via precompute_cc_factors_2d Returns ------- out : array, shape (R, C, 2) the gradient of the cross correlation metric with respect to the displacement associated to the moving image energy : the cross correlation energy (data term) at this iteration Notes ----- Currently, the gradient of the static image is not being used, but some authors suggest that symmetrizing the gradient by including both, the moving and static gradients may improve the registration quality. We are leaving this parameters as a placeholder for future investigation References ---------- .. [Avants2008]_ Avants, B. B., Epstein, C. L., Grossman, M., & Gee, J. C. (2008). Symmetric Diffeomorphic Image Registration with Cross-Correlation: Evaluating Automated Labeling of Elderly and Neurodegenerative Brain, Med Image Anal. 12(1), 26-41. .. [Avants2011]_ Avants, B. B., Tustison, N., & Song, G. (2011). Advanced Normalization Tools ( ANTS ), 1-35. """ cdef: cnp.npy_intp nr = grad_static.shape[0] cnp.npy_intp nc = grad_static.shape[1] double energy = 0 cnp.npy_intp r, c double Ii, Ji, sfm, sff, smm, localCorrelation, temp floating[:, :, :] out = np.zeros((nr, nc, 2), dtype=np.asarray(grad_static).dtype) with nogil: for r in range(radius, nr-radius): for c in range(radius, nc-radius): Ii = factors[r, c, 0] Ji = factors[r, c, 1] sfm = factors[r, c, 2] sff = factors[r, c, 3] smm = factors[r, c, 4] if(sff == 0.0 or smm == 0.0): continue localCorrelation = 0 if(sff * smm > 1e-5): localCorrelation = sfm * sfm / (sff * smm) if(localCorrelation < 1): # avoid bad values... energy -= localCorrelation temp = 2.0 * sfm / (sff * smm) * (Ji - sfm / sff * Ii) out[r, c, 0] -= temp * grad_static[r, c, 0] out[r, c, 1] -= temp * grad_static[r, c, 1] return np.asarray(out), energy @cython.boundscheck(False) @cython.wraparound(False) @cython.cdivision(True) def compute_cc_backward_step_2d(floating[:, :, :] grad_moving, floating[:, :, :] factors, cnp.npy_intp radius): r"""Gradient of the CC Metric w.r.t. the backward transformation Computes the gradient of the Cross Correlation metric for symmetric registration (SyN) [Avants2008]_ w.r.t. the displacement associated to the static image ('forward' step) as in [Avants2011]_ Parameters ---------- grad_moving : array, shape (R, C, 2) the gradient of the moving image factors : array, shape (R, C, 5) the precomputed cross correlation terms obtained via precompute_cc_factors_2d Returns ------- out : array, shape (R, C, 2) the gradient of the cross correlation metric with respect to the displacement associated to the static image energy : the cross correlation energy (data term) at this iteration References ---------- .. [Avants2008]_ Avants, B. B., Epstein, C. L., Grossman, M., & Gee, J. C. (2008). Symmetric Diffeomorphic Image Registration with Cross-Correlation: Evaluating Automated Labeling of Elderly and Neurodegenerative Brain, Med Image Anal. 12(1), 26-41. .. [Avants2011]_ Avants, B. B., Tustison, N., & Song, G. (2011). Advanced Normalization Tools ( ANTS ), 1-35. """ ftype = np.asarray(grad_moving).dtype cdef: cnp.npy_intp nr = grad_moving.shape[0] cnp.npy_intp nc = grad_moving.shape[1] cnp.npy_intp r, c double energy = 0 double Ii, Ji, sfm, sff, smm, localCorrelation, temp floating[:, :, :] out = np.zeros((nr, nc, 2), dtype=ftype) with nogil: for r in range(radius, nr-radius): for c in range(radius, nc-radius): Ii = factors[r, c, 0] Ji = factors[r, c, 1] sfm = factors[r, c, 2] sff = factors[r, c, 3] smm = factors[r, c, 4] if(sff == 0.0 or smm == 0.0): continue localCorrelation = 0 if(sff * smm > 1e-5): localCorrelation = sfm * sfm / (sff * smm) if(localCorrelation < 1): # avoid bad values... energy -= localCorrelation temp = 2.0 * sfm / (sff * smm) * (Ii - sfm / smm * Ji) out[r, c, 0] -= temp * grad_moving[r, c, 0] out[r, c, 1] -= temp * grad_moving[r, c, 1] return np.asarray(out), energy dipy-0.13.0/dipy/align/expectmax.pyx000066400000000000000000000543741317371701200173560ustar00rootroot00000000000000#!python #cython: boundscheck=False #cython: wraparound=False #cython: cdivision=True import numpy as np cimport cython cimport numpy as cnp from .fused_types cimport floating, number cdef extern from "dpy_math.h" nogil: int dpy_isinf(double) double floor(double) cdef inline int ifloor(double x) nogil: return int(floor(x)) def quantize_positive_2d(floating[:, :] v, int num_levels): r"""Quantizes a 2D image to num_levels quantization levels Quantizes the input image at num_levels intensity levels considering <=0 as a special value. Those input pixels <=0, and only those, will be assigned a quantization level of 0. The positive values are divided into the remaining num_levels-1 uniform quanization levels. The following are undefined, and raise a ValueError: * Quantizing at zero levels because at least one level must be assigned * Quantizing at one level because positive values should be assigned a level different from the secial level 0 (at least 2 levels are needed) Parameters ---------- v : array, shape (R, C) the image to be quantized num_levels : int the number of levels Returns ------- out : array, shape (R, C), same shape as v the quantized image levels: array, shape (num_levels,) the quantization values: levels[0]=0, and levels[i] is the mid-point of the interval of intensities that are assigned to quantization level i, i=1, ..., num_levels-1. hist: array, shape (num_levels,) histogram: the number of pixels that were assigned to each quantization level """ ftype = np.asarray(v).dtype cdef: cnp.npy_intp nrows = v.shape[0] cnp.npy_intp ncols = v.shape[1] cnp.npy_intp npix = nrows * ncols cnp.npy_intp i, j, l double epsilon, delta double min_val = -1 double max_val = -1 int[:] hist = np.zeros(shape=(num_levels,), dtype=np.int32) int[:, :] out = np.zeros(shape=(nrows, ncols,), dtype=np.int32) floating[:] levels = np.zeros(shape=(num_levels,), dtype=ftype) #Quantizing at zero levels is undefined #Quantizing at one level is not supported because we want to make sure the #maximum level in the quantization is never greater than num_levels-1 if(num_levels < 2): raise ValueError('Quantization levels must be at least 2') num_levels -= 1 # zero is one of the levels with nogil: for i in range(nrows): for j in range(ncols): if(v[i, j] > 0): if((min_val < 0) or (v[i, j] < min_val)): min_val = v[i, j] if(v[i, j] > max_val): max_val = v[i, j] epsilon = 1e-8 delta = (max_val - min_val + epsilon) / num_levels # notice that we decreased num_levels, so levels[0..num_levels] are well # defined if((num_levels < 2) or (delta < epsilon)): for i in range(nrows): for j in range(ncols): if(v[i, j] > 0): out[i, j] = 1 else: out[i, j] = 0 hist[0] += 1 levels[0] = 0 levels[1] = 0.5 * (min_val + max_val) hist[1] = npix - hist[0] with gil: return out, levels, hist levels[0] = 0 levels[1] = min_val + delta * 0.5 for i in range(2, 1 + num_levels): levels[i] = levels[i - 1] + delta for i in range(nrows): for j in range(ncols): if(v[i, j] > 0): l = ifloor((v[i, j] - min_val) / delta) out[i, j] = l + 1 hist[l + 1] += 1 else: out[i, j] = 0 hist[0] += 1 return np.asarray(out), np.array(levels), np.array(hist) def quantize_positive_3d(floating[:, :, :] v, int num_levels): r"""Quantizes a 3D volume to num_levels quantization levels Quantizes the input volume at num_levels intensity levels considering <=0 as a special value. Those input voxels <=0, and only those, will be assigned a quantization level of 0. The positive values are divided into the remaining num_levels-1 uniform quanization levels. The following are undefined, and raise a ValueError: * Quantizing at zero levels because at least one level must be assigned * Quantizing at one level because positive values should be assigned a level different from the secial level 0 (at least 2 levels are needed) Parameters ---------- v : array, shape (S, R, C) the volume to be quantized num_levels : int the number of levels Returns ------- out : array, shape (S, R, C), same shape as v the quantized volume levels: array, shape (num_levels,) the quantization values: levels[0]=0, and levels[i] is the mid-point of the interval of intensities that are assigned to quantization level i, i=1, ..., num_levels-1. hist: array, shape (num_levels,) histogram: the number of voxels that were assigned to each quantization level """ ftype = np.asarray(v).dtype cdef: cnp.npy_intp nslices = v.shape[0] cnp.npy_intp nrows = v.shape[1] cnp.npy_intp ncols = v.shape[2] cnp.npy_intp nvox = nrows * ncols * nslices cnp.npy_intp i, j, k, l double epsilon, delta double min_val = -1 double max_val = -1 int[:] hist = np.zeros(shape=(num_levels,), dtype=np.int32) int[:, :, :] out = np.zeros(shape=(nslices, nrows, ncols), dtype=np.int32) floating[:] levels = np.zeros(shape=(num_levels,), dtype=ftype) #Quantizing at zero levels is undefined #Quantizing at one level is not supported because we want to make sure the #maximum level in the quantization is never greater than num_levels-1 if(num_levels < 2): raise ValueError('Quantization levels must be at least 2') num_levels -= 1 # zero is one of the levels with nogil: for k in range(nslices): for i in range(nrows): for j in range(ncols): if(v[k, i, j] > 0): if((min_val < 0) or (v[k, i, j] < min_val)): min_val = v[k, i, j] if(v[k, i, j] > max_val): max_val = v[k, i, j] epsilon = 1e-8 delta = (max_val - min_val + epsilon) / num_levels # notice that we decreased num_levels, so levels[0..num_levels] are well # defined if((num_levels < 2) or (delta < epsilon)): for k in range(nslices): for i in range(nrows): for j in range(ncols): if(v[k, i, j] > 0): out[k, i, j] = 1 else: out[k, i, j] = 0 hist[0] += 1 levels[0] = 0 levels[1] = 0.5 * (min_val + max_val) hist[1] = nvox - hist[0] with gil: return out, levels, hist levels[0] = 0 levels[1] = min_val + delta * 0.5 for i in range(2, 1 + num_levels): levels[i] = levels[i - 1] + delta for k in range(nslices): for i in range(nrows): for j in range(ncols): if(v[k, i, j] > 0): l = ifloor((v[k, i, j] - min_val) / delta) out[k, i, j] = l + 1 hist[l + 1] += 1 else: out[k, i, j] = 0 hist[0] += 1 return np.asarray(out), np.asarray(levels), np.asarray(hist) def compute_masked_class_stats_2d(int[:, :] mask, floating[:, :] v, int num_labels, int[:, :] labels): r"""Computes the mean and std. for each quantization level. Computes the mean and standard deviation of the intensities in 'v' for each corresponding label in 'labels'. In other words, for each label L, it computes the mean and standard deviation of the intensities in 'v' at pixels whose label in 'labels' is L. This is used by the EM metric to compute statistics for each hidden variable represented by the labels. Parameters ---------- mask : array, shape (R, C) the mask of pixels that will be taken into account for computing the statistics. All zero pixels in mask will be ignored v : array, shape (R, C) the image which the statistics will be computed from num_labels : int the number of different labels in 'labels' (equal to the number of hidden variables in the EM metric) labels : array, shape (R, C) the label assigned to each pixel Returns ------- means : array, shape (num_labels,) means[i], 0<=i 0): means[i] /= counts[i] for i in range(nrows): for j in range(ncols): if(mask[i, j] != 0): diff = v[i, j] - means[labels[i, j]] variances[labels[i, j]] += diff ** 2 for i in range(num_labels): if(counts[i] > 1): variances[i] /= counts[i] else: variances[i] = INF64 return np.asarray(means), np.asarray(variances) def compute_masked_class_stats_3d(int[:, :, :] mask, floating[:, :, :] v, int num_labels, int[:, :, :] labels): r"""Computes the mean and std. for each quantization level. Computes the mean and standard deviation of the intensities in 'v' for each corresponding label in 'labels'. In other words, for each label L, it computes the mean and standard deviation of the intensities in 'v' at voxels whose label in 'labels' is L. This is used by the EM metric to compute statistics for each hidden variable represented by the labels. Parameters ---------- mask : array, shape (S, R, C) the mask of voxels that will be taken into account for computing the statistics. All zero voxels in mask will be ignored v : array, shape (S, R, C) the volume which the statistics will be computed from num_labels : int the number of different labels in 'labels' (equal to the number of hidden variables in the EM metric) labels : array, shape (S, R, C) the label assigned to each pixel Returns ------- means : array, shape (num_labels,) means[i], 0<=i 0): means[i] /= counts[i] for k in range(nslices): for i in range(nrows): for j in range(ncols): if(mask[k, i, j] != 0): diff = means[labels[k, i, j]] - v[k, i, j] variances[labels[k, i, j]] += diff ** 2 for i in range(num_labels): if(counts[i] > 1): variances[i] /= counts[i] else: variances[i] = INF64 return np.asarray(means), np.asarray(variances) @cython.boundscheck(False) @cython.wraparound(False) @cython.cdivision(True) def compute_em_demons_step_2d(floating[:,:] delta_field, floating[:,:] sigma_sq_field, floating[:,:,:] gradient_moving, double sigma_sq_x, floating[:,:,:] out): r"""Demons step for EM metric in 2D Computes the demons step [Vercauteren09] for SSD-driven registration ( eq. 4 in [Vercauteren09] ) using the EM algorithm [Arce14] to handle multi-modality images. In this case, $\sigma_i$ in eq. 4 of [Vercauteren] is estimated using the EM algorithm, while in the original version of diffeomorphic demons it is estimated by the difference between the image values at each pixel. Parameters ---------- delta_field : array, shape (R, C) contains, at each pixel, the difference between the moving image (warped under the current deformation s(. , .) ) J and the static image I: delta_field[i,j] = J(s(i,j)) - I(i,j). The order is important, changing to delta_field[i,j] = I(i,j) - J(s(i,j)) yields the backward demons step warping the static image towards the moving, which may not be the intended behavior unless the 'gradient_moving' passed corresponds to the gradient of the static image sigma_sq_field : array, shape (R, C) contains, at each pixel (i, j), the estimated variance (not std) of the hidden variable associated to the intensity at static[i,j] (which must have been previously quantized) gradient_moving : array, shape (R, C, 2) the gradient of the moving image sigma_sq_x : float parameter controlling the amount of regularization. It corresponds to $\sigma_x^2$ in algorithm 1 of Vercauteren et al.[2] out : array, shape (R, C, 2) the resulting demons step will be written to this array Returns ------- demons_step : array, shape (R, C, 2) the demons step to be applied for updating the current displacement field energy : float the current em energy (before applying the returned demons_step) References ---------- [Arce14] Arce-santana, E., Campos-delgado, D. U., & Vigueras-g, F. (2014). Non-rigid Multimodal Image Registration Based on the Expectation-Maximization Algorithm, (168140), 36-47. [Vercauteren09] Vercauteren, T., Pennec, X., Perchant, A., & Ayache, N. (2009). Diffeomorphic demons: efficient non-parametric image registration. NeuroImage, 45(1 Suppl), S61-72. doi:10.1016/j.neuroimage.2008.10.040 """ cdef: cnp.npy_intp nr = delta_field.shape[0] cnp.npy_intp nc = delta_field.shape[1] cnp.npy_intp i, j double delta, sigma_sq_i, nrm2, energy, den, prod if out is None: out = np.zeros((nr, nc, 2), dtype=np.asarray(delta_field).dtype) with nogil: energy = 0 for i in range(nr): for j in range(nc): sigma_sq_i = sigma_sq_field[i,j] delta = delta_field[i,j] energy += (delta**2) if dpy_isinf(sigma_sq_i) != 0: out[i, j, 0], out[i, j, 1] = 0, 0 else: nrm2 = (gradient_moving[i, j, 0]**2 + gradient_moving[i, j, 1]**2) if(sigma_sq_i == 0): if nrm2 == 0: out[i, j, 0], out[i, j, 1] = 0, 0 else: out[i, j, 0] = (delta * gradient_moving[i, j, 0] / nrm2) out[i, j, 1] = (delta * gradient_moving[i, j, 1] / nrm2) else: den = (sigma_sq_x * nrm2 + sigma_sq_i) prod = sigma_sq_x * delta out[i, j, 0] = prod * gradient_moving[i, j, 0] / den out[i, j, 1] = prod * gradient_moving[i, j, 1] / den return np.asarray(out), energy @cython.boundscheck(False) @cython.wraparound(False) @cython.cdivision(True) def compute_em_demons_step_3d(floating[:,:,:] delta_field, floating[:,:,:] sigma_sq_field, floating[:,:,:,:] gradient_moving, double sigma_sq_x, floating[:,:,:,:] out): r"""Demons step for EM metric in 3D Computes the demons step [Vercauteren09] for SSD-driven registration ( eq. 4 in [Vercauteren09] ) using the EM algorithm [Arce14] to handle multi-modality images. In this case, $\sigma_i$ in eq. 4 of [Vercauteren09] is estimated using the EM algorithm, while in the original version of diffeomorphic demons it is estimated by the difference between the image values at each pixel. Parameters ---------- delta_field : array, shape (S, R, C) contains, at each pixel, the difference between the moving image (warped under the current deformation s ) J and the static image I: delta_field[k,i,j] = J(s(k,i,j)) - I(k,i,j). The order is important, changing to delta_field[k,i,j] = I(k,i,j) - J(s(k,i,j)) yields the backward demons step warping the static image towards the moving, which may not be the intended behavior unless the 'gradient_moving' passed corresponds to the gradient of the static image sigma_sq_field : array, shape (S, R, C) contains, at each pixel (k, i, j), the estimated variance (not std) of the hidden variable associated to the intensity at static[k,i,j] (which must have been previously quantized) gradient_moving : array, shape (S, R, C, 2) the gradient of the moving image sigma_sq_x : float parameter controlling the amount of regularization. It corresponds to $\sigma_x^2$ in algorithm 1 of Vercauteren et al.[2]. out : array, shape (S, R, C, 2) the resulting demons step will be written to this array Returns ------- demons_step : array, shape (S, R, C, 3) the demons step to be applied for updating the current displacement field energy : float the current em energy (before applying the returned demons_step) References ---------- [Arce14] Arce-santana, E., Campos-delgado, D. U., & Vigueras-g, F. (2014). Non-rigid Multimodal Image Registration Based on the Expectation-Maximization Algorithm, (168140), 36-47. [Vercauteren09] Vercauteren, T., Pennec, X., Perchant, A., & Ayache, N. (2009). Diffeomorphic demons: efficient non-parametric image registration. NeuroImage, 45(1 Suppl), S61-72. doi:10.1016/j.neuroimage.2008.10.040 """ cdef: cnp.npy_intp ns = delta_field.shape[0] cnp.npy_intp nr = delta_field.shape[1] cnp.npy_intp nc = delta_field.shape[2] cnp.npy_intp i, j, k double delta, sigma_sq_i, nrm2, energy, den if out is None: out = np.zeros((ns, nr, nc, 3), dtype=np.asarray(delta_field).dtype) with nogil: energy = 0 for k in range(ns): for i in range(nr): for j in range(nc): sigma_sq_i = sigma_sq_field[k,i,j] delta = delta_field[k,i,j] energy += (delta**2) if dpy_isinf(sigma_sq_i) != 0: out[k, i, j, 0] = 0 out[k, i, j, 1] = 0 out[k, i, j, 2] = 0 else: nrm2 = (gradient_moving[k, i, j, 0]**2 + gradient_moving[k, i, j, 1]**2 + gradient_moving[k, i, j, 2]**2) if(sigma_sq_i == 0): if nrm2 == 0: out[k, i, j, 0] = 0 out[k, i, j, 1] = 0 out[k, i, j, 2] = 0 else: out[k, i, j, 0] = (delta * gradient_moving[k, i, j, 0] / nrm2) out[k, i, j, 1] = (delta * gradient_moving[k, i, j, 1] / nrm2) out[k, i, j, 2] = (delta * gradient_moving[k, i, j, 2] / nrm2) else: den = (sigma_sq_x * nrm2 + sigma_sq_i) out[k, i, j, 0] = (sigma_sq_x * delta * gradient_moving[k, i, j, 0] / den) out[k, i, j, 1] = (sigma_sq_x * delta * gradient_moving[k, i, j, 1] / den) out[k, i, j, 2] = (sigma_sq_x * delta * gradient_moving[k, i, j, 2] / den) return np.asarray(out), energy dipy-0.13.0/dipy/align/fused_types.pxd000066400000000000000000000001201317371701200176410ustar00rootroot00000000000000cimport cython ctypedef cython.floating floating ctypedef cython.numeric number dipy-0.13.0/dipy/align/imaffine.py000066400000000000000000001516221317371701200167400ustar00rootroot00000000000000""" Affine image registration module consisting of the following classes: AffineMap: encapsulates the necessary information to perform affine transforms between two domains, defined by a `static` and a `moving` image. The `domain` of the transform is the set of points in the `static` image's grid, and the `codomain` is the set of points in the `moving` image. When we call the `transform` method, `AffineMap` maps each point `x` of the domain (`static` grid) to the codomain (`moving` grid) and interpolates the `moving` image at that point to obtain the intensity value to be placed at `x` in the resulting grid. The `transform_inverse` method performs the opposite operation mapping points in the codomain to points in the domain. ParzenJointHistogram: computes the marginal and joint distributions of intensities of a pair of images, using Parzen windows [Parzen62] with a cubic spline kernel, as proposed by Mattes et al. [Mattes03]. It also computes the gradient of the joint histogram w.r.t. the parameters of a given transform. MutualInformationMetric: computes the value and gradient of the mutual information metric the way `Optimizer` needs them. That is, given a set of transform parameters, it will use `ParzenJointHistogram` to compute the value and gradient of the joint intensity histogram evaluated at the given parameters, and evaluate the the value and gradient of the histogram's mutual information. AffineRegistration: it runs the multi-resolution registration, putting all the pieces together. It needs to create the scale space of the images and run the multi-resolution registration by using the Metric and the Optimizer at each level of the Gaussian pyramid. At each level, it will setup the metric to compute value and gradient of the metric with the input images with different levels of smoothing. References ---------- [Parzen62] E. Parzen. On the estimation of a probability density function and the mode. Annals of Mathematical Statistics, 33(3), 1065-1076, 1962. [Mattes03] Mattes, D., Haynor, D. R., Vesselle, H., Lewellen, T. K., & Eubank, W. PET-CT image registration in the chest using free-form deformations. IEEE Transactions on Medical Imaging, 22(1), 120-8, 2003. """ import numpy as np import numpy.linalg as npl import scipy.ndimage as ndimage from dipy.core.optimize import Optimizer from dipy.core.optimize import SCIPY_LESS_0_12 from dipy.align import vector_fields as vf from dipy.align import VerbosityLevels from dipy.align.parzenhist import (ParzenJointHistogram, sample_domain_regular, compute_parzen_mi) from dipy.align.imwarp import (get_direction_and_spacings, ScaleSpace) from dipy.align.scalespace import IsotropicScaleSpace from warnings import warn _interp_options = ['nearest', 'linear'] _transform_method = {} _transform_method[(2, 'nearest')] = vf.transform_2d_affine_nn _transform_method[(3, 'nearest')] = vf.transform_3d_affine_nn _transform_method[(2, 'linear')] = vf.transform_2d_affine _transform_method[(3, 'linear')] = vf.transform_3d_affine class AffineInversionError(Exception): pass class AffineMap(object): def __init__(self, affine, domain_grid_shape=None, domain_grid2world=None, codomain_grid_shape=None, codomain_grid2world=None): """ AffineMap Implements an affine transformation whose domain is given by `domain_grid` and `domain_grid2world`, and whose co-domain is given by `codomain_grid` and `codomain_grid2world`. The actual transform is represented by the `affine` matrix, which operate in world coordinates. Therefore, to transform a moving image towards a static image, we first map each voxel (i,j,k) of the static image to world coordinates (x,y,z) by applying `domain_grid2world`. Then we apply the `affine` transform to (x,y,z) obtaining (x', y', z') in moving image's world coordinates. Finally, (x', y', z') is mapped to voxel coordinates (i', j', k') in the moving image by multiplying (x', y', z') by the inverse of `codomain_grid2world`. The `codomain_grid_shape` is used analogously to transform the static image towards the moving image when calling `transform_inverse`. If the domain/co-domain information is not provided (None) then the sampling information needs to be specified each time the `transform` or `transform_inverse` is called to transform images. Note that such sampling information is not necessary to transform points defined in physical space, such as stream lines. Parameters ---------- affine : array, shape (dim + 1, dim + 1) the matrix defining the affine transform, where `dim` is the dimension of the space this map operates in (2 for 2D images, 3 for 3D images). If None, then `self` represents the identity transformation. domain_grid_shape : sequence, shape (dim,), optional the shape of the default domain sampling grid. When `transform` is called to transform an image, the resulting image will have this shape, unless a different sampling information is provided. If None, then the sampling grid shape must be specified each time the `transform` method is called. domain_grid2world : array, shape (dim + 1, dim + 1), optional the grid-to-world transform associated with the domain grid. If None (the default), then the grid-to-world transform is assumed to be the identity. codomain_grid_shape : sequence of integers, shape (dim,) the shape of the default co-domain sampling grid. When `transform_inverse` is called to transform an image, the resulting image will have this shape, unless a different sampling information is provided. If None (the default), then the sampling grid shape must be specified each time the `transform_inverse` method is called. codomain_grid2world : array, shape (dim + 1, dim + 1) the grid-to-world transform associated with the co-domain grid. If None (the default), then the grid-to-world transform is assumed to be the identity. """ self.set_affine(affine) self.domain_shape = domain_grid_shape self.domain_grid2world = domain_grid2world self.codomain_shape = codomain_grid_shape self.codomain_grid2world = codomain_grid2world def set_affine(self, affine): """ Sets the affine transform (operating in physical space) Also sets `self.affine_inv` - the inverse of `affine`, or None if there is no inverse. Parameters ---------- affine : array, shape (dim + 1, dim + 1) the matrix representing the affine transform operating in physical space. The domain and co-domain information remains unchanged. If None, then `self` represents the identity transformation. """ self.affine = affine self.affine_inv = None if self.affine is None: return if not np.all(np.isfinite(affine)): raise AffineInversionError('Affine contains invalid elements') try: self.affine_inv = npl.inv(affine) except npl.LinAlgError: raise AffineInversionError('Affine cannot be inverted') def _apply_transform(self, image, interp='linear', image_grid2world=None, sampling_grid_shape=None, sampling_grid2world=None, resample_only=False, apply_inverse=False): """ Transforms the input image applying this affine transform This is a generic function to transform images using either this (direct) transform or its inverse. If applying the direct transform (`apply_inverse=False`): by default, the transformed image is sampled at a grid defined by `self.domain_shape` and `self.domain_grid2world`. If applying the inverse transform (`apply_inverse=True`): by default, the transformed image is sampled at a grid defined by `self.codomain_shape` and `self.codomain_grid2world`. If the sampling information was not provided at initialization of this transform then `sampling_grid_shape` is mandatory. Parameters ---------- image : array, shape (X, Y) or (X, Y, Z) the image to be transformed interp : string, either 'linear' or 'nearest' the type of interpolation to be used, either 'linear' (for k-linear interpolation) or 'nearest' for nearest neighbor image_grid2world : array, shape (dim + 1, dim + 1), optional the grid-to-world transform associated with `image`. If None (the default), then the grid-to-world transform is assumed to be the identity. sampling_grid_shape : sequence, shape (dim,), optional the shape of the grid where the transformed image must be sampled. If None (the default), then `self.domain_shape` is used instead (which must have been set at initialization, otherwise an exception will be raised). sampling_grid2world : array, shape (dim + 1, dim + 1), optional the grid-to-world transform associated with the sampling grid (specified by `sampling_grid_shape`, or by default `self.domain_shape`). If None (the default), then the grid-to-world transform is assumed to be the identity. resample_only : Boolean, optional If False (the default) the affine transform is applied normally. If True, then the affine transform is not applied, and the input image is just re-sampled on the domain grid of this transform. apply_inverse : Boolean, optional If False (the default) the image is transformed from the codomain of this transform to its domain using the (direct) affine transform. Otherwise, the image is transformed from the domain of this transform to its codomain using the (inverse) affine transform. Returns ------- transformed : array, shape `sampling_grid_shape` or `self.domain_shape` the transformed image, sampled at the requested grid """ # Verify valid interpolation requested if interp not in _interp_options: raise ValueError('Unknown interpolation method: %s' % (interp,)) # Obtain sampling grid if sampling_grid_shape is None: if apply_inverse: sampling_grid_shape = self.codomain_shape else: sampling_grid_shape = self.domain_shape if sampling_grid_shape is None: msg = 'Unknown sampling info. Provide a valid sampling_grid_shape' raise ValueError(msg) dim = len(sampling_grid_shape) shape = np.array(sampling_grid_shape, dtype=np.int32) # Verify valid image dimension img_dim = len(image.shape) if img_dim < 2 or img_dim > 3: raise ValueError('Undefined transform for dim: %d' % (img_dim,)) # Obtain grid-to-world transform for sampling grid if sampling_grid2world is None: if apply_inverse: sampling_grid2world = self.codomain_grid2world else: sampling_grid2world = self.domain_grid2world if sampling_grid2world is None: sampling_grid2world = np.eye(dim + 1) # Obtain world-to-grid transform for input image if image_grid2world is None: if apply_inverse: image_grid2world = self.domain_grid2world else: image_grid2world = self.codomain_grid2world if image_grid2world is None: image_grid2world = np.eye(dim + 1) image_world2grid = npl.inv(image_grid2world) # Compute the transform from sampling grid to input image grid if apply_inverse: aff = self.affine_inv else: aff = self.affine if (aff is None) or resample_only: comp = image_world2grid.dot(sampling_grid2world) else: comp = image_world2grid.dot(aff.dot(sampling_grid2world)) # Transform the input image if interp == 'linear': image = image.astype(np.float64) transformed = _transform_method[(dim, interp)](image, shape, comp) return transformed def transform(self, image, interp='linear', image_grid2world=None, sampling_grid_shape=None, sampling_grid2world=None, resample_only=False): """ Transforms the input image from co-domain to domain space By default, the transformed image is sampled at a grid defined by `self.domain_shape` and `self.domain_grid2world`. If such information was not provided then `sampling_grid_shape` is mandatory. Parameters ---------- image : array, shape (X, Y) or (X, Y, Z) the image to be transformed interp : string, either 'linear' or 'nearest' the type of interpolation to be used, either 'linear' (for k-linear interpolation) or 'nearest' for nearest neighbor image_grid2world : array, shape (dim + 1, dim + 1), optional the grid-to-world transform associated with `image`. If None (the default), then the grid-to-world transform is assumed to be the identity. sampling_grid_shape : sequence, shape (dim,), optional the shape of the grid where the transformed image must be sampled. If None (the default), then `self.codomain_shape` is used instead (which must have been set at initialization, otherwise an exception will be raised). sampling_grid2world : array, shape (dim + 1, dim + 1), optional the grid-to-world transform associated with the sampling grid (specified by `sampling_grid_shape`, or by default `self.codomain_shape`). If None (the default), then the grid-to-world transform is assumed to be the identity. resample_only : Boolean, optional If False (the default) the affine transform is applied normally. If True, then the affine transform is not applied, and the input image is just re-sampled on the domain grid of this transform. Returns ------- transformed : array, shape `sampling_grid_shape` or `self.codomain_shape` the transformed image, sampled at the requested grid """ transformed = self._apply_transform(image, interp, image_grid2world, sampling_grid_shape, sampling_grid2world, resample_only, apply_inverse=False) return np.array(transformed) def transform_inverse(self, image, interp='linear', image_grid2world=None, sampling_grid_shape=None, sampling_grid2world=None, resample_only=False): """ Transforms the input image from domain to co-domain space By default, the transformed image is sampled at a grid defined by `self.codomain_shape` and `self.codomain_grid2world`. If such information was not provided then `sampling_grid_shape` is mandatory. Parameters ---------- image : array, shape (X, Y) or (X, Y, Z) the image to be transformed interp : string, either 'linear' or 'nearest' the type of interpolation to be used, either 'linear' (for k-linear interpolation) or 'nearest' for nearest neighbor image_grid2world : array, shape (dim + 1, dim + 1), optional the grid-to-world transform associated with `image`. If None (the default), then the grid-to-world transform is assumed to be the identity. sampling_grid_shape : sequence, shape (dim,), optional the shape of the grid where the transformed image must be sampled. If None (the default), then `self.codomain_shape` is used instead (which must have been set at initialization, otherwise an exception will be raised). sampling_grid2world : array, shape (dim + 1, dim + 1), optional the grid-to-world transform associated with the sampling grid (specified by `sampling_grid_shape`, or by default `self.codomain_shape`). If None (the default), then the grid-to-world transform is assumed to be the identity. resample_only : Boolean, optional If False (the default) the affine transform is applied normally. If True, then the affine transform is not applied, and the input image is just re-sampled on the domain grid of this transform. Returns ------- transformed : array, shape `sampling_grid_shape` or `self.codomain_shape` the transformed image, sampled at the requested grid """ transformed = self._apply_transform(image, interp, image_grid2world, sampling_grid_shape, sampling_grid2world, resample_only, apply_inverse=True) return np.array(transformed) class MutualInformationMetric(object): def __init__(self, nbins=32, sampling_proportion=None): r""" Initializes an instance of the Mutual Information metric This class implements the methods required by Optimizer to drive the registration process. Parameters ---------- nbins : int, optional the number of bins to be used for computing the intensity histograms. The default is 32. sampling_proportion : None or float in interval (0, 1], optional There are two types of sampling: dense and sparse. Dense sampling uses all voxels for estimating the (joint and marginal) intensity histograms, while sparse sampling uses a subset of them. If `sampling_proportion` is None, then dense sampling is used. If `sampling_proportion` is a floating point value in (0,1] then sparse sampling is used, where `sampling_proportion` specifies the proportion of voxels to be used. The default is None. Notes ----- Since we use linear interpolation, images are not, in general, differentiable at exact voxel coordinates, but they are differentiable between voxel coordinates. When using sparse sampling, selected voxels are slightly moved by adding a small random displacement within one voxel to prevent sampling points from being located exactly at voxel coordinates. When using dense sampling, this random displacement is not applied. """ self.histogram = ParzenJointHistogram(nbins) self.sampling_proportion = sampling_proportion self.metric_val = None self.metric_grad = None def setup(self, transform, static, moving, static_grid2world=None, moving_grid2world=None, starting_affine=None): r""" Prepares the metric to compute intensity densities and gradients The histograms will be setup to compute probability densities of intensities within the minimum and maximum values of `static` and `moving` Parameters ---------- transform: instance of Transform the transformation with respect to whose parameters the gradient must be computed static : array, shape (S, R, C) or (R, C) static image moving : array, shape (S', R', C') or (R', C') moving image. The dimensions of the static (S, R, C) and moving (S', R', C') images do not need to be the same. static_grid2world : array (dim+1, dim+1), optional the grid-to-space transform of the static image. The default is None, implying the transform is the identity. moving_grid2world : array (dim+1, dim+1) the grid-to-space transform of the moving image. The default is None, implying the spacing along all axes is 1. starting_affine : array, shape (dim+1, dim+1), optional the pre-aligning matrix (an affine transform) that roughly aligns the moving image towards the static image. If None, no pre-alignment is performed. If a pre-alignment matrix is available, it is recommended to provide this matrix as `starting_affine` instead of manually transforming the moving image to reduce interpolation artifacts. The default is None, implying no pre-alignment is performed. """ n = transform.get_number_of_parameters() self.metric_grad = np.zeros(n, dtype=np.float64) self.dim = len(static.shape) if moving_grid2world is None: moving_grid2world = np.eye(self.dim + 1) if static_grid2world is None: static_grid2world = np.eye(self.dim + 1) self.transform = transform self.static = np.array(static).astype(np.float64) self.moving = np.array(moving).astype(np.float64) self.static_grid2world = static_grid2world self.static_world2grid = npl.inv(static_grid2world) self.moving_grid2world = moving_grid2world self.moving_world2grid = npl.inv(moving_grid2world) self.static_direction, self.static_spacing = \ get_direction_and_spacings(static_grid2world, self.dim) self.moving_direction, self.moving_spacing = \ get_direction_and_spacings(moving_grid2world, self.dim) self.starting_affine = starting_affine P = np.eye(self.dim + 1) if self.starting_affine is not None: P = self.starting_affine self.affine_map = AffineMap(P, static.shape, static_grid2world, moving.shape, moving_grid2world) if self.dim == 2: self.interp_method = vf.interpolate_scalar_2d else: self.interp_method = vf.interpolate_scalar_3d if self.sampling_proportion is None: self.samples = None self.ns = 0 else: k = int(np.ceil(1.0 / self.sampling_proportion)) shape = np.array(static.shape, dtype=np.int32) self.samples = sample_domain_regular(k, shape, static_grid2world) self.samples = np.array(self.samples) self.ns = self.samples.shape[0] # Add a column of ones (homogeneous coordinates) self.samples = np.hstack((self.samples, np.ones(self.ns)[:, None])) if self.starting_affine is None: self.samples_prealigned = self.samples else: self.samples_prealigned =\ self.starting_affine.dot(self.samples.T).T # Sample the static image static_p = self.static_world2grid.dot(self.samples.T).T static_p = static_p[..., :self.dim] self.static_vals, inside = self.interp_method(static, static_p) self.static_vals = np.array(self.static_vals, dtype=np.float64) self.histogram.setup(self.static, self.moving) def _update_histogram(self): r""" Updates the histogram according to the current affine transform The current affine transform is given by `self.affine_map`, which must be set before calling this method. Returns ------- static_values: array, shape(n,) if sparse sampling is being used, array, shape(S, R, C) or (R, C) if dense sampling the intensity values corresponding to the static image used to update the histogram. If sparse sampling is being used, then it is simply a sequence of scalars, obtained by sampling the static image at the `n` sampling points. If dense sampling is being used, then the intensities are given directly by the static image, whose shape is (S, R, C) in the 3D case or (R, C) in the 2D case. moving_values: array, shape(n,) if sparse sampling is being used, array, shape(S, R, C) or (R, C) if dense sampling the intensity values corresponding to the moving image used to update the histogram. If sparse sampling is being used, then it is simply a sequence of scalars, obtained by sampling the moving image at the `n` sampling points (mapped to the moving space by the current affine transform). If dense sampling is being used, then the intensities are given by the moving imaged linearly transformed towards the static image by the current affine, which results in an image of the same shape as the static image. """ static_values = None moving_values = None if self.sampling_proportion is None: # Dense case static_values = self.static moving_values = self.affine_map.transform(self.moving) self.histogram.update_pdfs_dense(static_values, moving_values) else: # Sparse case sp_to_moving = self.moving_world2grid.dot(self.affine_map.affine) pts = sp_to_moving.dot(self.samples.T).T # Points on moving grid pts = pts[..., :self.dim] self.moving_vals, inside = self.interp_method(self.moving, pts) self.moving_vals = np.array(self.moving_vals) static_values = self.static_vals moving_values = self.moving_vals self.histogram.update_pdfs_sparse(static_values, moving_values) return static_values, moving_values def _update_mutual_information(self, params, update_gradient=True): r""" Updates marginal and joint distributions and the joint gradient The distributions are updated according to the static and transformed images. The transformed image is precisely the moving image after transforming it by the transform defined by the `params` parameters. The gradient of the joint PDF is computed only if update_gradient is True. Parameters ---------- params : array, shape (n,) the parameter vector of the transform currently used by the metric (the transform name is provided when self.setup is called), n is the number of parameters of the transform update_gradient : Boolean, optional if True, the gradient of the joint PDF will also be computed, otherwise, only the marginal and joint PDFs will be computed. The default is True. """ # Get the matrix associated with the `params` parameter vector current_affine = self.transform.param_to_matrix(params) # Get the static-to-prealigned matrix (only needed for the MI gradient) static2prealigned = self.static_grid2world if self.starting_affine is not None: current_affine = current_affine.dot(self.starting_affine) static2prealigned = self.starting_affine.dot(static2prealigned) self.affine_map.set_affine(current_affine) # Update the histogram with the current joint intensities static_values, moving_values = self._update_histogram() H = self.histogram # Shortcut to `self.histogram` grad = None # Buffer to write the MI gradient into (if needed) if update_gradient: grad = self.metric_grad # Compute the gradient of the joint PDF w.r.t. parameters if self.sampling_proportion is None: # Dense case # Compute the gradient of moving img. at physical points # associated with the >>static image's grid<< cells # The image gradient must be eval. at current moved points grid_to_world = current_affine.dot(self.static_grid2world) mgrad, inside = vf.gradient(self.moving, self.moving_world2grid, self.moving_spacing, self.static.shape, grid_to_world) # The Jacobian must be evaluated at the pre-aligned points H.update_gradient_dense( params, self.transform, static_values, moving_values, static2prealigned, mgrad) else: # Sparse case # Compute the gradient of moving at the sampling points # which are already given in physical space coordinates pts = current_affine.dot(self.samples.T).T # Moved points mgrad, inside = vf.sparse_gradient(self.moving, self.moving_world2grid, self.moving_spacing, pts) # The Jacobian must be evaluated at the pre-aligned points pts = self.samples_prealigned[..., :self.dim] H.update_gradient_sparse(params, self.transform, static_values, moving_values, pts, mgrad) # Call the cythonized MI computation with self.histogram fields self.metric_val = compute_parzen_mi(H.joint, H.joint_grad, H.smarginal, H.mmarginal, grad) def distance(self, params): r""" Numeric value of the negative Mutual Information We need to change the sign so we can use standard minimization algorithms. Parameters ---------- params : array, shape (n,) the parameter vector of the transform currently used by the metric (the transform name is provided when self.setup is called), n is the number of parameters of the transform Returns ------- neg_mi : float the negative mutual information of the input images after transforming the moving image by the currently set transform with `params` parameters """ try: self._update_mutual_information(params, False) except AffineInversionError: return np.inf return -1 * self.metric_val def gradient(self, params): r""" Numeric value of the metric's gradient at the given parameters Parameters ---------- params : array, shape (n,) the parameter vector of the transform currently used by the metric (the transform name is provided when self.setup is called), n is the number of parameters of the transform Returns ------- grad : array, shape (n,) the gradient of the negative Mutual Information """ try: self._update_mutual_information(params, True) except AffineInversionError: return 0 * self.metric_grad return -1 * self.metric_grad def distance_and_gradient(self, params): r""" Numeric value of the metric and its gradient at given parameters Parameters ---------- params : array, shape (n,) the parameter vector of the transform currently used by the metric (the transform name is provided when self.setup is called), n is the number of parameters of the transform Returns ------- neg_mi : float the negative mutual information of the input images after transforming the moving image by the currently set transform with `params` parameters neg_mi_grad : array, shape (n,) the gradient of the negative Mutual Information """ try: self._update_mutual_information(params, True) except AffineInversionError: return np.inf, 0 * self.metric_grad return -1 * self.metric_val, -1 * self.metric_grad class AffineRegistration(object): def __init__(self, metric=None, level_iters=None, sigmas=None, factors=None, method='L-BFGS-B', ss_sigma_factor=None, options=None, verbosity=VerbosityLevels.STATUS): """ Initializes an instance of the AffineRegistration class Parameters ---------- metric : None or object, optional an instance of a metric. The default is None, implying the Mutual Information metric with default settings. level_iters : sequence, optional the number of iterations at each scale of the scale space. `level_iters[0]` corresponds to the coarsest scale, `level_iters[-1]` the finest, where n is the length of the sequence. By default, a 3-level scale space with iterations sequence equal to [10000, 1000, 100] will be used. sigmas : sequence of floats, optional custom smoothing parameter to build the scale space (one parameter for each scale). By default, the sequence of sigmas will be [3, 1, 0]. factors : sequence of floats, optional custom scale factors to build the scale space (one factor for each scale). By default, the sequence of factors will be [4, 2, 1]. method : string, optional optimization method to be used. If Scipy version < 0.12, then only L-BFGS-B is available. Otherwise, `method` can be any gradient-based method available in `dipy.core.Optimize`: CG, BFGS, Newton-CG, dogleg or trust-ncg. The default is 'L-BFGS-B'. ss_sigma_factor : float, optional If None, this parameter is not used and an isotropic scale space with the given `factors` and `sigmas` will be built. If not None, an anisotropic scale space will be used by automatically selecting the smoothing sigmas along each axis according to the voxel dimensions of the given image. The `ss_sigma_factor` is used to scale the automatically computed sigmas. For example, in the isotropic case, the sigma of the kernel will be $factor * (2 ^ i)$ where $i = 1, 2, ..., n_scales - 1$ is the scale (the finest resolution image $i=0$ is never smoothed). The default is None. options : dict, optional extra optimization options. The default is None, implying no extra options are passed to the optimizer. """ self.metric = metric if self.metric is None: self.metric = MutualInformationMetric() if level_iters is None: level_iters = [10000, 1000, 100] self.level_iters = level_iters self.levels = len(level_iters) if self.levels == 0: raise ValueError('The iterations sequence cannot be empty') self.options = options self.method = method if ss_sigma_factor is not None: self.use_isotropic = False self.ss_sigma_factor = ss_sigma_factor else: self.use_isotropic = True if factors is None: factors = [4, 2, 1] if sigmas is None: sigmas = [3, 1, 0] self.factors = factors self.sigmas = sigmas self.verbosity = verbosity # Separately add a string that tells about the verbosity kwarg. This needs # to be separate, because it is set as a module-wide option in __init__: docstring_addendum =\ """verbosity: int (one of {0, 1, 2, 3}), optional Set the verbosity level of the algorithm: 0 : do not print anything 1 : print information about the current status of the algorithm 2 : print high level information of the components involved in the registration that can be used to detect a failing component. 3 : print as much information as possible to isolate the cause of a bug. Default: % s """ % VerbosityLevels.STATUS __init__.__doc__ = __init__.__doc__ + docstring_addendum def _init_optimizer(self, static, moving, transform, params0, static_grid2world, moving_grid2world, starting_affine): r"""Initializes the registration optimizer Initializes the optimizer by computing the scale space of the input images Parameters ---------- static : array, shape (S, R, C) or (R, C) the image to be used as reference during optimization. moving : array, shape (S', R', C') or (R', C') the image to be used as "moving" during optimization. The dimensions of the static (S, R, C) and moving (S', R', C') images do not need to be the same. transform : instance of Transform the transformation with respect to whose parameters the gradient must be computed params0 : array, shape (n,) parameters from which to start the optimization. If None, the optimization will start at the identity transform. n is the number of parameters of the specified transformation. static_grid2world : array, shape (dim+1, dim+1) the voxel-to-space transformation associated with the static image moving_grid2world : array, shape (dim+1, dim+1) the voxel-to-space transformation associated with the moving image starting_affine : string, or matrix, or None If string: 'mass': align centers of gravity 'voxel-origin': align physical coordinates of voxel (0,0,0) 'centers': align physical coordinates of central voxels If matrix: array, shape (dim+1, dim+1) If None: Start from identity """ self.dim = len(static.shape) self.transform = transform n = transform.get_number_of_parameters() self.nparams = n if params0 is None: params0 = self.transform.get_identity_parameters() self.params0 = params0 if starting_affine is None: self.starting_affine = np.eye(self.dim + 1) elif isinstance(starting_affine, str): if starting_affine == 'mass': affine_map = transform_centers_of_mass(static, static_grid2world, moving, moving_grid2world) self.starting_affine = affine_map.affine elif starting_affine == 'voxel-origin': affine_map = transform_origins(static, static_grid2world, moving, moving_grid2world) self.starting_affine = affine_map.affine elif starting_affine == 'centers': affine_map = transform_geometric_centers(static, static_grid2world, moving, moving_grid2world) self.starting_affine = affine_map.affine else: raise ValueError('Invalid starting_affine strategy') elif (isinstance(starting_affine, np.ndarray) and starting_affine.shape >= (self.dim, self.dim + 1)): self.starting_affine = starting_affine else: raise ValueError('Invalid starting_affine matrix') # Extract information from affine matrices to create the scale space static_direction, static_spacing = \ get_direction_and_spacings(static_grid2world, self.dim) moving_direction, moving_spacing = \ get_direction_and_spacings(moving_grid2world, self.dim) static = ((static.astype(np.float64) - static.min()) / (static.max() - static.min())) moving = ((moving.astype(np.float64) - moving.min()) / (moving.max() - moving.min())) # Build the scale space of the input images if self.use_isotropic: self.moving_ss = IsotropicScaleSpace(moving, self.factors, self.sigmas, moving_grid2world, moving_spacing, False) self.static_ss = IsotropicScaleSpace(static, self.factors, self.sigmas, static_grid2world, static_spacing, False) else: self.moving_ss = ScaleSpace(moving, self.levels, moving_grid2world, moving_spacing, self.ss_sigma_factor, False) self.static_ss = ScaleSpace(static, self.levels, static_grid2world, static_spacing, self.ss_sigma_factor, False) def optimize(self, static, moving, transform, params0, static_grid2world=None, moving_grid2world=None, starting_affine=None): r''' Starts the optimization process Parameters ---------- static : array, shape (S, R, C) or (R, C) the image to be used as reference during optimization. moving : array, shape (S', R', C') or (R', C') the image to be used as "moving" during optimization. It is necessary to pre-align the moving image to ensure its domain lies inside the domain of the deformation fields. This is assumed to be accomplished by "pre-aligning" the moving image towards the static using an affine transformation given by the 'starting_affine' matrix transform : instance of Transform the transformation with respect to whose parameters the gradient must be computed params0 : array, shape (n,) parameters from which to start the optimization. If None, the optimization will start at the identity transform. n is the number of parameters of the specified transformation. static_grid2world : array, shape (dim+1, dim+1), optional the voxel-to-space transformation associated with the static image. The default is None, implying the transform is the identity. moving_grid2world : array, shape (dim+1, dim+1), optional the voxel-to-space transformation associated with the moving image. The default is None, implying the transform is the identity. starting_affine : string, or matrix, or None, optional If string: 'mass': align centers of gravity 'voxel-origin': align physical coordinates of voxel (0,0,0) 'centers': align physical coordinates of central voxels If matrix: array, shape (dim+1, dim+1). If None: Start from identity. The default is None. Returns ------- affine_map : instance of AffineMap the affine resulting affine transformation ''' self._init_optimizer(static, moving, transform, params0, static_grid2world, moving_grid2world, starting_affine) del starting_affine # Now we must refer to self.starting_affine # Multi-resolution iterations original_static_shape = self.static_ss.get_image(0).shape original_static_grid2world = self.static_ss.get_affine(0) original_moving_shape = self.moving_ss.get_image(0).shape original_moving_grid2world = self.moving_ss.get_affine(0) affine_map = AffineMap(None, original_static_shape, original_static_grid2world, original_moving_shape, original_moving_grid2world) for level in range(self.levels - 1, -1, -1): self.current_level = level max_iter = self.level_iters[-1 - level] if self.verbosity >= VerbosityLevels.STATUS: print('Optimizing level %d [max iter: %d]' % (level, max_iter)) # Resample the smooth static image to the shape of this level smooth_static = self.static_ss.get_image(level) current_static_shape = self.static_ss.get_domain_shape(level) current_static_grid2world = self.static_ss.get_affine(level) current_affine_map = AffineMap(None, current_static_shape, current_static_grid2world, original_static_shape, original_static_grid2world) current_static = current_affine_map.transform(smooth_static) # The moving image is full resolution current_moving_grid2world = original_moving_grid2world current_moving = self.moving_ss.get_image(level) # Prepare the metric for iterations at this resolution self.metric.setup(transform, current_static, current_moving, current_static_grid2world, current_moving_grid2world, self.starting_affine) # Optimize this level if self.options is None: self.options = {'gtol': 1e-4, 'disp': False} if self.method == 'L-BFGS-B': self.options['maxfun'] = max_iter else: self.options['maxiter'] = max_iter if SCIPY_LESS_0_12: # Older versions don't expect value and gradient from # the same function opt = Optimizer(self.metric.distance, self.params0, method=self.method, jac=self.metric.gradient, options=self.options) else: opt = Optimizer(self.metric.distance_and_gradient, self.params0, method=self.method, jac=True, options=self.options) params = opt.xopt # Update starting_affine matrix with optimal parameters T = self.transform.param_to_matrix(params) self.starting_affine = T.dot(self.starting_affine) # Start next iteration at identity self.params0 = self.transform.get_identity_parameters() affine_map.set_affine(self.starting_affine) return affine_map def align_centers_of_mass(static, static_grid2world, moving, moving_grid2world): msg = "This function is deprecated please use" msg += " dipy.align.imaffine.transform_centers_of_mass instead." warn(msg) return transform_centers_of_mass(static, static_grid2world, moving, moving_grid2world) def align_geometric_centers(static, static_grid2world, moving, moving_grid2world): msg = "This function is deprecated please use" msg += " dipy.align.imaffine.transform_geometric_centers instead." warn(msg) return transform_geometric_centers(static, static_grid2world, moving, moving_grid2world) def align_origins(static, static_grid2world, moving, moving_grid2world): msg = "This function is deprecated please use" msg += " dipy.align.imaffine.transform_origins instead." warn(msg) return transform_origins(static, static_grid2world, moving, moving_grid2world) def transform_centers_of_mass(static, static_grid2world, moving, moving_grid2world): r""" Transformation to align the center of mass of the input images Parameters ---------- static : array, shape (S, R, C) static image static_grid2world : array, shape (dim+1, dim+1) the voxel-to-space transformation of the static image moving : array, shape (S, R, C) moving image moving_grid2world : array, shape (dim+1, dim+1) the voxel-to-space transformation of the moving image Returns ------- affine_map : instance of AffineMap the affine transformation (translation only, in this case) aligning the center of mass of the moving image towards the one of the static image """ dim = len(static.shape) if static_grid2world is None: static_grid2world = np.eye(dim + 1) if moving_grid2world is None: moving_grid2world = np.eye(dim + 1) c_static = ndimage.measurements.center_of_mass(np.array(static)) c_static = static_grid2world.dot(c_static + (1,)) c_moving = ndimage.measurements.center_of_mass(np.array(moving)) c_moving = moving_grid2world.dot(c_moving + (1,)) transform = np.eye(dim + 1) transform[:dim, dim] = (c_moving - c_static)[:dim] affine_map = AffineMap(transform, static.shape, static_grid2world, moving.shape, moving_grid2world) return affine_map def transform_geometric_centers(static, static_grid2world, moving, moving_grid2world): r""" Transformation to align the geometric center of the input images With "geometric center" of a volume we mean the physical coordinates of its central voxel Parameters ---------- static : array, shape (S, R, C) static image static_grid2world : array, shape (dim+1, dim+1) the voxel-to-space transformation of the static image moving : array, shape (S, R, C) moving image moving_grid2world : array, shape (dim+1, dim+1) the voxel-to-space transformation of the moving image Returns ------- affine_map : instance of AffineMap the affine transformation (translation only, in this case) aligning the geometric center of the moving image towards the one of the static image """ dim = len(static.shape) if static_grid2world is None: static_grid2world = np.eye(dim + 1) if moving_grid2world is None: moving_grid2world = np.eye(dim + 1) c_static = tuple((np.array(static.shape, dtype=np.float64)) * 0.5) c_static = static_grid2world.dot(c_static + (1,)) c_moving = tuple((np.array(moving.shape, dtype=np.float64)) * 0.5) c_moving = moving_grid2world.dot(c_moving + (1,)) transform = np.eye(dim + 1) transform[:dim, dim] = (c_moving - c_static)[:dim] affine_map = AffineMap(transform, static.shape, static_grid2world, moving.shape, moving_grid2world) return affine_map def transform_origins(static, static_grid2world, moving, moving_grid2world): r""" Transformation to align the origins of the input images With "origin" of a volume we mean the physical coordinates of voxel (0,0,0) Parameters ---------- static : array, shape (S, R, C) static image static_grid2world : array, shape (dim+1, dim+1) the voxel-to-space transformation of the static image moving : array, shape (S, R, C) moving image moving_grid2world : array, shape (dim+1, dim+1) the voxel-to-space transformation of the moving image Returns ------- affine_map : instance of AffineMap the affine transformation (translation only, in this case) aligning the origin of the moving image towards the one of the static image """ dim = len(static.shape) if static_grid2world is None: static_grid2world = np.eye(dim + 1) if moving_grid2world is None: moving_grid2world = np.eye(dim + 1) c_static = static_grid2world[:dim, dim] c_moving = moving_grid2world[:dim, dim] transform = np.eye(dim + 1) transform[:dim, dim] = (c_moving - c_static)[:dim] affine_map = AffineMap(transform, static.shape, static_grid2world, moving.shape, moving_grid2world) return affine_map dipy-0.13.0/dipy/align/imwarp.py000066400000000000000000001716601317371701200164650ustar00rootroot00000000000000""" Classes and functions for Symmetric Diffeomorphic Registration """ from __future__ import print_function import abc from dipy.utils.six import with_metaclass import numpy as np import numpy.linalg as npl import scipy as sp import nibabel as nib from dipy.align import vector_fields as vfu from dipy.align import floating from dipy.align import VerbosityLevels from dipy.align import Bunch from dipy.align.scalespace import ScaleSpace RegistrationStages = Bunch(INIT_START=0, INIT_END=1, OPT_START=2, OPT_END=3, SCALE_START=4, SCALE_END=5, ITER_START=6, ITER_END=7) r"""Registration Stages This enum defines the different stages which the Volumetric Registration may be in. The value of the stage is passed as a parameter to the call-back function so that it can react accordingly. INIT_START: optimizer initialization starts INIT_END: optimizer initialization ends OPT_START: optimization starts OPT_END: optimization ends SCALE_START: optimization at a new scale space resolution starts SCALE_END: optimization at the current scale space resolution ends ITER_START: a new iteration starts ITER_END: the current iteration ends """ def mult_aff(A, B): r"""Returns the matrix product A.dot(B) considering None as the identity Parameters ---------- A : array, shape (n,k) B : array, shape (k,m) Returns ------- The matrix product A.dot(B). If any of the input matrices is None, it is treated as the identity matrix. If both matrices are None, None is returned """ if A is None: return B elif B is None: return A return A.dot(B) def get_direction_and_spacings(affine, dim): r"""Extracts the rotational and spacing components from a matrix Extracts the rotational and spacing (voxel dimensions) components from a matrix. An image gradient represents the local variation of the image's gray values per voxel. Since we are iterating on the physical space, we need to compute the gradients as variation per millimeter, so we need to divide each gradient's component by the voxel size along the corresponding axis, that's what the spacings are used for. Since the image's gradients are oriented along the grid axes, we also need to re-orient the gradients to be given in physical space coordinates. Parameters ---------- affine : array, shape (k, k), k = 3, 4 the matrix transforming grid coordinates to physical space. Returns ------- direction : array, shape (k-1, k-1) the rotational component of the input matrix spacings : array, shape (k-1,) the scaling component (voxel size) of the matrix """ if affine is None: return np.eye(dim), np.ones(dim) dim = affine.shape[1]-1 # Temporary hack: get the zooms by building a nifti image affine4x4 = np.eye(4) empty_volume = np.zeros((0, 0, 0)) affine4x4[:dim, :dim] = affine[:dim, :dim] affine4x4[:dim, 3] = affine[:dim, dim-1] nib_nifti = nib.Nifti1Image(empty_volume, affine4x4) scalings = np.asarray(nib_nifti.header.get_zooms()) scalings = np.asarray(scalings[:dim], dtype=np.float64) A = affine[:dim, :dim] return A.dot(np.diag(1.0/scalings)), scalings class DiffeomorphicMap(object): def __init__(self, dim, disp_shape, disp_grid2world=None, domain_shape=None, domain_grid2world=None, codomain_shape=None, codomain_grid2world=None, prealign=None): r""" DiffeomorphicMap Implements a diffeomorphic transformation on the physical space. The deformation fields encoding the direct and inverse transformations share the same domain discretization (both the discretization grid shape and voxel-to-space matrix). The input coordinates (physical coordinates) are first aligned using prealign, and then displaced using the corresponding vector field interpolated at the aligned coordinates. Parameters ---------- dim : int, 2 or 3 the transformation's dimension disp_shape : array, shape (dim,) the number of slices (if 3D), rows and columns of the deformation field's discretization disp_grid2world : the voxel-to-space transform between the def. fields grid and space domain_shape : array, shape (dim,) the number of slices (if 3D), rows and columns of the default discretizatio of this map's domain domain_grid2world : array, shape (dim+1, dim+1) the default voxel-to-space transformation between this map's discretization and physical space codomain_shape : array, shape (dim,) the number of slices (if 3D), rows and columns of the images that are 'normally' warped using this transformation in the forward direction (this will provide default transformation parameters to warp images under this transformation). By default, we assume that the inverse transformation is 'normally' used to warp images with the same discretization and voxel-to-space transformation as the deformation field grid. codomain_grid2world : array, shape (dim+1, dim+1) the voxel-to-space transformation of images that are 'normally' warped using this transformation (in the forward direction). prealign : array, shape (dim+1, dim+1) the linear transformation to be applied to align input images to the reference space before warping under the deformation field. """ self.dim = dim if(disp_shape is None): raise ValueError("Invalid displacement field discretization") self.disp_shape = np.asarray(disp_shape, dtype=np.int32) # If the discretization affine is None, we assume it's the identity self.disp_grid2world = disp_grid2world if(self.disp_grid2world is None): self.disp_world2grid = None else: self.disp_world2grid = npl.inv(self.disp_grid2world) # If domain_shape isn't provided, we use the map's discretization shape if(domain_shape is None): self.domain_shape = self.disp_shape else: self.domain_shape = np.asarray(domain_shape, dtype=np.int32) self.domain_grid2world = domain_grid2world if(domain_grid2world is None): self.domain_world2grid = None else: self.domain_world2grid = npl.inv(domain_grid2world) # If codomain shape was not provided, we assume it is an endomorphism: # use the same domain_shape and codomain_grid2world as the field domain if codomain_shape is None: self.codomain_shape = self.domain_shape else: self.codomain_shape = np.asarray(codomain_shape, dtype=np.int32) self.codomain_grid2world = codomain_grid2world if codomain_grid2world is None: self.codomain_world2grid = None else: self.codomain_world2grid = npl.inv(codomain_grid2world) self.prealign = prealign if prealign is None: self.prealign_inv = None else: self.prealign_inv = npl.inv(prealign) self.is_inverse = False self.forward = None self.backward = None def interpret_matrix(self, obj): ''' Try to interpret `obj` as a matrix Some operations are performed faster if we know in advance if a matrix is the identity (so we can skip the actual matrix-vector multiplication). This function returns None if the given object is None or the 'identity' string. It returns the same object if it is a numpy array. It raises an exception otherwise. Parameters ---------- obj : object any object Returns ---------- obj : object the same object given as argument if `obj` is None or a numpy array. None if `obj` is the 'identity' string. ''' if (obj is None) or isinstance(obj, np.ndarray): return obj if isinstance(obj, str) and (obj == 'identity'): return None raise ValueError('Invalid matrix') def get_forward_field(self): r"""Deformation field to transform an image in the forward direction Returns the deformation field that must be used to warp an image under this transformation in the forward direction (note the 'is_inverse' flag). """ if self.is_inverse: return self.backward else: return self.forward def get_backward_field(self): r"""Deformation field to transform an image in the backward direction Returns the deformation field that must be used to warp an image under this transformation in the backward direction (note the 'is_inverse' flag). """ if self.is_inverse: return self.forward else: return self.backward def allocate(self): r"""Creates a zero displacement field Creates a zero displacement field (the identity transformation). """ self.forward = np.zeros(tuple(self.disp_shape) + (self.dim,), dtype=floating) self.backward = np.zeros(tuple(self.disp_shape) + (self.dim,), dtype=floating) def _get_warping_function(self, interpolation): r"""Appropriate warping function for the given interpolation type Returns the right warping function from vector_fields that must be called for the specified data dimension and interpolation type """ if self.dim == 2: if interpolation == 'linear': return vfu.warp_2d else: return vfu.warp_2d_nn else: if interpolation == 'linear': return vfu.warp_3d else: return vfu.warp_3d_nn def _warp_forward(self, image, interpolation='linear', image_world2grid=None, out_shape=None, out_grid2world=None): r"""Warps an image in the forward direction Deforms the input image under this diffeomorphic map in the forward direction. Since the mapping is defined in the physical space, the user must specify the sampling grid shape and its space-to-voxel mapping. By default, the transformation will use the discretization information given at initialization. Parameters ---------- image : array, shape (s, r, c) if dim = 3 or (r, c) if dim = 2 the image to be warped under this transformation in the forward direction interpolation : string, either 'linear' or 'nearest' the type of interpolation to be used for warping, either 'linear' (for k-linear interpolation) or 'nearest' for nearest neighbor image_world2grid : array, shape (dim+1, dim+1) the transformation bringing world (space) coordinates to voxel coordinates of the image given as input out_shape : array, shape (dim,) the number of slices, rows and columns of the desired warped image out_grid2world : the transformation bringing voxel coordinates of the warped image to physical space Returns ------- warped : array, shape = out_shape or self.codomain_shape if None the warped image under this transformation in the forward direction Notes ----- A diffeomorphic map must be thought as a mapping between points in space. Warping an image J towards an image I means transforming each voxel with (discrete) coordinates i in I to (floating-point) voxel coordinates j in J. The transformation we consider 'forward' is precisely mapping coordinates i from the input image to coordinates j from reference image, which has the effect of warping an image with reference discretization (typically, the "static image") "towards" an image with input discretization (typically, the "moving image"). More precisely, the warped image is produced by the following interpolation: warped[i] = image[W * forward[Dinv * P * S * i] + W * P * S * i )] where i denotes the coordinates of a voxel in the input grid, W is the world-to-grid transformation of the image given as input, Dinv is the world-to-grid transformation of the deformation field discretization, P is the pre-aligning matrix (transforming input points to reference points), S is the voxel-to-space transformation of the sampling grid (see comment below) and forward is the forward deformation field. If we want to warp an image, we also must specify on what grid we want to sample the resulting warped image (the images are considered as points in space and its representation on a grid depends on its grid-to-space transform telling us for each grid voxel what point in space we need to bring via interpolation). So, S is the matrix that converts the sampling grid (whose shape is given as parameter 'out_shape' ) to space coordinates. """ # if no world-to-image transform is provided, we use the codomain info if image_world2grid is None: image_world2grid = self.codomain_world2grid # if no sampling info is provided, we use the domain info if out_shape is None: if self.domain_shape is None: raise ValueError('Unable to infer sampling info. ' 'Provide a valid out_shape.') out_shape = self.domain_shape else: out_shape = np.asarray(out_shape, dtype=np.int32) if out_grid2world is None: out_grid2world = self.domain_grid2world W = self.interpret_matrix(image_world2grid) Dinv = self.disp_world2grid P = self.prealign S = self.interpret_matrix(out_grid2world) # this is the matrix which we need to multiply the voxel coordinates # to interpolate on the forward displacement field ("in"side the # 'forward' brackets in the expression above) affine_idx_in = mult_aff(Dinv, mult_aff(P, S)) # this is the matrix which we need to multiply the voxel coordinates # to add to the displacement ("out"side the 'forward' brackets in the # expression above) affine_idx_out = mult_aff(W, mult_aff(P, S)) # this is the matrix which we need to multiply the displacement vector # prior to adding to the transformed input point affine_disp = W # Convert the data to required types to use the cythonized functions if interpolation == 'nearest': if image.dtype is np.dtype('float64') and floating is np.float32: image = image.astype(floating) elif image.dtype is np.dtype('int64'): image = image.astype(np.int32) else: image = np.asarray(image, dtype=floating) warp_f = self._get_warping_function(interpolation) warped = warp_f(image, self.forward, affine_idx_in, affine_idx_out, affine_disp, out_shape) return warped def _warp_backward(self, image, interpolation='linear', image_world2grid=None, out_shape=None, out_grid2world=None): r"""Warps an image in the backward direction Deforms the input image under this diffeomorphic map in the backward direction. Since the mapping is defined in the physical space, the user must specify the sampling grid shape and its space-to-voxel mapping. By default, the transformation will use the discretization information given at initialization. Parameters ---------- image : array, shape (s, r, c) if dim = 3 or (r, c) if dim = 2 the image to be warped under this transformation in the backward direction interpolation : string, either 'linear' or 'nearest' the type of interpolation to be used for warping, either 'linear' (for k-linear interpolation) or 'nearest' for nearest neighbor image_world2grid : array, shape (dim+1, dim+1) the transformation bringing world (space) coordinates to voxel coordinates of the image given as input out_shape : array, shape (dim,) the number of slices, rows and columns of the desired warped image out_grid2world : the transformation bringing voxel coordinates of the warped image to physical space Returns ------- warped : array, shape = out_shape or self.domain_shape if None the warped image under this transformation in the backward direction Notes ----- A diffeomorphic map must be thought as a mapping between points in space. Warping an image J towards an image I means transforming each voxel with (discrete) coordinates i in I to (floating-point) voxel coordinates j in J. The transformation we consider 'backward' is precisely mapping coordinates i from the reference grid to coordinates j from the input image (that's why it's "backward"), which has the effect of warping the input image (moving) "towards" the reference. More precisely, the warped image is produced by the following interpolation: warped[i]=image[W * Pinv * backward[Dinv * S * i] + W * Pinv * S * i )] where i denotes the coordinates of a voxel in the input grid, W is the world-to-grid transformation of the image given as input, Dinv is the world-to-grid transformation of the deformation field discretization, Pinv is the pre-aligning matrix's inverse (transforming reference points to input points), S is the grid-to-space transformation of the sampling grid (see comment below) and backward is the backward deformation field. If we want to warp an image, we also must specify on what grid we want to sample the resulting warped image (the images are considered as points in space and its representation on a grid depends on its grid-to-space transform telling us for each grid voxel what point in space we need to bring via interpolation). So, S is the matrix that converts the sampling grid (whose shape is given as parameter 'out_shape' ) to space coordinates. """ # if no world-to-image transform is provided, we use the domain info if image_world2grid is None: image_world2grid = self.domain_world2grid # if no sampling info is provided, we use the codomain info if out_shape is None: if self.codomain_shape is None: msg = 'Unknown sampling info. Provide a valid out_shape.' raise ValueError(msg) out_shape = self.codomain_shape if out_grid2world is None: out_grid2world = self.codomain_grid2world W = self.interpret_matrix(image_world2grid) Dinv = self.disp_world2grid Pinv = self.prealign_inv S = self.interpret_matrix(out_grid2world) # this is the matrix which we need to multiply the voxel coordinates # to interpolate on the backward displacement field ("in"side the # 'backward' brackets in the expression above) affine_idx_in = mult_aff(Dinv, S) # this is the matrix which we need to multiply the voxel coordinates # to add to the displacement ("out"side the 'backward' brackets in the # expression above) affine_idx_out = mult_aff(W, mult_aff(Pinv, S)) # this is the matrix which we need to multiply the displacement vector # prior to adding to the transformed input point affine_disp = mult_aff(W, Pinv) if interpolation == 'nearest': if image.dtype is np.dtype('float64') and floating is np.float32: image = image.astype(floating) elif image.dtype is np.dtype('int64'): image = image.astype(np.int32) else: image = np.asarray(image, dtype=floating) warp_f = self._get_warping_function(interpolation) warped = warp_f(image, self.backward, affine_idx_in, affine_idx_out, affine_disp, out_shape) return warped def transform(self, image, interpolation='linear', image_world2grid=None, out_shape=None, out_grid2world=None): r"""Warps an image in the forward direction Transforms the input image under this transformation in the forward direction. It uses the "is_inverse" flag to switch between "forward" and "backward" (if is_inverse is False, then transform(...) warps the image forwards, else it warps the image backwards). Parameters ---------- image : array, shape (s, r, c) if dim = 3 or (r, c) if dim = 2 the image to be warped under this transformation in the forward direction interpolation : string, either 'linear' or 'nearest' the type of interpolation to be used for warping, either 'linear' (for k-linear interpolation) or 'nearest' for nearest neighbor image_world2grid : array, shape (dim+1, dim+1) the transformation bringing world (space) coordinates to voxel coordinates of the image given as input out_shape : array, shape (dim,) the number of slices, rows and columns of the desired warped image out_grid2world : the transformation bringing voxel coordinates of the warped image to physical space Returns ------- warped : array, shape = out_shape or self.codomain_shape if None the warped image under this transformation in the forward direction Notes ----- See _warp_forward and _warp_backward documentation for further information. """ if out_shape is not None: out_shape = np.asarray(out_shape, dtype=np.int32) if self.is_inverse: warped = self._warp_backward(image, interpolation, image_world2grid, out_shape, out_grid2world) else: warped = self._warp_forward(image, interpolation, image_world2grid, out_shape, out_grid2world) return np.asarray(warped) def transform_inverse(self, image, interpolation='linear', image_world2grid=None, out_shape=None, out_grid2world=None): r"""Warps an image in the backward direction Transforms the input image under this transformation in the backward direction. It uses the "is_inverse" flag to switch between "forward" and "backward" (if is_inverse is False, then transform_inverse(...) warps the image backwards, else it warps the image forwards) Parameters ---------- image : array, shape (s, r, c) if dim = 3 or (r, c) if dim = 2 the image to be warped under this transformation in the forward direction interpolation : string, either 'linear' or 'nearest' the type of interpolation to be used for warping, either 'linear' (for k-linear interpolation) or 'nearest' for nearest neighbor image_world2grid : array, shape (dim+1, dim+1) the transformation bringing world (space) coordinates to voxel coordinates of the image given as input out_shape : array, shape (dim,) the number of slices, rows and columns of the desired warped image out_grid2world : the transformation bringing voxel coordinates of the warped image to physical space Returns ------- warped : array, shape = out_shape or self.codomain_shape if None warped image under this transformation in the backward direction Notes ----- See _warp_forward and _warp_backward documentation for further information. """ if self.is_inverse: warped = self._warp_forward(image, interpolation, image_world2grid, out_shape, out_grid2world) else: warped = self._warp_backward(image, interpolation, image_world2grid, out_shape, out_grid2world) return np.asarray(warped) def inverse(self): r"""Inverse of this DiffeomorphicMap instance Returns a diffeomorphic map object representing the inverse of this transformation. The internal arrays are not copied but just referenced. Returns ------- inv : DiffeomorphicMap object the inverse of this diffeomorphic map. """ inv = DiffeomorphicMap(self.dim, self.disp_shape, self.disp_grid2world, self.domain_shape, self.domain_grid2world, self.codomain_shape, self.codomain_grid2world, self.prealign) inv.forward = self.forward inv.backward = self.backward inv.is_inverse = True return inv def expand_fields(self, expand_factors, new_shape): r"""Expands the displacement fields from current shape to new_shape Up-samples the discretization of the displacement fields to be of new_shape shape. Parameters ---------- expand_factors : array, shape (dim,) the factors scaling current spacings (voxel sizes) to spacings in the expanded discretization. new_shape : array, shape (dim,) the shape of the arrays holding the up-sampled discretization """ if self.dim == 2: expand_f = vfu.resample_displacement_field_2d else: expand_f = vfu.resample_displacement_field_3d expanded_forward = expand_f(self.forward, expand_factors, new_shape) expanded_backward = expand_f(self.backward, expand_factors, new_shape) expand_factors = np.append(expand_factors, [1]) expanded_grid2world = mult_aff(self.disp_grid2world, np.diag(expand_factors)) expanded_world2grid = npl.inv(expanded_grid2world) self.forward = expanded_forward self.backward = expanded_backward self.disp_shape = new_shape self.disp_grid2world = expanded_grid2world self.disp_world2grid = expanded_world2grid def compute_inversion_error(self): r"""Inversion error of the displacement fields Estimates the inversion error of the displacement fields by computing statistics of the residual vectors obtained after composing the forward and backward displacement fields. Returns ------- residual : array, shape (R, C) or (S, R, C) the displacement field resulting from composing the forward and backward displacement fields of this transformation (the residual should be zero for a perfect diffeomorphism) stats : array, shape (3,) statistics from the norms of the vectors of the residual displacement field: maximum, mean and standard deviation Notes ----- Since the forward and backward displacement fields have the same discretization, the final composition is given by comp[i] = forward[ i + Dinv * backward[i]] where Dinv is the space-to-grid transformation of the displacement fields """ Dinv = self.disp_world2grid if self.dim == 2: compose_f = vfu.compose_vector_fields_2d else: compose_f = vfu.compose_vector_fields_3d residual, stats = compose_f(self.backward, self.forward, None, Dinv, 1.0, None) return np.asarray(residual), np.asarray(stats) def shallow_copy(self): r"""Shallow copy of this DiffeomorphicMap instance Creates a shallow copy of this diffeomorphic map (the arrays are not copied but just referenced) Returns ------- new_map : DiffeomorphicMap object the shallow copy of this diffeomorphic map """ new_map = DiffeomorphicMap(self.dim, self.disp_shape, self.disp_grid2world, self.domain_shape, self.domain_grid2world, self.codomain_shape, self.codomain_grid2world, self.prealign) new_map.forward = self.forward new_map.backward = self.backward new_map.is_inverse = self.is_inverse return new_map def warp_endomorphism(self, phi): r"""Composition of this DiffeomorphicMap with a given endomorphism Creates a new DiffeomorphicMap C with the same properties as self and composes its displacement fields with phi's corresponding fields. The resulting diffeomorphism is of the form C(x) = phi(self(x)) with inverse C^{-1}(y) = self^{-1}(phi^{-1}(y)). We assume that phi is an endomorphism with the same discretization and domain affine as self to ensure that the composition inherits self's properties (we also assume that the pre-aligning matrix of phi is None or identity). Parameters ---------- phi : DiffeomorphicMap object the endomorphism to be warped by this diffeomorphic map Returns ------- composition : the composition of this diffeomorphic map with the endomorphism given as input Notes ----- The problem with our current representation of a DiffeomorphicMap is that the set of Diffeomorphism that can be represented this way (a pre-aligning matrix followed by a non-linear endomorphism given as a displacement field) is not closed under the composition operation. Supporting a general DiffeomorphicMap class, closed under composition, may be extremely costly computationally, and the kind of transformations we actually need for Avants' mid-point algorithm (SyN) are much simpler. """ # Compose the forward deformation fields d1 = self.get_forward_field() d2 = phi.get_forward_field() d1_inv = self.get_backward_field() d2_inv = phi.get_backward_field() premult_disp = self.disp_world2grid if self.dim == 2: compose_f = vfu.compose_vector_fields_2d else: compose_f = vfu.compose_vector_fields_3d forward, stats = compose_f(d1, d2, None, premult_disp, 1.0, None) backward, stats, = compose_f(d2_inv, d1_inv, None, premult_disp, 1.0, None) composition = self.shallow_copy() composition.forward = forward composition.backward = backward return composition def get_simplified_transform(self): r""" Constructs a simplified version of this Diffeomorhic Map The simplified version incorporates the pre-align transform, as well as the domain and codomain affine transforms into the displacement field. The resulting transformation may be regarded as operating on the image spaces given by the domain and codomain discretization. As a result, self.prealign, self.disp_grid2world, self.domain_grid2world and self.codomain affine will be None (denoting Identity) in the resulting diffeomorphic map. """ if self.dim == 2: simplify_f = vfu.simplify_warp_function_2d else: simplify_f = vfu.simplify_warp_function_3d # Simplify the forward transform D = self.domain_grid2world P = self.prealign Rinv = self.disp_world2grid Cinv = self.codomain_world2grid # this is the matrix which we need to multiply the voxel coordinates # to interpolate on the forward displacement field ("in"side the # 'forward' brackets in the expression above) affine_idx_in = mult_aff(Rinv, mult_aff(P, D)) # this is the matrix which we need to multiply the voxel coordinates # to add to the displacement ("out"side the 'forward' brackets in the # expression above) affine_idx_out = mult_aff(Cinv, mult_aff(P, D)) # this is the matrix which we need to multiply the displacement vector # prior to adding to the transformed input point affine_disp = Cinv new_forward = simplify_f(self.forward, affine_idx_in, affine_idx_out, affine_disp, self.domain_shape) # Simplify the backward transform C = self.codomain_world2grid Pinv = self.prealign_inv Dinv = self.domain_world2grid affine_idx_in = mult_aff(Rinv, C) affine_idx_out = mult_aff(Dinv, mult_aff(Pinv, C)) affine_disp = mult_aff(Dinv, Pinv) new_backward = simplify_f(self.backward, affine_idx_in, affine_idx_out, affine_disp, self.codomain_shape) simplified = DiffeomorphicMap(self.dim, self.disp_shape, None, self.domain_shape, None, self.codomain_shape, None, None) simplified.forward = new_forward simplified.backward = new_backward return simplified class DiffeomorphicRegistration(with_metaclass(abc.ABCMeta, object)): def __init__(self, metric=None): r""" Diffeomorphic Registration This abstract class defines the interface to be implemented by any optimization algorithm for diffeomorphic registration. Parameters ---------- metric : SimilarityMetric object the object measuring the similarity of the two images. The registration algorithm will minimize (or maximize) the provided similarity. """ if metric is None: raise ValueError('The metric cannot be None') self.metric = metric self.dim = metric.dim def set_level_iters(self, level_iters): r"""Sets the number of iterations at each pyramid level Establishes the maximum number of iterations to be performed at each level of the Gaussian pyramid, similar to ANTS. Parameters ---------- level_iters : list the number of iterations at each level of the Gaussian pyramid. level_iters[0] corresponds to the finest level, level_iters[n-1] the coarsest, where n is the length of the list """ self.levels = len(level_iters) if level_iters else 0 self.level_iters = level_iters @abc.abstractmethod def optimize(self): r"""Starts the metric optimization This is the main function each specialized class derived from this must implement. Upon completion, the deformation field must be available from the forward transformation model. """ @abc.abstractmethod def get_map(self): r""" Returns the resulting diffeomorphic map after optimization """ class SymmetricDiffeomorphicRegistration(DiffeomorphicRegistration): def __init__(self, metric, level_iters=None, step_length=0.25, ss_sigma_factor=0.2, opt_tol=1e-5, inv_iter=20, inv_tol=1e-3, callback=None): r""" Symmetric Diffeomorphic Registration (SyN) Algorithm Performs the multi-resolution optimization algorithm for non-linear registration using a given similarity metric. Parameters ---------- metric : SimilarityMetric object the metric to be optimized level_iters : list of int the number of iterations at each level of the Gaussian Pyramid (the length of the list defines the number of pyramid levels to be used) opt_tol : float the optimization will stop when the estimated derivative of the energy profile w.r.t. time falls below this threshold inv_iter : int the number of iterations to be performed by the displacement field inversion algorithm step_length : float the length of the maximum displacement vector of the update displacement field at each iteration ss_sigma_factor : float parameter of the scale-space smoothing kernel. For example, the std. dev. of the kernel will be factor*(2^i) in the isotropic case where i = 0, 1, ..., n_scales is the scale inv_tol : float the displacement field inversion algorithm will stop iterating when the inversion error falls below this threshold callback : function(SymmetricDiffeomorphicRegistration) a function receiving a SymmetricDiffeomorphicRegistration object to be called after each iteration (this optimizer will call this function passing self as parameter) """ super(SymmetricDiffeomorphicRegistration, self).__init__(metric) if level_iters is None: level_iters = [100, 100, 25] if len(level_iters) == 0: raise ValueError('The iterations list cannot be empty') self.set_level_iters(level_iters) self.step_length = step_length self.ss_sigma_factor = ss_sigma_factor self.opt_tol = opt_tol self.inv_tol = inv_tol self.inv_iter = inv_iter self.energy_window = 12 self.energy_list = [] self.full_energy_profile = [] self.verbosity = VerbosityLevels.STATUS self.callback = callback self.moving_ss = None self.static_ss = None self.static_direction = None self.moving_direction = None self.mask0 = metric.mask0 def update(self, current_displacement, new_displacement, disp_world2grid, time_scaling): r"""Composition of the current displacement field with the given field Interpolates new displacement at the locations defined by current_displacement. Equivalently, computes the composition C of the given displacement fields as C(x) = B(A(x)), where A is current_displacement and B is new_displacement. This function is intended to be used with deformation fields of the same sampling (e.g. to be called by a registration algorithm). Parameters ---------- current_displacement : array, shape (R', C', 2) or (S', R', C', 3) the displacement field defining where to interpolate new_displacement new_displacement : array, shape (R, C, 2) or (S, R, C, 3) the displacement field to be warped by current_displacement disp_world2grid : array, shape (dim+1, dim+1) the space-to-grid transform associated with the displacements' grid (we assume that both displacements are discretized over the same grid) time_scaling : float scaling factor applied to d2. The effect may be interpreted as moving d1 displacements along a factor (`time_scaling`) of d2. Returns ------- updated : array, shape (the same as new_displacement) the warped displacement field mean_norm : the mean norm of all vectors in current_displacement """ sq_field = np.sum((np.array(current_displacement) ** 2), -1) mean_norm = np.sqrt(sq_field).mean() # We assume that both displacement fields have the same # grid2world transform, which implies premult_index=Identity # and premult_disp is the world2grid transform associated with # the displacements' grid self.compose(current_displacement, new_displacement, None, disp_world2grid, time_scaling, current_displacement) return np.array(current_displacement), np.array(mean_norm) def get_map(self): r"""Returns the resulting diffeomorphic map Returns the DiffeomorphicMap registering the moving image towards the static image. """ return self.static_to_ref def _connect_functions(self): r"""Assign the methods to be called according to the image dimension Assigns the appropriate functions to be called for displacement field inversion, Gaussian pyramid, and affine / dense deformation composition according to the dimension of the input images e.g. 2D or 3D. """ if self.dim == 2: self.invert_vector_field = vfu.invert_vector_field_fixed_point_2d self.compose = vfu.compose_vector_fields_2d else: self.invert_vector_field = vfu.invert_vector_field_fixed_point_3d self.compose = vfu.compose_vector_fields_3d def _init_optimizer(self, static, moving, static_grid2world, moving_grid2world, prealign): r"""Initializes the registration optimizer Initializes the optimizer by computing the scale space of the input images and allocating the required memory for the transformation models at the coarsest scale. Parameters ---------- static : array, shape (S, R, C) or (R, C) the image to be used as reference during optimization. The displacement fields will have the same discretization as the static image. moving : array, shape (S, R, C) or (R, C) the image to be used as "moving" during optimization. Since the deformation fields' discretization is the same as the static image, it is necessary to pre-align the moving image to ensure its domain lies inside the domain of the deformation fields. This is assumed to be accomplished by "pre-aligning" the moving image towards the static using an affine transformation given by the 'prealign' matrix static_grid2world : array, shape (dim+1, dim+1) the voxel-to-space transformation associated to the static image moving_grid2world : array, shape (dim+1, dim+1) the voxel-to-space transformation associated to the moving image prealign : array, shape (dim+1, dim+1) the affine transformation (operating on the physical space) pre-aligning the moving image towards the static """ self._connect_functions() # Extract information from affine matrices to create the scale space static_direction, static_spacing = \ get_direction_and_spacings(static_grid2world, self.dim) moving_direction, moving_spacing = \ get_direction_and_spacings(moving_grid2world, self.dim) # the images' directions don't change with scale self.static_direction = np.eye(self.dim + 1) self.moving_direction = np.eye(self.dim + 1) self.static_direction[:self.dim, :self.dim] = static_direction self.moving_direction[:self.dim, :self.dim] = moving_direction # Build the scale space of the input images if self.verbosity >= VerbosityLevels.DIAGNOSE: print('Applying zero mask: ' + str(self.mask0)) if self.verbosity >= VerbosityLevels.STATUS: print('Creating scale space from the moving image. Levels: %d. ' 'Sigma factor: %f.' % (self.levels, self.ss_sigma_factor)) self.moving_ss = ScaleSpace(moving, self.levels, moving_grid2world, moving_spacing, self.ss_sigma_factor, self.mask0) if self.verbosity >= VerbosityLevels.STATUS: print('Creating scale space from the static image. Levels: %d. ' 'Sigma factor: %f.' % (self.levels, self.ss_sigma_factor)) self.static_ss = ScaleSpace(static, self.levels, static_grid2world, static_spacing, self.ss_sigma_factor, self.mask0) if self.verbosity >= VerbosityLevels.DEBUG: print('Moving scale space:') for level in range(self.levels): self.moving_ss.print_level(level) print('Static scale space:') for level in range(self.levels): self.static_ss.print_level(level) # Get the properties of the coarsest level from the static image. These # properties will be taken as the reference discretization. disp_shape = self.static_ss.get_domain_shape(self.levels-1) disp_grid2world = self.static_ss.get_affine(self.levels-1) # The codomain discretization of both diffeomorphic maps is # precisely the discretization of the static image codomain_shape = static.shape codomain_grid2world = static_grid2world # The forward model transforms points from the static image # to points on the reference (which is the static as well). So the # domain properties are taken from the static image. Since its the same # as the reference, we don't need to pre-align. domain_shape = static.shape domain_grid2world = static_grid2world self.static_to_ref = DiffeomorphicMap(self.dim, disp_shape, disp_grid2world, domain_shape, domain_grid2world, codomain_shape, codomain_grid2world, None) self.static_to_ref.allocate() # The backward model transforms points from the moving image # to points on the reference (which is the static). So the input # properties are taken from the moving image, and we need to pre-align # points on the moving physical space to the reference physical space # by applying the inverse of pre-align. This is done this way to make # it clear for the user: the pre-align matrix is usually obtained by # doing affine registration of the moving image towards the static # image, which results in a matrix transforming points in the static # physical space to points in the moving physical space prealign_inv = None if prealign is None else npl.inv(prealign) domain_shape = moving.shape domain_grid2world = moving_grid2world self.moving_to_ref = DiffeomorphicMap(self.dim, disp_shape, disp_grid2world, domain_shape, domain_grid2world, codomain_shape, codomain_grid2world, prealign_inv) self.moving_to_ref.allocate() def _end_optimizer(self): r"""Frees the resources allocated during initialization """ del self.moving_ss del self.static_ss def _iterate(self): r"""Performs one symmetric iteration Performs one iteration of the SyN algorithm: 1.Compute forward 2.Compute backward 3.Update forward 4.Update backward 5.Compute inverses 6.Invert the inverses Returns ------- der : float the derivative of the energy profile, computed by fitting a quadratic function to the energy values at the latest T iterations, where T = self.energy_window. If the current iteration is less than T then np.inf is returned instead. """ # Acquire current resolution information from scale spaces current_moving = self.moving_ss.get_image(self.current_level) current_static = self.static_ss.get_image(self.current_level) current_disp_shape = \ self.static_ss.get_domain_shape(self.current_level) current_disp_grid2world = \ self.static_ss.get_affine(self.current_level) current_disp_world2grid = \ self.static_ss.get_affine_inv(self.current_level) current_disp_spacing = \ self.static_ss.get_spacing(self.current_level) # Warp the input images (smoothed to the current scale) to the common # (reference) space at the current resolution wstatic = self.static_to_ref.transform_inverse(current_static, 'linear', None, current_disp_shape, current_disp_grid2world) wmoving = self.moving_to_ref.transform_inverse(current_moving, 'linear', None, current_disp_shape, current_disp_grid2world) # Pass both images to the metric. Now both images are sampled on the # reference grid (equal to the static image's grid) and the direction # doesn't change across scales self.metric.set_moving_image(wmoving, current_disp_grid2world, current_disp_spacing, self.static_direction) self.metric.use_moving_image_dynamics( current_moving, self.moving_to_ref.inverse()) self.metric.set_static_image(wstatic, current_disp_grid2world, current_disp_spacing, self.static_direction) self.metric.use_static_image_dynamics( current_static, self.static_to_ref.inverse()) # Initialize the metric for a new iteration self.metric.initialize_iteration() if self.callback is not None: self.callback(self, RegistrationStages.ITER_START) # Compute the forward step (to be used to update the forward transform) fw_step = np.array(self.metric.compute_forward()) # set zero displacements at the boundary fw_step[0, ...] = 0 fw_step[:, 0, ...] = 0 fw_step[-1, ...] = 0 fw_step[:, -1, ...] = 0 if(self.dim == 3): fw_step[:, :, 0, ...] = 0 fw_step[:, :, -1, ...] = 0 # Normalize the forward step nrm = np.sqrt(np.sum((fw_step/current_disp_spacing)**2, -1)).max() if nrm > 0: fw_step /= nrm # Add to current total field self.static_to_ref.forward, md_forward = self.update( self.static_to_ref.forward, fw_step, current_disp_world2grid, self.step_length) del fw_step # Keep track of the forward energy fw_energy = self.metric.get_energy() # Compose backward step (to be used to update the backward transform) bw_step = np.array(self.metric.compute_backward()) # set zero displacements at the boundary bw_step[0, ...] = 0 bw_step[:, 0, ...] = 0 if(self.dim == 3): bw_step[:, :, 0, ...] = 0 # Normalize the backward step nrm = np.sqrt(np.sum((bw_step/current_disp_spacing) ** 2, -1)).max() if nrm > 0: bw_step /= nrm # Add to current total field self.moving_to_ref.forward, md_backward = self.update( self.moving_to_ref.forward, bw_step, current_disp_world2grid, self.step_length) del bw_step # Keep track of the energy bw_energy = self.metric.get_energy() der = np.inf n_iter = len(self.energy_list) if len(self.energy_list) >= self.energy_window: der = self._get_energy_derivative() if self.verbosity >= VerbosityLevels.DIAGNOSE: ch = '-' if np.isnan(der) else der print('%d:\t%0.6f\t%0.6f\t%0.6f\t%s' % (n_iter, fw_energy, bw_energy, fw_energy + bw_energy, ch)) self.energy_list.append(fw_energy + bw_energy) # Invert the forward model's forward field self.static_to_ref.backward = np.array( self.invert_vector_field( self.static_to_ref.forward, current_disp_world2grid, current_disp_spacing, self.inv_iter, self.inv_tol, self.static_to_ref.backward)) # Invert the backward model's forward field self.moving_to_ref.backward = np.array( self.invert_vector_field( self.moving_to_ref.forward, current_disp_world2grid, current_disp_spacing, self.inv_iter, self.inv_tol, self.moving_to_ref.backward)) # Invert the forward model's backward field self.static_to_ref.forward = np.array( self.invert_vector_field( self.static_to_ref.backward, current_disp_world2grid, current_disp_spacing, self.inv_iter, self.inv_tol, self.static_to_ref.forward)) # Invert the backward model's backward field self.moving_to_ref.forward = np.array( self.invert_vector_field( self.moving_to_ref.backward, current_disp_world2grid, current_disp_spacing, self.inv_iter, self.inv_tol, self.moving_to_ref.forward)) # Free resources no longer needed to compute the forward and backward # steps if self.callback is not None: self.callback(self, RegistrationStages.ITER_END) self.metric.free_iteration() return der def _approximate_derivative_direct(self, x, y): r"""Derivative of the degree-2 polynomial fit of the given x, y pairs Directly computes the derivative of the least-squares-fit quadratic function estimated from (x[...],y[...]) pairs. Parameters ---------- x : array, shape (n,) increasing array representing the x-coordinates of the points to be fit y : array, shape (n,) array representing the y-coordinates of the points to be fit Returns ------- y0 : float the estimated derivative at x0 = 0.5*len(x) """ x = np.asarray(x) y = np.asarray(y) X = np.row_stack((x**2, x, np.ones_like(x))) XX = (X).dot(X.T) b = X.dot(y) beta = npl.solve(XX, b) x0 = 0.5 * len(x) y0 = 2.0 * beta[0] * (x0) + beta[1] return y0 def _get_energy_derivative(self): r"""Approximate derivative of the energy profile Returns the derivative of the estimated energy as a function of "time" (iterations) at the last iteration """ n_iter = len(self.energy_list) if n_iter < self.energy_window: raise ValueError('Not enough data to fit the energy profile') x = range(self.energy_window) y = self.energy_list[(n_iter - self.energy_window):n_iter] ss = sum(y) if(ss > 0): ss *= -1 y = [v / ss for v in y] der = self._approximate_derivative_direct(x, y) return der def _optimize(self): r"""Starts the optimization The main multi-scale symmetric optimization algorithm """ self.full_energy_profile = [] if self.callback is not None: self.callback(self, RegistrationStages.OPT_START) for level in range(self.levels - 1, -1, -1): if self.verbosity >= VerbosityLevels.STATUS: print('Optimizing level %d' % level) self.current_level = level self.metric.set_levels_below(self.levels - level) self.metric.set_levels_above(level) if level < self.levels - 1: expand_factors = \ self.static_ss.get_expand_factors(level+1, level) new_shape = self.static_ss.get_domain_shape(level) self.static_to_ref.expand_fields(expand_factors, new_shape) self.moving_to_ref.expand_fields(expand_factors, new_shape) self.niter = 0 self.energy_list = [] derivative = np.inf if self.callback is not None: self.callback(self, RegistrationStages.SCALE_START) while ((self.niter < self.level_iters[self.levels - 1 - level]) and (self.opt_tol < derivative)): derivative = self._iterate() self.niter += 1 self.full_energy_profile.extend(self.energy_list) if self.callback is not None: self.callback(self, RegistrationStages.SCALE_END) # Reporting mean and std in stats[1] and stats[2] residual, stats = self.static_to_ref.compute_inversion_error() if self.verbosity >= VerbosityLevels.DIAGNOSE: print('Static-Reference Residual error: %0.6f (%0.6f)' % (stats[1], stats[2])) residual, stats = self.moving_to_ref.compute_inversion_error() if self.verbosity >= VerbosityLevels.DIAGNOSE: print('Moving-Reference Residual error :%0.6f (%0.6f)' % (stats[1], stats[2])) # Compose the two partial transformations self.static_to_ref = self.moving_to_ref.warp_endomorphism( self.static_to_ref.inverse()).inverse() # Report mean and std for the composed deformation field residual, stats = self.static_to_ref.compute_inversion_error() if self.verbosity >= VerbosityLevels.DIAGNOSE: print('Final residual error: %0.6f (%0.6f)' % (stats[1], stats[2])) if self.callback is not None: self.callback(self, RegistrationStages.OPT_END) def optimize(self, static, moving, static_grid2world=None, moving_grid2world=None, prealign=None): r""" Starts the optimization Parameters ---------- static : array, shape (S, R, C) or (R, C) the image to be used as reference during optimization. The displacement fields will have the same discretization as the static image. moving : array, shape (S, R, C) or (R, C) the image to be used as "moving" during optimization. Since the deformation fields' discretization is the same as the static image, it is necessary to pre-align the moving image to ensure its domain lies inside the domain of the deformation fields. This is assumed to be accomplished by "pre-aligning" the moving image towards the static using an affine transformation given by the 'prealign' matrix static_grid2world : array, shape (dim+1, dim+1) the voxel-to-space transformation associated to the static image moving_grid2world : array, shape (dim+1, dim+1) the voxel-to-space transformation associated to the moving image prealign : array, shape (dim+1, dim+1) the affine transformation (operating on the physical space) pre-aligning the moving image towards the static Returns ------- static_to_ref : DiffeomorphicMap object the diffeomorphic map that brings the moving image towards the static one in the forward direction (i.e. by calling static_to_ref.transform) and the static image towards the moving one in the backward direction (i.e. by calling static_to_ref.transform_inverse). """ if self.verbosity >= VerbosityLevels.DEBUG: print("Pre-align:", prealign) self._init_optimizer(static.astype(floating), moving.astype(floating), static_grid2world, moving_grid2world, prealign) self._optimize() self._end_optimizer() self.static_to_ref.forward = np.array(self.static_to_ref.forward) self.static_to_ref.backward = np.array(self.static_to_ref.backward) return self.static_to_ref dipy-0.13.0/dipy/align/metrics.py000066400000000000000000001327201317371701200166260ustar00rootroot00000000000000""" Metrics for Symmetric Diffeomorphic Registration """ from __future__ import print_function import abc import numpy as np import scipy as sp from scipy import gradient, ndimage from dipy.utils.six import with_metaclass from dipy.align import vector_fields as vfu from dipy.align import sumsqdiff as ssd from dipy.align import crosscorr as cc from dipy.align import expectmax as em from dipy.align import floating class SimilarityMetric(with_metaclass(abc.ABCMeta, object)): def __init__(self, dim): r""" Similarity Metric abstract class A similarity metric is in charge of keeping track of the numerical value of the similarity (or distance) between the two given images. It also computes the update field for the forward and inverse displacement fields to be used in a gradient-based optimization algorithm. Note that this metric does not depend on any transformation (affine or non-linear) so it assumes the static and moving images are already warped Parameters ---------- dim : int (either 2 or 3) the dimension of the image domain """ self.dim = dim self.levels_above = None self.levels_below = None self.static_image = None self.static_affine = None self.static_spacing = None self.static_direction = None self.moving_image = None self.moving_affine = None self.moving_spacing = None self.moving_direction = None self.mask0 = False def set_levels_below(self, levels): r"""Informs the metric how many pyramid levels are below the current one Informs this metric the number of pyramid levels below the current one. The metric may change its behavior (e.g. number of inner iterations) accordingly Parameters ---------- levels : int the number of levels below the current Gaussian Pyramid level """ self.levels_below = levels def set_levels_above(self, levels): r"""Informs the metric how many pyramid levels are above the current one Informs this metric the number of pyramid levels above the current one. The metric may change its behavior (e.g. number of inner iterations) accordingly Parameters ---------- levels : int the number of levels above the current Gaussian Pyramid level """ self.levels_above = levels def set_static_image(self, static_image, static_affine, static_spacing, static_direction): r"""Sets the static image being compared against the moving one. Sets the static image. The default behavior (of this abstract class) is simply to assign the reference to an attribute, but generalizations of the metric may need to perform other operations Parameters ---------- static_image : array, shape (R, C) or (S, R, C) the static image """ self.static_image = static_image self.static_affine = static_affine self.static_spacing = static_spacing self.static_direction = static_direction def use_static_image_dynamics(self, original_static_image, transformation): r"""This is called by the optimizer just after setting the static image. This method allows the metric to compute any useful information from knowing how the current static image was generated (as the transformation of an original static image). This method is called by the optimizer just after it sets the static image. Transformation will be an instance of DiffeomorficMap or None if the original_static_image equals self.moving_image. Parameters ---------- original_static_image : array, shape (R, C) or (S, R, C) original image from which the current static image was generated transformation : DiffeomorphicMap object the transformation that was applied to original image to generate the current static image """ pass def set_moving_image(self, moving_image, moving_affine, moving_spacing, moving_direction): r"""Sets the moving image being compared against the static one. Sets the moving image. The default behavior (of this abstract class) is simply to assign the reference to an attribute, but generalizations of the metric may need to perform other operations Parameters ---------- moving_image : array, shape (R, C) or (S, R, C) the moving image """ self.moving_image = moving_image self.moving_affine = moving_affine self.moving_spacing = moving_spacing self.moving_direction = moving_direction def use_moving_image_dynamics(self, original_moving_image, transformation): r"""This is called by the optimizer just after setting the moving image This method allows the metric to compute any useful information from knowing how the current static image was generated (as the transformation of an original static image). This method is called by the optimizer just after it sets the static image. Transformation will be an instance of DiffeomorficMap or None if the original_moving_image equals self.moving_image. Parameters ---------- original_moving_image : array, shape (R, C) or (S, R, C) original image from which the current moving image was generated transformation : DiffeomorphicMap object the transformation that was applied to original image to generate the current moving image """ pass @abc.abstractmethod def initialize_iteration(self): r"""Prepares the metric to compute one displacement field iteration. This method will be called before any compute_forward or compute_backward call, this allows the Metric to pre-compute any useful information for speeding up the update computations. This initialization was needed in ANTS because the updates are called once per voxel. In Python this is unpractical, though. """ @abc.abstractmethod def free_iteration(self): r"""Releases the resources no longer needed by the metric This method is called by the RegistrationOptimizer after the required iterations have been computed (forward and / or backward) so that the SimilarityMetric can safely delete any data it computed as part of the initialization """ @abc.abstractmethod def compute_forward(self): r"""Computes one step bringing the reference image towards the static. Computes the forward update field to register the moving image towards the static image in a gradient-based optimization algorithm """ @abc.abstractmethod def compute_backward(self): r"""Computes one step bringing the static image towards the moving. Computes the backward update field to register the static image towards the moving image in a gradient-based optimization algorithm """ @abc.abstractmethod def get_energy(self): r"""Numerical value assigned by this metric to the current image pair Must return the numeric value of the similarity between the given static and moving images """ class CCMetric(SimilarityMetric): def __init__(self, dim, sigma_diff=2.0, radius=4): r"""Normalized Cross-Correlation Similarity metric. Parameters ---------- dim : int (either 2 or 3) the dimension of the image domain sigma_diff : the standard deviation of the Gaussian smoothing kernel to be applied to the update field at each iteration radius : int the radius of the squared (cubic) neighborhood at each voxel to be considered to compute the cross correlation """ super(CCMetric, self).__init__(dim) self.sigma_diff = sigma_diff self.radius = radius self._connect_functions() def _connect_functions(self): r"""Assign the methods to be called according to the image dimension Assigns the appropriate functions to be called for precomputing the cross-correlation factors according to the dimension of the input images """ if self.dim == 2: self.precompute_factors = cc.precompute_cc_factors_2d self.compute_forward_step = cc.compute_cc_forward_step_2d self.compute_backward_step = cc.compute_cc_backward_step_2d self.reorient_vector_field = vfu.reorient_vector_field_2d elif self.dim == 3: self.precompute_factors = cc.precompute_cc_factors_3d self.compute_forward_step = cc.compute_cc_forward_step_3d self.compute_backward_step = cc.compute_cc_backward_step_3d self.reorient_vector_field = vfu.reorient_vector_field_3d else: raise ValueError('CC Metric not defined for dim. %d' % (self.dim)) def initialize_iteration(self): r"""Prepares the metric to compute one displacement field iteration. Pre-computes the cross-correlation factors for efficient computation of the gradient of the Cross Correlation w.r.t. the displacement field. It also pre-computes the image gradients in the physical space by re-orienting the gradients in the voxel space using the corresponding affine transformations. """ self.factors = self.precompute_factors(self.static_image, self.moving_image, self.radius) self.factors = np.array(self.factors) self.gradient_moving = np.empty( shape=(self.moving_image.shape)+(self.dim,), dtype=floating) for i, grad in enumerate(sp.gradient(self.moving_image)): self.gradient_moving[..., i] = grad # Convert moving image's gradient field from voxel to physical space if self.moving_spacing is not None: self.gradient_moving /= self.moving_spacing if self.moving_direction is not None: self.reorient_vector_field(self.gradient_moving, self.moving_direction) self.gradient_static = np.empty( shape=(self.static_image.shape)+(self.dim,), dtype=floating) for i, grad in enumerate(sp.gradient(self.static_image)): self.gradient_static[..., i] = grad # Convert moving image's gradient field from voxel to physical space if self.static_spacing is not None: self.gradient_static /= self.static_spacing if self.static_direction is not None: self.reorient_vector_field(self.gradient_static, self.static_direction) def free_iteration(self): r"""Frees the resources allocated during initialization """ del self.factors del self.gradient_moving del self.gradient_static def compute_forward(self): r"""Computes one step bringing the moving image towards the static. Computes the update displacement field to be used for registration of the moving image towards the static image """ displacement, self.energy = self.compute_forward_step( self.gradient_static, self.factors, self.radius) displacement = np.array(displacement) for i in range(self.dim): displacement[..., i] = ndimage.filters.gaussian_filter( displacement[..., i], self.sigma_diff) return displacement def compute_backward(self): r"""Computes one step bringing the static image towards the moving. Computes the update displacement field to be used for registration of the static image towards the moving image """ displacement, energy = self.compute_backward_step(self.gradient_moving, self.factors, self.radius) displacement = np.array(displacement) for i in range(self.dim): displacement[..., i] = ndimage.filters.gaussian_filter( displacement[..., i], self.sigma_diff) return displacement def get_energy(self): r"""Numerical value assigned by this metric to the current image pair Returns the Cross Correlation (data term) energy computed at the largest iteration """ return self.energy class EMMetric(SimilarityMetric): def __init__(self, dim, smooth=1.0, inner_iter=5, q_levels=256, double_gradient=True, step_type='gauss_newton'): r"""Expectation-Maximization Metric Similarity metric based on the Expectation-Maximization algorithm to handle multi-modal images. The transfer function is modeled as a set of hidden random variables that are estimated at each iteration of the algorithm. Parameters ---------- dim : int (either 2 or 3) the dimension of the image domain smooth : float smoothness parameter, the larger the value the smoother the deformation field inner_iter : int number of iterations to be performed at each level of the multi- resolution Gauss-Seidel optimization algorithm (this is not the number of steps per Gaussian Pyramid level, that parameter must be set for the optimizer, not the metric) q_levels : number of quantization levels (equal to the number of hidden variables in the EM algorithm) double_gradient : boolean if True, the gradient of the expected static image under the moving modality will be added to the gradient of the moving image, similarly, the gradient of the expected moving image under the static modality will be added to the gradient of the static image. step_type : string ('gauss_newton', 'demons') the optimization schedule to be used in the multi-resolution Gauss-Seidel optimization algorithm (not used if Demons Step is selected) """ super(EMMetric, self).__init__(dim) self.smooth = smooth self.inner_iter = inner_iter self.q_levels = q_levels self.use_double_gradient = double_gradient self.step_type = step_type self.static_image_mask = None self.moving_image_mask = None self.staticq_means_field = None self.movingq_means_field = None self.movingq_levels = None self.staticq_levels = None self._connect_functions() def _connect_functions(self): r"""Assign the methods to be called according to the image dimension Assigns the appropriate functions to be called for image quantization, statistics computation and multi-resolution iterations according to the dimension of the input images """ if self.dim == 2: self.quantize = em.quantize_positive_2d self.compute_stats = em.compute_masked_class_stats_2d self.reorient_vector_field = vfu.reorient_vector_field_2d elif self.dim == 3: self.quantize = em.quantize_positive_3d self.compute_stats = em.compute_masked_class_stats_3d self.reorient_vector_field = vfu.reorient_vector_field_3d else: raise ValueError('EM Metric not defined for dim. %d' % (self.dim)) if self.step_type == 'demons': self.compute_step = self.compute_demons_step elif self.step_type == 'gauss_newton': self.compute_step = self.compute_gauss_newton_step else: raise ValueError('Opt. step %s not defined' % (self.step_type)) def initialize_iteration(self): r"""Prepares the metric to compute one displacement field iteration. Pre-computes the transfer functions (hidden random variables) and variances of the estimators. Also pre-computes the gradient of both input images. Note that once the images are transformed to the opposite modality, the gradient of the transformed images can be used with the gradient of the corresponding modality in the same fashion as diff-demons does for mono-modality images. If the flag self.use_double_gradient is True these gradients are averaged. """ sampling_mask = self.static_image_mask*self.moving_image_mask self.sampling_mask = sampling_mask staticq, self.staticq_levels, hist = self.quantize(self.static_image, self.q_levels) staticq = np.array(staticq, dtype=np.int32) self.staticq_levels = np.array(self.staticq_levels) staticq_means, staticq_vars = self.compute_stats(sampling_mask, self.moving_image, self.q_levels, staticq) staticq_means[0] = 0 self.staticq_means = np.array(staticq_means) self.staticq_variances = np.array(staticq_vars) self.staticq_sigma_sq_field = self.staticq_variances[staticq] self.staticq_means_field = self.staticq_means[staticq] self.gradient_moving = np.empty( shape=(self.moving_image.shape)+(self.dim,), dtype=floating) for i, grad in enumerate(sp.gradient(self.moving_image)): self.gradient_moving[..., i] = grad # Convert moving image's gradient field from voxel to physical space if self.moving_spacing is not None: self.gradient_moving /= self.moving_spacing if self.moving_direction is not None: self.reorient_vector_field(self.gradient_moving, self.moving_direction) self.gradient_static = np.empty( shape=(self.static_image.shape)+(self.dim,), dtype=floating) for i, grad in enumerate(sp.gradient(self.static_image)): self.gradient_static[..., i] = grad # Convert moving image's gradient field from voxel to physical space if self.static_spacing is not None: self.gradient_static /= self.static_spacing if self.static_direction is not None: self.reorient_vector_field(self.gradient_static, self.static_direction) movingq, self.movingq_levels, hist = self.quantize(self.moving_image, self.q_levels) movingq = np.array(movingq, dtype=np.int32) self.movingq_levels = np.array(self.movingq_levels) movingq_means, movingq_variances = self.compute_stats( sampling_mask, self.static_image, self.q_levels, movingq) movingq_means[0] = 0 self.movingq_means = np.array(movingq_means) self.movingq_variances = np.array(movingq_variances) self.movingq_sigma_sq_field = self.movingq_variances[movingq] self.movingq_means_field = self.movingq_means[movingq] if self.use_double_gradient: for i, grad in enumerate(sp.gradient(self.staticq_means_field)): self.gradient_moving[..., i] += grad for i, grad in enumerate(sp.gradient(self.movingq_means_field)): self.gradient_static[..., i] += grad def free_iteration(self): r""" Frees the resources allocated during initialization """ del self.sampling_mask del self.staticq_levels del self.movingq_levels del self.staticq_sigma_sq_field del self.staticq_means_field del self.movingq_sigma_sq_field del self.movingq_means_field del self.gradient_moving del self.gradient_static def compute_forward(self): """Computes one step bringing the reference image towards the static. Computes the forward update field to register the moving image towards the static image in a gradient-based optimization algorithm """ return self.compute_step(True) def compute_backward(self): r"""Computes one step bringing the static image towards the moving. Computes the update displacement field to be used for registration of the static image towards the moving image """ return self.compute_step(False) def compute_gauss_newton_step(self, forward_step=True): r"""Computes the Gauss-Newton energy minimization step Computes the Newton step to minimize this energy, i.e., minimizes the linearized energy function with respect to the regularized displacement field (this step does not require post-smoothing, as opposed to the demons step, which does not include regularization). To accelerate convergence we use the multi-grid Gauss-Seidel algorithm proposed by Bruhn and Weickert et al [Bruhn05] Parameters ---------- forward_step : boolean if True, computes the Newton step in the forward direction (warping the moving towards the static image). If False, computes the backward step (warping the static image to the moving image) Returns ------- displacement : array, shape (R, C, 2) or (S, R, C, 3) the Newton step References ---------- [Bruhn05] Andres Bruhn and Joachim Weickert, "Towards ultimate motion estimation: combining highest accuracy with real-time performance", 10th IEEE International Conference on Computer Vision, 2005. ICCV 2005. """ reference_shape = self.static_image.shape if forward_step: gradient = self.gradient_static delta = self.staticq_means_field - self.moving_image sigma_sq_field = self.staticq_sigma_sq_field else: gradient = self.gradient_moving delta = self.movingq_means_field - self.static_image sigma_sq_field = self.movingq_sigma_sq_field displacement = np.zeros(shape=(reference_shape)+(self.dim,), dtype=floating) if self.dim == 2: self.energy = v_cycle_2d(self.levels_below, self.inner_iter, delta, sigma_sq_field, gradient, None, self.smooth, displacement) else: self.energy = v_cycle_3d(self.levels_below, self.inner_iter, delta, sigma_sq_field, gradient, None, self.smooth, displacement) return displacement def compute_demons_step(self, forward_step=True): r"""Demons step for EM metric Parameters ---------- forward_step : boolean if True, computes the Demons step in the forward direction (warping the moving towards the static image). If False, computes the backward step (warping the static image to the moving image) Returns ------- displacement : array, shape (R, C, 2) or (S, R, C, 3) the Demons step """ sigma_reg_2 = np.sum(self.static_spacing**2)/self.dim if forward_step: gradient = self.gradient_static delta_field = self.static_image - self.movingq_means_field sigma_sq_field = self.movingq_sigma_sq_field else: gradient = self.gradient_moving delta_field = self.moving_image - self.staticq_means_field sigma_sq_field = self.staticq_sigma_sq_field if self.dim == 2: step, self.energy = em.compute_em_demons_step_2d(delta_field, sigma_sq_field, gradient, sigma_reg_2, None) else: step, self.energy = em.compute_em_demons_step_3d(delta_field, sigma_sq_field, gradient, sigma_reg_2, None) for i in range(self.dim): step[..., i] = ndimage.filters.gaussian_filter(step[..., i], self.smooth) return step def get_energy(self): r"""The numerical value assigned by this metric to the current image pair Returns the EM (data term) energy computed at the largest iteration """ return self.energy def use_static_image_dynamics(self, original_static_image, transformation): r"""This is called by the optimizer just after setting the static image. EMMetric takes advantage of the image dynamics by computing the current static image mask from the originalstaticImage mask (warped by nearest neighbor interpolation) Parameters ---------- original_static_image : array, shape (R, C) or (S, R, C) the original static image from which the current static image was generated, the current static image is the one that was provided via 'set_static_image(...)', which may not be the same as the original static image but a warped version of it (even the static image changes during Symmetric Normalization, not only the moving one). transformation : DiffeomorphicMap object the transformation that was applied to the original_static_image to generate the current static image """ self.static_image_mask = (original_static_image > 0).astype(np.int32) if transformation is None: return shape = np.array(self.static_image.shape, dtype=np.int32) affine = self.static_affine self.static_image_mask = transformation.transform( self.static_image_mask, 'nearest', None, shape, affine) def use_moving_image_dynamics(self, original_moving_image, transformation): r"""This is called by the optimizer just after setting the moving image. EMMetric takes advantage of the image dynamics by computing the current moving image mask from the original_moving_image mask (warped by nearest neighbor interpolation) Parameters ---------- original_moving_image : array, shape (R, C) or (S, R, C) the original moving image from which the current moving image was generated, the current moving image is the one that was provided via 'set_moving_image(...)', which may not be the same as the original moving image but a warped version of it. transformation : DiffeomorphicMap object the transformation that was applied to the original_moving_image to generate the current moving image """ self.moving_image_mask = (original_moving_image > 0).astype(np.int32) if transformation is None: return shape = np.array(self.moving_image.shape, dtype=np.int32) affine = self.moving_affine self.moving_image_mask = transformation.transform( self.moving_image_mask, 'nearest', None, shape, affine) class SSDMetric(SimilarityMetric): def __init__(self, dim, smooth=4, inner_iter=10, step_type='demons'): r"""Sum of Squared Differences (SSD) Metric Similarity metric for (mono-modal) nonlinear image registration defined by the sum of squared differences (SSD) Parameters ---------- dim : int (either 2 or 3) the dimension of the image domain smooth : float smoothness parameter, the larger the value the smoother the deformation field inner_iter : int number of iterations to be performed at each level of the multi- resolution Gauss-Seidel optimization algorithm (this is not the number of steps per Gaussian Pyramid level, that parameter must be set for the optimizer, not the metric) step_type : string the displacement field step to be computed when 'compute_forward' and 'compute_backward' are called. Either 'demons' or 'gauss_newton' """ super(SSDMetric, self).__init__(dim) self.smooth = smooth self.inner_iter = inner_iter self.step_type = step_type self.levels_below = 0 self._connect_functions() def _connect_functions(self): r"""Assign the methods to be called according to the image dimension Assigns the appropriate functions to be called for vector field reorientation and displacement field steps according to the dimension of the input images and the select type of step (either Demons or Gauss Newton) """ if self.dim == 2: self.reorient_vector_field = vfu.reorient_vector_field_2d elif self.dim == 3: self.reorient_vector_field = vfu.reorient_vector_field_3d else: raise ValueError('SSD Metric not defined for dim. %d' % (self.dim)) if self.step_type == 'gauss_newton': self.compute_step = self.compute_gauss_newton_step elif self.step_type == 'demons': self.compute_step = self.compute_demons_step else: raise ValueError('Opt. step %s not defined' % (self.step_type)) def initialize_iteration(self): r"""Prepares the metric to compute one displacement field iteration. Pre-computes the gradient of the input images to be used in the computation of the forward and backward steps. """ self.gradient_moving = np.empty( shape=(self.moving_image.shape)+(self.dim,), dtype=floating) for i, grad in enumerate(gradient(self.moving_image)): self.gradient_moving[..., i] = grad # Convert static image's gradient field from voxel to physical space if self.moving_spacing is not None: self.gradient_moving /= self.moving_spacing if self.moving_direction is not None: self.reorient_vector_field(self.gradient_moving, self.moving_direction) self.gradient_static = np.empty( shape=(self.static_image.shape)+(self.dim,), dtype=floating) for i, grad in enumerate(gradient(self.static_image)): self.gradient_static[..., i] = grad # Convert static image's gradient field from voxel to physical space if self.static_spacing is not None: self.gradient_static /= self.static_spacing if self.static_direction is not None: self.reorient_vector_field(self.gradient_static, self.static_direction) def compute_forward(self): r"""Computes one step bringing the reference image towards the static. Computes the update displacement field to be used for registration of the moving image towards the static image """ return self.compute_step(True) def compute_backward(self): r"""Computes one step bringing the static image towards the moving. Computes the update displacement field to be used for registration of the static image towards the moving image """ return self.compute_step(False) def compute_gauss_newton_step(self, forward_step=True): r"""Computes the Gauss-Newton energy minimization step Minimizes the linearized energy function (Newton step) defined by the sum of squared differences of corresponding pixels of the input images with respect to the displacement field. Parameters ---------- forward_step : boolean if True, computes the Newton step in the forward direction (warping the moving towards the static image). If False, computes the backward step (warping the static image to the moving image) Returns ------- displacement : array, shape = static_image.shape + (3,) if forward_step==True, the forward SSD Gauss-Newton step, else, the backward step """ reference_shape = self.static_image.shape if forward_step: gradient = self.gradient_static delta_field = self.static_image-self.moving_image else: gradient = self.gradient_moving delta_field = self.moving_image - self.static_image displacement = np.zeros(shape=(reference_shape)+(self.dim,), dtype=floating) if self.dim == 2: self.energy = v_cycle_2d(self.levels_below, self.inner_iter, delta_field, None, gradient, None, self.smooth, displacement) else: self.energy = v_cycle_3d(self.levels_below, self.inner_iter, delta_field, None, gradient, None, self.smooth, displacement) return displacement def compute_demons_step(self, forward_step=True): r"""Demons step for SSD metric Computes the demons step proposed by Vercauteren et al.[Vercauteren09] for the SSD metric. Parameters ---------- forward_step : boolean if True, computes the Demons step in the forward direction (warping the moving towards the static image). If False, computes the backward step (warping the static image to the moving image) Returns ------- displacement : array, shape (R, C, 2) or (S, R, C, 3) the Demons step References ---------- [Vercauteren09] Tom Vercauteren, Xavier Pennec, Aymeric Perchant, Nicholas Ayache, "Diffeomorphic Demons: Efficient Non-parametric Image Registration", Neuroimage 2009 """ sigma_reg_2 = np.sum(self.static_spacing**2)/self.dim if forward_step: gradient = self.gradient_static delta_field = self.static_image - self.moving_image else: gradient = self.gradient_moving delta_field = self.moving_image - self.static_image if self.dim == 2: step, self.energy = ssd.compute_ssd_demons_step_2d(delta_field, gradient, sigma_reg_2, None) else: step, self.energy = ssd.compute_ssd_demons_step_3d(delta_field, gradient, sigma_reg_2, None) for i in range(self.dim): step[..., i] = ndimage.filters.gaussian_filter(step[..., i], self.smooth) return step def get_energy(self): r"""The numerical value assigned by this metric to the current image pair Returns the Sum of Squared Differences (data term) energy computed at the largest iteration """ return self.energy def free_iteration(self): r""" Nothing to free for the SSD metric """ pass def v_cycle_2d(n, k, delta_field, sigma_sq_field, gradient_field, target, lambda_param, displacement, depth=0): r"""Multi-resolution Gauss-Seidel solver using V-type cycles Multi-resolution Gauss-Seidel solver: solves the Gauss-Newton linear system by first filtering (GS-iterate) the current level, then solves for the residual at a coarser resolution and finally refines the solution at the current resolution. This scheme corresponds to the V-cycle proposed by Bruhn and Weickert[Bruhn05]. Parameters ---------- n : int number of levels of the multi-resolution algorithm (it will be called recursively until level n == 0) k : int the number of iterations at each multi-resolution level delta_field : array, shape (R, C) the difference between the static and moving image (the 'derivative w.r.t. time' in the optical flow model) sigma_sq_field : array, shape (R, C) the variance of the gray level value at each voxel, according to the EM model (for SSD, it is 1 for all voxels). Inf and 0 values are processed specially to support infinite and zero variance. gradient_field : array, shape (R, C, 2) the gradient of the moving image target : array, shape (R, C, 2) right-hand side of the linear system to be solved in the Weickert's multi-resolution algorithm lambda_param : float smoothness parameter, the larger its value the smoother the displacement field displacement : array, shape (R, C, 2) the displacement field to start the optimization from Returns ------- energy : the energy of the EM (or SSD if sigmafield[...]==1) metric at this iteration References ---------- [Bruhn05] Andres Bruhn and Joachim Weickert, "Towards ultimate motion estimation: combining highest accuracy with real-time performance", 10th IEEE International Conference on Computer Vision, 2005. ICCV 2005. """ # pre-smoothing for i in range(k): ssd.iterate_residual_displacement_field_ssd_2d(delta_field, sigma_sq_field, gradient_field, target, lambda_param, displacement) if n == 0: energy = ssd.compute_energy_ssd_2d(delta_field) return energy # solve at coarser grid residual = None residual = ssd.compute_residual_displacement_field_ssd_2d(delta_field, sigma_sq_field, gradient_field, target, lambda_param, displacement, residual) sub_residual = np.array(vfu.downsample_displacement_field_2d(residual)) del residual subsigma_sq_field = None if sigma_sq_field is not None: subsigma_sq_field = vfu.downsample_scalar_field_2d(sigma_sq_field) subdelta_field = vfu.downsample_scalar_field_2d(delta_field) subgradient_field = np.array( vfu.downsample_displacement_field_2d(gradient_field)) shape = np.array(displacement.shape).astype(np.int32) half_shape = ((shape[0] + 1) // 2, (shape[1] + 1) // 2, 2) sub_displacement = np.zeros(shape=half_shape, dtype=floating) sublambda_param = lambda_param*0.25 v_cycle_2d(n-1, k, subdelta_field, subsigma_sq_field, subgradient_field, sub_residual, sublambda_param, sub_displacement, depth+1) # displacement += np.array( # vfu.upsample_displacement_field(sub_displacement, shape)) displacement += vfu.resample_displacement_field_2d(sub_displacement, np.array([0.5, 0.5]), shape) # post-smoothing for i in range(k): ssd.iterate_residual_displacement_field_ssd_2d(delta_field, sigma_sq_field, gradient_field, target, lambda_param, displacement) energy = ssd.compute_energy_ssd_2d(delta_field) return energy def v_cycle_3d(n, k, delta_field, sigma_sq_field, gradient_field, target, lambda_param, displacement, depth=0): r"""Multi-resolution Gauss-Seidel solver using V-type cycles Multi-resolution Gauss-Seidel solver: solves the linear system by first filtering (GS-iterate) the current level, then solves for the residual at a coarser resolution and finally refines the solution at the current resolution. This scheme corresponds to the V-cycle proposed by Bruhn and Weickert[1]. [1] Andres Bruhn and Joachim Weickert, "Towards ultimate motion estimation: combining highest accuracy with real-time performance", 10th IEEE International Conference on Computer Vision, 2005. ICCV 2005. Parameters ---------- n : int number of levels of the multi-resolution algorithm (it will be called recursively until level n == 0) k : int the number of iterations at each multi-resolution level delta_field : array, shape (S, R, C) the difference between the static and moving image (the 'derivative w.r.t. time' in the optical flow model) sigma_sq_field : array, shape (S, R, C) the variance of the gray level value at each voxel, according to the EM model (for SSD, it is 1 for all voxels). Inf and 0 values are processed specially to support infinite and zero variance. gradient_field : array, shape (S, R, C, 3) the gradient of the moving image target : array, shape (S, R, C, 3) right-hand side of the linear system to be solved in the Weickert's multi-resolution algorithm lambda_param : float smoothness parameter, the larger its value the smoother the displacement field displacement : array, shape (S, R, C, 3) the displacement field to start the optimization from Returns ------- energy : the energy of the EM (or SSD if sigmafield[...]==1) metric at this iteration """ # pre-smoothing for i in range(k): ssd.iterate_residual_displacement_field_ssd_3d(delta_field, sigma_sq_field, gradient_field, target, lambda_param, displacement) if n == 0: energy = ssd.compute_energy_ssd_3d(delta_field) return energy # solve at coarser grid residual = ssd.compute_residual_displacement_field_ssd_3d(delta_field, sigma_sq_field, gradient_field, target, lambda_param, displacement, None) sub_residual = np.array(vfu.downsample_displacement_field_3d(residual)) del residual subsigma_sq_field = None if sigma_sq_field is not None: subsigma_sq_field = vfu.downsample_scalar_field_3d(sigma_sq_field) subdelta_field = vfu.downsample_scalar_field_3d(delta_field) subgradient_field = np.array( vfu.downsample_displacement_field_3d(gradient_field)) shape = np.array(displacement.shape).astype(np.int32) sub_displacement = np.zeros( shape=((shape[0]+1)//2, (shape[1]+1)//2, (shape[2]+1)//2, 3), dtype=floating) sublambda_param = lambda_param*0.25 v_cycle_3d(n-1, k, subdelta_field, subsigma_sq_field, subgradient_field, sub_residual, sublambda_param, sub_displacement, depth+1) del subdelta_field del subsigma_sq_field del subgradient_field del sub_residual displacement += vfu.resample_displacement_field_3d(sub_displacement, 0.5 * np.ones(3), shape) del sub_displacement # post-smoothing for i in range(k): ssd.iterate_residual_displacement_field_ssd_3d(delta_field, sigma_sq_field, gradient_field, target, lambda_param, displacement) energy = ssd.compute_energy_ssd_3d(delta_field) return energy dipy-0.13.0/dipy/align/parzenhist.pyx000066400000000000000000001533211317371701200175370ustar00rootroot00000000000000#!python #cython: boundscheck=False #cython: wraparound=False #cython: cdivision=True import numpy as np cimport numpy as cnp cimport cython import numpy.random as random from .fused_types cimport floating from . import vector_fields as vf from dipy.align.vector_fields cimport(_apply_affine_3d_x0, _apply_affine_3d_x1, _apply_affine_3d_x2, _apply_affine_2d_x0, _apply_affine_2d_x1) from dipy.align.transforms cimport (Transform) cdef extern from "dpy_math.h" nogil: double cos(double) double sin(double) double log(double) class ParzenJointHistogram(object): def __init__(self, nbins): r""" Computes joint histogram and derivatives with Parzen windows Base class to compute joint and marginal probability density functions and their derivatives with respect to a transform's parameters. The smooth histograms are computed by using Parzen windows [Parzen62] with a cubic spline kernel, as proposed by Mattes et al. [Mattes03]. This implementation is not tied to any optimization (registration) method, the idea is that information-theoretic matching functionals (such as Mutual Information) can inherit from this class to perform the low-level computations of the joint intensity distributions and its gradient w.r.t. the transform parameters. The derived class can then compute the similarity/dissimilarity measure and gradient, and finally communicate the results to the appropriate optimizer. Parameters ---------- nbins : int the number of bins of the joint and marginal probability density functions (the actual number of bins of the joint PDF is nbins**2) References ---------- [Parzen62] E. Parzen. On the estimation of a probability density function and the mode. Annals of Mathematical Statistics, 33(3), 1065-1076, 1962. [Mattes03] Mattes, D., Haynor, D. R., Vesselle, H., Lewellen, T. K., & Eubank, W. PET-CT image registration in the chest using free-form deformations. IEEE Transactions on Medical Imaging, 22(1), 120-8, 2003. Notes -------- We need this class in cython to allow _joint_pdf_gradient_dense_2d and _joint_pdf_gradient_dense_3d to use a nogil Jacobian function (obtained from an instance of the Transform class), which allows us to evaluate Jacobians at all the sampling points (maybe the full grid) inside a nogil loop. The reason we need a class is to encapsulate all the parameters related to the joint and marginal distributions. """ self.nbins = nbins # Since the kernel used to compute the Parzen histogram covers more # than one bin, we need to add extra bins to both sides of the # histogram to account for the contributions of the minimum and maximum # intensities. Padding is the number of extra bins used at each side # of the histogram (a total of [2 * padding] extra bins). Since the # support of the cubic spline is 5 bins (the center plus 2 bins at each # side) we need a padding of 2, in the case of cubic splines. self.padding = 2 self.setup_called = False def setup(self, static, moving, smask=None, mmask=None): r""" Compute histogram settings to store the PDF of input images Parameters ---------- static : array static image moving : array moving image smask : array mask of static object being registered (a binary array with 1's inside the object of interest and 0's along the background). If None, the behaviour is equivalent to smask=ones_like(static) mmask : array mask of moving object being registered (a binary array with 1's inside the object of interest and 0's along the background). If None, the behaviour is equivalent to mmask=ones_like(static) """ if smask is None: smask = np.ones_like(static) if mmask is None: mmask = np.ones_like(moving) self.smin = np.min(static[smask != 0]) self.smax = np.max(static[smask != 0]) self.mmin = np.min(moving[mmask != 0]) self.mmax = np.max(moving[mmask != 0]) self.sdelta = (self.smax - self.smin) / (self.nbins - 2 * self.padding) self.mdelta = (self.mmax - self.mmin) / (self.nbins - 2 * self.padding) self.smin = self.smin / self.sdelta - self.padding self.mmin = self.mmin / self.mdelta - self.padding self.joint_grad = None self.metric_grad = None self.metric_val = 0 self.joint = np.zeros(shape=(self.nbins, self.nbins)) self.smarginal = np.zeros(shape=(self.nbins,), dtype=np.float64) self.mmarginal = np.zeros(shape=(self.nbins,), dtype=np.float64) self.setup_called = True def bin_normalize_static(self, x): r""" Maps intensity x to the range covered by the static histogram If the input intensity is in [self.smin, self.smax] then the normalized intensity will be in [self.padding, self.nbins - self.padding] Parameters ---------- x : float the intensity to be normalized Returns ------- xnorm : float normalized intensity to the range covered by the static histogram """ return _bin_normalize(x, self.smin, self.sdelta) def bin_normalize_moving(self, x): r""" Maps intensity x to the range covered by the moving histogram If the input intensity is in [self.mmin, self.mmax] then the normalized intensity will be in [self.padding, self.nbins - self.padding] Parameters ---------- x : float the intensity to be normalized Returns ------- xnorm : float normalized intensity to the range covered by the moving histogram """ return _bin_normalize(x, self.mmin, self.mdelta) def bin_index(self, xnorm): r""" Bin index associated with the given normalized intensity The return value is an integer in [padding, nbins - 1 - padding] Parameters ---------- xnorm : float intensity value normalized to the range covered by the histogram Returns ------- bin : int the bin index associated with the given normalized intensity """ return _bin_index(xnorm, self.nbins, self.padding) def update_pdfs_dense(self, static, moving, smask=None, mmask=None): r''' Computes the Probability Density Functions of two images The joint PDF is stored in self.joint. The marginal distributions corresponding to the static and moving images are computed and stored in self.smarginal and self.mmarginal, respectively. Parameters ---------- static : array, shape (S, R, C) static image moving : array, shape (S, R, C) moving image smask : array, shape (S, R, C) mask of static object being registered (a binary array with 1's inside the object of interest and 0's along the background). If None, ones_like(static) is used as mask. mmask : array, shape (S, R, C) mask of moving object being registered (a binary array with 1's inside the object of interest and 0's along the background). If None, ones_like(moving) is used as mask. ''' if static.shape != moving.shape: raise ValueError("Images must have the same shape") dim = len(static.shape) if not dim in [2, 3]: msg = 'Only dimensions 2 and 3 are supported. ' +\ str(dim) + ' received' raise ValueError(msg) if not self.setup_called: self.setup(static, moving, smask=None, mmask=None) if dim == 2: _compute_pdfs_dense_2d(static, moving, smask, mmask, self.smin, self.sdelta, self.mmin, self.mdelta, self.nbins, self.padding, self.joint, self.smarginal, self.mmarginal) elif dim == 3: _compute_pdfs_dense_3d(static, moving, smask, mmask, self.smin, self.sdelta, self.mmin, self.mdelta, self.nbins, self.padding, self.joint, self.smarginal, self.mmarginal) def update_pdfs_sparse(self, sval, mval): r''' Computes the Probability Density Functions from a set of samples The list of intensities `sval` and `mval` are assumed to be sampled from the static and moving images, respectively, at the same physical points. Of course, the images may not be perfectly aligned at the moment the sampling was performed. The resulting distributions corresponds to the paired intensities according to the alignment at the moment the images were sampled. The joint PDF is stored in self.joint. The marginal distributions corresponding to the static and moving images are computed and stored in self.smarginal and self.mmarginal, respectively. Parameters ---------- sval : array, shape (n,) sampled intensities from the static image at sampled_points mval : array, shape (n,) sampled intensities from the moving image at sampled_points ''' if not self.setup_called: self.setup(sval, mval) energy = _compute_pdfs_sparse(sval, mval, self.smin, self.sdelta, self.mmin, self.mdelta, self.nbins, self.padding, self.joint, self.smarginal, self.mmarginal) def update_gradient_dense(self, theta, transform, static, moving, grid2world, mgradient, smask=None, mmask=None): r''' Computes the Gradient of the joint PDF w.r.t. transform parameters Computes the vector of partial derivatives of the joint histogram w.r.t. each transformation parameter. The gradient is stored in self.joint_grad. Parameters ---------- theta : array, shape (n,) parameters of the transformation to compute the gradient from transform : instance of Transform the transformation with respect to whose parameters the gradient must be computed static : array, shape (S, R, C) static image moving : array, shape (S, R, C) moving image grid2world : array, shape (4, 4) we assume that both images have already been sampled at a common grid. This transform must map voxel coordinates of this common grid to physical coordinates of its corresponding voxel in the moving image. For example, if the moving image was sampled on the static image's grid (this is the typical setting) using an aligning matrix A, then (1) grid2world = A.dot(static_affine) where static_affine is the transformation mapping static image's grid coordinates to physical space. mgradient : array, shape (S, R, C, 3) the gradient of the moving image smask : array, shape (S, R, C), optional mask of static object being registered (a binary array with 1's inside the object of interest and 0's along the background). The default is None, indicating all voxels are considered. mmask : array, shape (S, R, C), optional mask of moving object being registered (a binary array with 1's inside the object of interest and 0's along the background). The default is None, indicating all voxels are considered. ''' if static.shape != moving.shape: raise ValueError("Images must have the same shape") dim = len(static.shape) if not dim in [2, 3]: msg = 'Only dimensions 2 and 3 are supported. ' +\ str(dim) + ' received' raise ValueError(msg) if mgradient.shape != moving.shape + (dim,): raise ValueError('Invalid gradient field dimensions.') if not self.setup_called: self.setup(static, moving, smask, mmask) n = theta.shape[0] nbins = self.nbins if (self.joint_grad is None) or (self.joint_grad.shape[2] != n): self.joint_grad = np.zeros((nbins, nbins, n)) if dim == 2: if mgradient.dtype == np.float64: _joint_pdf_gradient_dense_2d[cython.double](theta, transform, static, moving, grid2world, mgradient, smask, mmask, self.smin, self.sdelta, self.mmin, self.mdelta, self.nbins, self.padding, self.joint_grad) elif mgradient.dtype == np.float32: _joint_pdf_gradient_dense_2d[cython.float](theta, transform, static, moving, grid2world, mgradient, smask, mmask, self.smin, self.sdelta, self.mmin, self.mdelta, self.nbins, self.padding, self.joint_grad) else: raise ValueError('Grad. field dtype must be floating point') elif dim == 3: if mgradient.dtype == np.float64: _joint_pdf_gradient_dense_3d[cython.double](theta, transform, static, moving, grid2world, mgradient, smask, mmask, self.smin, self.sdelta, self.mmin, self.mdelta, self.nbins, self.padding, self.joint_grad) elif mgradient.dtype == np.float32: _joint_pdf_gradient_dense_3d[cython.float](theta, transform, static, moving, grid2world, mgradient, smask, mmask, self.smin, self.sdelta, self.mmin, self.mdelta, self.nbins, self.padding, self.joint_grad) else: raise ValueError('Grad. field dtype must be floating point') def update_gradient_sparse(self, theta, transform, sval, mval, sample_points, mgradient): r''' Computes the Gradient of the joint PDF w.r.t. transform parameters Computes the vector of partial derivatives of the joint histogram w.r.t. each transformation parameter. The list of intensities `sval` and `mval` are assumed to be sampled from the static and moving images, respectively, at the same physical points. Of course, the images may not be perfectly aligned at the moment the sampling was performed. The resulting gradient corresponds to the paired intensities according to the alignment at the moment the images were sampled. The gradient is stored in self.joint_grad. Parameters ---------- theta : array, shape (n,) parameters to compute the gradient at transform : instance of Transform the transformation with respect to whose parameters the gradient must be computed sval : array, shape (m,) sampled intensities from the static image at sampled_points mval : array, shape (m,) sampled intensities from the moving image at sampled_points sample_points : array, shape (m, 3) coordinates (in physical space) of the points the images were sampled at mgradient : array, shape (m, 3) the gradient of the moving image at the sample points ''' dim = sample_points.shape[1] if mgradient.shape[1] != dim: raise ValueError('Dimensions of gradients and points are different') nsamples = sval.shape[0] if ((mgradient.shape[0] != nsamples) or (mval.shape[0] != nsamples) or sample_points.shape[0] != nsamples): raise ValueError('Number of points and gradients are different.') if not mgradient.dtype in [np.float32, np.float64]: raise ValueError('Gradients dtype must be floating point') n = theta.shape[0] nbins = self.nbins if (self.joint_grad is None) or (self.joint_grad.shape[2] != n): self.joint_grad = np.zeros(shape=(nbins, nbins, n)) if dim == 2: if mgradient.dtype == np.float64: _joint_pdf_gradient_sparse_2d[cython.double](theta, transform, sval, mval, sample_points, mgradient, self.smin, self.sdelta, self.mmin, self.mdelta, self.nbins, self.padding, self.joint_grad) elif mgradient.dtype == np.float32: _joint_pdf_gradient_sparse_2d[cython.float](theta, transform, sval, mval, sample_points, mgradient, self.smin, self.sdelta, self.mmin, self.mdelta, self.nbins, self.padding, self.joint_grad) else: raise ValueError('Gradients dtype must be floating point') elif dim == 3: if mgradient.dtype == np.float64: _joint_pdf_gradient_sparse_3d[cython.double](theta, transform, sval, mval, sample_points, mgradient, self.smin, self.sdelta, self.mmin, self.mdelta, self.nbins, self.padding, self.joint_grad) elif mgradient.dtype == np.float32: _joint_pdf_gradient_sparse_3d[cython.float](theta, transform, sval, mval, sample_points, mgradient, self.smin, self.sdelta, self.mmin, self.mdelta, self.nbins, self.padding, self.joint_grad) else: raise ValueError('Gradients dtype must be floating point') else: msg = 'Only dimensions 2 and 3 are supported. ' + str(dim) +\ ' received' raise ValueError(msg) cdef inline double _bin_normalize(double x, double mval, double delta) nogil: r''' Normalizes intensity x to the range covered by the Parzen histogram We assume that mval was computed as: (1) mval = xmin / delta - padding where xmin is the minimum observed image intensity and delta is the bin size, computed as: (2) delta = (xmax - xmin)/(nbins - 2 * padding) If the minimum and maximum intensities were assigned to the first and last bins (with no padding), it could be possible that samples at the first and last bins contribute to "non-existing" bins beyond the boundary (because the support of the Parzen window may be larger than one bin). The padding bins are used to collect such contributions (i.e. the probability of observing a value beyond the minimum and maximum observed intensities may correctly be assigned a positive value). The normalized intensity is (from eq(1) ): (3) nx = (x - xmin) / delta + padding = x / delta - mval This means that normalized intensity nx must lie in the closed interval [padding, nbins-padding], which contains bins with indices padding, padding+1, ..., nbins - 1 - padding (i.e., nbins - 2*padding bins) ''' return x / delta - mval cdef inline cnp.npy_intp _bin_index(double normalized, int nbins, int padding) nogil: r''' Index of the bin in which the normalized intensity `normalized` lies. The intensity is assumed to have been normalized to the range of intensities covered by the histogram: the bin index is the integer part of `normalized`, which must be within the interval [padding, nbins - 1 - padding]. Parameters ---------- normalized : float normalized intensity nbins : int number of histogram bins padding : int number of bins used as padding (the total bins used for padding at both sides of the histogram is actually 2*padding) Returns ------- bin : int index of the bin in which the normalized intensity 'normalized' lies ''' cdef: cnp.npy_intp bin bin = (normalized) if bin < padding: return padding if bin > nbins - 1 - padding: return nbins - 1 - padding return bin def cubic_spline(double[:] x): r''' Evaluates the cubic spline at a set of values Parameters ---------- x : array, shape (n) input values ''' cdef: cnp.npy_intp i cnp.npy_intp n = x.shape[0] double[:] sx = np.zeros(n, dtype=np.float64) with nogil: for i in range(n): sx[i] = _cubic_spline(x[i]) return np.asarray(sx) cdef inline double _cubic_spline(double x) nogil: r''' Cubic B-Spline evaluated at x See eq. (3) of [Matttes03]. References ---------- [Mattes03] Mattes, D., Haynor, D. R., Vesselle, H., Lewellen, T. K., & Eubank, W. PET-CT image registration in the chest using free-form deformations. IEEE Transactions on Medical Imaging, 22(1), 120-8, 2003. ''' cdef: double absx = -x if x < 0.0 else x double sqrx = x * x if absx < 1.0: return (4.0 - 6.0 * sqrx + 3.0 * sqrx * absx) / 6.0 elif absx < 2.0: return (8.0 - 12 * absx + 6.0 * sqrx - sqrx * absx) / 6.0 return 0.0 def cubic_spline_derivative(double[:] x): r''' Evaluates the cubic spline derivative at a set of values Parameters ---------- x : array, shape (n) input values ''' cdef: cnp.npy_intp i cnp.npy_intp n = x.shape[0] double[:] sx = np.zeros(n, dtype=np.float64) with nogil: for i in range(n): sx[i] = _cubic_spline_derivative(x[i]) return np.asarray(sx) cdef inline double _cubic_spline_derivative(double x) nogil: r''' Derivative of cubic B-Spline evaluated at x See eq. (3) of [Mattes03]. References ---------- [Mattes03] Mattes, D., Haynor, D. R., Vesselle, H., Lewellen, T. K., & Eubank, W. PET-CT image registration in the chest using free-form deformations. IEEE Transactions on Medical Imaging, 22(1), 120-8, 2003. ''' cdef: double absx = -x if x < 0.0 else x if absx < 1.0: if x >= 0.0: return -2.0 * x + 1.5 * x * x else: return -2.0 * x - 1.5 * x * x elif absx < 2.0: if x >= 0: return -2.0 + 2.0 * x - 0.5 * x * x else: return 2.0 + 2.0 * x + 0.5 * x * x return 0.0 cdef _compute_pdfs_dense_2d(double[:, :] static, double[:, :] moving, int[:, :] smask, int[:, :] mmask, double smin, double sdelta, double mmin, double mdelta, int nbins, int padding, double[:, :] joint, double[:] smarginal, double[:] mmarginal): r''' Joint Probability Density Function of intensities of two 2D images Parameters ---------- static : array, shape (R, C) static image moving : array, shape (R, C) moving image smask : array, shape (R, C) mask of static object being registered (a binary array with 1's inside the object of interest and 0's along the background) mmask : array, shape (R, C) mask of moving object being registered (a binary array with 1's inside the object of interest and 0's along the background) smin : float the minimum observed intensity associated with the static image, which was used to define the joint PDF sdelta : float bin size associated with the intensities of the static image mmin : float the minimum observed intensity associated with the moving image, which was used to define the joint PDF mdelta : float bin size associated with the intensities of the moving image nbins : int number of histogram bins padding : int number of bins used as padding (the total bins used for padding at both sides of the histogram is actually 2*padding) joint : array, shape (nbins, nbins) the array to write the joint PDF smarginal : array, shape (nbins,) the array to write the marginal PDF associated with the static image mmarginal : array, shape (nbins,) the array to write the marginal PDF associated with the moving image ''' cdef: cnp.npy_intp nrows = static.shape[0] cnp.npy_intp ncols = static.shape[1] cnp.npy_intp offset, valid_points cnp.npy_intp i, j, r, c double rn, cn double val, spline_arg, sum joint[...] = 0 sum = 0 valid_points = 0 with nogil: smarginal[:] = 0 for i in range(nrows): for j in range(ncols): if smask is not None and smask[i, j] == 0: continue if mmask is not None and mmask[i, j] == 0: continue valid_points += 1 rn = _bin_normalize(static[i, j], smin, sdelta) r = _bin_index(rn, nbins, padding) cn = _bin_normalize(moving[i, j], mmin, mdelta) c = _bin_index(cn, nbins, padding) spline_arg = (c - 2) - cn smarginal[r] += 1 for offset in range(-2, 3): val = _cubic_spline(spline_arg) joint[r, c + offset] += val sum += val spline_arg += 1.0 if sum > 0: for i in range(nbins): for j in range(nbins): joint[i, j] /= valid_points for i in range(nbins): smarginal[i] /= valid_points for j in range(nbins): mmarginal[j] = 0 for i in range(nbins): mmarginal[j] += joint[i, j] cdef _compute_pdfs_dense_3d(double[:, :, :] static, double[:, :, :] moving, int[:, :, :] smask, int[:, :, :] mmask, double smin, double sdelta, double mmin, double mdelta, int nbins, int padding, double[:, :] joint, double[:] smarginal, double[:] mmarginal): r''' Joint Probability Density Function of intensities of two 3D images Parameters ---------- static : array, shape (S, R, C) static image moving : array, shape (S, R, C) moving image smask : array, shape (S, R, C) mask of static object being registered (a binary array with 1's inside the object of interest and 0's along the background) mmask : array, shape (S, R, C) mask of moving object being registered (a binary array with 1's inside the object of interest and 0's along the background) smin : float the minimum observed intensity associated with the static image, which was used to define the joint PDF sdelta : float bin size associated with the intensities of the static image mmin : float the minimum observed intensity associated with the moving image, which was used to define the joint PDF mdelta : float bin size associated with the intensities of the moving image nbins : int number of histogram bins padding : int number of bins used as padding (the total bins used for padding at both sides of the histogram is actually 2*padding) joint : array, shape (nbins, nbins) the array to write the joint PDF to smarginal : array, shape (nbins,) the array to write the marginal PDF associated with the static image mmarginal : array, shape (nbins,) the array to write the marginal PDF associated with the moving image ''' cdef: cnp.npy_intp nslices = static.shape[0] cnp.npy_intp nrows = static.shape[1] cnp.npy_intp ncols = static.shape[2] cnp.npy_intp offset, valid_points cnp.npy_intp k, i, j, r, c double rn, cn double val, spline_arg, sum joint[...] = 0 sum = 0 with nogil: valid_points = 0 smarginal[:] = 0 for k in range(nslices): for i in range(nrows): for j in range(ncols): if smask is not None and smask[k, i, j] == 0: continue if mmask is not None and mmask[k, i, j] == 0: continue valid_points += 1 rn = _bin_normalize(static[k, i, j], smin, sdelta) r = _bin_index(rn, nbins, padding) cn = _bin_normalize(moving[k, i, j], mmin, mdelta) c = _bin_index(cn, nbins, padding) spline_arg = (c - 2) - cn smarginal[r] += 1 for offset in range(-2, 3): val = _cubic_spline(spline_arg) joint[r, c + offset] += val sum += val spline_arg += 1.0 if sum > 0: for i in range(nbins): for j in range(nbins): joint[i, j] /= sum for i in range(nbins): smarginal[i] /= valid_points for j in range(nbins): mmarginal[j] = 0 for i in range(nbins): mmarginal[j] += joint[i, j] cdef _compute_pdfs_sparse(double[:] sval, double[:] mval, double smin, double sdelta, double mmin, double mdelta, int nbins, int padding, double[:, :] joint, double[:] smarginal, double[:] mmarginal): r''' Probability Density Functions of paired intensities Parameters ---------- sval : array, shape (n,) sampled intensities from the static image at sampled_points mval : array, shape (n,) sampled intensities from the moving image at sampled_points smin : float the minimum observed intensity associated with the static image, which was used to define the joint PDF sdelta : float bin size associated with the intensities of the static image mmin : float the minimum observed intensity associated with the moving image, which was used to define the joint PDF mdelta : float bin size associated with the intensities of the moving image nbins : int number of histogram bins padding : int number of bins used as padding (the total bins used for padding at both sides of the histogram is actually 2*padding) joint : array, shape (nbins, nbins) the array to write the joint PDF to smarginal : array, shape (nbins,) the array to write the marginal PDF associated with the static image mmarginal : array, shape (nbins,) the array to write the marginal PDF associated with the moving image ''' cdef: cnp.npy_intp n = sval.shape[0] cnp.npy_intp offset, valid_points cnp.npy_intp i, r, c double rn, cn double val, spline_arg, sum joint[...] = 0 sum = 0 with nogil: valid_points = 0 smarginal[:] = 0 for i in range(n): valid_points += 1 rn = _bin_normalize(sval[i], smin, sdelta) r = _bin_index(rn, nbins, padding) cn = _bin_normalize(mval[i], mmin, mdelta) c = _bin_index(cn, nbins, padding) spline_arg = (c - 2) - cn smarginal[r] += 1 for offset in range(-2, 3): val = _cubic_spline(spline_arg) joint[r, c + offset] += val sum += val spline_arg += 1.0 if sum > 0: for i in range(nbins): for j in range(nbins): joint[i, j] /= sum for i in range(nbins): smarginal[i] /= valid_points for j in range(nbins): mmarginal[j] = 0 for i in range(nbins): mmarginal[j] += joint[i, j] cdef _joint_pdf_gradient_dense_2d(double[:] theta, Transform transform, double[:, :] static, double[:, :] moving, double[:, :] grid2world, floating[:, :, :] mgradient, int[:, :] smask, int[:, :] mmask, double smin, double sdelta, double mmin, double mdelta, int nbins, int padding, double[:, :, :] grad_pdf): r''' Gradient of the joint PDF w.r.t. transform parameters theta Computes the vector of partial derivatives of the joint histogram w.r.t. each transformation parameter. The transformation itself is not necessary to compute the gradient, but only its Jacobian. Parameters ---------- theta : array, shape (n,) parameters of the transformation to compute the gradient from transform : instance of Transform the transformation with respect to whose parameters the gradient must be computed static : array, shape (R, C) static image moving : array, shape (R, C) moving image grid2world : array, shape (3, 3) the grid-to-space transform associated with images static and moving (we assume that both images have already been sampled at a common grid) mgradient : array, shape (R, C, 2) the gradient of the moving image smask : array, shape (R, C) mask of static object being registered (a binary array with 1's inside the object of interest and 0's along the background) mmask : array, shape (R, C) mask of moving object being registered (a binary array with 1's inside the object of interest and 0's along the background) smin : float the minimum observed intensity associated with the static image, which was used to define the joint PDF sdelta : float bin size associated with the intensities of the static image mmin : float the minimum observed intensity associated with the moving image, which was used to define the joint PDF mdelta : float bin size associated with the intensities of the moving image nbins : int number of histogram bins padding : int number of bins used as padding (the total bins used for padding at both sides of the histogram is actually 2*padding) grad_pdf : array, shape (nbins, nbins, len(theta)) the array to write the gradient to ''' cdef: cnp.npy_intp nrows = static.shape[0] cnp.npy_intp ncols = static.shape[1] cnp.npy_intp n = theta.shape[0] cnp.npy_intp offset, valid_points int constant_jacobian = 0 cnp.npy_intp k, i, j, r, c double rn, cn double val, spline_arg, norm_factor double[:, :] J = np.empty(shape=(2, n), dtype=np.float64) double[:] prod = np.empty(shape=(n,), dtype=np.float64) double[:] x = np.empty(shape=(2,), dtype=np.float64) grad_pdf[...] = 0 with nogil: valid_points = 0 for i in range(nrows): for j in range(ncols): if smask is not None and smask[i, j] == 0: continue if mmask is not None and mmask[i, j] == 0: continue valid_points += 1 x[0] = _apply_affine_2d_x0(i, j, 1, grid2world) x[1] = _apply_affine_2d_x1(i, j, 1, grid2world) if constant_jacobian == 0: constant_jacobian = transform._jacobian(theta, x, J) for k in range(n): prod[k] = (J[0, k] * mgradient[i, j, 0] + J[1, k] * mgradient[i, j, 1]) rn = _bin_normalize(static[i, j], smin, sdelta) r = _bin_index(rn, nbins, padding) cn = _bin_normalize(moving[i, j], mmin, mdelta) c = _bin_index(cn, nbins, padding) spline_arg = (c - 2) - cn for offset in range(-2, 3): val = _cubic_spline_derivative(spline_arg) for k in range(n): grad_pdf[r, c + offset, k] -= val * prod[k] spline_arg += 1.0 norm_factor = valid_points * mdelta if norm_factor > 0: for i in range(nbins): for j in range(nbins): for k in range(n): grad_pdf[i, j, k] /= norm_factor cdef _joint_pdf_gradient_dense_3d(double[:] theta, Transform transform, double[:, :, :] static, double[:, :, :] moving, double[:, :] grid2world, floating[:, :, :, :] mgradient, int[:, :, :] smask, int[:, :, :] mmask, double smin, double sdelta, double mmin, double mdelta, int nbins, int padding, double[:, :, :] grad_pdf): r''' Gradient of the joint PDF w.r.t. transform parameters theta Computes the vector of partial derivatives of the joint histogram w.r.t. each transformation parameter. The transformation itself is not necessary to compute the gradient, but only its Jacobian. Parameters ---------- theta : array, shape (n,) parameters of the transformation to compute the gradient from transform : instance of Transform the transformation with respect to whose parameters the gradient must be computed static : array, shape (S, R, C) static image moving : array, shape (S, R, C) moving image grid2world : array, shape (4, 4) the grid-to-space transform associated with images static and moving (we assume that both images have already been sampled at a common grid) mgradient : array, shape (S, R, C, 3) the gradient of the moving image smask : array, shape (S, R, C) mask of static object being registered (a binary array with 1's inside the object of interest and 0's along the background) mmask : array, shape (S, R, C) mask of moving object being registered (a binary array with 1's inside the object of interest and 0's along the background) smin : float the minimum observed intensity associated with the static image, which was used to define the joint PDF sdelta : float bin size associated with the intensities of the static image mmin : float the minimum observed intensity associated with the moving image, which was used to define the joint PDF mdelta : float bin size associated with the intensities of the moving image nbins : int number of histogram bins padding : int number of bins used as padding (the total bins used for padding at both sides of the histogram is actually 2*padding) grad_pdf : array, shape (nbins, nbins, len(theta)) the array to write the gradient to ''' cdef: cnp.npy_intp nslices = static.shape[0] cnp.npy_intp nrows = static.shape[1] cnp.npy_intp ncols = static.shape[2] cnp.npy_intp n = theta.shape[0] cnp.npy_intp offset, valid_points int constant_jacobian = 0 cnp.npy_intp l, k, i, j, r, c double rn, cn double val, spline_arg, norm_factor double[:, :] J = np.empty(shape=(3, n), dtype=np.float64) double[:] prod = np.empty(shape=(n,), dtype=np.float64) double[:] x = np.empty(shape=(3,), dtype=np.float64) grad_pdf[...] = 0 with nogil: valid_points = 0 for k in range(nslices): for i in range(nrows): for j in range(ncols): if smask is not None and smask[k, i, j] == 0: continue if mmask is not None and mmask[k, i, j] == 0: continue valid_points += 1 x[0] = _apply_affine_3d_x0(k, i, j, 1, grid2world) x[1] = _apply_affine_3d_x1(k, i, j, 1, grid2world) x[2] = _apply_affine_3d_x2(k, i, j, 1, grid2world) if constant_jacobian == 0: constant_jacobian = transform._jacobian(theta, x, J) for l in range(n): prod[l] = (J[0, l] * mgradient[k, i, j, 0] + J[1, l] * mgradient[k, i, j, 1] + J[2, l] * mgradient[k, i, j, 2]) rn = _bin_normalize(static[k, i, j], smin, sdelta) r = _bin_index(rn, nbins, padding) cn = _bin_normalize(moving[k, i, j], mmin, mdelta) c = _bin_index(cn, nbins, padding) spline_arg = (c - 2) - cn for offset in range(-2, 3): val = _cubic_spline_derivative(spline_arg) for l in range(n): grad_pdf[r, c + offset, l] -= val * prod[l] spline_arg += 1.0 norm_factor = valid_points * mdelta if norm_factor > 0: for i in range(nbins): for j in range(nbins): for k in range(n): grad_pdf[i, j, k] /= norm_factor cdef _joint_pdf_gradient_sparse_2d(double[:] theta, Transform transform, double[:] sval, double[:] mval, double[:, :] sample_points, floating[:, :] mgradient, double smin, double sdelta, double mmin, double mdelta, int nbins, int padding, double[:, :, :] grad_pdf): r''' Gradient of the joint PDF w.r.t. transform parameters theta Computes the vector of partial derivatives of the joint histogram w.r.t. each transformation parameter. The transformation itself is not necessary to compute the gradient, but only its Jacobian. Parameters ---------- theta : array, shape (n,) parameters to compute the gradient at transform : instance of Transform the transformation with respect to whose parameters the gradient must be computed sval : array, shape (m,) sampled intensities from the static image at sampled_points mval : array, shape (m,) sampled intensities from the moving image at sampled_points sample_points : array, shape (m, 2) positions (in physical space) of the points the images were sampled at mgradient : array, shape (m, 2) the gradient of the moving image at the sample points smin : float the minimum observed intensity associated with the static image, which was used to define the joint PDF sdelta : float bin size associated with the intensities of the static image mmin : float the minimum observed intensity associated with the moving image, which was used to define the joint PDF mdelta : float bin size associated with the intensities of the moving image nbins : int number of histogram bins padding : int number of bins used as padding (the total bins used for padding at both sides of the histogram is actually 2*padding) grad_pdf : array, shape (nbins, nbins, len(theta)) the array to write the gradient to ''' cdef: cnp.npy_intp n = theta.shape[0] cnp.npy_intp m = sval.shape[0] cnp.npy_intp offset int constant_jacobian = 0 cnp.npy_intp i, j, r, c, valid_points double rn, cn double val, spline_arg, norm_factor double[:, :] J = np.empty(shape=(2, n), dtype=np.float64) double[:] prod = np.empty(shape=(n,), dtype=np.float64) grad_pdf[...] = 0 with nogil: valid_points = 0 for i in range(m): valid_points += 1 if constant_jacobian == 0: constant_jacobian = transform._jacobian(theta, sample_points[i], J) for j in range(n): prod[j] = (J[0, j] * mgradient[i, 0] + J[1, j] * mgradient[i, 1]) rn = _bin_normalize(sval[i], smin, sdelta) r = _bin_index(rn, nbins, padding) cn = _bin_normalize(mval[i], mmin, mdelta) c = _bin_index(cn, nbins, padding) spline_arg = (c - 2) - cn for offset in range(-2, 3): val = _cubic_spline_derivative(spline_arg) for j in range(n): grad_pdf[r, c + offset, j] -= val * prod[j] spline_arg += 1.0 norm_factor = valid_points * mdelta if norm_factor > 0: for i in range(nbins): for j in range(nbins): for k in range(n): grad_pdf[i, j, k] /= norm_factor cdef _joint_pdf_gradient_sparse_3d(double[:] theta, Transform transform, double[:] sval, double[:] mval, double[:, :] sample_points, floating[:, :] mgradient, double smin, double sdelta, double mmin, double mdelta, int nbins, int padding, double[:, :, :] grad_pdf): r''' Gradient of the joint PDF w.r.t. transform parameters theta Computes the vector of partial derivatives of the joint histogram w.r.t. each transformation parameter. The transformation itself is not necessary to compute the gradient, but only its Jacobian. Parameters ---------- theta : array, shape (n,) parameters to compute the gradient at transform : instance of Transform the transformation with respect to whose parameters the gradient must be computed sval : array, shape (m,) sampled intensities from the static image at sampled_points mval : array, shape (m,) sampled intensities from the moving image at sampled_points sample_points : array, shape (m, 3) positions (in physical space) of the points the images were sampled at mgradient : array, shape (m, 3) the gradient of the moving image at the sample points smin : float the minimum observed intensity associated with the static image, which was used to define the joint PDF sdelta : float bin size associated with the intensities of the static image mmin : float the minimum observed intensity associated with the moving image, which was used to define the joint PDF mdelta : float bin size associated with the intensities of the moving image nbins : int number of histogram bins padding : int number of bins used as padding (the total bins used for padding at both sides of the histogram is actually 2*padding) grad_pdf : array, shape (nbins, nbins, len(theta)) the array to write the gradient to ''' cdef: cnp.npy_intp n = theta.shape[0] cnp.npy_intp m = sval.shape[0] cnp.npy_intp offset, valid_points int constant_jacobian = 0 cnp.npy_intp i, j, r, c double rn, cn double val, spline_arg, norm_factor double[:, :] J = np.empty(shape=(3, n), dtype=np.float64) double[:] prod = np.empty(shape=(n,), dtype=np.float64) grad_pdf[...] = 0 with nogil: valid_points = 0 for i in range(m): valid_points += 1 if constant_jacobian == 0: constant_jacobian = transform._jacobian(theta, sample_points[i], J) for j in range(n): prod[j] = (J[0, j] * mgradient[i, 0] + J[1, j] * mgradient[i, 1] + J[2, j] * mgradient[i, 2]) rn = _bin_normalize(sval[i], smin, sdelta) r = _bin_index(rn, nbins, padding) cn = _bin_normalize(mval[i], mmin, mdelta) c = _bin_index(cn, nbins, padding) spline_arg = (c - 2) - cn for offset in range(-2, 3): val = _cubic_spline_derivative(spline_arg) for j in range(n): grad_pdf[r, c + offset, j] -= val * prod[j] spline_arg += 1.0 norm_factor = valid_points * mdelta if norm_factor > 0: for i in range(nbins): for j in range(nbins): for k in range(n): grad_pdf[i, j, k] /= norm_factor def compute_parzen_mi(double[:, :] joint, double[:, :, :] joint_gradient, double[:] smarginal, double[:] mmarginal, double[:] mi_gradient): r""" Computes the mutual information and its gradient (if requested) Parameters ---------- joint : array, shape (nbins, nbins) the joint intensity distribution joint_gradient : array, shape (nbins, nbins, n) the gradient of the joint distribution w.r.t. the transformation parameters smarginal : array, shape (nbins,) the marginal intensity distribution of the static image mmarginal : array, shape (nbins,) the marginal intensity distribution of the moving image mi_gradient : array, shape (n,) the buffer in which to write the gradient of the mutual information. If None, the gradient is not computed """ cdef: double epsilon = 2.2204460492503131e-016 double metric_value cnp.npy_intp nrows = joint.shape[0] cnp.npy_intp ncols = joint.shape[1] cnp.npy_intp n = joint_gradient.shape[2] with nogil: mi_gradient[:] = 0 metric_value = 0 for i in range(nrows): for j in range(ncols): if joint[i, j] < epsilon or mmarginal[j] < epsilon: continue factor = log(joint[i, j] / mmarginal[j]) if mi_gradient is not None: for k in range(n): mi_gradient[k] += joint_gradient[i, j, k] * factor if smarginal[i] > epsilon: metric_value += joint[i, j] * (factor - log(smarginal[i])) return metric_value def sample_domain_regular(int k, int[:] shape, double[:, :] grid2world, double sigma=0.25, int seed=1234): r""" Take floor(total_voxels/k) samples from a (2D or 3D) grid The sampling is made by taking all pixels whose index (in lexicographical order) is a multiple of k. Each selected point is slightly perturbed by adding a realization of a normally distributed random variable and then mapped to physical space by the given grid-to-space transform. The lexicographical order of a pixels in a grid of shape (a, b, c) is defined by assigning to each voxel position (i, j, k) the integer index F((i, j, k)) = i * (b * c) + j * (c) + k and sorting increasingly by this index. Parameters ---------- k : int the sampling rate, as described before shape : array, shape (dim,) the shape of the grid to be sampled grid2world : array, shape (dim+1, dim+1) the grid-to-space transform sigma : float the standard deviation of the Normal random distortion to be applied to the sampled points Returns ------- samples : array, shape (total_pixels//k, dim) the matrix whose rows are the sampled points Example ------- >>> from dipy.align.parzenhist import sample_domain_regular >>> import dipy.align.vector_fields as vf >>> shape = np.array((10, 10), dtype=np.int32) >>> sigma = 0 >>> dim = len(shape) >>> grid2world = np.eye(dim+1) >>> n = shape[0]*shape[1] >>> k = 2 >>> samples = sample_domain_regular(k, shape, grid2world, sigma) >>> (samples.shape[0], samples.shape[1]) == (n//k, dim) True >>> isamples = np.array(samples, dtype=np.int32) >>> indices = (isamples[:, 0] * shape[1] + isamples[:, 1]) >>> len(set(indices)) == len(indices) True >>> (indices%k).sum() 0 """ cdef: cnp.npy_intp i, dim, n, m, slice_size double s, r, c double[:, :] samples dim = len(shape) if not vf.is_valid_affine(grid2world, dim): raise ValueError("Invalid grid-to-space matrix") random.seed(seed) if dim == 2: n = shape[0] * shape[1] m = n // k samples = random.randn(m, dim) * sigma with nogil: for i in range(m): r = ((i * k) // shape[1]) + samples[i, 0] c = ((i * k) % shape[1]) + samples[i, 1] samples[i, 0] = _apply_affine_2d_x0(r, c, 1, grid2world) samples[i, 1] = _apply_affine_2d_x1(r, c, 1, grid2world) else: slice_size = shape[1] * shape[2] n = shape[0] * slice_size m = n // k samples = random.randn(m, dim) * sigma with nogil: for i in range(m): s = ((i * k) // slice_size) + samples[i, 0] r = (((i * k) % slice_size) // shape[2]) + samples[i, 1] c = (((i * k) % slice_size) % shape[2]) + samples[i, 2] samples[i, 0] = _apply_affine_3d_x0(s, r, c, 1, grid2world) samples[i, 1] = _apply_affine_3d_x1(s, r, c, 1, grid2world) samples[i, 2] = _apply_affine_3d_x2(s, r, c, 1, grid2world) return np.asarray(samples) dipy-0.13.0/dipy/align/reslice.py000066400000000000000000000075311317371701200166070ustar00rootroot00000000000000from multiprocessing import Pool, cpu_count import warnings import numpy as np from scipy.ndimage import affine_transform def _affine_transform(kwargs): return affine_transform(**kwargs) def reslice(data, affine, zooms, new_zooms, order=1, mode='constant', cval=0, num_processes=1): """Reslice data with new voxel resolution defined by ``new_zooms`` Parameters ---------- data : array, shape (I,J,K) or (I,J,K,N) 3d volume or 4d volume with datasets affine : array, shape (4,4) mapping from voxel coordinates to world coordinates zooms : tuple, shape (3,) voxel size for (i,j,k) dimensions new_zooms : tuple, shape (3,) new voxel size for (i,j,k) after resampling order : int, from 0 to 5 order of interpolation for resampling/reslicing, 0 nearest interpolation, 1 trilinear etc.. if you don't want any smoothing 0 is the option you need. mode : string ('constant', 'nearest', 'reflect' or 'wrap') Points outside the boundaries of the input are filled according to the given mode. cval : float Value used for points outside the boundaries of the input if mode='constant'. num_processes : int Split the calculation to a pool of children processes. This only applies to 4D `data` arrays. If a positive integer then it defines the size of the multiprocessing pool that will be used. If 0, then the size of the pool will equal the number of cores available. Returns ------- data2 : array, shape (I,J,K) or (I,J,K,N) datasets resampled into isotropic voxel size affine2 : array, shape (4,4) new affine for the resampled image Examples -------- >>> import nibabel as nib >>> from dipy.align.reslice import reslice >>> from dipy.data import get_data >>> fimg = get_data('aniso_vox') >>> img = nib.load(fimg) >>> data = img.get_data() >>> data.shape == (58, 58, 24) True >>> affine = img.affine >>> zooms = img.header.get_zooms()[:3] >>> zooms (4.0, 4.0, 5.0) >>> new_zooms = (3.,3.,3.) >>> new_zooms (3.0, 3.0, 3.0) >>> data2, affine2 = reslice(data, affine, zooms, new_zooms) >>> data2.shape == (77, 77, 40) True """ # We are suppressing warnings emitted by scipy >= 0.18, # described in https://github.com/nipy/dipy/issues/1107. # These warnings are not relevant to us, as long as our offset # input to scipy's affine_transform is [0, 0, 0] warnings.simplefilter("ignore") new_zooms = np.array(new_zooms, dtype='f8') zooms = np.array(zooms, dtype='f8') R = new_zooms / zooms new_shape = zooms / new_zooms * np.array(data.shape[:3]) new_shape = tuple(np.round(new_shape).astype('i8')) kwargs = {'matrix': R, 'output_shape': new_shape, 'order': order, 'mode': mode, 'cval': cval} if data.ndim == 3: data2 = affine_transform(input=data, **kwargs) if data.ndim == 4: data2 = np.zeros(new_shape+(data.shape[-1],), data.dtype) if not num_processes: num_processes = cpu_count() if num_processes < 2: for i in range(data.shape[-1]): affine_transform(input=data[..., i], output=data2[..., i], **kwargs) else: params = [] for i in range(data.shape[-1]): _kwargs = {'input': data[..., i]} _kwargs.update(kwargs) params.append(_kwargs) pool = Pool(num_processes) for i, result in enumerate(pool.imap(_affine_transform, params)): data2[..., i] = result pool.close() Rx = np.eye(4) Rx[:3, :3] = np.diag(R) affine2 = np.dot(affine, Rx) # Turn warnings back on: warnings.filterwarnings('always') return data2, affine2 dipy-0.13.0/dipy/align/scalespace.py000066400000000000000000000414671317371701200172720ustar00rootroot00000000000000from dipy.align import floating import numpy as np import numpy.linalg as npl import scipy.ndimage.filters as filters class ScaleSpace(object): def __init__(self, image, num_levels, image_grid2world=None, input_spacing=None, sigma_factor=0.2, mask0=False): r""" ScaleSpace Computes the Scale Space representation of an image. The scale space is simply a list of images produced by smoothing the input image with a Gaussian kernel with increasing smoothing parameter. If the image's voxels are isotropic, the smoothing will be the same along all directions: at level L = 0, 1, ..., the sigma is given by $s * ( 2^L - 1 )$. If the voxel dimensions are not isotropic, then the smoothing is weaker along low resolution directions. Parameters ---------- image : array, shape (r,c) or (s, r, c) where s is the number of slices, r is the number of rows and c is the number of columns of the input image. num_levels : int the desired number of levels (resolutions) of the scale space image_grid2world : array, shape (dim + 1, dim + 1), optional the grid-to-space transform of the image grid. The default is the identity matrix input_spacing : array, shape (dim,), optional the spacing (voxel size) between voxels in physical space. The default is 1.0 along all axes sigma_factor : float, optional the smoothing factor to be used in the construction of the scale space. The default is 0.2 mask0 : Boolean, optional if True, all smoothed images will be zero at all voxels that are zero in the input image. The default is False. """ self.dim = len(image.shape) self.num_levels = num_levels input_size = np.array(image.shape) if mask0: mask = np.asarray(image > 0, dtype=np.int32) # Normalize input image to [0,1] img = (image - image.min())/(image.max() - image.min()) if mask0: img *= mask # The properties are saved in separate lists. Insert input image # properties at the first level of the scale space self.images = [img.astype(floating)] self.domain_shapes = [input_size.astype(np.int32)] if input_spacing is None: input_spacing = np.ones((self.dim,), dtype=np.int32) self.spacings = [input_spacing] self.scalings = [np.ones(self.dim)] self.affines = [image_grid2world] self.sigmas = [np.zeros(self.dim)] if image_grid2world is not None: self.affine_invs = [npl.inv(image_grid2world)] else: self.affine_invs = [None] # Compute the rest of the levels min_spacing = np.min(input_spacing) for i in range(1, num_levels): scaling_factor = 2 ** i scaling = np.ndarray((self.dim + 1,)) # Note: the minimum below is present in ANTS to prevent the scaling # from being too large (making the sub-sampled image to be too # small) this makes the sub-sampled image at least 32 voxels at # each direction it is risky to make this decision based on image # size, though (we need to investigate more the effect of this) # scaling = np.minimum(scaling_factor * min_spacing /input_spacing, # input_size / 32) scaling = scaling_factor * min_spacing / input_spacing output_spacing = input_spacing * scaling extended = np.append(scaling, [1]) if image_grid2world is not None: affine = image_grid2world.dot(np.diag(extended)) else: affine = np.diag(extended) output_size = input_size * (input_spacing / output_spacing) + 0.5 output_size = output_size.astype(np.int32) sigmas = sigma_factor * (output_spacing / input_spacing - 1.0) # Filter along each direction with the appropriate sigma filtered = filters.gaussian_filter(image, sigmas) filtered = ((filtered - filtered.min()) / (filtered.max() - filtered.min())) if mask0: filtered *= mask # Add current level to the scale space self.images.append(filtered.astype(floating)) self.domain_shapes.append(output_size) self.spacings.append(output_spacing) self.scalings.append(scaling) self.affines.append(affine) self.affine_invs.append(npl.inv(affine)) self.sigmas.append(sigmas) def get_expand_factors(self, from_level, to_level): r"""Ratio of voxel size from pyramid level from_level to to_level Given two scale space resolutions a = from_level, b = to_level, returns the ratio of voxels size at level b to voxel size at level a (the factor that must be used to multiply voxels at level a to 'expand' them to level b). Parameters ---------- from_level : int, 0 <= from_level < L, (L = number of resolutions) the resolution to expand voxels from to_level : int, 0 <= to_level < from_level the resolution to expand voxels to Returns ------- factors : array, shape (k,), k = 2, 3 the expand factors (a scalar for each voxel dimension) """ factors = (np.array(self.spacings[to_level]) / np.array(self.spacings[from_level])) return factors def print_level(self, level): r"""Prints properties of a pyramid level Prints the properties of a level of this scale space to standard output Parameters ---------- level : int, 0 <= from_level < L, (L = number of resolutions) the scale space level to be printed """ print('Domain shape: ', self.get_domain_shape(level)) print('Spacing: ', self.get_spacing(level)) print('Scaling: ', self.get_scaling(level)) print('Affine: ', self.get_affine(level)) print('Sigmas: ', self.get_sigmas(level)) def _get_attribute(self, attribute, level): r"""Returns an attribute from the Scale Space at a given level Returns the level-th element of attribute if level is a valid level of this scale space. Otherwise, returns None. Parameters ---------- attribute : list the attribute to retrieve the level-th element from level : int, the index of the required element from attribute. Returns ------- attribute[level] : object the requested attribute if level is valid, else it raises a ValueError """ if 0 <= level < self.num_levels: return attribute[level] raise ValueError('Invalid pyramid level: '+str(level)) def get_image(self, level): r"""Smoothed image at a given level Returns the smoothed image at the requested level in the Scale Space. Parameters ---------- level : int, 0 <= from_level < L, (L = number of resolutions) the scale space level to get the smooth image from Returns ------- the smooth image at the requested resolution or None if an invalid level was requested """ return self._get_attribute(self.images, level) def get_domain_shape(self, level): r"""Shape the sub-sampled image must have at a particular level Returns the shape the sub-sampled image must have at a particular resolution of the scale space (note that this object does not explicitly subsample the smoothed images, but only provides the properties the sub-sampled images must have). Parameters ---------- level : int, 0 <= from_level < L, (L = number of resolutions) the scale space level to get the sub-sampled shape from Returns ------- the sub-sampled shape at the requested resolution or None if an invalid level was requested """ return self._get_attribute(self.domain_shapes, level) def get_spacing(self, level): r"""Spacings the sub-sampled image must have at a particular level Returns the spacings (voxel sizes) the sub-sampled image must have at a particular resolution of the scale space (note that this object does not explicitly subsample the smoothed images, but only provides the properties the sub-sampled images must have). Parameters ---------- level : int, 0 <= from_level < L, (L = number of resolutions) the scale space level to get the sub-sampled shape from Returns ------- the spacings (voxel sizes) at the requested resolution or None if an invalid level was requested """ return self._get_attribute(self.spacings, level) def get_scaling(self, level): r"""Adjustment factor for input-spacing to reflect voxel sizes at level Returns the scaling factor that needs to be applied to the input spacing (the voxel sizes of the image at level 0 of the scale space) to transform them to voxel sizes at the requested level. Parameters ---------- level : int, 0 <= from_level < L, (L = number of resolutions) the scale space level to get the scalings from Returns ------- the scaling factors from the original spacing to the spacings at the requested level """ return self._get_attribute(self.scalings, level) def get_affine(self, level): r"""Voxel-to-space transformation at a given level Returns the voxel-to-space transformation associated with the sub-sampled image at a particular resolution of the scale space (note that this object does not explicitly subsample the smoothed images, but only provides the properties the sub-sampled images must have). Parameters ---------- level : int, 0 <= from_level < L, (L = number of resolutions) the scale space level to get affine transform from Returns ------- the affine (voxel-to-space) transform at the requested resolution or None if an invalid level was requested """ return self._get_attribute(self.affines, level) def get_affine_inv(self, level): r"""Space-to-voxel transformation at a given level Returns the space-to-voxel transformation associated with the sub-sampled image at a particular resolution of the scale space (note that this object does not explicitly subsample the smoothed images, but only provides the properties the sub-sampled images must have). Parameters ---------- level : int, 0 <= from_level < L, (L = number of resolutions) the scale space level to get the inverse transform from Returns ------- the inverse (space-to-voxel) transform at the requested resolution or None if an invalid level was requested """ return self._get_attribute(self.affine_invs, level) def get_sigmas(self, level): r"""Smoothing parameters used at a given level Returns the smoothing parameters (a scalar for each axis) used at the requested level of the scale space Parameters ---------- level : int, 0 <= from_level < L, (L = number of resolutions) the scale space level to get the smoothing parameters from Returns ------- the smoothing parameters at the requested level """ return self._get_attribute(self.sigmas, level) class IsotropicScaleSpace(ScaleSpace): def __init__(self, image, factors, sigmas, image_grid2world=None, input_spacing=None, mask0=False): r""" IsotropicScaleSpace Computes the Scale Space representation of an image using isotropic smoothing kernels for all scales. The scale space is simply a list of images produced by smoothing the input image with a Gaussian kernel with different smoothing parameters. This specialization of ScaleSpace allows the user to provide custom scale and smoothing factors for all scales. Parameters ---------- image : array, shape (r,c) or (s, r, c) where s is the number of slices, r is the number of rows and c is the number of columns of the input image. factors : list of floats custom scale factors to build the scale space (one factor for each scale). sigmas : list of floats custom smoothing parameter to build the scale space (one parameter for each scale). image_grid2world : array, shape (dim + 1, dim + 1), optional the grid-to-space transform of the image grid. The default is the identity matrix. input_spacing : array, shape (dim,), optional the spacing (voxel size) between voxels in physical space. The default if 1.0 along all axes. mask0 : Boolean, optional if True, all smoothed images will be zero at all voxels that are zero in the input image. The default is False. """ self.dim = len(image.shape) self.num_levels = len(factors) if len(sigmas) != self.num_levels: raise ValueError("sigmas and factors must have the same length") input_size = np.array(image.shape) if mask0: mask = np.asarray(image > 0, dtype=np.int32) # Normalize input image to [0,1] img = ((image.astype(np.float64) - image.min()) / (image.max() - image.min())) if mask0: img *= mask # The properties are saved in separate lists. Insert input image # properties at the first level of the scale space self.images = [img.astype(floating)] self.domain_shapes = [input_size.astype(np.int32)] if input_spacing is None: input_spacing = np.ones((self.dim,), dtype=np.int32) self.spacings = [input_spacing] self.scalings = [np.ones(self.dim)] self.affines = [image_grid2world] self.sigmas = [np.ones(self.dim) * sigmas[self.num_levels - 1]] if image_grid2world is not None: self.affine_invs = [npl.inv(image_grid2world)] else: self.affine_invs = [None] # Compute the rest of the levels min_index = np.argmin(input_spacing) for i in range(1, self.num_levels): factor = factors[self.num_levels - 1 - i] shrink_factors = np.zeros(self.dim) new_spacing = np.zeros(self.dim) shrink_factors[min_index] = factor new_spacing[min_index] = input_spacing[min_index] * factor for j in range(self.dim): if j != min_index: # Select the factor that maximizes isotropy shrink_factors[j] = factor new_spacing[j] = input_spacing[j] * factor min_diff = np.abs(new_spacing[j] - new_spacing[min_index]) for f in range(1, factor): diff = input_spacing[j] * f - new_spacing[min_index] diff = np.abs(diff) if diff < min_diff: shrink_factors[j] = f new_spacing[j] = input_spacing[j] * f min_diff = diff extended = np.append(shrink_factors, [1]) if image_grid2world is not None: affine = image_grid2world.dot(np.diag(extended)) else: affine = np.diag(extended) output_size = (input_size / shrink_factors).astype(np.int32) new_sigmas = np.ones(self.dim) * sigmas[self.num_levels - i - 1] # Filter along each direction with the appropriate sigma filtered = filters.gaussian_filter(image.astype(np.float64), new_sigmas) filtered = ((filtered.astype(np.float64) - filtered.min()) / (filtered.max() - filtered.min())) if mask0: filtered *= mask # Add current level to the scale space self.images.append(filtered.astype(floating)) self.domain_shapes.append(output_size) self.spacings.append(new_spacing) self.scalings.append(shrink_factors) self.affines.append(affine) self.affine_invs.append(npl.inv(affine)) self.sigmas.append(new_sigmas) dipy-0.13.0/dipy/align/streamlinear.py000066400000000000000000000522471317371701200176530ustar00rootroot00000000000000import abc import numpy as np from dipy.utils.six import with_metaclass from dipy.core.optimize import Optimizer from dipy.align.bundlemin import (_bundle_minimum_distance, distance_matrix_mdf) from dipy.tracking.streamline import (transform_streamlines, unlist_streamlines, center_streamlines) from dipy.core.geometry import (compose_transformations, compose_matrix, decompose_matrix) from dipy.utils.six import string_types MAX_DIST = 1e10 LOG_MAX_DIST = np.log(MAX_DIST) class StreamlineDistanceMetric(with_metaclass(abc.ABCMeta, object)): def __init__(self, num_threads=None): """ An abstract class for the metric used for streamline registration If the two sets of streamlines match exactly then method ``distance`` of this object should be minimum. Parameters ---------- num_threads : int Number of threads. If None (default) then all available threads will be used. Only metrics using OpenMP will use this variable. """ self.static = None self.moving = None self.num_threads = num_threads @abc.abstractmethod def setup(self, static, moving): pass @abc.abstractmethod def distance(self, xopt): """ calculate distance for current set of parameters """ pass class BundleMinDistanceMetric(StreamlineDistanceMetric): """ Bundle-based Minimum Distance aka BMD This is the cost function used by the StreamlineLinearRegistration Methods ------- setup(static, moving) distance(xopt) References ---------- .. [Garyfallidis14] Garyfallidis et al., "Direct native-space fiber bundle alignment for group comparisons", ISMRM, 2014. """ def setup(self, static, moving): """ Setup static and moving sets of streamlines Parameters ---------- static : streamlines Fixed or reference set of streamlines. moving : streamlines Moving streamlines. num_threads : int Number of threads. If None (default) then all available threads will be used. Notes ----- Call this after the object is initiated and before distance. """ self._set_static(static) self._set_moving(moving) def _set_static(self, static): static_centered_pts, st_idx = unlist_streamlines(static) self.static_centered_pts = np.ascontiguousarray(static_centered_pts, dtype=np.float64) self.block_size = st_idx[0] def _set_moving(self, moving): self.moving_centered_pts, _ = unlist_streamlines(moving) def distance(self, xopt): """ Distance calculated from this Metric Parameters ---------- xopt : sequence List of affine parameters as an 1D vector, """ return bundle_min_distance_fast(xopt, self.static_centered_pts, self.moving_centered_pts, self.block_size, self.num_threads) class BundleMinDistanceMatrixMetric(StreamlineDistanceMetric): """ Bundle-based Minimum Distance aka BMD This is the cost function used by the StreamlineLinearRegistration Methods ------- setup(static, moving) distance(xopt) Notes ----- The difference with BundleMinDistanceMetric is that this creates the entire distance matrix and therefore requires more memory. """ def setup(self, static, moving): """ Setup static and moving sets of streamlines Parameters ---------- static : streamlines Fixed or reference set of streamlines. moving : streamlines Moving streamlines. Notes ----- Call this after the object is initiated and before distance. Num_threads is not used in this class. Use ``BundleMinDistanceMetric`` for a faster, threaded and less memory hungry metric """ self.static = static self.moving = moving def distance(self, xopt): """ Distance calculated from this Metric Parameters ---------- xopt : sequence List of affine parameters as an 1D vector """ return bundle_min_distance(xopt, self.static, self.moving) class BundleSumDistanceMatrixMetric(BundleMinDistanceMatrixMetric): """ Bundle-based Sum Distance aka BMD This is a cost function that can be used by the StreamlineLinearRegistration class. Methods ------- setup(static, moving) distance(xopt) Notes ----- The difference with BundleMinDistanceMatrixMetric is that it uses uses the sum of the distance matrix and not the sum of mins. """ def distance(self, xopt): """ Distance calculated from this Metric Parameters ---------- xopt : sequence List of affine parameters as an 1D vector """ return bundle_sum_distance(xopt, self.static, self.moving) class StreamlineLinearRegistration(object): def __init__(self, metric=None, x0="rigid", method='L-BFGS-B', bounds=None, verbose=False, options=None, evolution=False, num_threads=None): r""" Linear registration of 2 sets of streamlines [Garyfallidis14]_. Parameters ---------- metric : StreamlineDistanceMetric, If None and fast is False then the BMD distance is used. If fast is True then a faster implementation of BMD is used. Otherwise, use the given distance metric. x0 : array or int or str Initial parametrization for the optimization. If 1D array with: a) 6 elements then only rigid registration is performed with the 3 first elements for translation and 3 for rotation. b) 7 elements also isotropic scaling is performed (similarity). c) 12 elements then translation, rotation (in degrees), scaling and shearing is performed (affine). Here is an example of x0 with 12 elements: ``x0=np.array([0, 10, 0, 40, 0, 0, 2., 1.5, 1, 0.1, -0.5, 0])`` This has translation (0, 10, 0), rotation (40, 0, 0) in degrees, scaling (2., 1.5, 1) and shearing (0.1, -0.5, 0). If int: a) 6 ``x0 = np.array([0, 0, 0, 0, 0, 0])`` b) 7 ``x0 = np.array([0, 0, 0, 0, 0, 0, 1.])`` c) 12 ``x0 = np.array([0, 0, 0, 0, 0, 0, 1., 1., 1, 0, 0, 0])`` If str: a) "rigid" ``x0 = np.array([0, 0, 0, 0, 0, 0])`` b) "similarity" ``x0 = np.array([0, 0, 0, 0, 0, 0, 1.])`` c) "affine" ``x0 = np.array([0, 0, 0, 0, 0, 0, 1., 1., 1, 0, 0, 0])`` method : str, 'L_BFGS_B' or 'Powell' optimizers can be used. Default is 'L_BFGS_B'. bounds : list of tuples or None, If method == 'L_BFGS_B' then we can use bounded optimization. For example for the six parameters of rigid rotation we can set the bounds = [(-30, 30), (-30, 30), (-30, 30), (-45, 45), (-45, 45), (-45, 45)] That means that we have set the bounds for the three translations and three rotation axes (in degrees). verbose : bool, If True then information about the optimization is shown. options : None or dict, Extra options to be used with the selected method. evolution : boolean If True save the transformation for each iteration of the optimizer. Default is False. Supported only with Scipy >= 0.11. num_threads : int Number of threads. If None (default) then all available threads will be used. Only metrics using OpenMP will use this variable. References ---------- .. [Garyfallidis14] Garyfallidis et al., "Direct native-space fiber bundle alignment for group comparisons", ISMRM, 2014. """ self.x0 = self._set_x0(x0) self.metric = metric if self.metric is None: self.metric = BundleMinDistanceMetric(num_threads=num_threads) self.verbose = verbose self.method = method if self.method not in ['Powell', 'L-BFGS-B']: raise ValueError('Only Powell and L-BFGS-B can be used') self.bounds = bounds self.options = options self.evolution = evolution def optimize(self, static, moving, mat=None): """ Find the minimum of the provided metric. Parameters ---------- static : streamlines Reference or fixed set of streamlines. moving : streamlines Moving set of streamlines. mat : array Transformation (4, 4) matrix to start the registration. ``mat`` is applied to moving. Default value None which means that initial transformation will be generated by shifting the centers of moving and static sets of streamlines to the origin. Returns ------- map : StreamlineRegistrationMap """ msg = 'need to have the same number of points. Use ' msg += 'set_number_of_points from dipy.tracking.streamline' if not np.all(np.array(list(map(len, static))) == static[0].shape[0]): raise ValueError('Static streamlines ' + msg) if not np.all(np.array(list(map(len, moving))) == moving[0].shape[0]): raise ValueError('Moving streamlines ' + msg) if not np.all(np.array(list(map(len, moving))) == static[0].shape[0]): raise ValueError('Static and moving streamlines ' + msg) if mat is None: static_centered, static_shift = center_streamlines(static) moving_centered, moving_shift = center_streamlines(moving) static_mat = compose_matrix44([static_shift[0], static_shift[1], static_shift[2], 0, 0, 0]) moving_mat = compose_matrix44([-moving_shift[0], -moving_shift[1], -moving_shift[2], 0, 0, 0]) else: static_centered = static moving_centered = transform_streamlines(moving, mat) static_mat = np.eye(4) moving_mat = mat self.metric.setup(static_centered, moving_centered) distance = self.metric.distance if self.method == 'Powell': if self.options is None: self.options = {'xtol': 1e-6, 'ftol': 1e-6, 'maxiter': 1e6} opt = Optimizer(distance, self.x0.tolist(), method=self.method, options=self.options, evolution=self.evolution) if self.method == 'L-BFGS-B': if self.options is None: self.options = {'maxcor': 10, 'ftol': 1e-7, 'gtol': 1e-5, 'eps': 1e-8, 'maxiter': 100} opt = Optimizer(distance, self.x0.tolist(), method=self.method, bounds=self.bounds, options=self.options, evolution=self.evolution) if self.verbose: opt.print_summary() opt_mat = compose_matrix44(opt.xopt) mat = compose_transformations(moving_mat, opt_mat, static_mat) mat_history = [] if opt.evolution is not None: for vecs in opt.evolution: mat_history.append( compose_transformations(moving_mat, compose_matrix44(vecs), static_mat)) srm = StreamlineRegistrationMap(mat, opt.xopt, opt.fopt, mat_history, opt.nfev, opt.nit) del opt return srm def _set_x0(self, x0): """ check if input is of correct type""" if hasattr(x0, 'ndim'): if len(x0) not in [6, 7, 12]: msg = 'Only 1D arrays of 6, 7 and 12 elements are allowed' raise ValueError(msg) if x0.ndim != 1: raise ValueError("Array should have only one dimension") return x0 if isinstance(x0, string_types): if x0.lower() == 'rigid': return np.zeros(6) if x0.lower() == 'similarity': return np.array([0, 0, 0, 0, 0, 0, 1.]) if x0.lower() == 'affine': return np.array([0, 0, 0, 0, 0, 0, 1., 1., 1., 0, 0, 0]) if isinstance(x0, int): if x0 not in [6, 7, 12]: msg = 'Only 6, 7 and 12 are accepted as integers' raise ValueError(msg) else: if x0 == 6: return np.zeros(6) if x0 == 7: return np.array([0, 0, 0, 0, 0, 0, 1.]) if x0 == 12: return np.array([0, 0, 0, 0, 0, 0, 1., 1., 1., 0, 0, 0]) raise ValueError('Wrong input') class StreamlineRegistrationMap(object): def __init__(self, matopt, xopt, fopt, matopt_history, funcs, iterations): r""" A map holding the optimum affine matrix and some other parameters of the optimization Parameters ---------- matrix : array, 4x4 affine matrix which transforms the moving to the static streamlines xopt : array, 1d array with the parameters of the transformation after centering fopt : float, final value of the metric matrix_history : array All transformation matrices created during the optimization funcs : int, Number of function evaluations of the optimizer iterations : int Number of iterations of the optimizer """ self.matrix = matopt self.xopt = xopt self.fopt = fopt self.matrix_history = matopt_history self.funcs = funcs self.iterations = iterations def transform(self, moving): """ Transform moving streamlines to the static. Parameters ---------- moving : streamlines Returns ------- moved : streamlines Notes ----- All this does is apply ``self.matrix`` to the input streamlines. """ return transform_streamlines(moving, self.matrix) def bundle_sum_distance(t, static, moving, num_threads=None): """ MDF distance optimization function (SUM) We minimize the distance between moving streamlines as they align with the static streamlines. Parameters ----------- t : ndarray t is a vector of of affine transformation parameters with size at least 6. If size is 6, t is interpreted as translation + rotation. If size is 7, t is interpreted as translation + rotation + isotropic scaling. If size is 12, t is interpreted as translation + rotation + scaling + shearing. static : list Static streamlines moving : list Moving streamlines. These will be transform to align with the static streamlines Returns ------- cost: float """ aff = compose_matrix44(t) moving = transform_streamlines(moving, aff) d01 = distance_matrix_mdf(static, moving) return np.sum(d01) def bundle_min_distance(t, static, moving): """ MDF-based pairwise distance optimization function (MIN) We minimize the distance between moving streamlines as they align with the static streamlines. Parameters ----------- t : ndarray t is a vector of of affine transformation parameters with size at least 6. If size is 6, t is interpreted as translation + rotation. If size is 7, t is interpreted as translation + rotation + isotropic scaling. If size is 12, t is interpreted as translation + rotation + scaling + shearing. static : list Static streamlines moving : list Moving streamlines. num_threads : int Number of threads. If None (default) then all available threads will be used. Returns ------- cost: float """ aff = compose_matrix44(t) moving = transform_streamlines(moving, aff) d01 = distance_matrix_mdf(static, moving) rows, cols = d01.shape return 0.25 * (np.sum(np.min(d01, axis=0)) / float(cols) + np.sum(np.min(d01, axis=1)) / float(rows)) ** 2 def bundle_min_distance_fast(t, static, moving, block_size, num_threads): """ MDF-based pairwise distance optimization function (MIN) We minimize the distance between moving streamlines as they align with the static streamlines. Parameters ----------- t : array 1D array. t is a vector of of affine transformation parameters with size at least 6. If size is 6, t is interpreted as translation + rotation. If size is 7, t is interpreted as translation + rotation + isotropic scaling. If size is 12, t is interpreted as translation + rotation + scaling + shearing. static : array N*M x 3 array. All the points of the static streamlines. With order of streamlines intact. Where N is the number of streamlines and M is the number of points per streamline. moving : array K*M x 3 array. All the points of the moving streamlines. With order of streamlines intact. Where K is the number of streamlines and M is the number of points per streamline. block_size : int Number of points per streamline. All streamlines in static and moving should have the same number of points M. num_threads : int Number of threads. If None (default) then all available threads will be used. Returns ------- cost: float Notes ----- This is a faster implementation of ``bundle_min_distance``, which requires that all the points of each streamline are allocated into an ndarray (of shape N*M by 3, with N the number of points per streamline and M the number of streamlines). This can be done by calling `dipy.tracking.streamlines.unlist_streamlines`. """ aff = compose_matrix44(t) moving = np.dot(aff[:3, :3], moving.T).T + aff[:3, 3] moving = np.ascontiguousarray(moving, dtype=np.float64) rows = static.shape[0] / block_size cols = moving.shape[0] / block_size return _bundle_minimum_distance(static, moving, rows, cols, block_size, num_threads) def _threshold(x, th): return np.maximum(np.minimum(x, th), -th) def compose_matrix44(t, dtype=np.double): """ Compose a 4x4 transformation matrix Parameters ----------- t : ndarray This is a 1D vector of of affine transformation parameters with size at least 6. If size is 6, t is interpreted as translation + rotation. If size is 7, t is interpreted as translation + rotation + isotropic scaling. If size is 12, t is interpreted as translation + rotation + scaling + shearing. Returns ------- T : ndarray Homogeneous transformation matrix of size 4x4. """ if isinstance(t, list): t = np.array(t) size = t.size if size not in [6, 7, 12]: raise ValueError('Accepted number of parameters is 6, 7 and 12') scale, shear, angles, translate = (None, ) * 4 if size in [6, 7, 12]: translate = _threshold(t[0:3], MAX_DIST) angles = np.deg2rad(t[3:6]) if size == 7: scale = np.array((t[6],) * 3) if size == 12: scale = t[6: 9] shear = t[9: 12] return compose_matrix(scale=scale, shear=shear, angles=angles, translate=translate) def decompose_matrix44(mat, size=12): """ Given a 4x4 homogeneous matrix return the parameter vector Parameters ----------- mat : array Homogeneous 4x4 transformation matrix size : int Size of output vector. 6 for rigid, 7 for similarity and 12 for affine. Default is 12. Returns ------- t : ndarray One dimensional ndarray of 6, 7 or 12 affine parameters. """ scale, shear, angles, translate, _ = decompose_matrix(mat) t = np.zeros(12) t[:3] = translate t[3: 6] = np.rad2deg(angles) if size == 6: return t[:6] if size == 7: t[6] = np.mean(scale) return t[:7] if size == 12: t[6: 9] = scale t[9: 12] = shear return t raise ValueError('Size can be 6, 7 or 12') dipy-0.13.0/dipy/align/sumsqdiff.pyx000066400000000000000000001067011317371701200173510ustar00rootroot00000000000000""" Utility functions used by the Sum of Squared Differences (SSD) metric """ import numpy as np cimport cython cimport numpy as cnp from .fused_types cimport floating, number cdef extern from "dpy_math.h" nogil: int dpy_isinf(double) double sqrt(double) @cython.boundscheck(False) @cython.wraparound(False) @cython.cdivision(True) cdef void _solve_2d_symmetric_positive_definite(double* A, double* y, double det, double* out) nogil: r"""Solves a 2-variable symmetric positive-definite linear system The C implementation of the public-facing Python function ``solve_2d_symmetric_positive_definite``. Solves the symmetric positive-definite linear system $Mx = y$ given by:: M = [[A[0], A[1]], [A[1], A[2]]] Parameters ---------- A : array, shape (3,) the array containing the entries of the symmetric 2x2 matrix y : array, shape (2,) right-hand side of the system to be solved out : array, shape (2,) the array the output will be stored in """ out[1] = (A[0] * y[1] - A[1] * y[0]) / det out[0] = (y[0] - A[1] * out[1]) / A[0] def solve_2d_symmetric_positive_definite(A, y, double det): r"""Solves a 2-variable symmetric positive-definite linear system Solves the symmetric positive-definite linear system $Mx = y$ given by:: M = [[A[0], A[1]], [A[1], A[2]]] Parameters ---------- A : array, shape (3,) the array containing the entries of the symmetric 2x2 matrix y : array, shape (2,) right-hand side of the system to be solved Returns ------- out : array, shape (2,) the array the output will be stored in """ cdef: cnp.ndarray out = np.zeros(2, dtype=float) _solve_2d_symmetric_positive_definite( cnp.PyArray_DATA(np.ascontiguousarray(A, float)), cnp.PyArray_DATA(np.ascontiguousarray(y, float)), det, cnp.PyArray_DATA(out)) return np.asarray(out) @cython.boundscheck(False) @cython.wraparound(False) @cython.cdivision(True) cdef int _solve_3d_symmetric_positive_definite(double* g, double* y, double tau, double* out) nogil: r"""Solves a 3-variable symmetric positive-definite linear system Solves the symmetric semi-positive-definite linear system $Mx = y$ given by $M = (g g^{T} + \tau I)$ The C implementation of the public-facing Python function ``solve_3d_symmetric_positive_definite``. Parameters ---------- g : array, shape (3,) the vector in the outer product above y : array, shape (3,) right-hand side of the system to be solved tau : double $\tau$ in $M = (g g^{T} + \tau I)$ out : array, shape (3,) the array the output will be stored in Returns ------- is_singular : int 1 if M is singular, otherwise 0 """ cdef: double a,b,c,d,e,f, y0, y1, y2, sub_det a = g[0] ** 2 + tau if a < 1e-9: return 1 b = g[0] * g[1] sub_det = (a * (g[1] ** 2 + tau) - b * b) if sub_det < 1e-9: return 1 c = g[0] * g[2] d = (a * (g[1] ** 2 + tau) - b * b) / a e = (a * (g[1] * g[2]) - b * c) / a f = (a * (g[2] ** 2 + tau) - c * c) / a - (e * e * a) / sub_det if f < 1e-9: return 1 y0 = y[0] y1 = (y[1] * a - y0 * b) / a y2 = (y[2] * a - c * y0) / a - (e * (y[1] * a - b * y0)) / sub_det out[2] = y2 / f out[1] = (y1 - e * out[2]) / d out[0] = (y0 - b * out[1] - c * out[2]) / a return 0 def solve_3d_symmetric_positive_definite(g, y, double tau): r"""Solves a 3-variable symmetric positive-definite linear system Solves the symmetric semi-positive-definite linear system $Mx = y$ given by $M = (g g^{T} + \tau I)$. Parameters ---------- g : array, shape (3,) the vector in the outer product above y : array, shape (3,) right-hand side of the system to be solved tau : double $\tau$ in $M = (g g^{T} + \tau I)$ Returns ------- out : array, shape (3,) the array the output will be stored in is_singular : int 1 if M is singular, otherwise 0 """ cdef: cnp.ndarray out = np.zeros(3, dtype=float) int is_singular is_singular = _solve_3d_symmetric_positive_definite( cnp.PyArray_DATA(np.ascontiguousarray(g, float)), cnp.PyArray_DATA(np.ascontiguousarray(y, float)), tau, cnp.PyArray_DATA(out)) return np.asarray(out), is_singular @cython.boundscheck(False) @cython.wraparound(False) @cython.cdivision(True) cpdef double iterate_residual_displacement_field_ssd_2d( floating[:, :] delta_field, floating[:, :] sigmasq_field, floating[:, :, :] grad, floating[:, :, :] target, double lambda_param, floating[:, :, :] displacement_field): r"""One iteration of a large linear system solver for 2D SSD registration Performs one iteration at one level of the Multi-resolution Gauss-Seidel solver proposed by Bruhn and Weickert [Bruhn05]. Parameters ---------- delta_field : array, shape (R, C) the difference between the static and moving image (the 'derivative w.r.t. time' in the optical flow model) sigmasq_field : array, shape (R, C) the variance of the gray level value at each voxel, according to the EM model (for SSD, it is 1 for all voxels). Inf and 0 values are processed specially to support infinite and zero variance. grad : array, shape (R, C, 2) the gradient of the moving image target : array, shape (R, C, 2) right-hand side of the linear system to be solved in the Weickert's multi-resolution algorithm lambda_param : float smoothness parameter of the objective function displacement_field : array, shape (R, C, 2) current displacement field to start the iteration from Returns ------- max_displacement : float the norm of the maximum change in the displacement field after the iteration References ---------- [Bruhn05] Andres Bruhn and Joachim Weickert, "Towards ultimate motion estimation: combining highest accuracy with real-time performance", 10th IEEE International Conference on Computer Vision, 2005. ICCV 2005. """ ftype = np.asarray(delta_field).dtype cdef: int NUM_NEIGHBORS = 4 int* dRow = [-1, 0, 1, 0] int* dCol = [0, 1, 0, -1] cnp.npy_intp nrows = delta_field.shape[0] cnp.npy_intp ncols = delta_field.shape[1] cnp.npy_intp r, c, dr, dc, nn, k double* b = [0, 0] double* d = [0, 0] double* y = [0, 0] double* A = [0, 0, 0] int yi double xx, yy, opt, nrm2, delta, sigmasq, max_displacement, det max_displacement = 0 with nogil: for r in range(nrows): for c in range(ncols): delta = delta_field[r, c] sigmasq = sigmasq_field[r, c] if sigmasq_field != None else 1 if(target is None): b[0] = delta_field[r, c] * grad[r, c, 0] b[1] = delta_field[r, c] * grad[r, c, 1] else: b[0] = target[r, c, 0] b[1] = target[r, c, 1] nn = 0 y[0] = 0 y[1] = 0 for k in range(NUM_NEIGHBORS): dr = r + dRow[k] if((dr < 0) or (dr >= nrows)): continue dc = c + dCol[k] if((dc < 0) or (dc >= ncols)): continue nn += 1 y[0] += displacement_field[dr, dc, 0] y[1] += displacement_field[dr, dc, 1] if dpy_isinf(sigmasq) != 0: xx = displacement_field[r, c, 0] yy = displacement_field[r, c, 1] displacement_field[r, c, 0] = y[0] / nn displacement_field[r, c, 1] = y[1] / nn xx -= displacement_field[r, c, 0] yy -= displacement_field[r, c, 1] opt = xx * xx + yy * yy if(max_displacement < opt): max_displacement = opt else: A[0] = grad[r, c, 0] ** 2 + sigmasq * lambda_param * nn A[1] = grad[r, c, 0] * grad[r, c, 1] A[2] = grad[r, c, 1] ** 2 + sigmasq * lambda_param * nn det = A[0] * A[2] - A[1] * A[1] if(det < 1e-9): nrm2 = (grad[r, c, 0] ** 2 + grad[r, c, 1] ** 2) if(nrm2 < 1e-9): displacement_field[r, c, 0] = 0 displacement_field[r, c, 1] = 0 else: displacement_field[r, c, 0] = (b[0]) / nrm2 displacement_field[r, c, 1] = (b[1]) / nrm2 else: y[0] = b[0] + sigmasq * lambda_param * y[0] y[1] = b[1] + sigmasq * lambda_param * y[1] _solve_2d_symmetric_positive_definite(A, y, det, d) xx = displacement_field[r, c, 0] - d[0] yy = displacement_field[r, c, 1] - d[1] displacement_field[r, c, 0] = d[0] displacement_field[r, c, 1] = d[1] opt = xx * xx + yy * yy if(max_displacement < opt): max_displacement = opt return sqrt(max_displacement) @cython.boundscheck(False) @cython.wraparound(False) cpdef double compute_energy_ssd_2d(floating[:, :] delta_field): r"""Sum of squared differences between two 2D images Computes the Sum of Squared Differences between the static and moving image. Those differences are given by delta_field Parameters ---------- delta_field : array, shape (R, C) the difference between the static and moving image (the 'derivative w.r.t. time' in the optical flow model) Returns ------- energy : float the SSD energy at this iteration Notes ----- The numeric value of the energy is used only to detect convergence. This function returns only the energy corresponding to the data term (excluding the energy corresponding to the regularization term) because the Greedy-SyN algorithm is an unconstrained gradient descent algorithm in the space of diffeomorphisms: in each iteration it makes a step along the negative smoothed gradient --of the data term-- and then makes sure the resulting diffeomorphisms are invertible using an explicit inversion algorithm. Since it is not clear how to reflect the energy corresponding to this re-projection to the space of diffeomorphisms, a more precise energy computation including the regularization term is useless. Instead, convergence is checked considering the data-term energy only and detecting oscilations in the energy profile. """ cdef: cnp.npy_intp nrows = delta_field.shape[0] cnp.npy_intp ncols = delta_field.shape[1] cnp.npy_intp r, c double energy = 0 with nogil: for r in range(nrows): for c in range(ncols): energy += delta_field[r, c] ** 2 return energy @cython.boundscheck(False) @cython.wraparound(False) @cython.cdivision(True) cpdef double iterate_residual_displacement_field_ssd_3d( floating[:, :, :] delta_field, floating[:, :, :] sigmasq_field, floating[:, :, :, :] grad, floating[:, :, :, :] target, double lambda_param, floating[:, :, :, :] disp): r"""One iteration of a large linear system solver for 3D SSD registration Performs one iteration at one level of the Multi-resolution Gauss-Seidel solver proposed by Bruhn and Weickert [Bruhn05]. Parameters ---------- delta_field : array, shape (S, R, C) the difference between the static and moving image (the 'derivative w.r.t. time' in the optical flow model) sigmasq_field : array, shape (S, R, C) the variance of the gray level value at each voxel, according to the EM model (for SSD, it is 1 for all voxels). Inf and 0 values are processed specially to support infinite and zero variance. grad : array, shape (S, R, C, 3) the gradient of the moving image target : array, shape (S, R, C, 3) right-hand side of the linear system to be solved in the Weickert's multi-resolution algorithm lambda_param : float smoothness parameter of the objective function disp : array, shape (S, R, C, 3) the displacement field to start the optimization from Returns ------- max_displacement : float the norm of the maximum change in the displacement field after the iteration References ---------- [Bruhn05] Andres Bruhn and Joachim Weickert, "Towards ultimate motion estimation: combining highest accuracy with real-time performance", 10th IEEE International Conference on Computer Vision, 2005. ICCV 2005. """ ftype = np.asarray(delta_field).dtype cdef: int NUM_NEIGHBORS = 6 int* dSlice = [-1, 0, 0, 0, 0, 1] int* dRow = [0, -1, 0, 1, 0, 0] int* dCol = [0, 0, 1, 0, -1, 0] cnp.npy_intp nslices = delta_field.shape[0] cnp.npy_intp nrows = delta_field.shape[1] cnp.npy_intp ncols = delta_field.shape[2] int nn double* g = [0, 0, 0] double* b = [0, 0, 0] double* d = [0, 0, 0] double* y = [0, 0, 0] double* A = [0, 0, 0, 0, 0, 0] double xx, yy, zz, opt, nrm2, delta, sigmasq, max_displacement cnp.npy_intp dr, ds, dc, s, r, c max_displacement = 0 with nogil: for s in range(nslices): for r in range(nrows): for c in range(ncols): g[0] = grad[s, r, c, 0] g[1] = grad[s, r, c, 1] g[2] = grad[s, r, c, 2] delta = delta_field[s, r, c] sigmasq = sigmasq_field[s, r, c] if sigmasq_field != None else 1 if(target is None): b[0] = delta_field[s, r, c] * g[0] b[1] = delta_field[s, r, c] * g[1] b[2] = delta_field[s, r, c] * g[2] else: b[0] = target[s, r, c, 0] b[1] = target[s, r, c, 1] b[2] = target[s, r, c, 2] nn = 0 y[0] = 0 y[1] = 0 y[2] = 0 for k in range(NUM_NEIGHBORS): ds = s + dSlice[k] if((ds < 0) or (ds >= nslices)): continue dr = r + dRow[k] if((dr < 0) or (dr >= nrows)): continue dc = c + dCol[k] if((dc < 0) or (dc >= ncols)): continue nn += 1 y[0] += disp[ds, dr, dc, 0] y[1] += disp[ds, dr, dc, 1] y[2] += disp[ds, dr, dc, 2] if dpy_isinf(sigmasq) != 0: xx = disp[s, r, c, 0] yy = disp[s, r, c, 1] zz = disp[s, r, c, 2] disp[s, r, c, 0] = y[0] / nn disp[s, r, c, 1] = y[1] / nn disp[s, r, c, 2] = y[2] / nn xx -= disp[s, r, c, 0] yy -= disp[s, r, c, 1] zz -= disp[s, r, c, 2] opt = xx * xx + yy * yy + zz * zz if(max_displacement < opt): max_displacement = opt elif(sigmasq < 1e-9): nrm2 = g[0] ** 2 + g[1] ** 2 + g[2] ** 2 if(nrm2 < 1e-9): disp[s, r, c, 0] = 0 disp[s, r, c, 1] = 0 disp[s, r, c, 2] = 0 else: disp[s, r, c, 0] = (b[0]) / nrm2 disp[s, r, c, 1] = (b[1]) / nrm2 disp[s, r, c, 2] = (b[2]) / nrm2 else: tau = sigmasq * lambda_param * nn y[0] = b[0] + sigmasq * lambda_param * y[0] y[1] = b[1] + sigmasq * lambda_param * y[1] y[2] = b[2] + sigmasq * lambda_param * y[2] is_singular = _solve_3d_symmetric_positive_definite( g, y, tau, d) if is_singular == 1: nrm2 = g[0] ** 2 + g[1] ** 2 + g[2] ** 2 if(nrm2 < 1e-9): disp[s, r, c, 0] = 0 disp[s, r, c, 1] = 0 disp[s, r, c, 2] = 0 else: disp[s, r, c, 0] = (b[0]) / nrm2 disp[s, r, c, 1] = (b[1]) / nrm2 disp[s, r, c, 2] = (b[2]) / nrm2 xx = disp[s, r, c, 0] - d[0] yy = disp[s, r, c, 1] - d[1] zz = disp[s, r, c, 2] - d[2] disp[s, r, c, 0] = d[0] disp[s, r, c, 1] = d[1] disp[s, r, c, 2] = d[2] opt = xx * xx + yy * yy + zz * zz if(max_displacement < opt): max_displacement = opt return sqrt(max_displacement) @cython.boundscheck(False) @cython.wraparound(False) cpdef double compute_energy_ssd_3d(floating[:, :, :] delta_field): r"""Sum of squared differences between two 3D volumes Computes the Sum of Squared Differences between the static and moving volume Those differences are given by delta_field Parameters ---------- delta_field : array, shape (R, C) the difference between the static and moving image (the 'derivative w.r.t. time' in the optical flow model) Returns ------- energy : float the SSD energy at this iteration Notes ----- The numeric value of the energy is used only to detect convergence. This function returns only the energy corresponding to the data term (excluding the energy corresponding to the regularization term) because the Greedy-SyN algorithm is an unconstrained gradient descent algorithm in the space of diffeomorphisms: in each iteration it makes a step along the negative smoothed gradient --of the data term-- and then makes sure the resulting diffeomorphisms are invertible using an explicit inversion algorithm. Since it is not clear how to reflect the energy corresponding to this re-projection to the space of diffeomorphisms, a more precise energy computation including the regularization term is useless. Instead, convergence is checked considering the data-term energy only and detecting oscilations in the energy profile. """ cdef: cnp.npy_intp nslices = delta_field.shape[0] cnp.npy_intp nrows = delta_field.shape[1] cnp.npy_intp ncols = delta_field.shape[2] cnp.npy_intp s, r, c double energy = 0 with nogil: for s in range(nslices): for r in range(nrows): for c in range(ncols): energy += delta_field[s, r, c] ** 2 return energy @cython.boundscheck(False) @cython.wraparound(False) def compute_residual_displacement_field_ssd_3d( floating[:, :, :] delta_field, floating[:, :, :] sigmasq_field, floating[:, :, :, :] gradient_field, floating[:, :, :, :] target, double lambda_param, floating[:, :, :, :] disp, floating[:, :, :, :] residual): r"""The residual displacement field to be fit on the next iteration Computes the residual displacement field corresponding to the current displacement field (given by 'disp') in the Multi-resolution Gauss-Seidel solver proposed by Bruhn and Weickert [Bruhn]. Parameters ---------- delta_field : array, shape (S, R, C) the difference between the static and moving image (the 'derivative w.r.t. time' in the optical flow model) sigmasq_field : array, shape (S, R, C) the variance of the gray level value at each voxel, according to the EM model (for SSD, it is 1 for all voxels). Inf and 0 values are processed specially to support infinite and zero variance. gradient_field : array, shape (S, R, C, 3) the gradient of the moving image target : array, shape (S, R, C, 3) right-hand side of the linear system to be solved in the Weickert's multi-resolution algorithm lambda_param : float smoothness parameter in the objective function disp : array, shape (S, R, C, 3) the current displacement field to compute the residual from residual : array, shape (S, R, C, 3) the displacement field to put the residual to Returns ------- residual : array, shape (S, R, C, 3) the residual displacement field. If residual was None a input, then a new field is returned, otherwise the same array is returned References ---------- [Bruhn05] Andres Bruhn and Joachim Weickert, "Towards ultimate motion estimation: combining highest accuracy with real-time performance", 10th IEEE International Conference on Computer Vision, 2005. ICCV 2005. """ ftype = np.asarray(delta_field).dtype cdef: int NUM_NEIGHBORS = 6 int* dSlice = [-1, 0, 0, 0, 0, 1] int* dRow = [0, -1, 0, 1, 0, 0] int* dCol = [0, 0, 1, 0, -1, 0] double* b = [0, 0, 0] double* y = [0, 0, 0] int yi cnp.npy_intp nslices = delta_field.shape[0] cnp.npy_intp nrows = delta_field.shape[1] cnp.npy_intp ncols = delta_field.shape[2] double delta, sigmasq, dotP cnp.npy_intp s, r, c, ds, dr, dc if residual is None: residual = np.empty(shape=(nslices, nrows, ncols, 3), dtype=ftype) with nogil: for s in range(nslices): for r in range(nrows): for c in range(ncols): delta = delta_field[s, r, c] sigmasq = sigmasq_field[s, r, c] if sigmasq_field != None else 1 if(target is None): b[0] = delta * gradient_field[s, r, c, 0] b[1] = delta * gradient_field[s, r, c, 1] b[2] = delta * gradient_field[s, r, c, 2] else: b[0] = target[s, r, c, 0] b[1] = target[s, r, c, 1] b[2] = target[s, r, c, 2] y[0] = 0 y[1] = 0 y[2] = 0 for k in range(NUM_NEIGHBORS): ds = s + dSlice[k] if((ds < 0) or (ds >= nslices)): continue dr = r + dRow[k] if((dr < 0) or (dr >= nrows)): continue dc = c + dCol[k] if((dc < 0) or (dc >= ncols)): continue y[0] += (disp[s, r, c, 0] - disp[ds, dr, dc, 0]) y[1] += (disp[s, r, c, 1] - disp[ds, dr, dc, 1]) y[2] += (disp[s, r, c, 2] - disp[ds, dr, dc, 2]) if dpy_isinf(sigmasq) != 0: residual[s, r, c, 0] = -lambda_param * y[0] residual[s, r, c, 1] = -lambda_param * y[1] residual[s, r, c, 2] = -lambda_param * y[2] else: dotP = (gradient_field[s, r, c, 0] * disp[s, r, c, 0] + gradient_field[s, r, c, 1] * disp[s, r, c, 1] + gradient_field[s, r, c, 2] * disp[s, r, c, 2]) residual[s, r, c, 0] = (b[0] - (gradient_field[s, r, c, 0] * dotP + sigmasq * lambda_param * y[0])) residual[s, r, c, 1] = (b[1] - (gradient_field[s, r, c, 1] * dotP + sigmasq * lambda_param * y[1])) residual[s, r, c, 2] = (b[2] - (gradient_field[s, r, c, 2] * dotP + sigmasq * lambda_param * y[2])) return np.asarray(residual) @cython.boundscheck(False) @cython.wraparound(False) cpdef compute_residual_displacement_field_ssd_2d( floating[:, :] delta_field, floating[:, :] sigmasq_field, floating[:, :, :] gradient_field, floating[:, :, :] target, double lambda_param, floating[:, :, :] d, floating[:, :, :] residual): r"""The residual displacement field to be fit on the next iteration Computes the residual displacement field corresponding to the current displacement field in the Multi-resolution Gauss-Seidel solver proposed by Bruhn and Weickert [Bruhn05]. Parameters ---------- delta_field : array, shape (R, C) the difference between the static and moving image (the 'derivative w.r.t. time' in the optical flow model) sigmasq_field : array, shape (R, C) the variance of the gray level value at each voxel, according to the EM model (for SSD, it is 1 for all voxels). Inf and 0 values are processed specially to support infinite and zero variance. gradient_field : array, shape (R, C, 2) the gradient of the moving image target : array, shape (R, C, 2) right-hand side of the linear system to be solved in the Weickert's multi-resolution algorithm lambda_param : float smoothness parameter in the objective function d : array, shape (R, C, 2) the current displacement field to compute the residual from residual : array, shape (R, C, 2) the displacement field to put the residual to Returns ------- residual : array, shape (R, C, 2) the residual displacement field. If residual was None a input, then a new field is returned, otherwise the same array is returned References ---------- [Bruhn05] Andres Bruhn and Joachim Weickert, "Towards ultimate motion estimation: combining highest accuracy with real-time performance", 10th IEEE International Conference on Computer Vision, 2005. ICCV 2005. """ ftype = np.asarray(delta_field).dtype cdef: int NUM_NEIGHBORS = 4 int* dRow = [-1, 0, 1, 0] int* dCol = [0, 1, 0, -1] double* b = [0, 0] double* y = [0, 0] int yi cnp.npy_intp nrows = delta_field.shape[0] cnp.npy_intp ncols = delta_field.shape[1] double delta, sigmasq, dotP cnp.npy_intp r, c, dr, dc if residual is None: residual = np.empty(shape=(nrows, ncols, 2), dtype=ftype) with nogil: for r in range(nrows): for c in range(ncols): delta = delta_field[r, c] sigmasq = sigmasq_field[r, c] if sigmasq_field != None else 1 if target is None: b[0] = delta * gradient_field[r, c, 0] b[1] = delta * gradient_field[r, c, 1] else: b[0] = target[r, c, 0] b[1] = target[r, c, 1] y[0] = 0 # reset y y[1] = 0 nn=0 for k in range(NUM_NEIGHBORS): dr = r + dRow[k] if((dr < 0) or (dr >= nrows)): continue dc = c + dCol[k] if((dc < 0) or (dc >= ncols)): continue y[0] += (d[r, c, 0] - d[dr, dc, 0]) y[1] += (d[r, c, 1] - d[dr, dc, 1]) if dpy_isinf(sigmasq) != 0: residual[r, c, 0] = -lambda_param * y[0] residual[r, c, 1] = -lambda_param * y[1] else: dotP = (gradient_field[r, c, 0] * d[r, c, 0] + gradient_field[r, c, 1] * d[r, c, 1]) residual[r, c, 0] = (b[0] - (gradient_field[r, c, 0] * dotP + sigmasq * lambda_param * y[0])) residual[r, c, 1] = (b[1] - (gradient_field[r, c, 1] * dotP + sigmasq * lambda_param * y[1])) return np.asarray(residual) @cython.boundscheck(False) @cython.wraparound(False) @cython.cdivision(True) def compute_ssd_demons_step_2d(floating[:,:] delta_field, floating[:,:,:] gradient_moving, double sigma_sq_x, floating[:,:,:] out): r"""Demons step for 2D SSD-driven registration Computes the demons step for SSD-driven registration ( eq. 4 in [Bruhn05] ) Parameters ---------- delta_field : array, shape (R, C) the difference between the static and moving image (the 'derivative w.r.t. time' in the optical flow model) gradient_field : array, shape (R, C, 2) the gradient of the moving image sigma_sq_x : float parameter controlling the amount of regularization. It corresponds to $\sigma_x^2$ in algorithm 1 of Vercauteren et al.[Vercauteren09] out : array, shape (R, C, 2) if None, a new array will be created to store the demons step. Otherwise the provided array will be used. Returns ------- demons_step : array, shape (R, C, 2) the demons step to be applied for updating the current displacement field energy : float the current ssd energy (before applying the returned demons_step) References ---------- [Bruhn05] Andres Bruhn and Joachim Weickert, "Towards ultimate motion estimation: combining highest accuracy with real-time performance", 10th IEEE International Conference on Computer Vision, 2005. ICCV 2005. [Vercauteren09] Vercauteren, T., Pennec, X., Perchant, A., & Ayache, N. (2009). Diffeomorphic demons: efficient non-parametric image registration. NeuroImage, 45(1 Suppl), S61-72. doi:10.1016/j.neuroimage.2008.10.040 """ cdef: cnp.npy_intp nr = delta_field.shape[0] cnp.npy_intp nc = delta_field.shape[1] cnp.npy_intp i, j double delta, delta_2, nrm2, energy, den if out is None: out = np.zeros((nr, nc, 2), dtype=np.asarray(delta_field).dtype) with nogil: energy = 0 for i in range(nr): for j in range(nc): delta = delta_field[i,j] delta_2 = delta**2 energy += delta_2 nrm2 = gradient_moving[i, j, 0]**2 + gradient_moving[i, j, 1]**2 den = delta_2/sigma_sq_x + nrm2 if den <1e-9: out[i, j, 0] = 0 out[i, j, 1] = 0 else: out[i, j, 0] = delta * gradient_moving[i, j, 0] / den out[i, j, 1] = delta * gradient_moving[i, j, 1] / den return np.asarray(out), energy @cython.boundscheck(False) @cython.wraparound(False) @cython.cdivision(True) def compute_ssd_demons_step_3d(floating[:,:,:] delta_field, floating[:,:,:,:] gradient_moving, double sigma_sq_x, floating[:,:,:,:] out): r"""Demons step for 3D SSD-driven registration Computes the demons step for SSD-driven registration ( eq. 4 in [Bruhn05] ) Parameters ---------- delta_field : array, shape (S, R, C) the difference between the static and moving image (the 'derivative w.r.t. time' in the optical flow model) gradient_field : array, shape (S, R, C, 2) the gradient of the moving image sigma_sq_x : float parameter controlling the amount of regularization. It corresponds to $\sigma_x^2$ in algorithm 1 of Vercauteren et al.[Vercauteren09] out : array, shape (S, R, C, 2) if None, a new array will be created to store the demons step. Otherwise the provided array will be used. Returns ------- demons_step : array, shape (S, R, C, 3) the demons step to be applied for updating the current displacement field energy : float the current ssd energy (before applying the returned demons_step) References ---------- [Bruhn05] Andres Bruhn and Joachim Weickert, "Towards ultimate motion estimation: combining highest accuracy with real-time performance", 10th IEEE International Conference on Computer Vision, 2005. ICCV 2005. [Vercauteren09] Vercauteren, T., Pennec, X., Perchant, A., & Ayache, N. (2009). Diffeomorphic demons: efficient non-parametric image registration. NeuroImage, 45(1 Suppl), S61-72. doi:10.1016/j.neuroimage.2008.10.040 """ cdef: cnp.npy_intp ns = delta_field.shape[0] cnp.npy_intp nr = delta_field.shape[1] cnp.npy_intp nc = delta_field.shape[2] cnp.npy_intp i, j, k double delta, delta_2, nrm2, energy, den if out is None: out = np.zeros((ns, nr, nc, 3), dtype=np.asarray(delta_field).dtype) with nogil: energy = 0 for k in range(ns): for i in range(nr): for j in range(nc): delta = delta_field[k,i,j] delta_2 = delta**2 energy += delta_2 nrm2 = (gradient_moving[k, i, j, 0]**2 + gradient_moving[k, i, j, 1]**2 + gradient_moving[k, i, j, 2]**2) den = delta_2/sigma_sq_x + nrm2 if den < 1e-9: out[k, i, j, 0] = 0 out[k, i, j, 1] = 0 out[k, i, j, 2] = 0 else: out[k, i, j, 0] = (delta * gradient_moving[k, i, j, 0] / den) out[k, i, j, 1] = (delta * gradient_moving[k, i, j, 1] / den) out[k, i, j, 2] = (delta * gradient_moving[k, i, j, 2] / den) return np.asarray(out), energy dipy-0.13.0/dipy/align/tests/000077500000000000000000000000001317371701200157435ustar00rootroot00000000000000dipy-0.13.0/dipy/align/tests/__init__.py000066400000000000000000000000001317371701200200420ustar00rootroot00000000000000dipy-0.13.0/dipy/align/tests/test_crosscorr.py000066400000000000000000000174221317371701200214010ustar00rootroot00000000000000import numpy as np from numpy.testing import assert_array_almost_equal from dipy.align import floating from dipy.align import crosscorr as cc def test_cc_factors_2d(): r""" Compares the output of the optimized function to compute the cross- correlation factors against a direct (not optimized, but less error prone) implementation. """ a = np.array(range(20*20), dtype=floating).reshape(20, 20) b = np.array(range(20*20)[::-1], dtype=floating).reshape(20, 20) a /= a.max() b /= b.max() for radius in [0, 1, 3, 6]: factors = np.asarray(cc.precompute_cc_factors_2d(a, b, radius)) expected = np.asarray(cc.precompute_cc_factors_2d_test(a, b, radius)) assert_array_almost_equal(factors, expected) def test_cc_factors_3d(): r""" Compares the output of the optimized function to compute the cross- correlation factors against a direct (not optimized, but less error prone) implementation. """ a = np.array(range(20*20*20), dtype=floating).reshape(20, 20, 20) b = np.array(range(20*20*20)[::-1], dtype=floating).reshape(20, 20, 20) a /= a.max() b /= b.max() for radius in [0, 1, 3, 6]: factors = np.asarray(cc.precompute_cc_factors_3d(a, b, radius)) expected = np.asarray(cc.precompute_cc_factors_3d_test(a, b, radius)) assert_array_almost_equal(factors, expected, decimal=5) def test_compute_cc_steps_2d(): # Select arbitrary images' shape (same shape for both images) sh = (32, 32) radius = 2 # Select arbitrary centers c_f = (np.asarray(sh)/2) + 1.25 c_g = c_f + 2.5 # Compute the identity vector field I(x) = x in R^2 x_0 = np.asarray(range(sh[0])) x_1 = np.asarray(range(sh[1])) X = np.ndarray(sh + (2,), dtype=np.float64) O = np.ones(sh) X[..., 0] = x_0[:, None] * O X[..., 1] = x_1[None, :] * O # Compute the gradient fields of F and G np.random.seed(1147572) gradF = np.array(X - c_f, dtype=floating) gradG = np.array(X - c_g, dtype=floating) sz = np.size(gradF) Fnoise = np.random.ranf(sz).reshape(gradF.shape) * gradF.max() * 0.1 Fnoise = Fnoise.astype(floating) gradF += Fnoise sz = np.size(gradG) Gnoise = np.random.ranf(sz).reshape(gradG.shape) * gradG.max() * 0.1 Gnoise = Gnoise.astype(floating) gradG += Gnoise sq_norm_grad_G = np.sum(gradG**2, -1) F = np.array(0.5*np.sum(gradF**2, -1), dtype=floating) G = np.array(0.5*sq_norm_grad_G, dtype=floating) Fnoise = np.random.ranf(np.size(F)).reshape(F.shape) * F.max() * 0.1 Fnoise = Fnoise.astype(floating) F += Fnoise Gnoise = np.random.ranf(np.size(G)).reshape(G.shape) * G.max() * 0.1 Gnoise = Gnoise.astype(floating) G += Gnoise # precompute the cross correlation factors factors = cc.precompute_cc_factors_2d_test(F, G, radius) factors = np.array(factors, dtype=floating) # test the forward step against the exact expression I = factors[..., 0] J = factors[..., 1] sfm = factors[..., 2] sff = factors[..., 3] smm = factors[..., 4] expected = np.ndarray(shape=sh + (2,), dtype=floating) factor = (-2.0 * sfm / (sff * smm)) * (J - (sfm / sff) * I) expected[..., 0] = factor * gradF[..., 0] factor = (-2.0 * sfm / (sff * smm)) * (J - (sfm / sff) * I) expected[..., 1] = factor * gradF[..., 1] actual, energy = cc.compute_cc_forward_step_2d(gradF, factors, 0) assert_array_almost_equal(actual, expected) for radius in range(1, 5): expected[:radius, ...] = 0 expected[:, :radius, ...] = 0 expected[-radius::, ...] = 0 expected[:, -radius::, ...] = 0 actual, energy = cc.compute_cc_forward_step_2d(gradF, factors, radius) assert_array_almost_equal(actual, expected) # test the backward step against the exact expression factor = (-2.0 * sfm / (sff * smm)) * (I - (sfm / smm) * J) expected[..., 0] = factor * gradG[..., 0] factor = (-2.0 * sfm / (sff * smm)) * (I - (sfm / smm) * J) expected[..., 1] = factor * gradG[..., 1] actual, energy = cc.compute_cc_backward_step_2d(gradG, factors, 0) assert_array_almost_equal(actual, expected) for radius in range(1, 5): expected[:radius, ...] = 0 expected[:, :radius, ...] = 0 expected[-radius::, ...] = 0 expected[:, -radius::, ...] = 0 actual, energy = cc.compute_cc_backward_step_2d(gradG, factors, radius) assert_array_almost_equal(actual, expected) def test_compute_cc_steps_3d(): sh = (32, 32, 32) radius = 2 # Select arbitrary centers c_f = (np.asarray(sh)/2) + 1.25 c_g = c_f + 2.5 # Compute the identity vector field I(x) = x in R^2 x_0 = np.asarray(range(sh[0])) x_1 = np.asarray(range(sh[1])) x_2 = np.asarray(range(sh[2])) X = np.ndarray(sh + (3,), dtype=np.float64) O = np.ones(sh) X[..., 0] = x_0[:, None, None] * O X[..., 1] = x_1[None, :, None] * O X[..., 2] = x_2[None, None, :] * O # Compute the gradient fields of F and G np.random.seed(12465825) gradF = np.array(X - c_f, dtype=floating) gradG = np.array(X - c_g, dtype=floating) sz = np.size(gradF) Fnoise = np.random.ranf(sz).reshape(gradF.shape) * gradF.max() * 0.1 Fnoise = Fnoise.astype(floating) gradF += Fnoise sz = np.size(gradG) Gnoise = np.random.ranf(sz).reshape(gradG.shape) * gradG.max() * 0.1 Gnoise = Gnoise.astype(floating) gradG += Gnoise sq_norm_grad_G = np.sum(gradG**2, -1) F = np.array(0.5*np.sum(gradF**2, -1), dtype=floating) G = np.array(0.5*sq_norm_grad_G, dtype=floating) Fnoise = np.random.ranf(np.size(F)).reshape(F.shape) * F.max() * 0.1 Fnoise = Fnoise.astype(floating) F += Fnoise Gnoise = np.random.ranf(np.size(G)).reshape(G.shape) * G.max() * 0.1 Gnoise = Gnoise.astype(floating) G += Gnoise # precompute the cross correlation factors factors = cc.precompute_cc_factors_3d_test(F, G, radius) factors = np.array(factors, dtype=floating) # test the forward step against the exact expression I = factors[..., 0] J = factors[..., 1] sfm = factors[..., 2] sff = factors[..., 3] smm = factors[..., 4] expected = np.ndarray(shape=sh + (3,), dtype=floating) factor = (-2.0 * sfm / (sff * smm)) * (J - (sfm / sff) * I) expected[..., 0] = factor * gradF[..., 0] expected[..., 1] = factor * gradF[..., 1] expected[..., 2] = factor * gradF[..., 2] actual, energy = cc.compute_cc_forward_step_3d(gradF, factors, 0) assert_array_almost_equal(actual, expected) for radius in range(1, 5): expected[:radius, ...] = 0 expected[:, :radius, ...] = 0 expected[:, :, :radius, :] = 0 expected[-radius::, ...] = 0 expected[:, -radius::, ...] = 0 expected[:, :, -radius::, ...] = 0 actual, energy = cc.compute_cc_forward_step_3d(gradF, factors, radius) assert_array_almost_equal(actual, expected) # test the backward step against the exact expression factor = (-2.0 * sfm / (sff * smm)) * (I - (sfm / smm) * J) expected[..., 0] = factor * gradG[..., 0] expected[..., 1] = factor * gradG[..., 1] expected[..., 2] = factor * gradG[..., 2] actual, energy = cc.compute_cc_backward_step_3d(gradG, factors, 0) assert_array_almost_equal(actual, expected) for radius in range(1, 5): expected[:radius, ...] = 0 expected[:, :radius, ...] = 0 expected[:, :, :radius, :] = 0 expected[-radius::, ...] = 0 expected[:, -radius::, ...] = 0 expected[:, :, -radius::, ...] = 0 actual, energy = cc.compute_cc_backward_step_3d(gradG, factors, radius) assert_array_almost_equal(actual, expected) if __name__ == '__main__': test_cc_factors_2d() test_cc_factors_3d() test_compute_cc_steps_2d() test_compute_cc_steps_3d() dipy-0.13.0/dipy/align/tests/test_expectmax.py000066400000000000000000000433151317371701200213600ustar00rootroot00000000000000import numpy as np from numpy.testing import (assert_equal, assert_array_equal, assert_array_almost_equal, assert_raises) from dipy.align import floating from dipy.align import expectmax as em def test_compute_em_demons_step_2d(): r""" Compares the output of the demons step in 2d against an analytical step. The fixed image is given by $F(x) = \frac{1}{2}||x - c_f||^2$, the moving image is given by $G(x) = \frac{1}{2}||x - c_g||^2$, $x, c_f, c_g \in R^{2}$ References ---------- [Vercauteren09] Vercauteren, T., Pennec, X., Perchant, A., & Ayache, N. (2009). Diffeomorphic demons: efficient non-parametric image registration. NeuroImage, 45(1 Suppl), S61-72. doi:10.1016/j.neuroimage.2008.10.040 """ # Select arbitrary images' shape (same shape for both images) sh = (30, 20) # Select arbitrary centers c_f = np.asarray(sh) / 2 c_g = c_f + 0.5 # Compute the identity vector field I(x) = x in R^2 x_0 = np.asarray(range(sh[0])) x_1 = np.asarray(range(sh[1])) X = np.ndarray(sh + (2,), dtype=np.float64) O = np.ones(sh) X[..., 0] = x_0[:, None] * O X[..., 1] = x_1[None, :] * O # Compute the gradient fields of F and G grad_F = X - c_f grad_G = X - c_g # The squared norm of grad_G to be used later sq_norm_grad_G = np.sum(grad_G**2, -1) # Compute F and G F = 0.5 * np.sum(grad_F**2, -1) G = 0.5 * sq_norm_grad_G delta_field = G - F # Now select an arbitrary parameter for # $\sigma_x$ (eq 4 in [Vercauteren09]) sigma_x_sq = 1.5 # Set arbitrary values for $\sigma_i$ (eq. 4 in [Vercauteren09]) # The original Demons algorithm used simply |F(x) - G(x)| as an # estimator, so let's use it as well sigma_i_sq = (F - G)**2 # Select some pixels to have special values np.random.seed(1346491) random_labels = np.random.randint(0, 5, sh[0] * sh[1]) random_labels = random_labels.reshape(sh) # this label is used to set sigma_i_sq == 0 below random_labels[sigma_i_sq == 0] = 2 # this label is used to set gradient == 0 below random_labels[sq_norm_grad_G == 0] = 2 expected = np.zeros_like(grad_G) # Pixels with sigma_i_sq = inf sigma_i_sq[random_labels == 0] = np.inf expected[random_labels == 0, ...] = 0 # Pixels with gradient!=0 and sigma_i_sq=0 sqnrm = sq_norm_grad_G[random_labels == 1] sigma_i_sq[random_labels == 1] = 0 expected[random_labels == 1, 0] = (delta_field[random_labels == 1] * grad_G[random_labels == 1, 0] / sqnrm) expected[random_labels == 1, 1] = (delta_field[random_labels == 1] * grad_G[random_labels == 1, 1] / sqnrm) # Pixels with gradient=0 and sigma_i_sq=0 sigma_i_sq[random_labels == 2] = 0 grad_G[random_labels == 2, ...] = 0 expected[random_labels == 2, ...] = 0 # Pixels with gradient=0 and sigma_i_sq!=0 grad_G[random_labels == 3, ...] = 0 # Directly compute the demons step according to eq. 4 in [Vercauteren09] num = (sigma_x_sq * (F - G))[random_labels >= 3] den = (sigma_x_sq * sq_norm_grad_G + sigma_i_sq)[random_labels >= 3] # This is $J^{P}$ in eq. 4 [Vercauteren09] expected[random_labels >= 3] = -1 * np.array(grad_G[random_labels >= 3]) expected[random_labels >= 3, ...] *= (num / den)[..., None] # Now compute it using the implementation under test actual = np.empty_like(expected, dtype=floating) em.compute_em_demons_step_2d(np.array(delta_field, dtype=floating), np.array(sigma_i_sq, dtype=floating), np.array(grad_G, dtype=floating), sigma_x_sq, actual) # Test sigma_i_sq == inf try: assert_array_almost_equal(actual[random_labels == 0], expected[random_labels == 0]) except AssertionError: raise AssertionError("Failed for sigma_i_sq == inf") # Test sigma_i_sq == 0 and gradient != 0 try: assert_array_almost_equal(actual[random_labels == 1], expected[random_labels == 1]) except AssertionError: raise AssertionError("Failed for sigma_i_sq == 0 and gradient != 0") # Test sigma_i_sq == 0 and gradient == 0 try: assert_array_almost_equal(actual[random_labels == 2], expected[random_labels == 2]) except AssertionError: raise AssertionError("Failed for sigma_i_sq == 0 and gradient == 0") # Test sigma_i_sq != 0 and gradient == 0 try: assert_array_almost_equal(actual[random_labels == 3], expected[random_labels == 3]) except AssertionError: raise AssertionError("Failed for sigma_i_sq != 0 and gradient == 0 ") # Test sigma_i_sq != 0 and gradient != 0 try: assert_array_almost_equal(actual[random_labels == 4], expected[random_labels == 4]) except AssertionError: raise AssertionError("Failed for sigma_i_sq != 0 and gradient != 0") def test_compute_em_demons_step_3d(): r""" Compares the output of the demons step in 3d against an analytical step. The fixed image is given by $F(x) = \frac{1}{2}||x - c_f||^2$, the moving image is given by $G(x) = \frac{1}{2}||x - c_g||^2$, $x, c_f, c_g \in R^{3}$ References ---------- [Vercauteren09] Vercauteren, T., Pennec, X., Perchant, A., & Ayache, N. (2009). Diffeomorphic demons: efficient non-parametric image registration. NeuroImage, 45(1 Suppl), S61-72. doi:10.1016/j.neuroimage.2008.10.040 """ # Select arbitrary images' shape (same shape for both images) sh = (20, 15, 10) # Select arbitrary centers c_f = np.asarray(sh) / 2 c_g = c_f + 0.5 # Compute the identity vector field I(x) = x in R^2 x_0 = np.asarray(range(sh[0])) x_1 = np.asarray(range(sh[1])) x_2 = np.asarray(range(sh[2])) X = np.ndarray(sh + (3,), dtype=np.float64) O = np.ones(sh) X[..., 0] = x_0[:, None, None] * O X[..., 1] = x_1[None, :, None] * O X[..., 2] = x_2[None, None, :] * O # Compute the gradient fields of F and G grad_F = X - c_f grad_G = X - c_g # The squared norm of grad_G to be used later sq_norm_grad_G = np.sum(grad_G**2, -1) # Compute F and G F = 0.5 * np.sum(grad_F**2, -1) G = 0.5 * sq_norm_grad_G delta_field = G - F # Now select an arbitrary parameter for # $\sigma_x$ (eq 4 in [Vercauteren09]) sigma_x_sq = 1.5 # Set arbitrary values for $\sigma_i$ (eq. 4 in [Vercauteren09]) # The original Demons algorithm used simply |F(x) - G(x)| as an # estimator, so let's use it as well sigma_i_sq = (F - G)**2 # Select some pixels to have special values np.random.seed(1346491) random_labels = np.random.randint(0, 5, sh[0] * sh[1] * sh[2]) random_labels = random_labels.reshape(sh) # this label is used to set sigma_i_sq == 0 below random_labels[sigma_i_sq == 0] = 2 # this label is used to set gradient == 0 below random_labels[sq_norm_grad_G == 0] = 2 expected = np.zeros_like(grad_G) # Pixels with sigma_i_sq = inf sigma_i_sq[random_labels == 0] = np.inf expected[random_labels == 0, ...] = 0 # Pixels with gradient!=0 and sigma_i_sq=0 sqnrm = sq_norm_grad_G[random_labels == 1] sigma_i_sq[random_labels == 1] = 0 expected[random_labels == 1, 0] = (delta_field[random_labels == 1] * grad_G[random_labels == 1, 0] / sqnrm) expected[random_labels == 1, 1] = (delta_field[random_labels == 1] * grad_G[random_labels == 1, 1] / sqnrm) expected[random_labels == 1, 2] = (delta_field[random_labels == 1] * grad_G[random_labels == 1, 2] / sqnrm) # Pixels with gradient=0 and sigma_i_sq=0 sigma_i_sq[random_labels == 2] = 0 grad_G[random_labels == 2, ...] = 0 expected[random_labels == 2, ...] = 0 # Pixels with gradient=0 and sigma_i_sq!=0 grad_G[random_labels == 3, ...] = 0 # Directly compute the demons step according to eq. 4 in [Vercauteren09] num = (sigma_x_sq * (F - G))[random_labels >= 3] den = (sigma_x_sq * sq_norm_grad_G + sigma_i_sq)[random_labels >= 3] # This is $J^{P}$ in eq. 4 [Vercauteren09] expected[random_labels >= 3] = -1 * np.array(grad_G[random_labels >= 3]) expected[random_labels >= 3, ...] *= (num / den)[..., None] # Now compute it using the implementation under test actual = np.empty_like(expected, dtype=floating) em.compute_em_demons_step_3d(np.array(delta_field, dtype=floating), np.array(sigma_i_sq, dtype=floating), np.array(grad_G, dtype=floating), sigma_x_sq, actual) # Test sigma_i_sq == inf try: assert_array_almost_equal(actual[random_labels == 0], expected[random_labels == 0]) except AssertionError: raise AssertionError("Failed for sigma_i_sq == inf") # Test sigma_i_sq == 0 and gradient != 0 try: assert_array_almost_equal(actual[random_labels == 1], expected[random_labels == 1]) except AssertionError: raise AssertionError("Failed for sigma_i_sq == 0 and gradient != 0") # Test sigma_i_sq == 0 and gradient == 0 try: assert_array_almost_equal(actual[random_labels == 2], expected[random_labels == 2]) except AssertionError: raise AssertionError("Failed for sigma_i_sq == 0 and gradient == 0") # Test sigma_i_sq != 0 and gradient == 0 try: assert_array_almost_equal(actual[random_labels == 3], expected[random_labels == 3]) except AssertionError: raise AssertionError("Failed for sigma_i_sq != 0 and gradient == 0 ") # Test sigma_i_sq != 0 and gradient != 0 try: assert_array_almost_equal(actual[random_labels == 4], expected[random_labels == 4]) except AssertionError: raise AssertionError("Failed for sigma_i_sq != 0 and gradient != 0") def test_quantize_positive_2d(): np.random.seed(1246592) # an arbitrary number of quantization levels num_levels = 11 # arbitrary test image shape (must contain at least 3 elements) img_shape = (15, 20) min_positive = 0.1 max_positive = 1.0 epsilon = 1e-8 delta = (max_positive - min_positive + epsilon) / (num_levels - 1) true_levels = np.zeros((num_levels,), dtype=np.float32) # put the intensities at the centers of the bins true_levels[1:] = np.linspace(min_positive + delta * 0.5, max_positive - delta * 0.5, num_levels - 1) # generate a target quantization image true_quantization = np.empty(img_shape, dtype=np.int32) random_labels = np.random.randint(0, num_levels, np.size(true_quantization)) # make sure there is at least one element equal to 0, 1 and num_levels-1 random_labels[0] = 0 random_labels[1] = 1 random_labels[2] = num_levels - 1 true_quantization[...] = random_labels.reshape(img_shape) # make sure additive noise doesn't change the quantization result noise_amplitude = np.min([delta / 4.0, min_positive / 4.0]) sz = np.size(true_quantization) noise = np.random.ranf(sz).reshape(img_shape) * noise_amplitude noise = noise.astype(floating) input_image = np.ndarray(img_shape, dtype=floating) # assign intensities plus noise input_image[...] = true_levels[true_quantization] + noise # preserve original zeros input_image[true_quantization == 0] = 0 # preserve min positive value input_image[true_quantization == 1] = min_positive # preserve max positive value input_image[true_quantization == num_levels - 1] = max_positive out, levels, hist = em.quantize_positive_2d(input_image, num_levels) levels = np.asarray(levels) assert_array_equal(out, true_quantization) assert_array_almost_equal(levels, true_levels) for i in range(num_levels): current_bin = np.asarray(true_quantization == i).sum() assert_equal(hist[i], current_bin) # test num_levels<2 and input image with zeros and non-zeros everywhere assert_raises(ValueError, em.quantize_positive_2d, input_image, 0) assert_raises(ValueError, em.quantize_positive_2d, input_image, 1) out, levels, hist = em.quantize_positive_2d( np.zeros(img_shape, dtype=floating), 2) assert_equal(out, np.zeros(img_shape, dtype=np.int32)) out, levels, hist = em.quantize_positive_2d( np.ones(img_shape, dtype=floating), 2) assert_equal(out, np.ones(img_shape, dtype=np.int32)) def test_quantize_positive_3d(): np.random.seed(1246592) # an arbitrary number of quantization levels num_levels = 11 # arbitrary test image shape (must contain at least 3 elements) img_shape = (5, 10, 15) min_positive = 0.1 max_positive = 1.0 epsilon = 1e-8 delta = (max_positive - min_positive + epsilon) / (num_levels - 1) true_levels = np.zeros((num_levels,), dtype=np.float32) # put the intensities at the centers of the bins true_levels[1:] = np.linspace(min_positive + delta * 0.5, max_positive - delta * 0.5, num_levels - 1) # generate a target quantization image true_quantization = np.empty(img_shape, dtype=np.int32) random_labels = np.random.randint(0, num_levels, np.size(true_quantization)) # make sure there is at least one element equal to 0, 1 and num_levels-1 random_labels[0] = 0 random_labels[1] = 1 random_labels[2] = num_levels - 1 true_quantization[...] = random_labels.reshape(img_shape) # make sure additive noise doesn't change the quantization result noise_amplitude = np.min([delta / 4.0, min_positive / 4.0]) sz = np.size(true_quantization) noise = np.random.ranf(sz).reshape(img_shape) * noise_amplitude noise = noise.astype(floating) input_image = np.ndarray(img_shape, dtype=floating) # assign intensities plus noise input_image[...] = true_levels[true_quantization] + noise # preserve original zeros input_image[true_quantization == 0] = 0 # preserve min positive value input_image[true_quantization == 1] = min_positive # preserve max positive value input_image[true_quantization == num_levels - 1] = max_positive out, levels, hist = em.quantize_positive_3d(input_image, num_levels) levels = np.asarray(levels) assert_array_equal(out, true_quantization) assert_array_almost_equal(levels, true_levels) for i in range(num_levels): current_bin = np.asarray(true_quantization == i).sum() assert_equal(hist[i], current_bin) # test num_levels<2 and input image with zeros and non-zeros everywhere assert_raises(ValueError, em.quantize_positive_3d, input_image, 0) assert_raises(ValueError, em.quantize_positive_3d, input_image, 1) out, levels, hist = em.quantize_positive_3d(np.zeros(img_shape, dtype=floating), 2) assert_equal(out, np.zeros(img_shape, dtype=np.int32)) out, levels, hist = em.quantize_positive_3d(np.ones(img_shape, dtype=floating), 2) assert_equal(out, np.ones(img_shape, dtype=np.int32)) def test_compute_masked_class_stats_2d(): np.random.seed(1246592) shape = (32, 32) # Create random labels labels = np.ndarray(shape, dtype=np.int32) labels[...] = np.random.randint(2, 10, np.size(labels)).reshape(shape) # now label 0 is not present and label 1 occurs once labels[0, 0] = 1 # Create random values values = np.random.randn(shape[0], shape[1]).astype(floating) values *= labels values += labels expected_means = [0, values[0, 0]] + \ [values[labels == i].mean() for i in range(2, 10)] expected_vars = [np.inf, np.inf] + \ [values[labels == i].var() for i in range(2, 10)] mask = np.ones(shape, dtype=np.int32) means, vars = em.compute_masked_class_stats_2d(mask, values, 10, labels) assert_array_almost_equal(means, expected_means, decimal=4) assert_array_almost_equal(vars, expected_vars, decimal=4) def test_compute_masked_class_stats_3d(): np.random.seed(1246592) shape = (32, 32, 32) # Create random labels labels = np.ndarray(shape, dtype=np.int32) labels[...] = np.random.randint(2, 10, np.size(labels)).reshape(shape) # now label 0 is not present and label 1 occurs once labels[0, 0, 0] = 1 # Create random values values = np.random.randn(shape[0], shape[1], shape[2]).astype(floating) values *= labels values += labels expected_means = [0, values[0, 0, 0]] + \ [values[labels == i].mean() for i in range(2, 10)] expected_vars = [np.inf, np.inf] + \ [values[labels == i].var() for i in range(2, 10)] mask = np.ones(shape, dtype=np.int32) means, vars = em.compute_masked_class_stats_3d(mask, values, 10, labels) assert_array_almost_equal(means, expected_means, decimal=4) assert_array_almost_equal(vars, expected_vars, decimal=4) if __name__ == '__main__': test_compute_em_demons_step_2d() test_compute_em_demons_step_3d() test_quantize_positive_2d() test_quantize_positive_3d() test_compute_masked_class_stats_2d() test_compute_masked_class_stats_3d() dipy-0.13.0/dipy/align/tests/test_imaffine.py000066400000000000000000000615651317371701200211470ustar00rootroot00000000000000import numpy as np import scipy as sp import nibabel as nib import numpy.linalg as npl from numpy.testing import (assert_array_equal, assert_array_almost_equal, assert_almost_equal, assert_equal, assert_raises) from dipy.core import geometry as geometry from dipy.data import get_data from dipy.viz import regtools as rt from dipy.align import floating from dipy.align import vector_fields as vf from dipy.align import imaffine from dipy.align.imaffine import AffineInversionError from dipy.align.transforms import (Transform, regtransforms) from dipy.align.tests.test_parzenhist import (setup_random_transform, sample_domain_regular) # For each transform type, select a transform factor (indicating how large the # true transform between static and moving images will be), a sampling scheme # (either a positive integer less than or equal to 100, or None) indicating # the percentage (if int) of voxels to be used for estimating the joint PDFs, # or dense sampling (if None), and also specify a starting point (to avoid # starting from the identity) factors = {('TRANSLATION', 2): (2.0, 0.35, np.array([2.3, 4.5])), ('ROTATION', 2): (0.1, None, np.array([0.1])), ('RIGID', 2): (0.1, .50, np.array([0.12, 1.8, 2.7])), ('SCALING', 2): (0.01, None, np.array([1.05])), ('AFFINE', 2): (0.1, .50, np.array([0.99, -0.05, 1.3, 0.05, 0.99, 2.5])), ('TRANSLATION', 3): (2.0, None, np.array([2.3, 4.5, 1.7])), ('ROTATION', 3): (0.1, 1.0, np.array([0.1, 0.15, -0.11])), ('RIGID', 3): (0.1, None, np.array([0.1, 0.15, -0.11, 2.3, 4.5, 1.7])), ('SCALING', 3): (0.1, .35, np.array([0.95])), ('AFFINE', 3): (0.1, None, np.array([0.99, -0.05, 0.03, 1.3, 0.05, 0.99, -0.10, 2.5, -0.07, 0.10, 0.99, -1.4]))} def test_transform_centers_of_mass_3d(): np.random.seed(1246592) shape = (64, 64, 64) rm = 8 sp = vf.create_sphere(shape[0] // 2, shape[1] // 2, shape[2] // 2, rm) moving = np.zeros(shape) # The center of mass will be (16, 16, 16), in image coordinates moving[:shape[0] // 2, :shape[1] // 2, :shape[2] // 2] = sp[...] rs = 16 # The center of mass will be (32, 32, 32), in image coordinates static = vf.create_sphere(shape[0], shape[1], shape[2], rs) # Create arbitrary image-to-space transforms axis = np.array([.5, 2.0, 1.5]) t = 0.15 # translation factor trans = np.array([[1, 0, 0, -t * shape[0]], [0, 1, 0, -t * shape[1]], [0, 0, 1, -t * shape[2]], [0, 0, 0, 1]]) trans_inv = npl.inv(trans) for rotation_angle in [-1 * np.pi / 6.0, 0.0, np.pi / 5.0]: for scale_factor in [0.83, 1.3, 2.07]: # scale rot = np.zeros(shape=(4, 4)) rot[:3, :3] = geometry.rodrigues_axis_rotation(axis, rotation_angle) rot[3, 3] = 1.0 scale = np.array([[1 * scale_factor, 0, 0, 0], [0, 1 * scale_factor, 0, 0], [0, 0, 1 * scale_factor, 0], [0, 0, 0, 1]]) static_grid2world = trans_inv.dot(scale.dot(rot.dot(trans))) moving_grid2world = npl.inv(static_grid2world) # Expected translation c_static = static_grid2world.dot((32, 32, 32, 1))[:3] c_moving = moving_grid2world.dot((16, 16, 16, 1))[:3] expected = np.eye(4) expected[:3, 3] = c_moving - c_static # Implementation under test actual = imaffine.transform_centers_of_mass(static, static_grid2world, moving, moving_grid2world) assert_array_almost_equal(actual.affine, expected) def test_transform_geometric_centers_3d(): # Create arbitrary image-to-space transforms axis = np.array([.5, 2.0, 1.5]) t = 0.15 # translation factor for theta in [-1 * np.pi / 6.0, 0.0, np.pi / 5.0]: # rotation angle for s in [0.83, 1.3, 2.07]: # scale m_shapes = [(256, 256, 128), (255, 255, 127), (64, 127, 142)] for shape_moving in m_shapes: s_shapes = [(256, 256, 128), (255, 255, 127), (64, 127, 142)] for shape_static in s_shapes: moving = np.ndarray(shape=shape_moving) static = np.ndarray(shape=shape_static) trans = np.array([[1, 0, 0, -t * shape_static[0]], [0, 1, 0, -t * shape_static[1]], [0, 0, 1, -t * shape_static[2]], [0, 0, 0, 1]]) trans_inv = npl.inv(trans) rot = np.zeros(shape=(4, 4)) rot[:3, :3] = geometry.rodrigues_axis_rotation(axis, theta) rot[3, 3] = 1.0 scale = np.array([[1 * s, 0, 0, 0], [0, 1 * s, 0, 0], [0, 0, 1 * s, 0], [0, 0, 0, 1]]) static_grid2world = trans_inv.dot( scale.dot(rot.dot(trans))) moving_grid2world = npl.inv(static_grid2world) # Expected translation c_static = np.array(shape_static, dtype=np.float64) * 0.5 c_static = tuple(c_static) c_static = static_grid2world.dot(c_static + (1,))[:3] c_moving = np.array(shape_moving, dtype=np.float64) * 0.5 c_moving = tuple(c_moving) c_moving = moving_grid2world.dot(c_moving + (1,))[:3] expected = np.eye(4) expected[:3, 3] = c_moving - c_static # Implementation under test actual = imaffine.transform_geometric_centers( static, static_grid2world, moving, moving_grid2world) assert_array_almost_equal(actual.affine, expected) def test_transform_origins_3d(): # Create arbitrary image-to-space transforms axis = np.array([.5, 2.0, 1.5]) t = 0.15 # translation factor for theta in [-1 * np.pi / 6.0, 0.0, np.pi / 5.0]: # rotation angle for s in [0.83, 1.3, 2.07]: # scale m_shapes = [(256, 256, 128), (255, 255, 127), (64, 127, 142)] for shape_moving in m_shapes: s_shapes = [(256, 256, 128), (255, 255, 127), (64, 127, 142)] for shape_static in s_shapes: moving = np.ndarray(shape=shape_moving) static = np.ndarray(shape=shape_static) trans = np.array([[1, 0, 0, -t * shape_static[0]], [0, 1, 0, -t * shape_static[1]], [0, 0, 1, -t * shape_static[2]], [0, 0, 0, 1]]) trans_inv = npl.inv(trans) rot = np.zeros(shape=(4, 4)) rot[:3, :3] = geometry.rodrigues_axis_rotation(axis, theta) rot[3, 3] = 1.0 scale = np.array([[1 * s, 0, 0, 0], [0, 1 * s, 0, 0], [0, 0, 1 * s, 0], [0, 0, 0, 1]]) static_grid2world = trans_inv.dot( scale.dot(rot.dot(trans))) moving_grid2world = npl.inv(static_grid2world) # Expected translation c_static = static_grid2world[:3, 3] c_moving = moving_grid2world[:3, 3] expected = np.eye(4) expected[:3, 3] = c_moving - c_static # Implementation under test actual = imaffine.transform_origins(static, static_grid2world, moving, moving_grid2world) assert_array_almost_equal(actual.affine, expected) def test_affreg_all_transforms(): # Test affine registration using all transforms with typical settings # Make sure dictionary entries are processed in the same order regardless # of the platform. # Otherwise any random numbers drawn within the loop would make # the test non-deterministic even if we fix the seed before the loop. # Right now, this test does not draw any samples, # but we still sort the entries # to prevent future related failures. for ttype in sorted(factors): dim = ttype[1] if dim == 2: nslices = 1 else: nslices = 45 factor = factors[ttype][0] sampling_pc = factors[ttype][1] transform = regtransforms[ttype] static, moving, static_grid2world, moving_grid2world, smask, mmask, T = \ setup_random_transform(transform, factor, nslices, 1.0) # Sum of absolute differences start_sad = np.abs(static - moving).sum() metric = imaffine.MutualInformationMetric(32, sampling_pc) affreg = imaffine.AffineRegistration(metric, [1000, 100, 50], [3, 1, 0], [4, 2, 1], 'L-BFGS-B', None, options=None) x0 = transform.get_identity_parameters() affine_map = affreg.optimize(static, moving, transform, x0, static_grid2world, moving_grid2world) transformed = affine_map.transform(moving) # Sum of absolute differences end_sad = np.abs(static - transformed).sum() reduction = 1 - end_sad / start_sad print("%s>>%f" % (ttype, reduction)) assert(reduction > 0.9) # Verify that exception is raised if level_iters is empty metric = imaffine.MutualInformationMetric(32) assert_raises(ValueError, imaffine.AffineRegistration, metric, []) def test_affreg_defaults(): # Test all default arguments with an arbitrary transform # Select an arbitrary transform (all of them are already tested # in test_affreg_all_transforms) transform_name = 'TRANSLATION' dim = 2 ttype = (transform_name, dim) aff_options = ['mass', 'voxel-origin', 'centers', None, np.eye(dim + 1)] for starting_affine in aff_options: if dim == 2: nslices = 1 else: nslices = 45 factor = factors[ttype][0] sampling_pc = factors[ttype][1] transform = regtransforms[ttype] id_param = transform.get_identity_parameters() static, moving, static_grid2world, moving_grid2world, smask, mmask, T = \ setup_random_transform(transform, factor, nslices, 1.0) # Sum of absolute differences start_sad = np.abs(static - moving).sum() metric = None x0 = None sigmas = None scale_factors = None level_iters = None static_grid2world = None moving_grid2world = None for ss_sigma_factor in [1.0, None]: affreg = imaffine.AffineRegistration(metric, level_iters, sigmas, scale_factors, 'L-BFGS-B', ss_sigma_factor, options=None) affine_map = affreg.optimize(static, moving, transform, x0, static_grid2world, moving_grid2world, starting_affine) transformed = affine_map.transform(moving) # Sum of absolute differences end_sad = np.abs(static - transformed).sum() reduction = 1 - end_sad / start_sad print("%s>>%f" % (ttype, reduction)) assert(reduction > 0.9) transformed_inv = affine_map.transform_inverse(static) # Sum of absolute differences end_sad = np.abs(moving - transformed_inv).sum() reduction = 1 - end_sad / start_sad print("%s>>%f" % (ttype, reduction)) assert(reduction > 0.9) def test_mi_gradient(): np.random.seed(2022966) # Test the gradient of mutual information h = 1e-5 # Make sure dictionary entries are processed in the same order regardless # of the platform. Otherwise any random numbers drawn within the loop would # make the test non-deterministic even if we fix the seed before the loop: # in this case the samples are drawn with `np.random.randn` below for ttype in sorted(factors): transform = regtransforms[ttype] dim = ttype[1] if dim == 2: nslices = 1 else: nslices = 45 factor = factors[ttype][0] sampling_proportion = factors[ttype][1] theta = factors[ttype][2] # Start from a small rotation start = regtransforms[('ROTATION', dim)] nrot = start.get_number_of_parameters() starting_affine = start.param_to_matrix(0.25 * np.random.randn(nrot)) # Get data (pair of images related to each other by an known transform) static, moving, static_g2w, moving_g2w, smask, mmask, M = \ setup_random_transform(transform, factor, nslices, 2.0) # Prepare a MutualInformationMetric instance mi_metric = imaffine.MutualInformationMetric(32, sampling_proportion) mi_metric.setup( transform, static, moving, starting_affine=starting_affine) # Compute the gradient with the implementation under test actual = mi_metric.gradient(theta) # Compute the gradient using finite-diferences n = transform.get_number_of_parameters() expected = np.empty(n, dtype=np.float64) val0 = mi_metric.distance(theta) for i in range(n): dtheta = theta.copy() dtheta[i] += h val1 = mi_metric.distance(dtheta) expected[i] = (val1 - val0) / h dp = expected.dot(actual) enorm = npl.norm(expected) anorm = npl.norm(actual) nprod = dp / (enorm * anorm) assert(nprod >= 0.99) def create_affine_transforms( dim, translations, rotations, scales, rot_axis=None): r""" Creates a list of affine transforms with all combinations of params This function is intended to be used for testing only. It generates affine transforms for all combinations of the input parameters in the following order: let T be a translation, R a rotation and S a scale. The generated affine will be: A = T.dot(S).dot(R).dot(T^{-1}) Translation is handled this way because it is convenient to provide the translation parameters in terms of the center of rotation we wish to generate. Parameters ---------- dim: int (either dim=2 or dim=3) dimension of the affine transforms translations: sequence of dim-tuples each dim-tuple represents a translation parameter rotations: sequence of floats each number represents a rotation angle in radians scales: sequence of floats each number represents a scale rot_axis: rotation axis (used for dim=3 only) Returns ------- transforms: sequence of (dim + 1)x(dim + 1) matrices each matrix correspond to an affine transform with a combination of the input parameters """ transforms = [] for t in translations: trans_inv = np.eye(dim + 1) trans_inv[:dim, dim] = -t[:dim] trans = npl.inv(trans_inv) for theta in rotations: # rotation angle if dim == 2: ct = np.cos(theta) st = np.sin(theta) rot = np.array([[ct, -st, 0], [st, ct, 0], [0, 0, 1]]) else: rot = np.eye(dim + 1) rot[:3, :3] = geometry.rodrigues_axis_rotation(rot_axis, theta) for s in scales: # scale scale = np.eye(dim + 1) * s scale[dim, dim] = 1 affine = trans.dot(scale.dot(rot.dot(trans_inv))) transforms.append(affine) return transforms def test_affine_map(): np.random.seed(2112927) dom_shape = np.array([64, 64, 64], dtype=np.int32) cod_shape = np.array([80, 80, 80], dtype=np.int32) nx = dom_shape[0] ny = dom_shape[1] nz = dom_shape[2] # Radius of the circle/sphere (testing image) radius = 16 # Rotation axis (used for 3D transforms only) rot_axis = np.array([.5, 2.0, 1.5]) # Arbitrary transform parameters t = 0.15 rotations = [-1 * np.pi / 10.0, 0.0, np.pi / 10.0] scales = [0.9, 1.0, 1.1] for dim in [2, 3]: # Setup current dimension if dim == 2: # Create image of a circle img = vf.create_circle(cod_shape[0], cod_shape[1], radius) oracle_linear = vf.transform_2d_affine oracle_nn = vf.transform_2d_affine_nn else: # Create image of a sphere img = vf.create_sphere(cod_shape[0], cod_shape[1], cod_shape[2], radius) oracle_linear = vf.transform_3d_affine oracle_nn = vf.transform_3d_affine_nn img = np.array(img) # Translation is the only parameter differing for 2D and 3D translations = [t * dom_shape[:dim]] # Generate affine transforms gt_affines = create_affine_transforms(dim, translations, rotations, scales, rot_axis) # Include the None case gt_affines.append(None) for affine in gt_affines: # make both domain point to the same physical region # It's ok to use the same transform, we just want to test # that this information is actually being considered domain_grid2world = affine codomain_grid2world = affine grid2grid_transform = affine # Evaluate the transform with vector_fields module (already tested) expected_linear = oracle_linear(img, dom_shape[:dim], grid2grid_transform) expected_nn = oracle_nn(img, dom_shape[:dim], grid2grid_transform) # Evaluate the transform with the implementation under test affine_map = imaffine.AffineMap(affine, dom_shape[:dim], domain_grid2world, cod_shape[:dim], codomain_grid2world) actual_linear = affine_map.transform(img, interp='linear') actual_nn = affine_map.transform(img, interp='nearest') assert_array_almost_equal(actual_linear, expected_linear) assert_array_almost_equal(actual_nn, expected_nn) # Test set_affine with valid matrix affine_map.set_affine(affine) if affine is None: assert(affine_map.affine is None) assert(affine_map.affine_inv is None) else: assert_array_equal(affine, affine_map.affine) actual = affine_map.affine.dot(affine_map.affine_inv) assert_array_almost_equal(actual, np.eye(dim + 1)) # Evaluate via the inverse transform # AffineMap will use the inverse of the input matrix when we call # `transform_inverse`. Since the inverse of the inverse of a matrix # is not exactly equal to the original matrix (numerical # limitations) we need to invert the matrix twice to make sure # the oracle and the implementation under test apply the same # transform aff_inv = None if affine is None else npl.inv(affine) aff_inv_inv = None if aff_inv is None else npl.inv(aff_inv) expected_linear = oracle_linear(img, dom_shape[:dim], aff_inv_inv) expected_nn = oracle_nn(img, dom_shape[:dim], aff_inv_inv) affine_map = imaffine.AffineMap(aff_inv, cod_shape[:dim], codomain_grid2world, dom_shape[:dim], domain_grid2world) actual_linear = affine_map.transform_inverse(img, interp='linear') actual_nn = affine_map.transform_inverse(img, interp='nearest') assert_array_almost_equal(actual_linear, expected_linear) assert_array_almost_equal(actual_nn, expected_nn) # Verify AffineMap cannot be created with a non-invertible matrix invalid_nan = np.zeros((dim + 1, dim + 1), dtype=np.float64) invalid_nan[1, 1] = np.nan invalid_zeros = np.zeros((dim + 1, dim + 1), dtype=np.float64) assert_raises( imaffine.AffineInversionError, imaffine.AffineMap, invalid_nan) assert_raises( imaffine.AffineInversionError, imaffine.AffineMap, invalid_zeros) # Test exception is raised when the affine transform matrix is not # valid invalid_shape = np.eye(dim) affmap_invalid_shape = imaffine.AffineMap(invalid_shape, dom_shape[:dim], None, cod_shape[:dim], None) assert_raises(ValueError, affmap_invalid_shape.transform, img) assert_raises(ValueError, affmap_invalid_shape.transform_inverse, img) # Verify exception is raised when sampling info is not provided valid = np.eye(3) affmap_invalid_shape = imaffine.AffineMap(valid) assert_raises(ValueError, affmap_invalid_shape.transform, img) assert_raises(ValueError, affmap_invalid_shape.transform_inverse, img) # Verify exception is raised when requesting an invalid interpolation assert_raises(ValueError, affine_map.transform, img, 'invalid') assert_raises(ValueError, affine_map.transform_inverse, img, 'invalid') # Verify exception is raised when attempting to warp an image of # invalid dimension for dim in [2, 3]: affine_map = imaffine.AffineMap(np.eye(dim), cod_shape[:dim], None, dom_shape[:dim], None) for sh in [(2,), (2, 2, 2, 2)]: img = np.zeros(sh) assert_raises(ValueError, affine_map.transform, img) assert_raises(ValueError, affine_map.transform_inverse, img) aff_sing = np.zeros((dim + 1, dim + 1)) aff_nan = np.zeros((dim + 1, dim + 1)) aff_nan[...] = np.nan aff_inf = np.zeros((dim + 1, dim + 1)) aff_inf[...] = np.inf assert_raises( AffineInversionError, affine_map.set_affine, aff_sing) assert_raises(AffineInversionError, affine_map.set_affine, aff_nan) assert_raises(AffineInversionError, affine_map.set_affine, aff_inf) def test_MIMetric_invalid_params(): transform = regtransforms[('AFFINE', 3)] static = np.random.rand(20, 20, 20) moving = np.random.rand(20, 20, 20) n = transform.get_number_of_parameters() sampling_proportion = 0.3 theta_sing = np.zeros(n) theta_nan = np.zeros(n) theta_nan[...] = np.nan theta_inf = np.zeros(n) theta_nan[...] = np.inf mi_metric = imaffine.MutualInformationMetric(32, sampling_proportion) mi_metric.setup(transform, static, moving) for theta in [theta_sing, theta_nan, theta_inf]: # Test metric value at invalid params actual_val = mi_metric.distance(theta) assert(np.isinf(actual_val)) # Test gradient at invalid params expected_grad = np.zeros(n) actual_grad = mi_metric.gradient(theta) assert_equal(actual_grad, expected_grad) # Test both actual_val, actual_grad = mi_metric.distance_and_gradient(theta) assert(np.isinf(actual_val)) assert_equal(actual_grad, expected_grad) dipy-0.13.0/dipy/align/tests/test_imwarp.py000066400000000000000000001007221317371701200206550ustar00rootroot00000000000000from __future__ import print_function import numpy as np import nibabel.eulerangles as eulerangles from numpy.testing import (assert_equal, assert_array_equal, assert_array_almost_equal, assert_raises) from dipy.data import get_data from dipy.align import floating from dipy.align import imwarp as imwarp from dipy.align import metrics as metrics from dipy.align import vector_fields as vfu from dipy.align import VerbosityLevels from dipy.align.imwarp import DiffeomorphicMap def test_mult_aff(): r""" Test matrix multiplication using None as identity """ A = np.array([[1.0, 2.0], [3.0, 4.0]]) B = np.array([[2.0, 0.0], [0.0, 2.0]]) C = imwarp.mult_aff(A, B) expected_mult = np.array([[2.0, 4.0], [6.0, 8.0]]) assert_array_almost_equal(C, expected_mult) C = imwarp.mult_aff(A, None) assert_array_almost_equal(C, A) C = imwarp.mult_aff(None, B) assert_array_almost_equal(C, B) C = imwarp.mult_aff(None, None) assert_equal(C, None) def test_diffeomorphic_map_2d(): r""" Test 2D DiffeomorphicMap Creates a random displacement field that exactly maps pixels from an input image to an output image. First a discrete random assignment between the images is generated, then each pair of mapped points are transformed to the physical space by assigning a pair of arbitrary, fixed affine matrices to input and output images, and finaly the difference between their positions is taken as the displacement vector. The resulting displacement, although operating in physical space, maps the points exactly (up to numerical precision). """ np.random.seed(2022966) domain_shape = (10, 10) codomain_shape = (10, 10) # create a simple affine transformation nr = domain_shape[0] nc = domain_shape[1] s = 1.1 t = 0.25 trans = np.array([[1, 0, -t * nr], [0, 1, -t * nc], [0, 0, 1]]) trans_inv = np.linalg.inv(trans) scale = np.array([[1 * s, 0, 0], [0, 1 * s, 0], [0, 0, 1]]) gt_affine = trans_inv.dot(scale.dot(trans)) # create the random displacement field domain_grid2world = gt_affine codomain_grid2world = gt_affine disp, assign = vfu.create_random_displacement_2d( np.array(domain_shape, dtype=np.int32), domain_grid2world, np.array(codomain_shape, dtype=np.int32), codomain_grid2world) disp = np.array(disp, dtype=floating) assign = np.array(assign) # create a random image (with decimal digits) to warp moving_image = np.ndarray(codomain_shape, dtype=floating) ns = np.size(moving_image) moving_image[...] = np.random.randint(0, 10, ns).reshape(codomain_shape) # set boundary values to zero so we don't test wrong interpolation due # to floating point precision moving_image[0, :] = 0 moving_image[-1, :] = 0 moving_image[:, 0] = 0 moving_image[:, -1] = 0 # warp the moving image using the (exact) assignments expected = moving_image[(assign[..., 0], assign[..., 1])] # warp using a DiffeomorphicMap instance diff_map = imwarp.DiffeomorphicMap(2, domain_shape, domain_grid2world, domain_shape, domain_grid2world, codomain_shape, codomain_grid2world, None) diff_map.forward = disp # Verify that the transform method accepts different image types (note that # the actual image contained integer values, we don't want to test # rounding) for type in [floating, np.float64, np.int64, np.int32]: moving_image = moving_image.astype(type) # warp using linear interpolation warped = diff_map.transform(moving_image, 'linear') # compare the images (the linear interpolation may introduce slight # precision errors) assert_array_almost_equal(warped, expected, decimal=5) # Now test the nearest neighbor interpolation warped = diff_map.transform(moving_image, 'nearest') # compare the images (now we dont have to worry about precision, # it is n.n.) assert_array_almost_equal(warped, expected) # verify the is_inverse flag inv = diff_map.inverse() warped = inv.transform_inverse(moving_image, 'linear') assert_array_almost_equal(warped, expected, decimal=5) warped = inv.transform_inverse(moving_image, 'nearest') assert_array_almost_equal(warped, expected) # Now test the inverse functionality diff_map = imwarp.DiffeomorphicMap(2, codomain_shape, codomain_grid2world, codomain_shape, codomain_grid2world, domain_shape, domain_grid2world, None) diff_map.backward = disp for type in [floating, np.float64, np.int64, np.int32]: moving_image = moving_image.astype(type) # warp using linear interpolation warped = diff_map.transform_inverse(moving_image, 'linear') # compare the images (the linear interpolation may introduce slight # precision errors) assert_array_almost_equal(warped, expected, decimal=5) # Now test the nearest neighbor interpolation warped = diff_map.transform_inverse(moving_image, 'nearest') # compare the images (now we don't have to worry about precision, # it is nearest neighbour) assert_array_almost_equal(warped, expected) # Verify that DiffeomorphicMap raises the appropriate exceptions when # the sampling information is undefined diff_map = imwarp.DiffeomorphicMap(2, domain_shape, domain_grid2world, domain_shape, domain_grid2world, codomain_shape, codomain_grid2world, None) diff_map.forward = disp diff_map.domain_shape = None # If we don't provide the sampling info, it should try to use the map's # info, but it's None... assert_raises(ValueError, diff_map.transform, moving_image, 'linear') # Same test for diff_map.transform_inverse diff_map = imwarp.DiffeomorphicMap(2, domain_shape, domain_grid2world, domain_shape, domain_grid2world, codomain_shape, codomain_grid2world, None) diff_map.forward = disp diff_map.codomain_shape = None # If we don't provide the sampling info, it should try to use the map's # info, but it's None... assert_raises(ValueError, diff_map.transform_inverse, moving_image, 'linear') # We must provide, at least, the reference grid shape assert_raises(ValueError, imwarp.DiffeomorphicMap, 2, None) # Verify that matrices are correctly interpreted from string non_array_obj = diff_map array_obj = np.ones((3, 3)) assert_raises(ValueError, diff_map.interpret_matrix, 'a different string') assert_raises(ValueError, diff_map.interpret_matrix, non_array_obj) assert(diff_map.interpret_matrix('identity') is None) assert(diff_map.interpret_matrix(None) is None) assert_array_equal(diff_map.interpret_matrix(array_obj), array_obj) def test_diffeomorphic_map_simplification_2d(): r""" Test simplification of 2D diffeomorphic maps Create an invertible deformation field, and define a DiffeomorphicMap using different voxel-to-space transforms for domain, codomain, and reference discretizations, also use a non-identity pre-aligning matrix. Warp a circle using the diffeomorphic map to obtain the expected warped circle. Now simplify the DiffeomorphicMap and warp the same circle using this simplified map. Verify that the two warped circles are equal up to numerical precision. """ # create a simple affine transformation dom_shape = (64, 64) cod_shape = (80, 80) nr = dom_shape[0] nc = dom_shape[1] s = 1.1 t = 0.25 trans = np.array([[1, 0, -t * nr], [0, 1, -t * nc], [0, 0, 1]]) trans_inv = np.linalg.inv(trans) scale = np.array([[1 * s, 0, 0], [0, 1 * s, 0], [0, 0, 1]]) gt_affine = trans_inv.dot(scale.dot(trans)) # Create the invertible displacement fields and the circle radius = 16 circle = vfu.create_circle(cod_shape[0], cod_shape[1], radius) d, dinv = vfu.create_harmonic_fields_2d(dom_shape[0], dom_shape[1], 0.3, 6) # Define different voxel-to-space transforms for domain, codomain and # reference grid, also, use a non-identity pre-align transform D = gt_affine C = imwarp.mult_aff(gt_affine, gt_affine) R = np.eye(3) P = gt_affine # Create the original diffeomorphic map diff_map = imwarp.DiffeomorphicMap(2, dom_shape, R, dom_shape, D, cod_shape, C, P) diff_map.forward = np.array(d, dtype=floating) diff_map.backward = np.array(dinv, dtype=floating) # Warp the circle to obtain the expected image expected = diff_map.transform(circle, 'linear') # Simplify simplified = diff_map.get_simplified_transform() # warp the circle warped = simplified.transform(circle, 'linear') # verify that the simplified map is equivalent to the # original one assert_array_almost_equal(warped, expected) # And of course, it must be simpler... assert_equal(simplified.domain_grid2world, None) assert_equal(simplified.codomain_grid2world, None) assert_equal(simplified.disp_grid2world, None) assert_equal(simplified.domain_world2grid, None) assert_equal(simplified.codomain_world2grid, None) assert_equal(simplified.disp_world2grid, None) def test_diffeomorphic_map_simplification_3d(): r""" Test simplification of 3D diffeomorphic maps Create an invertible deformation field, and define a DiffeomorphicMap using different voxel-to-space transforms for domain, codomain, and reference discretizations, also use a non-identity pre-aligning matrix. Warp a sphere using the diffeomorphic map to obtain the expected warped sphere. Now simplify the DiffeomorphicMap and warp the same sphere using this simplified map. Verify that the two warped spheres are equal up to numerical precision. """ # create a simple affine transformation domain_shape = (64, 64, 64) codomain_shape = (80, 80, 80) nr = domain_shape[0] nc = domain_shape[1] ns = domain_shape[2] s = 1.1 t = 0.25 trans = np.array([[1, 0, 0, -t * ns], [0, 1, 0, -t * nr], [0, 0, 1, -t * nc], [0, 0, 0, 1]]) trans_inv = np.linalg.inv(trans) scale = np.array([[1 * s, 0, 0, 0], [0, 1 * s, 0, 0], [0, 0, 1 * s, 0], [0, 0, 0, 1]]) gt_affine = trans_inv.dot(scale.dot(trans)) # Create the invertible displacement fields and the sphere radius = 16 sphere = vfu.create_sphere(codomain_shape[0], codomain_shape[1], codomain_shape[2], radius) d, dinv = vfu.create_harmonic_fields_3d(domain_shape[0], domain_shape[1], domain_shape[2], 0.3, 6) # Define different voxel-to-space transforms for domain, codomain and # reference grid, also, use a non-identity pre-align transform D = gt_affine C = imwarp.mult_aff(gt_affine, gt_affine) R = np.eye(4) P = gt_affine # Create the original diffeomorphic map diff_map = imwarp.DiffeomorphicMap(3, domain_shape, R, domain_shape, D, codomain_shape, C, P) diff_map.forward = np.array(d, dtype=floating) diff_map.backward = np.array(dinv, dtype=floating) # Warp the sphere to obtain the expected image expected = diff_map.transform(sphere, 'linear') # Simplify simplified = diff_map.get_simplified_transform() # warp the sphere warped = simplified.transform(sphere, 'linear') # verify that the simplified map is equivalent to the # original one assert_array_almost_equal(warped, expected) # And of course, it must be simpler... assert_equal(simplified.domain_grid2world, None) assert_equal(simplified.codomain_grid2world, None) assert_equal(simplified.disp_grid2world, None) assert_equal(simplified.domain_world2grid, None) assert_equal(simplified.codomain_world2grid, None) assert_equal(simplified.disp_world2grid, None) def test_optimizer_exceptions(): r""" Test exceptions from SyN """ # An arbitrary valid metric metric = metrics.SSDMetric(2) # The metric must not be None assert_raises(ValueError, imwarp.SymmetricDiffeomorphicRegistration, None) # The iterations list must not be empty assert_raises(ValueError, imwarp.SymmetricDiffeomorphicRegistration, metric, []) optimizer = imwarp.SymmetricDiffeomorphicRegistration(metric, None) # Verify the default iterations list assert_array_equal(optimizer.level_iters, [100, 100, 25]) # Verify exception thrown when attepting to fit the energy profile without # enough data assert_raises(ValueError, optimizer._get_energy_derivative) def test_get_direction_and_spacings(): r""" Test direction and spacings from affine transforms """ xrot = 0.5 yrot = 0.75 zrot = 1.0 direction_gt = eulerangles.euler2mat(zrot, yrot, xrot) spacings_gt = np.array([1.1, 1.2, 1.3]) scaling_gt = np.diag(spacings_gt) translation_gt = np.array([1, 2, 3]) affine = np.eye(4) affine[:3, :3] = direction_gt.dot(scaling_gt) affine[:3, 3] = translation_gt direction, spacings = imwarp.get_direction_and_spacings(affine, 3) assert_array_almost_equal(direction, direction_gt) assert_array_almost_equal(spacings, spacings_gt) def simple_callback(sdr, status): r""" Verify callback function is called from SyN """ if status == imwarp.RegistrationStages.INIT_START: sdr.INIT_START_CALLED = 1 if status == imwarp.RegistrationStages.INIT_END: sdr.INIT_END_CALLED = 1 if status == imwarp.RegistrationStages.OPT_START: sdr.OPT_START_CALLED = 1 if status == imwarp.RegistrationStages.OPT_END: sdr.OPT_END_CALLED = 1 if status == imwarp.RegistrationStages.SCALE_START: sdr.SCALE_START_CALLED = 1 if status == imwarp.RegistrationStages.SCALE_END: sdr.SCALE_END_CALLED = 1 if status == imwarp.RegistrationStages.ITER_START: sdr.ITER_START_CALLED = 1 if status == imwarp.RegistrationStages.ITER_END: sdr.ITER_END_CALLED = 1 def test_ssd_2d_demons(): r''' Test 2D SyN with SSD metric, demons-like optimizer Classical Circle-To-C experiment for 2D monomodal registration. We verify that the final registration is of good quality. ''' fname_moving = get_data('reg_o') fname_static = get_data('reg_c') moving = np.load(fname_moving) static = np.load(fname_static) moving = np.array(moving, dtype=floating) static = np.array(static, dtype=floating) moving = (moving - moving.min()) / (moving.max() - moving.min()) static = (static - static.min()) / (static.max() - static.min()) # Create the SSD metric smooth = 4 step_type = 'demons' similarity_metric = metrics.SSDMetric( 2, smooth=smooth, step_type=step_type) # Configure and run the Optimizer level_iters = [200, 100, 50, 25] step_length = 0.25 opt_tol = 1e-4 inv_iter = 40 inv_tol = 1e-3 ss_sigma_factor = 0.2 optimizer = imwarp.SymmetricDiffeomorphicRegistration( similarity_metric, level_iters, step_length, ss_sigma_factor, opt_tol, inv_iter, inv_tol) # test callback being called optimizer.INIT_START_CALLED = 0 optimizer.INIT_END_CALLED = 0 optimizer.OPT_START_CALLED = 0 optimizer.OPT_END_CALLED = 0 optimizer.SCALE_START_CALLED = 0 optimizer.SCALE_END_CALLED = 0 optimizer.ITER_START_CALLED = 0 optimizer.ITER_END_CALLED = 0 optimizer.callback_counter_test = 0 optimizer.callback = simple_callback optimizer.verbosity = VerbosityLevels.DEBUG mapping = optimizer.optimize(static, moving, None) m = optimizer.get_map() assert_equal(mapping, m) warped = mapping.transform(moving) starting_energy = np.sum((static - moving)**2) final_energy = np.sum((static - warped)**2) reduced = 1.0 - final_energy / starting_energy assert(reduced > 0.9) assert_equal(optimizer.OPT_START_CALLED, 1) assert_equal(optimizer.OPT_END_CALLED, 1) assert_equal(optimizer.SCALE_START_CALLED, 1) assert_equal(optimizer.SCALE_END_CALLED, 1) assert_equal(optimizer.ITER_START_CALLED, 1) assert_equal(optimizer.ITER_END_CALLED, 1) def test_ssd_2d_gauss_newton(): r''' Test 2D SyN with SSD metric, Gauss-Newton optimizer Classical Circle-To-C experiment for 2D monomodal registration. We verify that the final registration is of good quality. ''' fname_moving = get_data('reg_o') fname_static = get_data('reg_c') moving = np.load(fname_moving) static = np.load(fname_static) moving = np.array(moving, dtype=floating) static = np.array(static, dtype=floating) moving = (moving - moving.min()) / (moving.max() - moving.min()) static = (static - static.min()) / (static.max() - static.min()) # Create the SSD metric smooth = 4 inner_iter = 5 step_type = 'gauss_newton' similarity_metric = metrics.SSDMetric(2, smooth, inner_iter, step_type) # Configure and run the Optimizer level_iters = [200, 100, 50, 25] step_length = 0.5 opt_tol = 1e-4 inv_iter = 40 inv_tol = 1e-3 ss_sigma_factor = 0.2 optimizer = imwarp.SymmetricDiffeomorphicRegistration( similarity_metric, level_iters, step_length, ss_sigma_factor, opt_tol, inv_iter, inv_tol) # test callback not being called optimizer.INIT_START_CALLED = 0 optimizer.INIT_END_CALLED = 0 optimizer.OPT_START_CALLED = 0 optimizer.OPT_END_CALLED = 0 optimizer.SCALE_START_CALLED = 0 optimizer.SCALE_END_CALLED = 0 optimizer.ITER_START_CALLED = 0 optimizer.ITER_END_CALLED = 0 optimizer.verbosity = VerbosityLevels.DEBUG id = np.eye(3) mapping = optimizer.optimize(static, moving, id, id, id) m = optimizer.get_map() assert_equal(mapping, m) warped = mapping.transform(moving) starting_energy = np.sum((static - moving)**2) final_energy = np.sum((static - warped)**2) reduced = 1.0 - final_energy / starting_energy assert(reduced > 0.9) assert_equal(optimizer.OPT_START_CALLED, 0) assert_equal(optimizer.OPT_END_CALLED, 0) assert_equal(optimizer.SCALE_START_CALLED, 0) assert_equal(optimizer.SCALE_END_CALLED, 0) assert_equal(optimizer.ITER_START_CALLED, 0) assert_equal(optimizer.ITER_END_CALLED, 0) def get_warped_stacked_image(image, nslices, b, m): r""" Creates a volume by stacking copies of a deformed image The image is deformed under an invertible field, and a 3D volume is generated as follows: the first and last `nslices`//3 slices are filled with zeros to simulate background. The remaining middle slices are filled with copies of the deformed `image` under the action of the invertible field. Parameters ---------- image : 2d array shape(r, c) the image to be deformed nslices : int the number of slices in the final volume b, m : float parameters of the harmonic field (as in [1]). Returns ------- vol : array shape(r, c) if `nslices`==1 else (r, c, `nslices`) the volumed generated using the undeformed image wvol : array shape(r, c) if `nslices`==1 else (r, c, `nslices`) the volumed generated using the warped image References ---------- [1] Chen, M., Lu, W., Chen, Q., Ruchala, K. J., & Olivera, G. H. (2008). A simple fixed-point approach to invert a deformation field. Medical Physics, 35(1), 81. doi:10.1118/1.2816107 """ shape = image.shape # create a synthetic invertible map and warp the circle d, dinv = vfu.create_harmonic_fields_2d(shape[0], shape[1], b, m) d = np.asarray(d, dtype=floating) dinv = np.asarray(dinv, dtype=floating) mapping = DiffeomorphicMap(2, shape) mapping.forward, mapping.backward = d, dinv wimage = mapping.transform(image) if(nslices == 1): return image, wimage # normalize and form the 3d by piling slices image = image.astype(floating) image = (image - image.min()) / (image.max() - image.min()) zero_slices = nslices // 3 vol = np.zeros(shape=image.shape + (nslices,)) vol[..., zero_slices:(2 * zero_slices)] = image[..., None] wvol = np.zeros(shape=image.shape + (nslices,)) wvol[..., zero_slices:(2 * zero_slices)] = wimage[..., None] return vol, wvol def get_synthetic_warped_circle(nslices): # get a subsampled circle fname_cicle = get_data('reg_o') circle = np.load(fname_cicle)[::4, ::4].astype(floating) # create a synthetic invertible map and warp the circle d, dinv = vfu.create_harmonic_fields_2d(64, 64, 0.1, 4) d = np.asarray(d, dtype=floating) dinv = np.asarray(dinv, dtype=floating) mapping = DiffeomorphicMap(2, (64, 64)) mapping.forward, mapping.backward = d, dinv wcircle = mapping.transform(circle) if(nslices == 1): return circle, wcircle # normalize and form the 3d by piling slices circle = (circle - circle.min()) / (circle.max() - circle.min()) circle_3d = np.ndarray(circle.shape + (nslices,), dtype=floating) circle_3d[...] = circle[..., None] circle_3d[..., 0] = 0 circle_3d[..., -1] = 0 # do the same with the warped circle wcircle = (wcircle - wcircle.min()) / (wcircle.max() - wcircle.min()) wcircle_3d = np.ndarray(wcircle.shape + (nslices,), dtype=floating) wcircle_3d[...] = wcircle[..., None] wcircle_3d[..., 0] = 0 wcircle_3d[..., -1] = 0 return circle_3d, wcircle_3d def test_ssd_3d_demons(): r''' Test 3D SyN with SSD metric, demons-like optimizer Register a stack of circles ('cylinder') before and after warping them with a synthetic diffeomorphism. We verify that the final registration is of good quality. ''' moving, static = get_synthetic_warped_circle(30) moving[..., :8] = 0 moving[..., -1:-9:-1] = 0 static[..., :8] = 0 static[..., -1:-9:-1] = 0 # Create the SSD metric smooth = 4 step_type = 'demons' similarity_metric = metrics.SSDMetric(3, smooth=smooth, step_type=step_type) # Create the optimizer level_iters = [10, 10] step_length = 0.1 opt_tol = 1e-4 inv_iter = 20 inv_tol = 1e-3 ss_sigma_factor = 0.5 optimizer = imwarp.SymmetricDiffeomorphicRegistration( similarity_metric, level_iters, step_length, ss_sigma_factor, opt_tol, inv_iter, inv_tol) optimizer.verbosity = VerbosityLevels.DEBUG mapping = optimizer.optimize(static, moving, None) m = optimizer.get_map() assert_equal(mapping, m) warped = mapping.transform(moving) starting_energy = np.sum((static - moving)**2) final_energy = np.sum((static - warped)**2) reduced = 1.0 - final_energy / starting_energy assert(reduced > 0.9) def test_ssd_3d_gauss_newton(): r''' Test 3D SyN with SSD metric, Gauss-Newton optimizer Register a stack of circles ('cylinder') before and after warping them with a synthetic diffeomorphism. We verify that the final registration is of good quality. ''' moving, static = get_synthetic_warped_circle(35) moving[..., :10] = 0 moving[..., -1:-11:-1] = 0 static[..., :10] = 0 static[..., -1:-11:-1] = 0 # Create the SSD metric smooth = 4 inner_iter = 5 step_type = 'gauss_newton' similarity_metric = metrics.SSDMetric(3, smooth, inner_iter, step_type) # Create the optimizer level_iters = [10, 10] step_length = 0.1 opt_tol = 1e-4 inv_iter = 20 inv_tol = 1e-3 ss_sigma_factor = 0.5 optimizer = imwarp.SymmetricDiffeomorphicRegistration( similarity_metric, level_iters, step_length, ss_sigma_factor, opt_tol, inv_iter, inv_tol) optimizer.verbosity = VerbosityLevels.DEBUG mapping = optimizer.optimize(static, moving, None) m = optimizer.get_map() assert_equal(mapping, m) warped = mapping.transform(moving) starting_energy = np.sum((static - moving)**2) final_energy = np.sum((static - warped)**2) reduced = 1.0 - final_energy / starting_energy assert(reduced > 0.9) def test_cc_2d(): r''' Test 2D SyN with CC metric Register a coronal slice from a T1w brain MRI before and after warping it under a synthetic invertible map. We verify that the final registration is of good quality. ''' fname = get_data('t1_coronal_slice') nslices = 1 b = 0.1 m = 4 image = np.load(fname) moving, static = get_warped_stacked_image(image, nslices, b, m) # Configure the metric sigma_diff = 3.0 radius = 4 metric = metrics.CCMetric(2, sigma_diff, radius) # Configure and run the Optimizer level_iters = [15, 5] optimizer = imwarp.SymmetricDiffeomorphicRegistration(metric, level_iters) optimizer.verbosity = VerbosityLevels.DEBUG mapping = optimizer.optimize(static, moving, None) m = optimizer.get_map() assert_equal(mapping, m) warped = mapping.transform(moving) starting_energy = np.sum((static - moving)**2) final_energy = np.sum((static - warped)**2) reduced = 1.0 - final_energy / starting_energy assert(reduced > 0.9) def test_cc_3d(): r''' Test 3D SyN with CC metric Register a volume created by stacking copies of a coronal slice from a T1w brain MRI before and after warping it under a synthetic invertible map. We verify that the final registration is of good quality. ''' fname = get_data('t1_coronal_slice') nslices = 21 b = 0.1 m = 4 image = np.load(fname) moving, static = get_warped_stacked_image(image, nslices, b, m) # Create the CC metric sigma_diff = 2.0 radius = 2 similarity_metric = metrics.CCMetric(3, sigma_diff, radius) # Create the optimizer level_iters = [20, 5] step_length = 0.25 opt_tol = 1e-4 inv_iter = 20 inv_tol = 1e-3 ss_sigma_factor = 0.2 optimizer = imwarp.SymmetricDiffeomorphicRegistration( similarity_metric, level_iters, step_length, ss_sigma_factor, opt_tol, inv_iter, inv_tol) optimizer.verbosity = VerbosityLevels.DEBUG mapping = optimizer.optimize(static, moving, None, None, None) m = optimizer.get_map() assert_equal(mapping, m) warped = mapping.transform(moving) starting_energy = np.sum((static - moving)**2) final_energy = np.sum((static - warped)**2) reduced = 1.0 - final_energy / starting_energy assert(reduced > 0.9) def test_em_3d_gauss_newton(): r''' Test 3D SyN with EM metric, Gauss-Newton optimizer Register a volume created by stacking copies of a coronal slice from a T1w brain MRI before and after warping it under a synthetic invertible map. We verify that the final registration is of good quality. ''' fname = get_data('t1_coronal_slice') nslices = 21 b = 0.1 m = 4 image = np.load(fname) moving, static = get_warped_stacked_image(image, nslices, b, m) # Create the EM metric smooth = 2.0 inner_iter = 20 step_length = 0.25 q_levels = 256 double_gradient = True iter_type = 'gauss_newton' similarity_metric = metrics.EMMetric( 3, smooth, inner_iter, q_levels, double_gradient, iter_type) # Create the optimizer level_iters = [20, 5] opt_tol = 1e-4 inv_iter = 20 inv_tol = 1e-3 ss_sigma_factor = 1.0 optimizer = imwarp.SymmetricDiffeomorphicRegistration( similarity_metric, level_iters, step_length, ss_sigma_factor, opt_tol, inv_iter, inv_tol) optimizer.verbosity = VerbosityLevels.DEBUG mapping = optimizer.optimize(static, moving, None) m = optimizer.get_map() assert_equal(mapping, m) warped = mapping.transform(moving) starting_energy = np.sum((static - moving)**2) final_energy = np.sum((static - warped)**2) reduced = 1.0 - final_energy / starting_energy assert(reduced > 0.9) def test_em_2d_gauss_newton(): r''' Test 2D SyN with EM metric, Gauss-Newton optimizer Register a coronal slice from a T1w brain MRI before and after warping it under a synthetic invertible map. We verify that the final registration is of good quality. ''' fname = get_data('t1_coronal_slice') nslices = 1 b = 0.1 m = 4 image = np.load(fname) moving, static = get_warped_stacked_image(image, nslices, b, m) # Configure the metric smooth = 5.0 inner_iter = 20 q_levels = 256 double_gradient = False iter_type = 'gauss_newton' metric = metrics.EMMetric( 2, smooth, inner_iter, q_levels, double_gradient, iter_type) # Configure and run the Optimizer level_iters = [40, 20, 10] optimizer = imwarp.SymmetricDiffeomorphicRegistration(metric, level_iters) optimizer.verbosity = VerbosityLevels.DEBUG mapping = optimizer.optimize(static, moving, None) m = optimizer.get_map() assert_equal(mapping, m) warped = mapping.transform(moving) starting_energy = np.sum((static - moving)**2) final_energy = np.sum((static - warped)**2) reduced = 1.0 - final_energy / starting_energy assert(reduced > 0.9) def test_em_3d_demons(): r''' Test 3D SyN with EM metric, demons-like optimizer Register a volume created by stacking copies of a coronal slice from a T1w brain MRI before and after warping it under a synthetic invertible map. We verify that the final registration is of good quality. ''' fname = get_data('t1_coronal_slice') nslices = 21 b = 0.1 m = 4 image = np.load(fname) moving, static = get_warped_stacked_image(image, nslices, b, m) # Create the EM metric smooth = 2.0 inner_iter = 20 step_length = 0.25 q_levels = 256 double_gradient = True iter_type = 'demons' similarity_metric = metrics.EMMetric( 3, smooth, inner_iter, q_levels, double_gradient, iter_type) # Create the optimizer level_iters = [20, 5] opt_tol = 1e-4 inv_iter = 20 inv_tol = 1e-3 ss_sigma_factor = 1.0 optimizer = imwarp.SymmetricDiffeomorphicRegistration( similarity_metric, level_iters, step_length, ss_sigma_factor, opt_tol, inv_iter, inv_tol) optimizer.verbosity = VerbosityLevels.DEBUG mapping = optimizer.optimize(static, moving, None) m = optimizer.get_map() assert_equal(mapping, m) warped = mapping.transform(moving) starting_energy = np.sum((static - moving)**2) final_energy = np.sum((static - warped)**2) reduced = 1.0 - final_energy / starting_energy assert(reduced > 0.9) def test_em_2d_demons(): r''' Test 2D SyN with EM metric, demons-like optimizer Register a coronal slice from a T1w brain MRI before and after warping it under a synthetic invertible map. We verify that the final registration is of good quality. ''' fname = get_data('t1_coronal_slice') nslices = 1 b = 0.1 m = 4 image = np.load(fname) moving, static = get_warped_stacked_image(image, nslices, b, m) # Configure the metric smooth = 2.0 inner_iter = 20 q_levels = 256 double_gradient = False iter_type = 'demons' metric = metrics.EMMetric( 2, smooth, inner_iter, q_levels, double_gradient, iter_type) # Configure and run the Optimizer level_iters = [40, 20, 10] optimizer = imwarp.SymmetricDiffeomorphicRegistration(metric, level_iters) optimizer.verbosity = VerbosityLevels.DEBUG mapping = optimizer.optimize(static, moving, None) m = optimizer.get_map() assert_equal(mapping, m) warped = mapping.transform(moving) starting_energy = np.sum((static - moving)**2) final_energy = np.sum((static - warped)**2) reduced = 1.0 - final_energy / starting_energy assert(reduced > 0.9) dipy-0.13.0/dipy/align/tests/test_metrics.py000066400000000000000000000220271317371701200210250ustar00rootroot00000000000000import numpy as np from scipy import ndimage from dipy.align import floating from dipy.align.metrics import SSDMetric, CCMetric, EMMetric from numpy.testing import (assert_array_equal, assert_array_almost_equal, assert_raises) def test_exceptions(): for invalid_dim in [-1, 0, 1, 4, 5]: assert_raises(ValueError, CCMetric, invalid_dim) assert_raises(ValueError, EMMetric, invalid_dim) assert_raises(ValueError, SSDMetric, invalid_dim) assert_raises(ValueError, SSDMetric, 3, step_type='unknown_metric_name') assert_raises(ValueError, EMMetric, 3, step_type='unknown_metric_name') def test_EMMetric_image_dynamics(): np.random.seed(7181309) metric = EMMetric(2) target_shape = (10, 10) # create a random image image = np.ndarray(target_shape, dtype=floating) image[...] = np.random.randint( 0, 10, np.size(image)).reshape(tuple(target_shape)) # compute the expected binary mask expected = (image > 0).astype(np.int32) metric.use_static_image_dynamics(image, None) assert_array_equal(expected, metric.static_image_mask) metric.use_moving_image_dynamics(image, None) assert_array_equal(expected, metric.moving_image_mask) def test_em_demons_step_2d(): r""" Compares the output of the demons step in 2d against an analytical step. The fixed image is given by $F(x) = \frac{1}{2}||x - c_f||^2$, the moving image is given by $G(x) = \frac{1}{2}||x - c_g||^2$, $x, c_f, c_g \in R^{2}$ References ---------- [Vercauteren09] Vercauteren, T., Pennec, X., Perchant, A., & Ayache, N. (2009). Diffeomorphic demons: efficient non-parametric image registration. NeuroImage, 45(1 Suppl), S61-72. doi:10.1016/j.neuroimage.2008.10.040 """ # Select arbitrary images' shape (same shape for both images) sh = (20, 10) # Select arbitrary centers c_f = np.asarray(sh) / 2 c_g = c_f + 0.5 # Compute the identity vector field I(x) = x in R^2 x_0 = np.asarray(range(sh[0])) x_1 = np.asarray(range(sh[1])) X = np.ndarray(sh + (2,), dtype=np.float64) O = np.ones(sh) X[..., 0] = x_0[:, None] * O X[..., 1] = x_1[None, :] * O # Compute the gradient fields of F and G grad_F = X - c_f grad_G = X - c_g # The squared norm of grad_G to be used later sq_norm_grad_F = np.sum(grad_F**2, -1) sq_norm_grad_G = np.sum(grad_G**2, -1) # Compute F and G F = 0.5 * sq_norm_grad_F G = 0.5 * sq_norm_grad_G # Create an instance of EMMetric metric = EMMetric(2) metric.static_spacing = np.array([1.2, 1.2]) # The $\sigma_x$ (eq. 4 in [Vercauteren09]) parameter is computed in ANTS # based on the image's spacing sigma_x_sq = np.sum(metric.static_spacing**2) / metric.dim # Set arbitrary values for $\sigma_i$ (eq. 4 in [Vercauteren09]) # The original Demons algorithm used simply |F(x) - G(x)| as an # estimator, so let's use it as well sigma_i_sq = (F - G)**2 # Set the properties relevant to the demons methods metric.smooth = 3.0 metric.gradient_static = np.array(grad_F, dtype=floating) metric.gradient_moving = np.array(grad_G, dtype=floating) metric.static_image = np.array(F, dtype=floating) metric.moving_image = np.array(G, dtype=floating) metric.staticq_means_field = np.array(F, dtype=floating) metric.staticq_sigma_sq_field = np.array(sigma_i_sq, dtype=floating) metric.movingq_means_field = np.array(G, dtype=floating) metric.movingq_sigma_sq_field = np.array(sigma_i_sq, dtype=floating) # compute the step using the implementation under test actual_forward = metric.compute_demons_step(True) actual_backward = metric.compute_demons_step(False) # Now directly compute the demons steps according to eq 4 in # [Vercauteren09] num_fwd = sigma_x_sq * (G - F) den_fwd = sigma_x_sq * sq_norm_grad_F + sigma_i_sq # This is $J^{P}$ in eq. 4 [Vercauteren09] expected_fwd = -1 * np.array(grad_F) expected_fwd[..., 0] *= num_fwd / den_fwd expected_fwd[..., 1] *= num_fwd / den_fwd # apply Gaussian smoothing expected_fwd[..., 0] = ndimage.filters.gaussian_filter( expected_fwd[..., 0], 3.0) expected_fwd[..., 1] = ndimage.filters.gaussian_filter( expected_fwd[..., 1], 3.0) num_bwd = sigma_x_sq * (F - G) den_bwd = sigma_x_sq * sq_norm_grad_G + sigma_i_sq # This is $J^{P}$ in eq. 4 [Vercauteren09] expected_bwd = -1 * np.array(grad_G) expected_bwd[..., 0] *= num_bwd / den_bwd expected_bwd[..., 1] *= num_bwd / den_bwd # apply Gaussian smoothing expected_bwd[..., 0] = ndimage.filters.gaussian_filter( expected_bwd[..., 0], 3.0) expected_bwd[..., 1] = ndimage.filters.gaussian_filter( expected_bwd[..., 1], 3.0) assert_array_almost_equal(actual_forward, expected_fwd) assert_array_almost_equal(actual_backward, expected_bwd) def test_em_demons_step_3d(): r""" Compares the output of the demons step in 3d against an analytical step. The fixed image is given by $F(x) = \frac{1}{2}||x - c_f||^2$, the moving image is given by $G(x) = \frac{1}{2}||x - c_g||^2$, $x, c_f, c_g \in R^{3}$ References ---------- [Vercauteren09] Vercauteren, T., Pennec, X., Perchant, A., & Ayache, N. (2009). Diffeomorphic demons: efficient non-parametric image registration. NeuroImage, 45(1 Suppl), S61-72. doi:10.1016/j.neuroimage.2008.10.040 """ # Select arbitrary images' shape (same shape for both images) sh = (20, 15, 10) # Select arbitrary centers c_f = np.asarray(sh) / 2 c_g = c_f + 0.5 # Compute the identity vector field I(x) = x in R^2 x_0 = np.asarray(range(sh[0])) x_1 = np.asarray(range(sh[1])) x_2 = np.asarray(range(sh[2])) X = np.ndarray(sh + (3,), dtype=np.float64) O = np.ones(sh) X[..., 0] = x_0[:, None, None] * O X[..., 1] = x_1[None, :, None] * O X[..., 2] = x_2[None, None, :] * O # Compute the gradient fields of F and G grad_F = X - c_f grad_G = X - c_g # The squared norm of grad_G to be used later sq_norm_grad_F = np.sum(grad_F**2, -1) sq_norm_grad_G = np.sum(grad_G**2, -1) # Compute F and G F = 0.5 * sq_norm_grad_F G = 0.5 * sq_norm_grad_G # Create an instance of EMMetric metric = EMMetric(3) metric.static_spacing = np.array([1.2, 1.2, 1.2]) # The $\sigma_x$ (eq. 4 in [Vercauteren09]) parameter is computed in ANTS # based on the image's spacing sigma_x_sq = np.sum(metric.static_spacing**2) / metric.dim # Set arbitrary values for $\sigma_i$ (eq. 4 in [Vercauteren09]) # The original Demons algorithm used simply |F(x) - G(x)| as an # estimator, so let's use it as well sigma_i_sq = (F - G)**2 # Set the properties relevant to the demons methods metric.smooth = 3.0 metric.gradient_static = np.array(grad_F, dtype=floating) metric.gradient_moving = np.array(grad_G, dtype=floating) metric.static_image = np.array(F, dtype=floating) metric.moving_image = np.array(G, dtype=floating) metric.staticq_means_field = np.array(F, dtype=floating) metric.staticq_sigma_sq_field = np.array(sigma_i_sq, dtype=floating) metric.movingq_means_field = np.array(G, dtype=floating) metric.movingq_sigma_sq_field = np.array(sigma_i_sq, dtype=floating) # compute the step using the implementation under test actual_forward = metric.compute_demons_step(True) actual_backward = metric.compute_demons_step(False) # Now directly compute the demons steps according to eq 4 in # [Vercauteren09] num_fwd = sigma_x_sq * (G - F) den_fwd = sigma_x_sq * sq_norm_grad_F + sigma_i_sq expected_fwd = -1 * np.array(grad_F) expected_fwd[..., 0] *= num_fwd / den_fwd expected_fwd[..., 1] *= num_fwd / den_fwd expected_fwd[..., 2] *= num_fwd / den_fwd # apply Gaussian smoothing expected_fwd[..., 0] = ndimage.filters.gaussian_filter( expected_fwd[..., 0], 3.0) expected_fwd[..., 1] = ndimage.filters.gaussian_filter( expected_fwd[..., 1], 3.0) expected_fwd[..., 2] = ndimage.filters.gaussian_filter( expected_fwd[..., 2], 3.0) num_bwd = sigma_x_sq * (F - G) den_bwd = sigma_x_sq * sq_norm_grad_G + sigma_i_sq expected_bwd = -1 * np.array(grad_G) expected_bwd[..., 0] *= num_bwd / den_bwd expected_bwd[..., 1] *= num_bwd / den_bwd expected_bwd[..., 2] *= num_bwd / den_bwd # apply Gaussian smoothing expected_bwd[..., 0] = ndimage.filters.gaussian_filter( expected_bwd[..., 0], 3.0) expected_bwd[..., 1] = ndimage.filters.gaussian_filter( expected_bwd[..., 1], 3.0) expected_bwd[..., 2] = ndimage.filters.gaussian_filter( expected_bwd[..., 2], 3.0) assert_array_almost_equal(actual_forward, expected_fwd) assert_array_almost_equal(actual_backward, expected_bwd) if __name__ == '__main__': test_em_demons_step_2d() test_em_demons_step_3d() test_exceptions() test_EMMetric_image_dynamics() dipy-0.13.0/dipy/align/tests/test_parzenhist.py000066400000000000000000000653201317371701200215510ustar00rootroot00000000000000import numpy as np import scipy as sp from functools import reduce from operator import mul from dipy.core.ndindex import ndindex from dipy.data import get_data from dipy.align import vector_fields as vf from dipy.align.transforms import regtransforms from dipy.align.parzenhist import (ParzenJointHistogram, cubic_spline, cubic_spline_derivative, sample_domain_regular) from numpy.testing import (assert_array_equal, assert_array_almost_equal, assert_almost_equal, assert_equal, assert_raises) factors = {('TRANSLATION', 2): 2.0, ('ROTATION', 2): 0.1, ('RIGID', 2): 0.1, ('SCALING', 2): 0.01, ('AFFINE', 2): 0.1, ('TRANSLATION', 3): 2.0, ('ROTATION', 3): 0.1, ('RIGID', 3): 0.1, ('SCALING', 3): 0.1, ('AFFINE', 3): 0.1} def create_random_image_pair(sh, nvals, seed): r""" Create a pair of images with an arbitrary, non-uniform joint PDF Parameters ---------- sh : array, shape (dim,) the shape of the images to be created nvals : int maximum number of different values in the generated 2D images. The voxel intensities of the returned images will be in {0, 1, ..., nvals-1} Returns ------- static : array, shape=sh first image in the image pair moving : array, shape=sh second image in the image pair """ np.random.seed(seed) sz = reduce(mul, sh, 1) sh = tuple(sh) static = np.random.randint(0, nvals, sz).reshape(sh) # This is just a simple way of making the distribution non-uniform moving = static.copy() moving += np.random.randint(0, nvals // 2, sz).reshape(sh) - nvals // 4 # This is just a simple way of making the distribution non-uniform static = moving.copy() static += np.random.randint(0, nvals // 2, sz).reshape(sh) - nvals // 4 return static.astype(np.float64), moving.astype(np.float64) def test_cubic_spline(): # Cubic spline as defined in [Mattes03] eq. (3) # # [Mattes03] Mattes, D., Haynor, D. R., Vesselle, H., Lewellen, T. K., # & Eubank, W. PET-CT image registration in the chest using # free-form deformations. IEEE Transactions on Medical Imaging, # 22(1), 120-8, 2003. in_list = [] expected = [] for epsilon in [-1e-9, 0.0, 1e-9]: for t in [-2.0, -1.0, 0.0, 1.0, 2.0]: x = t + epsilon in_list.append(x) absx = np.abs(x) sqrx = x * x if absx < 1: expected.append((4.0 - 6 * sqrx + 3.0 * (absx ** 3)) / 6.0) elif absx < 2: expected.append(((2 - absx) ** 3) / 6.0) else: expected.append(0.0) actual = cubic_spline(np.array(in_list, dtype=np.float64)) assert_array_almost_equal(actual, np.array(expected, dtype=np.float64)) def test_cubic_spline_derivative(): # Test derivative of the cubic spline, as defined in [Mattes03] eq. (3) by # comparing the analytical and numerical derivatives # # [Mattes03] Mattes, D., Haynor, D. R., Vesselle, H., Lewellen, T. K., # & Eubank, W. PET-CT image registration in the chest using # free-form deformations. IEEE Transactions on Medical Imaging, # 22(1), 120-8, 2003. in_list = [] expected = [] for epsilon in [-1e-9, 0.0, 1e-9]: for t in [-2.0, -1.0, 0.0, 1.0, 2.0]: x = t + epsilon in_list.append(x) h = 1e-6 in_list = np.array(in_list) input_h = in_list + h s = np.array(cubic_spline(in_list)) s_h = np.array(cubic_spline(input_h)) expected = (s_h - s) / h actual = cubic_spline_derivative(in_list) assert_array_almost_equal(actual, expected) def test_parzen_joint_histogram(): # Test the simple functionality of ParzenJointHistogram, # the gradients and computation of the joint intensity distribution # will be tested independently for nbins in [15, 30, 50]: for min_int in [-10.0, 0.0, 10.0]: for intensity_range in [0.1, 1.0, 10.0]: fact = 1 max_int = min_int + intensity_range P = ParzenJointHistogram(nbins) # Make a pair of 4-pixel images, introduce +/- 1 values # that will be excluded using a mask static = np.array([min_int - 1.0, min_int, max_int, max_int + 1.0]) # Multiply by an arbitrary value (make the ranges different) moving = fact * np.array([min_int, min_int - 1.0, max_int + 1.0, max_int]) # Create a mask to exclude the invalid values (beyond min and # max computed above) static_mask = np.array([0, 1, 1, 0]) moving_mask = np.array([1, 0, 0, 1]) P.setup(static, moving, static_mask, moving_mask) # Test bin_normalize_static at the boundary normalized = P.bin_normalize_static(min_int) assert_almost_equal(normalized, P.padding) index = P.bin_index(normalized) assert_equal(index, P.padding) normalized = P.bin_normalize_static(max_int) assert_almost_equal(normalized, nbins - P.padding) index = P.bin_index(normalized) assert_equal(index, nbins - 1 - P.padding) # Test bin_normalize_moving at the boundary normalized = P.bin_normalize_moving(fact * min_int) assert_almost_equal(normalized, P.padding) index = P.bin_index(normalized) assert_equal(index, P.padding) normalized = P.bin_normalize_moving(fact * max_int) assert_almost_equal(normalized, nbins - P.padding) index = P.bin_index(normalized) assert_equal(index, nbins - 1 - P.padding) # Test bin_index not at the boundary delta_s = (max_int - min_int) / (nbins - 2 * P.padding) delta_m = fact * (max_int - min_int) / (nbins - 2 * P.padding) for i in range(nbins - 2 * P.padding): normalized = P.bin_normalize_static(min_int + (i + 0.5) * delta_s) index = P.bin_index(normalized) assert_equal(index, P.padding + i) normalized = P.bin_normalize_moving(fact * min_int + (i + 0.5) * delta_m) index = P.bin_index(normalized) assert_equal(index, P.padding + i) def test_parzen_densities(): # Test the computation of the joint intensity distribution # using a dense and a sparse set of values seed = 1246592 nbins = 32 nr = 30 nc = 35 ns = 20 nvals = 50 for dim in [2, 3]: if dim == 2: shape = (nr, nc) static, moving = create_random_image_pair(shape, nvals, seed) else: shape = (ns, nr, nc) static, moving = create_random_image_pair(shape, nvals, seed) # Initialize parzen_hist = ParzenJointHistogram(nbins) parzen_hist.setup(static, moving) # Get distributions computed by dense sampling parzen_hist.update_pdfs_dense(static, moving) actual_joint_dense = parzen_hist.joint actual_mmarginal_dense = parzen_hist.mmarginal actual_smarginal_dense = parzen_hist.smarginal # Get distributions computed by sparse sampling sval = static.reshape(-1) mval = moving.reshape(-1) parzen_hist.update_pdfs_sparse(sval, mval) actual_joint_sparse = parzen_hist.joint actual_mmarginal_sparse = parzen_hist.mmarginal actual_smarginal_sparse = parzen_hist.smarginal # Compute the expected joint distribution with dense sampling expected_joint_dense = np.zeros(shape=(nbins, nbins)) for index in ndindex(shape): sv = parzen_hist.bin_normalize_static(static[index]) mv = parzen_hist.bin_normalize_moving(moving[index]) sbin = parzen_hist.bin_index(sv) # The spline is centered at mv, will evaluate for all row spline_arg = np.array([i - mv for i in range(nbins)]) contribution = cubic_spline(spline_arg) expected_joint_dense[sbin, :] += contribution # Compute the expected joint distribution with sparse sampling expected_joint_sparse = np.zeros(shape=(nbins, nbins)) for index in range(sval.shape[0]): sv = parzen_hist.bin_normalize_static(sval[index]) mv = parzen_hist.bin_normalize_moving(mval[index]) sbin = parzen_hist.bin_index(sv) # The spline is centered at mv, will evaluate for all row spline_arg = np.array([i - mv for i in range(nbins)]) contribution = cubic_spline(spline_arg) expected_joint_sparse[sbin, :] += contribution # Verify joint distributions expected_joint_dense /= expected_joint_dense.sum() expected_joint_sparse /= expected_joint_sparse.sum() assert_array_almost_equal(actual_joint_dense, expected_joint_dense) assert_array_almost_equal(actual_joint_sparse, expected_joint_sparse) # Verify moving marginals expected_mmarginal_dense = expected_joint_dense.sum(0) expected_mmarginal_dense /= expected_mmarginal_dense.sum() expected_mmarginal_sparse = expected_joint_sparse.sum(0) expected_mmarginal_sparse /= expected_mmarginal_sparse.sum() assert_array_almost_equal(actual_mmarginal_dense, expected_mmarginal_dense) assert_array_almost_equal(actual_mmarginal_sparse, expected_mmarginal_sparse) # Verify static marginals expected_smarginal_dense = expected_joint_dense.sum(1) expected_smarginal_dense /= expected_smarginal_dense.sum() expected_smarginal_sparse = expected_joint_sparse.sum(1) expected_smarginal_sparse /= expected_smarginal_sparse.sum() assert_array_almost_equal(actual_smarginal_dense, expected_smarginal_dense) assert_array_almost_equal(actual_smarginal_sparse, expected_smarginal_sparse) def setup_random_transform(transform, rfactor, nslices=45, sigma=1): r""" Creates a pair of images related to each other by an affine transform We transform the static image with a random transform so that the returned ground-truth transform will produce the static image when applied to the moving image. This will simply stack some copies of a T1 coronal slice image and add some zero slices up and down to reduce boundary artefacts when interpolating. Parameters ---------- transform: instance of Transform defines the type of random transformation that will be created rfactor: float the factor to multiply the uniform(0,1) random noise that will be added to the identity parameters to create the random transform nslices: int number of slices to be stacked to form the volumes """ dim = 2 if nslices == 1 else 3 if transform.get_dim() != dim: raise ValueError("Transform and requested volume have different dims.") np.random.seed(3147702) zero_slices = nslices // 3 fname = get_data('t1_coronal_slice') moving_slice = np.load(fname) moving_slice = moving_slice[40:180, 50:210] if nslices == 1: dim = 2 moving = moving_slice transform_method = vf.transform_2d_affine else: dim = 3 transform_method = vf.transform_3d_affine moving = np.zeros(shape=moving_slice.shape + (nslices,)) moving[..., zero_slices:(2 * zero_slices)] = moving_slice[..., None] moving = sp.ndimage.filters.gaussian_filter(moving, sigma) moving_g2w = np.eye(dim + 1) mmask = np.ones_like(moving, dtype=np.int32) # Create a transform by slightly perturbing the identity parameters theta = transform.get_identity_parameters() n = transform.get_number_of_parameters() theta += np.random.rand(n) * rfactor M = transform.param_to_matrix(theta) shape = np.array(moving.shape, dtype=np.int32) static = np.array(transform_method(moving.astype(np.float32), shape, M)) static = static.astype(np.float64) static_g2w = np.eye(dim + 1) smask = np.ones_like(static, dtype=np.int32) return static, moving, static_g2w, moving_g2w, smask, mmask, M def test_joint_pdf_gradients_dense(): # Compare the analytical and numerical (finite differences) gradient of # the joint distribution (i.e. derivatives of each histogram cell) w.r.t. # the transform parameters. Since the histograms are discrete partitions # of the image intensities, the finite difference approximation is # normally not very close to the analytical derivatives. Other sources of # error are the interpolation used when transforming the images and the # boundary intensities introduced when interpolating outside of the image # (i.e. some "zeros" are introduced at the boundary which affect the # numerical derivatives but is not taken into account by the analytical # derivatives). Thus, we need to relax the verification. Instead of # looking for the analytical and numerical gradients to be very close to # each other, we will verify that they approximately point in the same # direction by testing if the angle they form is close to zero. h = 1e-4 # Make sure dictionary entries are processed in the same order regardless # of the platform. Otherwise any random numbers drawn within the loop # would make the test non-deterministic even if we fix the seed before # the loop. Right now, this test does not draw any samples, but we still # sort the entries to prevent future related failures. for ttype in sorted(factors): dim = ttype[1] if dim == 2: nslices = 1 transform_method = vf.transform_2d_affine else: nslices = 45 transform_method = vf.transform_3d_affine transform = regtransforms[ttype] factor = factors[ttype] theta = transform.get_identity_parameters() static, moving, static_g2w, moving_g2w, smask, mmask, M = \ setup_random_transform(transform, factor, nslices, 5.0) parzen_hist = ParzenJointHistogram(32) parzen_hist.setup(static, moving, smask, mmask) # Compute the gradient at theta with the implementation under test M = transform.param_to_matrix(theta) shape = np.array(static.shape, dtype=np.int32) moved = transform_method(moving.astype(np.float32), shape, M) moved = np.array(moved) parzen_hist.update_pdfs_dense(static.astype(np.float64), moved.astype(np.float64)) # Get the joint distribution evaluated at theta J0 = np.copy(parzen_hist.joint) grid_to_space = np.eye(dim + 1) spacing = np.ones(dim, dtype=np.float64) mgrad, inside = vf.gradient(moving.astype(np.float32), moving_g2w, spacing, shape, grid_to_space) id = transform.get_identity_parameters() parzen_hist.update_gradient_dense( id, transform, static.astype(np.float64), moved.astype(np.float64), grid_to_space, mgrad, smask, mmask) actual = np.copy(parzen_hist.joint_grad) # Now we have the gradient of the joint distribution w.r.t. the # transform parameters # Compute the gradient using finite-diferences n = transform.get_number_of_parameters() expected = np.empty_like(actual) for i in range(n): dtheta = theta.copy() dtheta[i] += h # Update the joint distribution with the transformed moving image M = transform.param_to_matrix(dtheta) shape = np.array(static.shape, dtype=np.int32) moved = transform_method(moving.astype(np.float32), shape, M) moved = np.array(moved) parzen_hist.update_pdfs_dense(static.astype(np.float64), moved.astype(np.float64)) J1 = np.copy(parzen_hist.joint) expected[..., i] = (J1 - J0) / h # Dot product and norms of gradients of each joint histogram cell # i.e. the derivatives of each cell w.r.t. all parameters P = (expected * actual).sum(2) enorms = np.sqrt((expected ** 2).sum(2)) anorms = np.sqrt((actual ** 2).sum(2)) prodnorms = enorms * anorms # Cosine of angle between the expected and actual gradients. # Exclude very small gradients P[prodnorms > 1e-6] /= (prodnorms[prodnorms > 1e-6]) P[prodnorms <= 1e-6] = 0 # Verify that a large proportion of the gradients point almost in # the same direction. Disregard very small gradients mean_cosine = P[P != 0].mean() std_cosine = P[P != 0].std() assert(mean_cosine > 0.9) assert(std_cosine < 0.25) def test_joint_pdf_gradients_sparse(): h = 1e-4 # Make sure dictionary entries are processed in the same order regardless # of the platform. Otherwise any random numbers drawn within the loop # would make the test non-deterministic even if we fix the seed before # the loop.Right now, this test does not draw any samples, but we still # sort the entries to prevent future related failures. for ttype in sorted(factors): dim = ttype[1] if dim == 2: nslices = 1 interp_method = vf.interpolate_scalar_2d else: nslices = 45 interp_method = vf.interpolate_scalar_3d transform = regtransforms[ttype] factor = factors[ttype] theta = transform.get_identity_parameters() static, moving, static_g2w, moving_g2w, smask, mmask, M = \ setup_random_transform(transform, factor, nslices, 5.0) parzen_hist = ParzenJointHistogram(32) parzen_hist.setup(static, moving, smask, mmask) # Sample the fixed-image domain k = 3 sigma = 0.25 seed = 1234 shape = np.array(static.shape, dtype=np.int32) samples = sample_domain_regular(k, shape, static_g2w, sigma, seed) samples = np.array(samples) samples = np.hstack((samples, np.ones(samples.shape[0])[:, None])) sp_to_static = np.linalg.inv(static_g2w) samples_static_grid = (sp_to_static.dot(samples.T).T)[..., :dim] intensities_static, inside = interp_method(static.astype(np.float32), samples_static_grid) # The routines in vector_fields operate, mostly, with float32 because # they were thought to be used for non-linear registration. We may need # to write some float64 counterparts for affine registration, where # memory is not so big issue intensities_static = np.array(intensities_static, dtype=np.float64) # Compute the gradient at theta with the implementation under test M = transform.param_to_matrix(theta) sp_to_moving = np.linalg.inv(moving_g2w).dot(M) samples_moving_grid = (sp_to_moving.dot(samples.T).T)[..., :dim] intensities_moving, inside = interp_method(moving.astype(np.float32), samples_moving_grid) intensities_moving = np.array(intensities_moving, dtype=np.float64) parzen_hist.update_pdfs_sparse(intensities_static, intensities_moving) # Get the joint distribution evaluated at theta J0 = np.copy(parzen_hist.joint) spacing = np.ones(dim + 1, dtype=np.float64) mgrad, inside = vf.sparse_gradient(moving.astype(np.float32), sp_to_moving, spacing, samples) parzen_hist.update_gradient_sparse( theta, transform, intensities_static, intensities_moving, samples[..., :dim], mgrad) # Get the gradient of the joint distribution w.r.t. the transform # parameters actual = np.copy(parzen_hist.joint_grad) # Compute the gradient using finite-diferences n = transform.get_number_of_parameters() expected = np.empty_like(actual) for i in range(n): dtheta = theta.copy() dtheta[i] += h # Update the joint distribution with the transformed moving image M = transform.param_to_matrix(dtheta) sp_to_moving = np.linalg.inv(moving_g2w).dot(M) samples_moving_grid = sp_to_moving.dot(samples.T).T intensities_moving, inside = \ interp_method(moving.astype(np.float32), samples_moving_grid) intensities_moving = np.array(intensities_moving, dtype=np.float64) parzen_hist.update_pdfs_sparse( intensities_static, intensities_moving) J1 = np.copy(parzen_hist.joint) expected[..., i] = (J1 - J0) / h # Dot product and norms of gradients of each joint histogram cell # i.e. the derivatives of each cell w.r.t. all parameters P = (expected * actual).sum(2) enorms = np.sqrt((expected ** 2).sum(2)) anorms = np.sqrt((actual ** 2).sum(2)) prodnorms = enorms * anorms # Cosine of angle between the expected and actual gradients. # Exclude very small gradients P[prodnorms > 1e-6] /= (prodnorms[prodnorms > 1e-6]) P[prodnorms <= 1e-6] = 0 # Verify that a large proportion of the gradients point almost in # the same direction. Disregard very small gradients mean_cosine = P[P != 0].mean() std_cosine = P[P != 0].std() assert(mean_cosine > 0.99) assert(std_cosine < 0.15) def test_sample_domain_regular(): # Test 2D sampling shape = np.array((10, 10), dtype=np.int32) affine = np.eye(3) invalid_affine = np.eye(2) sigma = 0 dim = len(shape) n = shape[0] * shape[1] k = 2 # Verify exception is raised with invalid affine assert_raises(ValueError, sample_domain_regular, k, shape, invalid_affine, sigma) samples = sample_domain_regular(k, shape, affine, sigma) isamples = np.array(samples, dtype=np.int32) indices = (isamples[:, 0] * shape[1] + isamples[:, 1]) # Verify correct number of points sampled assert_array_equal(samples.shape, [n // k, dim]) # Verify all sampled points are different assert_equal(len(set(indices)), len(indices)) # Verify the sampling was regular at rate k assert_equal((indices % k).sum(), 0) # Test 3D sampling shape = np.array((5, 10, 10), dtype=np.int32) affine = np.eye(4) invalid_affine = np.eye(3) sigma = 0 dim = len(shape) n = shape[0] * shape[1] * shape[2] k = 10 # Verify exception is raised with invalid affine assert_raises(ValueError, sample_domain_regular, k, shape, invalid_affine, sigma) samples = sample_domain_regular(k, shape, affine, sigma) isamples = np.array(samples, dtype=np.int32) indices = (isamples[:, 0] * shape[1] * shape[2] + isamples[:, 1] * shape[2] + isamples[:, 2]) # Verify correct number of points sampled assert_array_equal(samples.shape, [n // k, dim]) # Verify all sampled points are different assert_equal(len(set(indices)), len(indices)) # Verify the sampling was regular at rate k assert_equal((indices % k).sum(), 0) def test_exceptions(): H = ParzenJointHistogram(32) valid = np.empty((2, 2, 2), dtype=np.float64) invalid = np.empty((2, 2, 2, 2), dtype=np.float64) # Test exception from `ParzenJointHistogram.update_pdfs_dense` assert_raises(ValueError, H.update_pdfs_dense, valid, invalid) assert_raises(ValueError, H.update_pdfs_dense, invalid, valid) assert_raises(ValueError, H.update_pdfs_dense, invalid, invalid) # Test exception from `ParzenJointHistogram.update_gradient_dense` for shape in [(5, 5), (5, 5, 5)]: dim = len(shape) grid2world = np.eye(dim + 1) transform = regtransforms[('ROTATION', dim)] theta = transform.get_identity_parameters() valid_img = np.empty(shape, dtype=np.float64) valid_grad = np.empty(shape + (dim,), dtype=np.float64) invalid_img = np.empty((2, 2, 2, 2), dtype=np.float64) invalid_grad_type = valid_grad.astype(np.int32) invalid_grad_dim = np.empty(shape + (dim + 1,), dtype=np.float64) for s, m, g in [(valid_img, valid_img, invalid_grad_type), (valid_img, valid_img, invalid_grad_dim), (invalid_img, valid_img, valid_grad), (invalid_img, invalid_img, invalid_grad_type), (invalid_img, invalid_img, invalid_grad_dim)]: assert_raises(ValueError, H.update_gradient_dense, theta, transform, s, m, grid2world, g) # Test exception from `ParzenJointHistogram.update_gradient_dense` nsamples = 2 for dim in [2, 3]: transform = regtransforms[('ROTATION', dim)] theta = transform.get_identity_parameters() valid_vals = np.empty((nsamples,), dtype=np.float64) valid_grad = np.empty((nsamples, dim), dtype=np.float64) valid_points = np.empty((nsamples, dim), dtype=np.float64) invalid_grad_type = np.empty((nsamples, dim), dtype=np.int32) invalid_grad_dim = np.empty((nsamples, dim + 2), dtype=np.float64) invalid_grad_len = np.empty((nsamples + 1, dim), dtype=np.float64) invalid_vals = np.empty((nsamples + 1), dtype=np.float64) invalid_points_dim = np.empty((nsamples, dim + 2), dtype=np.float64) invalid_points_len = np.empty((nsamples + 1, dim), dtype=np.float64) C = [(invalid_vals, valid_vals, valid_points, valid_grad), (valid_vals, invalid_vals, valid_points, valid_grad), (valid_vals, valid_vals, invalid_points_dim, valid_grad), (valid_vals, valid_vals, invalid_points_dim, invalid_grad_dim), (valid_vals, valid_vals, invalid_points_len, valid_grad), (valid_vals, valid_vals, valid_points, invalid_grad_type), (valid_vals, valid_vals, valid_points, invalid_grad_dim), (valid_vals, valid_vals, valid_points, invalid_grad_len)] for s, m, p, g in C: assert_raises(ValueError, H.update_gradient_sparse, theta, transform, s, m, p, g) dipy-0.13.0/dipy/align/tests/test_reslice.py000066400000000000000000000050421317371701200210030ustar00rootroot00000000000000import numpy as np import nibabel as nib from numpy.testing import (run_module_suite, assert_, assert_equal, assert_almost_equal) from dipy.data import get_data from dipy.align.reslice import reslice from dipy.denoise.noise_estimate import estimate_sigma def test_resample(): fimg, _, _ = get_data("small_25") img = nib.load(fimg) data = img.get_data() affine = img.affine zooms = img.header.get_zooms()[:3] # test that new zooms are correctly from the affine (check with 3D volume) new_zooms = (1, 1.2, 2.1) data2, affine2 = reslice(data[..., 0], affine, zooms, new_zooms, order=1, mode='constant') img2 = nib.Nifti1Image(data2, affine2) new_zooms_confirmed = img2.header.get_zooms()[:3] assert_almost_equal(new_zooms, new_zooms_confirmed) # test that shape changes correctly for the first 3 dimensions (check 4D) new_zooms = (1, 1, 1.) data2, affine2 = reslice(data, affine, zooms, new_zooms, order=0, mode='reflect') assert_equal(2 * np.array(data.shape[:3]), data2.shape[:3]) assert_equal(data2.shape[-1], data.shape[-1]) # same with different interpolation order new_zooms = (1, 1, 1.) data3, affine2 = reslice(data, affine, zooms, new_zooms, order=5, mode='reflect') assert_equal(2 * np.array(data.shape[:3]), data3.shape[:3]) assert_equal(data3.shape[-1], data.shape[-1]) # test that the sigma will be reduced with interpolation sigmas = estimate_sigma(data) sigmas2 = estimate_sigma(data2) sigmas3 = estimate_sigma(data3) assert_(np.all(sigmas > sigmas2)) assert_(np.all(sigmas2 > sigmas3)) # check that 4D resampling matches 3D resampling data2, affine2 = reslice(data, affine, zooms, new_zooms) for i in range(data.shape[-1]): _data, _affine = reslice(data[..., i], affine, zooms, new_zooms) assert_almost_equal(data2[..., i], _data) assert_almost_equal(affine2, _affine) # check use of multiprocessing pool of specified size data3, affine3 = reslice(data, affine, zooms, new_zooms, num_processes=4) assert_almost_equal(data2, data3) assert_almost_equal(affine2, affine3) # check use of multiprocessing pool of autoconfigured size data3, affine3 = reslice(data, affine, zooms, new_zooms, num_processes=0) assert_almost_equal(data2, data3) assert_almost_equal(affine2, affine3) if __name__ == '__main__': run_module_suite() dipy-0.13.0/dipy/align/tests/test_scalespace.py000066400000000000000000000067431317371701200214710ustar00rootroot00000000000000import numpy as np import scipy as sp from numpy.testing import (assert_array_equal, assert_array_almost_equal, assert_almost_equal, assert_equal, assert_raises) from dipy.align import floating from dipy.align.imwarp import get_direction_and_spacings from dipy.align.scalespace import (ScaleSpace, IsotropicScaleSpace) from dipy.align.tests.test_imwarp import get_synthetic_warped_circle def test_scale_space(): num_levels = 3 for test_class in [ScaleSpace, IsotropicScaleSpace]: for dim in [2, 3]: print(dim, test_class) if dim == 2: moving, static = get_synthetic_warped_circle(1) else: moving, static = get_synthetic_warped_circle(30) input_spacing = np.array([1.1, 1.2, 1.5])[:dim] grid2world = np.diag(tuple(input_spacing) + (1.0,)) original = moving if test_class is ScaleSpace: ss = test_class( original, num_levels, grid2world, input_spacing) elif test_class is IsotropicScaleSpace: factors = [4, 2, 1] sigmas = [3.0, 1.0, 0.0] ss = test_class( original, factors, sigmas, grid2world, input_spacing) for level in range(num_levels): # Verify sigmas and images are consistent sigmas = ss.get_sigmas(level) expected = sp.ndimage.filters.gaussian_filter(original, sigmas) expected = ((expected - expected.min()) / (expected.max() - expected.min())) actual = ss.get_image(level) assert_array_almost_equal(actual, expected) # Verify scalings and spacings are consistent spacings = ss.get_spacing(level) scalings = ss.get_scaling(level) expected = ss.get_spacing(0) * scalings actual = ss.get_spacing(level) assert_array_almost_equal(actual, expected) # Verify affine and affine_inv are consistent affine = ss.get_affine(level) affine_inv = ss.get_affine_inv(level) expected = np.eye(1 + dim) actual = affine.dot(affine_inv) assert_array_almost_equal(actual, expected) # Verify affine consistent with spacings exp_dir, expected_sp = get_direction_and_spacings(affine, dim) actual_sp = spacings assert_array_almost_equal(actual_sp, expected_sp) def test_scale_space_exceptions(): np.random.seed(2022966) target_shape = (32, 32) # create a random image image = np.ndarray(target_shape, dtype=floating) ns = np.size(image) image[...] = np.random.randint(0, 10, ns).reshape(tuple(target_shape)) zeros = (image == 0).astype(np.int32) ss = ScaleSpace(image, 3) for invalid_level in [-1, 3, 4]: assert_raises(ValueError, ss.get_image, invalid_level) # Verify that the mask is correctly applied, when requested ss = ScaleSpace(image, 3, mask0=True) for level in range(3): img = ss.get_image(level) z = (img == 0).astype(np.int32) assert_array_equal(zeros, z) dipy-0.13.0/dipy/align/tests/test_streamlinear.py000066400000000000000000000357241317371701200220550ustar00rootroot00000000000000import numpy as np from numpy.testing import (run_module_suite, assert_, assert_equal, assert_almost_equal, assert_array_equal, assert_array_almost_equal, assert_raises) from dipy.align.streamlinear import (compose_matrix44, decompose_matrix44, BundleSumDistanceMatrixMetric, BundleMinDistanceMatrixMetric, BundleMinDistanceMetric, StreamlineLinearRegistration, StreamlineDistanceMetric) from dipy.tracking.streamline import (center_streamlines, unlist_streamlines, relist_streamlines, transform_streamlines, set_number_of_points) from dipy.core.geometry import compose_matrix from dipy.data import get_data, two_cingulum_bundles from nibabel import trackvis as tv from dipy.align.bundlemin import (_bundle_minimum_distance_matrix, _bundle_minimum_distance, distance_matrix_mdf) def simulated_bundle(no_streamlines=10, waves=False, no_pts=12): t = np.linspace(-10, 10, 200) # parallel waves or parallel lines bundle = [] for i in np.linspace(-5, 5, no_streamlines): if waves: pts = np.vstack((np.cos(t), t, i * np.ones(t.shape))).T else: pts = np.vstack((np.zeros(t.shape), t, i * np.ones(t.shape))).T pts = set_number_of_points(pts, no_pts) bundle.append(pts) return bundle def fornix_streamlines(no_pts=12): fname = get_data('fornix') streams, hdr = tv.read(fname) streamlines = [set_number_of_points(i[0], no_pts) for i in streams] return streamlines def evaluate_convergence(bundle, new_bundle2): pts_static = np.concatenate(bundle, axis=0) pts_moved = np.concatenate(new_bundle2, axis=0) assert_array_almost_equal(pts_static, pts_moved, 3) def test_rigid_parallel_lines(): bundle_initial = simulated_bundle() bundle, shift = center_streamlines(bundle_initial) mat = compose_matrix44([20, 0, 10, 0, 40, 0]) bundle2 = transform_streamlines(bundle, mat) bundle_sum_distance = BundleSumDistanceMatrixMetric() options = {'maxcor': 100, 'ftol': 1e-9, 'gtol': 1e-16, 'eps': 1e-3} srr = StreamlineLinearRegistration(metric=bundle_sum_distance, x0=np.zeros(6), method='L-BFGS-B', bounds=None, options=options) new_bundle2 = srr.optimize(bundle, bundle2).transform(bundle2) evaluate_convergence(bundle, new_bundle2) def test_rigid_real_bundles(): bundle_initial = fornix_streamlines()[:20] bundle, shift = center_streamlines(bundle_initial) mat = compose_matrix44([0, 0, 20, 45., 0, 0]) bundle2 = transform_streamlines(bundle, mat) bundle_sum_distance = BundleSumDistanceMatrixMetric() srr = StreamlineLinearRegistration(bundle_sum_distance, x0=np.zeros(6), method='Powell') new_bundle2 = srr.optimize(bundle, bundle2).transform(bundle2) evaluate_convergence(bundle, new_bundle2) bundle_min_distance = BundleMinDistanceMatrixMetric() srr = StreamlineLinearRegistration(bundle_min_distance, x0=np.zeros(6), method='Powell') new_bundle2 = srr.optimize(bundle, bundle2).transform(bundle2) evaluate_convergence(bundle, new_bundle2) assert_raises(ValueError, StreamlineLinearRegistration, method='Whatever') def test_rigid_partial_real_bundles(): static = fornix_streamlines()[:20] moving = fornix_streamlines()[20:40] static_center, shift = center_streamlines(static) moving_center, shift2 = center_streamlines(moving) print(shift2) mat = compose_matrix(translate=np.array([0, 0, 0.]), angles=np.deg2rad([40, 0, 0.])) moved = transform_streamlines(moving_center, mat) srr = StreamlineLinearRegistration() srm = srr.optimize(static_center, moved) print(srm.fopt) print(srm.iterations) print(srm.funcs) moving_back = srm.transform(moved) print(srm.matrix) static_center = set_number_of_points(static_center, 100) moving_center = set_number_of_points(moving_back, 100) vol = np.zeros((100, 100, 100)) spts = np.concatenate(static_center, axis=0) spts = np.round(spts).astype(np.int) + np.array([50, 50, 50]) mpts = np.concatenate(moving_center, axis=0) mpts = np.round(mpts).astype(np.int) + np.array([50, 50, 50]) for index in spts: i, j, k = index vol[i, j, k] = 1 vol2 = np.zeros((100, 100, 100)) for index in mpts: i, j, k = index vol2[i, j, k] = 1 overlap = np.sum(np.logical_and(vol, vol2)) / float(np.sum(vol2)) assert_equal(overlap * 100 > 40, True) def test_stream_rigid(): static = fornix_streamlines()[:20] moving = fornix_streamlines()[20:40] static_center, shift = center_streamlines(static) mat = compose_matrix44([0, 0, 0, 0, 40, 0]) moving = transform_streamlines(moving, mat) srr = StreamlineLinearRegistration() sr_params = srr.optimize(static, moving) moved = transform_streamlines(moving, sr_params.matrix) srr = StreamlineLinearRegistration(verbose=True) srm = srr.optimize(static, moving) moved2 = transform_streamlines(moving, srm.matrix) moved3 = srm.transform(moving) assert_array_almost_equal(moved[0], moved2[0], decimal=3) assert_array_almost_equal(moved2[0], moved3[0], decimal=3) def test_min_vs_min_fast_precision(): static = fornix_streamlines()[:20] moving = fornix_streamlines()[:20] static = [s.astype('f8') for s in static] moving = [m.astype('f8') for m in moving] bmd = BundleMinDistanceMatrixMetric() bmd.setup(static, moving) bmdf = BundleMinDistanceMetric() bmdf.setup(static, moving) x_test = [0.01, 0, 0, 0, 0, 0] print(bmd.distance(x_test)) print(bmdf.distance(x_test)) assert_equal(bmd.distance(x_test), bmdf.distance(x_test)) def test_same_number_of_points(): A = [np.random.rand(10, 3), np.random.rand(20, 3)] B = [np.random.rand(21, 3), np.random.rand(30, 3)] C = [np.random.rand(10, 3), np.random.rand(10, 3)] D = [np.random.rand(20, 3), np.random.rand(20, 3)] slr = StreamlineLinearRegistration() assert_raises(ValueError, slr.optimize, A, B) assert_raises(ValueError, slr.optimize, C, D) assert_raises(ValueError, slr.optimize, C, B) def test_efficient_bmd(): a = np.array([[1, 1, 1], [2, 2, 2], [3, 3, 3]]) streamlines = [a, a + 2, a + 4] points, offsets = unlist_streamlines(streamlines) points = points.astype(np.double) points2 = points.copy() D = np.zeros((len(offsets), len(offsets)), dtype='f8') _bundle_minimum_distance_matrix(points, points2, len(offsets), len(offsets), a.shape[0], D) assert_equal(np.sum(np.diag(D)), 0) points2 += 2 _bundle_minimum_distance_matrix(points, points2, len(offsets), len(offsets), a.shape[0], D) streamlines2 = relist_streamlines(points2, offsets) D2 = distance_matrix_mdf(streamlines, streamlines2) assert_array_almost_equal(D, D2) cols = D2.shape[1] rows = D2.shape[0] dist = 0.25 * (np.sum(np.min(D2, axis=0)) / float(cols) + np.sum(np.min(D2, axis=1)) / float(rows)) ** 2 dist2 = _bundle_minimum_distance(points, points2, len(offsets), len(offsets), a.shape[0]) assert_almost_equal(dist, dist2) def test_openmp_locks(): static = [] moving = [] pts = 20 for i in range(1000): s = np.random.rand(pts, 3) static.append(s) moving.append(s + 2) moving = moving[2:] points, offsets = unlist_streamlines(static) points2, offsets2 = unlist_streamlines(moving) D = np.zeros((len(offsets), len(offsets2)), dtype='f8') _bundle_minimum_distance_matrix(points, points2, len(offsets), len(offsets2), pts, D) dist1 = 0.25 * (np.sum(np.min(D, axis=0)) / float(D.shape[1]) + np.sum(np.min(D, axis=1)) / float(D.shape[0])) ** 2 dist2 = _bundle_minimum_distance(points, points2, len(offsets), len(offsets2), pts) assert_almost_equal(dist1, dist2, 6) def test_from_to_rigid(): t = np.array([10, 2, 3, 0.1, 20., 30.]) mat = compose_matrix44(t) vec = decompose_matrix44(mat, 6) assert_array_almost_equal(t, vec) t = np.array([0, 0, 0, 180, 0., 0.]) mat = np.eye(4) mat[0, 0] = -1 vec = decompose_matrix44(mat, 6) assert_array_almost_equal(-t, vec) def test_matrix44(): assert_raises(ValueError, compose_matrix44, np.ones(5)) assert_raises(ValueError, compose_matrix44, np.ones(9)) assert_raises(ValueError, compose_matrix44, np.ones(16)) def test_abstract_metric_class(): class DummyStreamlineMetric(StreamlineDistanceMetric): def test(): pass assert_raises(TypeError, DummyStreamlineMetric) def test_evolution_of_previous_iterations(): static = fornix_streamlines()[:20] moving = fornix_streamlines()[:20] moving = [m + np.array([10., 0., 0.]) for m in moving] slr = StreamlineLinearRegistration(evolution=True) from dipy.core.optimize import SCIPY_LESS_0_12 if not SCIPY_LESS_0_12: slm = slr.optimize(static, moving) assert_equal(len(slm.matrix_history), slm.iterations) def test_similarity_real_bundles(): bundle_initial = fornix_streamlines() bundle_initial, shift = center_streamlines(bundle_initial) bundle = bundle_initial[:20] xgold = [0, 0, 10, 0, 0, 0, 1.5] mat = compose_matrix44(xgold) bundle2 = transform_streamlines(bundle_initial[:20], mat) metric = BundleMinDistanceMatrixMetric() x0 = np.array([0, 0, 0, 0, 0, 0, 1], 'f8') slr = StreamlineLinearRegistration(metric=metric, x0=x0, method='Powell', bounds=None, verbose=False) slm = slr.optimize(bundle, bundle2) new_bundle2 = slm.transform(bundle2) evaluate_convergence(bundle, new_bundle2) def test_affine_real_bundles(): bundle_initial = fornix_streamlines() bundle_initial, shift = center_streamlines(bundle_initial) bundle = bundle_initial[:20] xgold = [0, 4, 2, 0, 10, 10, 1.2, 1.1, 1., 0., 0.2, 0.] mat = compose_matrix44(xgold) bundle2 = transform_streamlines(bundle_initial[:20], mat) x0 = np.array([0, 0, 0, 0, 0, 0, 1., 1., 1., 0, 0, 0]) x = 25 bounds = [(-x, x), (-x, x), (-x, x), (-x, x), (-x, x), (-x, x), (0.1, 1.5), (0.1, 1.5), (0.1, 1.5), (-1, 1), (-1, 1), (-1, 1)] options = {'maxcor': 10, 'ftol': 1e-7, 'gtol': 1e-5, 'eps': 1e-8} metric = BundleMinDistanceMatrixMetric() slr = StreamlineLinearRegistration(metric=metric, x0=x0, method='L-BFGS-B', bounds=bounds, verbose=True, options=options) slm = slr.optimize(bundle, bundle2) new_bundle2 = slm.transform(bundle2) slr2 = StreamlineLinearRegistration(metric=metric, x0=x0, method='Powell', bounds=None, verbose=True, options=None) slm2 = slr2.optimize(bundle, new_bundle2) new_bundle2 = slm2.transform(new_bundle2) evaluate_convergence(bundle, new_bundle2) def test_vectorize_streamlines(): cingulum_bundles = two_cingulum_bundles() cb_subj1 = cingulum_bundles[0] cb_subj1 = set_number_of_points(cb_subj1, 10) cb_subj1_pts_no = np.array([s.shape[0] for s in cb_subj1]) assert_equal(np.all(cb_subj1_pts_no == 10), True) def test_x0_input(): for x0 in [6, 7, 12, "Rigid", 'rigid', "similarity", "Affine"]: StreamlineLinearRegistration(x0=x0) for x0 in [np.random.rand(6), np.random.rand(7), np.random.rand(12)]: StreamlineLinearRegistration(x0=x0) for x0 in [8, 20, "Whatever", np.random.rand(20), np.random.rand(20, 3)]: assert_raises(ValueError, StreamlineLinearRegistration, x0=x0) x0 = np.random.rand(4, 3) assert_raises(ValueError, StreamlineLinearRegistration, x0=x0) x0_6 = np.zeros(6) x0_7 = np.array([0, 0, 0, 0, 0, 0, 1.]) x0_12 = np.array([0, 0, 0, 0, 0, 0, 1., 1., 1., 0, 0, 0]) x0_s = [x0_6, x0_7, x0_12, x0_6, x0_7, x0_12] for i, x0 in enumerate([6, 7, 12, "Rigid", "similarity", "Affine"]): slr = StreamlineLinearRegistration(x0=x0) assert_equal(slr.x0, x0_s[i]) def test_compose_decompose_matrix44(): for i in range(20): x0 = np.random.rand(12) mat = compose_matrix44(x0[:6]) assert_array_almost_equal(x0[:6], decompose_matrix44(mat, size=6)) mat = compose_matrix44(x0[:7]) assert_array_almost_equal(x0[:7], decompose_matrix44(mat, size=7)) mat = compose_matrix44(x0[:12]) assert_array_almost_equal(x0[:12], decompose_matrix44(mat, size=12)) assert_raises(ValueError, decompose_matrix44, mat, 20) def test_cascade_of_optimizations_and_threading(): cingulum_bundles = two_cingulum_bundles() cb1 = cingulum_bundles[0] cb1 = set_number_of_points(cb1, 20) test_x0 = np.array([10, 4, 3, 0, 20, 10, 1.5, 1.5, 1.5, 0., 0.2, 0]) cb2 = transform_streamlines(cingulum_bundles[0], compose_matrix44(test_x0)) cb2 = set_number_of_points(cb2, 20) print('first rigid') slr = StreamlineLinearRegistration(x0=6, num_threads=1) slm = slr.optimize(cb1, cb2) print('then similarity') slr2 = StreamlineLinearRegistration(x0=7, num_threads=2) slm2 = slr2.optimize(cb1, cb2, slm.matrix) print('then affine') slr3 = StreamlineLinearRegistration(x0=12, options={'maxiter': 50}, num_threads=None) slm3 = slr3.optimize(cb1, cb2, slm2.matrix) assert_(slm2.fopt < slm.fopt) assert_(slm3.fopt < slm2.fopt) if __name__ == '__main__': run_module_suite() dipy-0.13.0/dipy/align/tests/test_sumsqdiff.py000066400000000000000000000631021317371701200213570ustar00rootroot00000000000000import numpy as np from dipy.align import floating from dipy.align import sumsqdiff as ssd from numpy.testing import (assert_equal, assert_almost_equal, assert_array_almost_equal, assert_allclose) def iterate_residual_field_ssd_2d(delta_field, sigmasq_field, grad, target, lambda_param, dfield): r""" This implementation is for testing purposes only. The problem with Gauss-Seidel iterations is that it depends on the order in which we iterate over the variables, so it is necessary to replicate the implementation under test. """ nrows, ncols = delta_field.shape if target is None: b = np.zeros_like(grad) b[..., 0] = delta_field * grad[..., 0] b[..., 1] = delta_field * grad[..., 1] else: b = target y = np.zeros(2) A = np.ndarray((2, 2)) for r in range(nrows): for c in range(ncols): delta = delta_field[r, c] sigmasq = sigmasq_field[r, c] if sigmasq_field is not None else 1 # This has to be done inside the neste loops because # some d[...] may have been previously modified nn = 0 y[:] = 0 for (dRow, dCol) in [(-1, 0), (0, 1), (1, 0), (0, -1)]: dr = r + dRow if((dr < 0) or (dr >= nrows)): continue dc = c + dCol if((dc < 0) or (dc >= ncols)): continue nn += 1 y += dfield[dr, dc] if np.isinf(sigmasq): dfield[r, c] = y / nn else: tau = sigmasq * lambda_param * nn A = np.outer(grad[r, c], grad[r, c]) + tau * np.eye(2) det = np.linalg.det(A) if(det < 1e-9): nrm2 = np.sum(grad[r, c]**2) if(nrm2 < 1e-9): dfield[r, c, :] = 0 else: dfield[r, c] = b[r, c] / nrm2 else: y = b[r, c] + sigmasq * lambda_param * y dfield[r, c] = np.linalg.solve(A, y) def iterate_residual_field_ssd_3d(delta_field, sigmasq_field, grad, target, lambda_param, dfield): r""" This implementation is for testing purposes only. The problem with Gauss-Seidel iterations is that it depends on the order in which we iterate over the variables, so it is necessary to replicate the implementation under test. """ nslices, nrows, ncols = delta_field.shape if target is None: b = np.zeros_like(grad) for i in range(3): b[..., i] = delta_field * grad[..., i] else: b = target y = np.ndarray((3,)) for s in range(nslices): for r in range(nrows): for c in range(ncols): g = grad[s, r, c] delta = delta_field[s, r, c] sigmasq = sigmasq_field[ s, r, c] if sigmasq_field is not None else 1 nn = 0 y[:] = 0 for dSlice, dRow, dCol in [(-1, 0, 0), (0, -1, 0), (0, 0, 1), (0, 1, 0), (0, 0, -1), (1, 0, 0)]: ds = s + dSlice if((ds < 0) or (ds >= nslices)): continue dr = r + dRow if((dr < 0) or (dr >= nrows)): continue dc = c + dCol if((dc < 0) or (dc >= ncols)): continue nn += 1 y += dfield[ds, dr, dc] if(np.isinf(sigmasq)): dfield[s, r, c] = y / nn elif(sigmasq < 1e-9): nrm2 = np.sum(g**2) if(nrm2 < 1e-9): dfield[s, r, c, :] = 0 else: dfield[s, r, c, :] = b[s, r, c] / nrm2 else: tau = sigmasq * lambda_param * nn y = b[s, r, c] + sigmasq * lambda_param * y G = np.outer(g, g) + tau * np.eye(3) try: dfield[s, r, c] = np.linalg.solve(G, y) except np.linalg.linalg.LinAlgError as err: nrm2 = np.sum(g**2) if(nrm2 < 1e-9): dfield[s, r, c, :] = 0 else: dfield[s, r, c] = b[s, r, c] / nrm2 def test_compute_residual_displacement_field_ssd_2d(): # Select arbitrary images' shape (same shape for both images) sh = (20, 10) # Select arbitrary centers c_f = np.asarray(sh) / 2 c_g = c_f + 0.5 # Compute the identity vector field I(x) = x in R^2 x_0 = np.asarray(range(sh[0])) x_1 = np.asarray(range(sh[1])) X = np.ndarray(sh + (2,), dtype=np.float64) O = np.ones(sh) X[..., 0] = x_0[:, None] * O X[..., 1] = x_1[None, :] * O # Compute the gradient fields of F and G np.random.seed(5512751) grad_F = X - c_f grad_G = X - c_g Fnoise = np.random.ranf( np.size(grad_F)).reshape( grad_F.shape) * grad_F.max() * 0.1 Fnoise = Fnoise.astype(floating) grad_F += Fnoise Gnoise = np.random.ranf( np.size(grad_G)).reshape( grad_G.shape) * grad_G.max() * 0.1 Gnoise = Gnoise.astype(floating) grad_G += Gnoise # The squared norm of grad_G sq_norm_grad_G = np.sum(grad_G**2, -1) # Compute F and G F = 0.5 * np.sum(grad_F**2, -1) G = 0.5 * sq_norm_grad_G Fnoise = np.random.ranf(np.size(F)).reshape(F.shape) * F.max() * 0.1 Fnoise = Fnoise.astype(floating) F += Fnoise Gnoise = np.random.ranf(np.size(G)).reshape(G.shape) * G.max() * 0.1 Gnoise = Gnoise.astype(floating) G += Gnoise delta_field = np.array(F - G, dtype=floating) sigma_field = np.random.randn(delta_field.size).reshape(delta_field.shape) sigma_field = sigma_field.astype(floating) # Select some pixels to force sigma_field = infinite inf_sigma = np.random.randint(0, 2, sh[0] * sh[1]) inf_sigma = inf_sigma.reshape(sh) sigma_field[inf_sigma == 1] = np.inf # Select an initial displacement field d = np.random.randn(grad_G.size).reshape(grad_G.shape).astype(floating) # d = np.zeros_like(grad_G, dtype=floating) lambda_param = 1.5 # Implementation under test iut = ssd.compute_residual_displacement_field_ssd_2d # In the first iteration we test the case target=None # In the second iteration, target is not None target = None rtol = 1e-9 atol = 1e-4 for it in range(2): # Sum of differences with the neighbors s = np.zeros_like(d, dtype=np.float64) s[:, :-1] += d[:, :-1] - d[:, 1:] # right s[:, 1:] += d[:, 1:] - d[:, :-1] # left s[:-1, :] += d[:-1, :] - d[1:, :] # down s[1:, :] += d[1:, :] - d[:-1, :] # up s *= lambda_param # Dot product of displacement and gradient dp = d[..., 0] * grad_G[..., 0] + \ d[..., 1] * grad_G[..., 1] dp = dp.astype(np.float64) # Compute expected residual expected = None if target is None: expected = np.zeros_like(grad_G) expected[..., 0] = delta_field * grad_G[..., 0] expected[..., 1] = delta_field * grad_G[..., 1] else: expected = target.copy().astype(np.float64) # Expected residuals when sigma != infinte expected[inf_sigma == 0, 0] -= grad_G[inf_sigma == 0, 0] * \ dp[inf_sigma == 0] + sigma_field[inf_sigma == 0] * s[inf_sigma == 0, 0] expected[inf_sigma == 0, 1] -= grad_G[inf_sigma == 0, 1] * \ dp[inf_sigma == 0] + sigma_field[inf_sigma == 0] * s[inf_sigma == 0, 1] # Expected residuals when sigma == infinte expected[inf_sigma == 1] = -1.0 * s[inf_sigma == 1] # Test residual field computation starting with residual = None actual = iut(delta_field, sigma_field, grad_G.astype(floating), target, lambda_param, d, None) assert_allclose(actual, expected, rtol=rtol, atol=atol) # destroy previous result actual = np.ndarray(actual.shape, dtype=floating) # Test residual field computation starting with residual is not None iut(delta_field, sigma_field, grad_G.astype(floating), target, lambda_param, d, actual) assert_allclose(actual, expected, rtol=rtol, atol=atol) # Set target for next iteration target = actual # Test Gauss-Seidel step with residual=None and residual=target for residual in [None, target]: expected = d.copy() iterate_residual_field_ssd_2d( delta_field, sigma_field, grad_G.astype(floating), residual, lambda_param, expected) actual = d.copy() ssd.iterate_residual_displacement_field_ssd_2d( delta_field, sigma_field, grad_G.astype(floating), residual, lambda_param, actual) assert_allclose(actual, expected, rtol=rtol, atol=atol) def test_compute_residual_displacement_field_ssd_3d(): # Select arbitrary images' shape (same shape for both images) sh = (20, 15, 10) # Select arbitrary centers c_f = np.asarray(sh) / 2 c_g = c_f + 0.5 # Compute the identity vector field I(x) = x in R^2 x_0 = np.asarray(range(sh[0])) x_1 = np.asarray(range(sh[1])) x_2 = np.asarray(range(sh[2])) X = np.ndarray(sh + (3,), dtype=np.float64) O = np.ones(sh) X[..., 0] = x_0[:, None, None] * O X[..., 1] = x_1[None, :, None] * O X[..., 2] = x_2[None, None, :] * O # Compute the gradient fields of F and G np.random.seed(9223102) grad_F = X - c_f grad_G = X - c_g Fnoise = np.random.ranf( np.size(grad_F)).reshape( grad_F.shape) * grad_F.max() * 0.1 Fnoise = Fnoise.astype(floating) grad_F += Fnoise Gnoise = np.random.ranf( np.size(grad_G)).reshape( grad_G.shape) * grad_G.max() * 0.1 Gnoise = Gnoise.astype(floating) grad_G += Gnoise # The squared norm of grad_G sq_norm_grad_G = np.sum(grad_G**2, -1) # Compute F and G F = 0.5 * np.sum(grad_F**2, -1) G = 0.5 * sq_norm_grad_G Fnoise = np.random.ranf(np.size(F)).reshape(F.shape) * F.max() * 0.1 Fnoise = Fnoise.astype(floating) F += Fnoise Gnoise = np.random.ranf(np.size(G)).reshape(G.shape) * G.max() * 0.1 Gnoise = Gnoise.astype(floating) G += Gnoise delta_field = np.array(F - G, dtype=floating) sigma_field = np.random.randn(delta_field.size).reshape(delta_field.shape) sigma_field = sigma_field.astype(floating) # Select some pixels to force sigma_field = infinite inf_sigma = np.random.randint(0, 2, sh[0] * sh[1] * sh[2]) inf_sigma = inf_sigma.reshape(sh) sigma_field[inf_sigma == 1] = np.inf # Select an initial displacement field d = np.random.randn(grad_G.size).reshape(grad_G.shape).astype(floating) # d = np.zeros_like(grad_G, dtype=floating) lambda_param = 1.5 # Implementation under test iut = ssd.compute_residual_displacement_field_ssd_3d # In the first iteration we test the case target=None # In the second iteration, target is not None target = None rtol = 1e-9 atol = 1e-4 for it in range(2): # Sum of differences with the neighbors s = np.zeros_like(d, dtype=np.float64) s[:, :, :-1] += d[:, :, :-1] - d[:, :, 1:] # right s[:, :, 1:] += d[:, :, 1:] - d[:, :, :-1] # left s[:, :-1, :] += d[:, :-1, :] - d[:, 1:, :] # down s[:, 1:, :] += d[:, 1:, :] - d[:, :-1, :] # up s[:-1, :, :] += d[:-1, :, :] - d[1:, :, :] # below s[1:, :, :] += d[1:, :, :] - d[:-1, :, :] # above s *= lambda_param # Dot product of displacement and gradient dp = d[..., 0] * grad_G[..., 0] + \ d[..., 1] * grad_G[..., 1] + \ d[..., 2] * grad_G[..., 2] # Compute expected residual expected = None if target is None: expected = np.zeros_like(grad_G) for i in range(3): expected[..., i] = delta_field * grad_G[..., i] else: expected = target.copy().astype(np.float64) # Expected residuals when sigma != infinte for i in range(3): expected[inf_sigma == 0, i] -= grad_G[inf_sigma == 0, i] * \ dp[inf_sigma == 0] + sigma_field[inf_sigma == 0] * s[inf_sigma == 0, i] # Expected residuals when sigma == infinte expected[inf_sigma == 1] = -1.0 * s[inf_sigma == 1] # Test residual field computation starting with residual = None actual = iut(delta_field, sigma_field, grad_G.astype(floating), target, lambda_param, d, None) assert_allclose(actual, expected, rtol=rtol, atol=atol) # destroy previous result actual = np.ndarray(actual.shape, dtype=floating) # Test residual field computation starting with residual is not None iut(delta_field, sigma_field, grad_G.astype(floating), target, lambda_param, d, actual) assert_allclose(actual, expected, rtol=rtol, atol=atol) # Set target for next iteration target = actual # Test Gauss-Seidel step with residual=None and residual=target for residual in [None, target]: expected = d.copy() iterate_residual_field_ssd_3d( delta_field, sigma_field, grad_G.astype(floating), residual, lambda_param, expected) actual = d.copy() ssd.iterate_residual_displacement_field_ssd_3d( delta_field, sigma_field, grad_G.astype(floating), residual, lambda_param, actual) # the numpy linear solver may differ from our custom implementation # we need to increase the tolerance a bit assert_allclose(actual, expected, rtol=rtol, atol=atol * 5) def test_solve_2d_symmetric_positive_definite(): # Select some arbitrary right-hand sides bs = [np.array([1.1, 2.2]), np.array([1e-2, 3e-3]), np.array([1e2, 1e3]), np.array([1e-5, 1e5])] # Select arbitrary symmetric positive-definite matrices As = [] # Identity As.append(np.array([1.0, 0.0, 1.0])) # Small determinant As.append(np.array([1e-3, 1e-4, 1e-3])) # Large determinant As.append(np.array([1e6, 1e4, 1e6])) for A in As: AA = np.array([[A[0], A[1]], [A[1], A[2]]]) det = np.linalg.det(AA) for b in bs: expected = np.linalg.solve(AA, b) actual = ssd.solve_2d_symmetric_positive_definite(A, b, det) assert_allclose(expected, actual, rtol=1e-9, atol=1e-9) def test_solve_3d_symmetric_positive_definite(): # Select some arbitrary right-hand sides bs = [np.array([1.1, 2.2, 3.3]), np.array([1e-2, 3e-3, 2e-2]), np.array([1e2, 1e3, 5e-2]), np.array([1e-5, 1e5, 1.0])] # Select arbitrary taus taus = [0.0, 1.0, 1e-4, 1e5] # Select arbitrary matrices gs = [] # diagonal gs.append(np.array([0.0, 0.0, 0.0])) # canonical basis gs.append(np.array([1.0, 0.0, 0.0])) gs.append(np.array([0.0, 1.0, 0.0])) gs.append(np.array([0.0, 0.0, 1.0])) # other gs.append(np.array([1.0, 0.5, 0.0])) gs.append(np.array([0.0, 0.2, 0.1])) gs.append(np.array([0.3, 0.0, 0.9])) for g in gs: A = g[:, None] * g[None, :] for tau in taus: AA = A + tau * np.eye(3) for b in bs: actual, is_singular = ssd.solve_3d_symmetric_positive_definite( g, b, tau) if tau == 0.0: assert_equal(is_singular, 1) else: expected = np.linalg.solve(AA, b) assert_allclose(expected, actual, rtol=1e-9, atol=1e-9) def test_compute_energy_ssd_2d(): sh = (32, 32) # Select arbitrary centers c_f = np.asarray(sh) / 2 c_g = c_f + 0.5 # Compute the identity vector field I(x) = x in R^2 x_0 = np.asarray(range(sh[0])) x_1 = np.asarray(range(sh[1])) X = np.ndarray(sh + (2,), dtype=np.float64) O = np.ones(sh) X[..., 0] = x_0[:, None] * O X[..., 1] = x_1[None, :] * O # Compute the gradient fields of F and G grad_F = X - c_f grad_G = X - c_g # Compute F and G F = 0.5 * np.sum(grad_F**2, -1) G = 0.5 * np.sum(grad_G**2, -1) # Note: this should include the energy corresponding to the # regularization term, but it is discarded in ANTS (they just # consider the data term, which is not the objective function # being optimized). This test case should be updated after # further investigation expected = ((F - G)**2).sum() actual = ssd.compute_energy_ssd_2d(np.array(F - G, dtype=floating)) assert_almost_equal(expected, actual) def test_compute_energy_ssd_3d(): sh = (32, 32, 32) # Select arbitrary centers c_f = np.asarray(sh) / 2 c_g = c_f + 0.5 # Compute the identity vector field I(x) = x in R^2 x_0 = np.asarray(range(sh[0])) x_1 = np.asarray(range(sh[1])) x_2 = np.asarray(range(sh[2])) X = np.ndarray(sh + (3,), dtype=np.float64) O = np.ones(sh) X[..., 0] = x_0[:, None, None] * O X[..., 1] = x_1[None, :, None] * O X[..., 2] = x_2[None, None, :] * O # Compute the gradient fields of F and G grad_F = X - c_f grad_G = X - c_g # Compute F and G F = 0.5 * np.sum(grad_F**2, -1) G = 0.5 * np.sum(grad_G**2, -1) # Note: this should include the energy corresponding to the # regularization term, but it is discarded in ANTS (they just # consider the data term, which is not the objective function # being optimized). This test case should be updated after # further investigating expected = ((F - G)**2).sum() actual = ssd.compute_energy_ssd_3d(np.array(F - G, dtype=floating)) assert_almost_equal(expected, actual) def test_compute_ssd_demons_step_2d(): r""" Compares the output of the demons step in 2d against an analytical step. The fixed image is given by $F(x) = \frac{1}{2}||x - c_f||^2$, the moving image is given by $G(x) = \frac{1}{2}||x - c_g||^2$, $x, c_f, c_g \in R^{2}$ References ---------- [Vercauteren09] Vercauteren, T., Pennec, X., Perchant, A., & Ayache, N. (2009). Diffeomorphic demons: efficient non-parametric image registration. NeuroImage, 45(1 Suppl), S61-72. doi:10.1016/j.neuroimage.2008.10.040 """ # Select arbitrary images' shape (same shape for both images) sh = (20, 10) # Select arbitrary centers c_f = np.asarray(sh) / 2 c_g = c_f + 0.5 # Compute the identity vector field I(x) = x in R^2 x_0 = np.asarray(range(sh[0])) x_1 = np.asarray(range(sh[1])) X = np.ndarray(sh + (2,), dtype=np.float64) O = np.ones(sh) X[..., 0] = x_0[:, None] * O X[..., 1] = x_1[None, :] * O # Compute the gradient fields of F and G np.random.seed(1137271) grad_F = X - c_f grad_G = X - c_g Fnoise = np.random.ranf( np.size(grad_F)).reshape( grad_F.shape) * grad_F.max() * 0.1 Fnoise = Fnoise.astype(floating) grad_F += Fnoise Gnoise = np.random.ranf( np.size(grad_G)).reshape( grad_G.shape) * grad_G.max() * 0.1 Gnoise = Gnoise.astype(floating) grad_G += Gnoise # The squared norm of grad_G to be used later sq_norm_grad_G = np.sum(grad_G**2, -1) # Compute F and G F = 0.5 * np.sum(grad_F**2, -1) G = 0.5 * sq_norm_grad_G Fnoise = np.random.ranf(np.size(F)).reshape(F.shape) * F.max() * 0.1 Fnoise = Fnoise.astype(floating) F += Fnoise Gnoise = np.random.ranf(np.size(G)).reshape(G.shape) * G.max() * 0.1 Gnoise = Gnoise.astype(floating) G += Gnoise delta_field = np.array(G - F, dtype=floating) # Select some pixels to force gradient = 0 and F=G random_labels = np.random.randint(0, 2, sh[0] * sh[1]) random_labels = random_labels.reshape(sh) F[random_labels == 0] = G[random_labels == 0] delta_field[random_labels == 0] = 0 grad_G[random_labels == 0, ...] = 0 sq_norm_grad_G[random_labels == 0, ...] = 0 # Set arbitrary values for $\sigma_i$ (eq. 4 in [Vercauteren09]) # The original Demons algorithm used simply |F(x) - G(x)| as an # estimator, so let's use it as well sigma_i_sq = (F - G)**2 # Now select arbitrary parameters for $\sigma_x$ (eq 4 in [Vercauteren09]) for sigma_x_sq in [0.01, 1.5, 4.2]: # Directly compute the demons step according to eq. 4 in # [Vercauteren09] num = (sigma_x_sq * (F - G))[random_labels == 1] den = (sigma_x_sq * sq_norm_grad_G + sigma_i_sq)[random_labels == 1] # This is $J^{P}$ in eq. 4 [Vercauteren09] expected = (-1 * np.array(grad_G)) expected[random_labels == 1, 0] *= num / den expected[random_labels == 1, 1] *= num / den expected[random_labels == 0, ...] = 0 # Now compute it using the implementation under test actual = np.empty_like(expected, dtype=floating) ssd.compute_ssd_demons_step_2d(delta_field, np.array(grad_G, dtype=floating), sigma_x_sq, actual) assert_array_almost_equal(actual, expected) def test_compute_ssd_demons_step_3d(): r""" Compares the output of the demons step in 3d against an analytical step. The fixed image is given by $F(x) = \frac{1}{2}||x - c_f||^2$, the moving image is given by $G(x) = \frac{1}{2}||x - c_g||^2$, $x, c_f, c_g \in R^{3}$ References ---------- [Vercauteren09] Vercauteren, T., Pennec, X., Perchant, A., & Ayache, N. (2009). Diffeomorphic demons: efficient non-parametric image registration. NeuroImage, 45(1 Suppl), S61-72. doi:10.1016/j.neuroimage.2008.10.040 """ # Select arbitrary images' shape (same shape for both images) sh = (20, 15, 10) # Select arbitrary centers c_f = np.asarray(sh) / 2 c_g = c_f + 0.5 # Compute the identity vector field I(x) = x in R^2 x_0 = np.asarray(range(sh[0])) x_1 = np.asarray(range(sh[1])) x_2 = np.asarray(range(sh[2])) X = np.ndarray(sh + (3,), dtype=np.float64) O = np.ones(sh) X[..., 0] = x_0[:, None, None] * O X[..., 1] = x_1[None, :, None] * O X[..., 2] = x_2[None, None, :] * O # Compute the gradient fields of F and G np.random.seed(1137271) grad_F = X - c_f grad_G = X - c_g Fnoise = np.random.ranf( np.size(grad_F)).reshape( grad_F.shape) * grad_F.max() * 0.1 Fnoise = Fnoise.astype(floating) grad_F += Fnoise Gnoise = np.random.ranf( np.size(grad_G)).reshape( grad_G.shape) * grad_G.max() * 0.1 Gnoise = Gnoise.astype(floating) grad_G += Gnoise # The squared norm of grad_G to be used later sq_norm_grad_G = np.sum(grad_G**2, -1) # Compute F and G F = 0.5 * np.sum(grad_F**2, -1) G = 0.5 * sq_norm_grad_G Fnoise = np.random.ranf(np.size(F)).reshape(F.shape) * F.max() * 0.1 Fnoise = Fnoise.astype(floating) F += Fnoise Gnoise = np.random.ranf(np.size(G)).reshape(G.shape) * G.max() * 0.1 Gnoise = Gnoise.astype(floating) G += Gnoise delta_field = np.array(G - F, dtype=floating) # Select some pixels to force gradient = 0 and F=G random_labels = np.random.randint(0, 2, sh[0] * sh[1] * sh[2]) random_labels = random_labels.reshape(sh) F[random_labels == 0] = G[random_labels == 0] delta_field[random_labels == 0] = 0 grad_G[random_labels == 0, ...] = 0 sq_norm_grad_G[random_labels == 0, ...] = 0 # Set arbitrary values for $\sigma_i$ (eq. 4 in [Vercauteren09]) # The original Demons algorithm used simply |F(x) - G(x)| as an # estimator, so let's use it as well sigma_i_sq = (F - G)**2 # Now select arbitrary parameters for $\sigma_x$ (eq 4 in [Vercauteren09]) for sigma_x_sq in [0.01, 1.5, 4.2]: # Directly compute the demons step according to eq. 4 in # [Vercauteren09] num = (sigma_x_sq * (F - G))[random_labels == 1] den = (sigma_x_sq * sq_norm_grad_G + sigma_i_sq)[random_labels == 1] # This is $J^{P}$ in eq. 4 [Vercauteren09] expected = (-1 * np.array(grad_G)) expected[random_labels == 1, 0] *= num / den expected[random_labels == 1, 1] *= num / den expected[random_labels == 1, 2] *= num / den expected[random_labels == 0, ...] = 0 # Now compute it using the implementation under test actual = np.empty_like(expected, dtype=floating) ssd.compute_ssd_demons_step_3d(delta_field, np.array(grad_G, dtype=floating), sigma_x_sq, actual) assert_array_almost_equal(actual, expected) if __name__ == '__main__': test_compute_residual_displacement_field_ssd_2d() test_compute_residual_displacement_field_ssd_3d() test_compute_energy_ssd_2d() test_compute_energy_ssd_3d() test_compute_ssd_demons_step_2d() test_compute_ssd_demons_step_3d() dipy-0.13.0/dipy/align/tests/test_transforms.py000066400000000000000000000212661317371701200215610ustar00rootroot00000000000000from dipy.align.transforms import regtransforms, Transform import numpy as np from numpy.testing import (assert_array_equal, assert_array_almost_equal, assert_almost_equal, assert_equal, assert_raises) def test_number_of_parameters(): expected_params = {('TRANSLATION', 2): 2, ('TRANSLATION', 3): 3, ('ROTATION', 2): 1, ('ROTATION', 3): 3, ('RIGID', 2): 3, ('RIGID', 3): 6, ('SCALING', 2): 1, ('SCALING', 3): 1, ('AFFINE', 2): 6, ('AFFINE', 3): 12} for ttype, transform in regtransforms.items(): assert_equal( transform.get_number_of_parameters(), expected_params[ttype]) def test_param_to_matrix_2d(): rng = np.random.RandomState() # Test translation matrix 2D transform = regtransforms[('TRANSLATION', 2)] dx, dy = rng.uniform(size=(2,)) theta = np.array([dx, dy]) expected = np.array([[1, 0, dx], [0, 1, dy], [0, 0, 1]]) actual = transform.param_to_matrix(theta) assert_array_equal(actual, expected) # Test rotation matrix 2D transform = regtransforms[('ROTATION', 2)] angle = rng.uniform() theta = np.array([angle]) ct = np.cos(angle) st = np.sin(angle) expected = np.array([[ct, -st, 0], [st, ct, 0], [0, 0, 1]]) actual = transform.param_to_matrix(theta) assert_array_almost_equal(actual, expected) # Test rigid matrix 2D transform = regtransforms[('RIGID', 2)] angle, dx, dy = rng.uniform(size=(3,)) theta = np.array([angle, dx, dy]) ct = np.cos(angle) st = np.sin(angle) expected = np.array([[ct, -st, dx], [st, ct, dy], [0, 0, 1]]) actual = transform.param_to_matrix(theta) assert_array_almost_equal(actual, expected) # Test rigid matrix 2D transform = regtransforms[('SCALING', 2)] factor = rng.uniform() theta = np.array([factor]) expected = np.array([[factor, 0, 0], [0, factor, 0], [0, 0, 1]]) actual = transform.param_to_matrix(theta) assert_array_almost_equal(actual, expected) # Test affine 2D transform = regtransforms[('AFFINE', 2)] theta = rng.uniform(size=(6,)) expected = np.eye(3) expected[0, :] = theta[:3] expected[1, :] = theta[3:6] actual = transform.param_to_matrix(theta) assert_array_almost_equal(actual, expected) # Verify that ValueError is raised if incorrect number of parameters for transform in regtransforms.values(): n = transform.get_number_of_parameters() # Set incorrect number of parameters theta = np.zeros(n + 1, dtype=np.float64) assert_raises(ValueError, transform.param_to_matrix, theta) def test_param_to_matrix_3d(): rng = np.random.RandomState() # Test translation matrix 3D transform = regtransforms[('TRANSLATION', 3)] dx, dy, dz = rng.uniform(size=(3,)) theta = np.array([dx, dy, dz]) expected = np.array([[1, 0, 0, dx], [0, 1, 0, dy], [0, 0, 1, dz], [0, 0, 0, 1]]) actual = transform.param_to_matrix(theta) assert_array_equal(actual, expected) # Test rotation matrix 3D transform = regtransforms[('ROTATION', 3)] theta = rng.uniform(size=(3,)) ca = np.cos(theta[0]) sa = np.sin(theta[0]) cb = np.cos(theta[1]) sb = np.sin(theta[1]) cc = np.cos(theta[2]) sc = np.sin(theta[2]) X = np.array([[1, 0, 0], [0, ca, -sa], [0, sa, ca]]) Y = np.array([[cb, 0, sb], [0, 1, 0], [-sb, 0, cb]]) Z = np.array([[cc, -sc, 0], [sc, cc, 0], [0, 0, 1]]) R = Z.dot(X.dot(Y)) # Apply in order: Y, X, Z (Y goes to the right) expected = np.eye(4) expected[:3, :3] = R[:3, :3] actual = transform.param_to_matrix(theta) assert_array_almost_equal(actual, expected) # Test rigid matrix 3D transform = regtransforms[('RIGID', 3)] theta = rng.uniform(size=(6,)) ca = np.cos(theta[0]) sa = np.sin(theta[0]) cb = np.cos(theta[1]) sb = np.sin(theta[1]) cc = np.cos(theta[2]) sc = np.sin(theta[2]) X = np.array([[1, 0, 0], [0, ca, -sa], [0, sa, ca]]) Y = np.array([[cb, 0, sb], [0, 1, 0], [-sb, 0, cb]]) Z = np.array([[cc, -sc, 0], [sc, cc, 0], [0, 0, 1]]) R = Z.dot(X.dot(Y)) # Apply in order: Y, X, Z (Y goes to the right) expected = np.eye(4) expected[:3, :3] = R[:3, :3] expected[:3, 3] = theta[3:6] actual = transform.param_to_matrix(theta) assert_array_almost_equal(actual, expected) # Test scaling matrix 3D transform = regtransforms[('SCALING', 3)] factor = rng.uniform() theta = np.array([factor]) expected = np.array([[factor, 0, 0, 0], [0, factor, 0, 0], [0, 0, factor, 0], [0, 0, 0, 1]]) actual = transform.param_to_matrix(theta) assert_array_almost_equal(actual, expected) # Test affine 3D transform = regtransforms[('AFFINE', 3)] theta = rng.uniform(size=(12,)) expected = np.eye(4) expected[0, :] = theta[:4] expected[1, :] = theta[4:8] expected[2, :] = theta[8:12] actual = transform.param_to_matrix(theta) assert_array_almost_equal(actual, expected) # Verify that ValueError is raised if incorrect number of parameters for transform in regtransforms.values(): n = transform.get_number_of_parameters() # Set incorrect number of parameters theta = np.zeros(n + 1, dtype=np.float64) assert_raises(ValueError, transform.param_to_matrix, theta) def test_identity_parameters(): for transform in regtransforms.values(): n = transform.get_number_of_parameters() dim = transform.get_dim() theta = transform.get_identity_parameters() expected = np.eye(dim + 1) actual = transform.param_to_matrix(theta) assert_array_almost_equal(actual, expected) def test_jacobian_functions(): rng = np.random.RandomState() # Compare the analytical Jacobians with their numerical approximations h = 1e-8 nsamples = 50 for transform in regtransforms.values(): n = transform.get_number_of_parameters() dim = transform.get_dim() expected = np.empty((dim, n)) theta = rng.uniform(size=(n,)) T = transform.param_to_matrix(theta) for j in range(nsamples): x = 255 * (rng.uniform(size=(dim,)) - 0.5) actual = transform.jacobian(theta, x) # Approximate with finite differences x_hom = np.ones(dim + 1) x_hom[:dim] = x[:] for i in range(n): dtheta = theta.copy() dtheta[i] += h dT = np.array(transform.param_to_matrix(dtheta)) g = (dT - T).dot(x_hom) / h expected[:, i] = g[:dim] assert_array_almost_equal(actual, expected, decimal=5) # Test ValueError is raised when theta parameter doesn't have the right # length for transform in regtransforms.values(): n = transform.get_number_of_parameters() # Wrong number of parameters theta = np.zeros(n + 1) x = np.zeros(dim) assert_raises(ValueError, transform.jacobian, theta, x) def test_invalid_transform(): # Note: users should not attempt to use the base class Transform: # they should get an instance of one of its derived classes from the # regtransforms dictionary (the base class is not contained there) # If for some reason the user instanciates it and attempts to use it, # however, it will raise exceptions when attempting to retrieve its # jacobian, identity parameters or its matrix representation. It will # return -1 if queried about its dimension or number of parameters transform = Transform() theta = np.ndarray(3) x = np.ndarray(3) assert_raises(ValueError, transform.jacobian, theta, x) assert_raises(ValueError, transform.get_identity_parameters) assert_raises(ValueError, transform.param_to_matrix, theta) expected = -1 actual = transform.get_number_of_parameters() assert_equal(actual, expected) actual = transform.get_dim() assert_equal(actual, expected) if __name__ == '__main__': test_number_of_parameters() test_jacobian_functions() test_param_to_matrix_2d() test_param_to_matrix_3d() test_identity_parameters() test_invalid_transform() dipy-0.13.0/dipy/align/tests/test_vector_fields.py000066400000000000000000002050361317371701200222120ustar00rootroot00000000000000import numpy as np from numpy.testing import (assert_array_equal, assert_array_almost_equal, assert_almost_equal, assert_equal, assert_raises) from scipy.ndimage.interpolation import map_coordinates from nibabel.affines import apply_affine, from_matvec from dipy.core import geometry from dipy.align import floating from dipy.align import imwarp from dipy.align import vector_fields as vfu from dipy.align.transforms import regtransforms from dipy.align.parzenhist import sample_domain_regular def test_random_displacement_field_2d(): np.random.seed(3921116) from_shape = (25, 32) to_shape = (33, 29) # Create grid coordinates x_0 = np.asarray(range(from_shape[0])) x_1 = np.asarray(range(from_shape[1])) X = np.empty((3,) + from_shape, dtype=np.float64) O = np.ones(from_shape) X[0, ...] = x_0[:, None] * O X[1, ...] = x_1[None, :] * O X[2, ...] = 1 # Create an arbitrary image-to-space transform t = 0.15 # translation factor trans = np.array([[1, 0, -t * from_shape[0]], [0, 1, -t * from_shape[1]], [0, 0, 1]]) trans_inv = np.linalg.inv(trans) for theta in [-1 * np.pi / 6.0, 0.0, np.pi / 5.0]: # rotation angle for s in [0.83, 1.3, 2.07]: # scale ct = np.cos(theta) st = np.sin(theta) rot = np.array([[ct, -st, 0], [st, ct, 0], [0, 0, 1]]) scale = np.array([[1 * s, 0, 0], [0, 1 * s, 0], [0, 0, 1]]) from_grid2world = trans_inv.dot(scale.dot(rot.dot(trans))) to_grid2world = from_grid2world.dot(scale) to_world2grid = np.linalg.inv(to_grid2world) field, assignment = vfu.create_random_displacement_2d( np.array(from_shape, dtype=np.int32), from_grid2world, np.array(to_shape, dtype=np.int32), to_grid2world) field = np.array(field, dtype=floating) assignment = np.array(assignment) # Verify the assignments are inside the requested region assert_equal(0, (assignment < 0).sum()) for i in range(2): assert_equal(0, (assignment[..., i] >= to_shape[i]).sum()) # Compute the warping coordinates (see warp_2d documentation) Y = np.apply_along_axis(from_grid2world.dot, 0, X)[0:2, ...] Z = np.zeros_like(X) Z[0, ...] = Y[0, ...] + field[..., 0] Z[1, ...] = Y[1, ...] + field[..., 1] Z[2, ...] = 1 W = np.apply_along_axis(to_world2grid.dot, 0, Z)[0:2, ...] # Verify the claimed assignments are correct assert_array_almost_equal(W[0, ...], assignment[..., 0], 5) assert_array_almost_equal(W[1, ...], assignment[..., 1], 5) # Test exception is raised when the affine transform matrix is not valid valid = np.zeros((2, 3), dtype=np.float64) invalid = np.zeros((2, 2), dtype=np.float64) shape = np.array(from_shape, dtype=np.int32) assert_raises(ValueError, vfu.create_random_displacement_2d, shape, invalid, shape, valid) assert_raises(ValueError, vfu.create_random_displacement_2d, shape, valid, shape, invalid) def test_random_displacement_field_3d(): np.random.seed(7127562) from_shape = (25, 32, 31) to_shape = (33, 29, 35) # Create grid coordinates x_0 = np.asarray(range(from_shape[0])) x_1 = np.asarray(range(from_shape[1])) x_2 = np.asarray(range(from_shape[2])) X = np.empty((4,) + from_shape, dtype=np.float64) O = np.ones(from_shape) X[0, ...] = x_0[:, None, None] * O X[1, ...] = x_1[None, :, None] * O X[2, ...] = x_2[None, None, :] * O X[3, ...] = 1 # Select an arbitrary rotation axis axis = np.array([.5, 2.0, 1.5]) # Create an arbitrary image-to-space transform t = 0.15 # translation factor trans = np.array([[1, 0, 0, -t * from_shape[0]], [0, 1, 0, -t * from_shape[1]], [0, 0, 1, -t * from_shape[2]], [0, 0, 0, 1]]) trans_inv = np.linalg.inv(trans) for theta in [-1 * np.pi / 6.0, 0.0, np.pi / 5.0]: # rotation angle for s in [0.83, 1.3, 2.07]: # scale rot = np.zeros(shape=(4, 4)) rot[:3, :3] = geometry.rodrigues_axis_rotation(axis, theta) rot[3, 3] = 1.0 scale = np.array([[1 * s, 0, 0, 0], [0, 1 * s, 0, 0], [0, 0, 1 * s, 0], [0, 0, 0, 1]]) from_grid2world = trans_inv.dot(scale.dot(rot.dot(trans))) to_grid2world = from_grid2world.dot(scale) to_world2grid = np.linalg.inv(to_grid2world) field, assignment = vfu.create_random_displacement_3d( np.array(from_shape, dtype=np.int32), from_grid2world, np.array(to_shape, dtype=np.int32), to_grid2world) field = np.array(field, dtype=floating) assignment = np.array(assignment) # Verify the assignments are inside the requested region assert_equal(0, (assignment < 0).sum()) for i in range(3): assert_equal(0, (assignment[..., i] >= to_shape[i]).sum()) # Compute the warping coordinates (see warp_2d documentation) Y = np.apply_along_axis(from_grid2world.dot, 0, X)[0:3, ...] Z = np.zeros_like(X) Z[0, ...] = Y[0, ...] + field[..., 0] Z[1, ...] = Y[1, ...] + field[..., 1] Z[2, ...] = Y[2, ...] + field[..., 2] Z[3, ...] = 1 W = np.apply_along_axis(to_world2grid.dot, 0, Z)[0:3, ...] # Verify the claimed assignments are correct assert_array_almost_equal(W[0, ...], assignment[..., 0], 5) assert_array_almost_equal(W[1, ...], assignment[..., 1], 5) assert_array_almost_equal(W[2, ...], assignment[..., 2], 5) # Test exception is raised when the affine transform matrix is not valid valid = np.zeros((3, 4), dtype=np.float64) invalid = np.zeros((3, 3), dtype=np.float64) shape = np.array(from_shape, dtype=np.int32) assert_raises(ValueError, vfu.create_random_displacement_2d, shape, invalid, shape, valid) assert_raises(ValueError, vfu.create_random_displacement_2d, shape, valid, shape, invalid) def test_harmonic_fields_2d(): nrows = 64 ncols = 67 mid_row = nrows // 2 mid_col = ncols // 2 expected_d = np.empty(shape=(nrows, ncols, 2)) expected_d_inv = np.empty(shape=(nrows, ncols, 2)) for b in [0.1, 0.3, 0.7]: for m in [2, 4, 7]: for i in range(nrows): for j in range(ncols): ii = i - mid_row jj = j - mid_col theta = np.arctan2(ii, jj) expected_d[i, j, 0] =\ ii * (1.0 / (1 + b * np.cos(m * theta)) - 1.0) expected_d[i, j, 1] =\ jj * (1.0 / (1 + b * np.cos(m * theta)) - 1.0) expected_d_inv[i, j, 0] = b * np.cos(m * theta) * ii expected_d_inv[i, j, 1] = b * np.cos(m * theta) * jj actual_d, actual_d_inv =\ vfu.create_harmonic_fields_2d(nrows, ncols, b, m) assert_array_almost_equal(expected_d, actual_d) assert_array_almost_equal(expected_d_inv, expected_d_inv) def test_harmonic_fields_3d(): nslices = 25 nrows = 34 ncols = 37 mid_slice = nslices // 2 mid_row = nrows // 2 mid_col = ncols // 2 expected_d = np.empty(shape=(nslices, nrows, ncols, 3)) expected_d_inv = np.empty(shape=(nslices, nrows, ncols, 3)) for b in [0.3, 0.7]: for m in [2, 5]: for k in range(nslices): for i in range(nrows): for j in range(ncols): kk = k - mid_slice ii = i - mid_row jj = j - mid_col theta = np.arctan2(ii, jj) expected_d[k, i, j, 0] =\ kk * (1.0 / (1 + b * np.cos(m * theta)) - 1.0) expected_d[k, i, j, 1] =\ ii * (1.0 / (1 + b * np.cos(m * theta)) - 1.0) expected_d[k, i, j, 2] =\ jj * (1.0 / (1 + b * np.cos(m * theta)) - 1.0) expected_d_inv[k, i, j, 0] = b * np.cos(m * theta) * kk expected_d_inv[k, i, j, 1] = b * np.cos(m * theta) * ii expected_d_inv[k, i, j, 2] = b * np.cos(m * theta) * jj actual_d, actual_d_inv =\ vfu.create_harmonic_fields_3d(nslices, nrows, ncols, b, m) assert_array_almost_equal(expected_d, actual_d) assert_array_almost_equal(expected_d_inv, expected_d_inv) def test_circle(): sh = (64, 61) cr = sh[0] // 2 cc = sh[1] // 2 x_0 = np.asarray(range(sh[0])) x_1 = np.asarray(range(sh[1])) X = np.empty((2,) + sh, dtype=np.float64) O = np.ones(sh) X[0, ...] = x_0[:, None] * O - cr X[1, ...] = x_1[None, :] * O - cc nrm = np.sqrt(np.sum(X ** 2, axis=0)) for radius in [0, 7, 17, 32]: expected = nrm <= radius actual = vfu.create_circle(sh[0], sh[1], radius) assert_array_almost_equal(actual, expected) def test_sphere(): sh = (64, 61, 57) cs = sh[0] // 2 cr = sh[1] // 2 cc = sh[2] // 2 x_0 = np.asarray(range(sh[0])) x_1 = np.asarray(range(sh[1])) x_2 = np.asarray(range(sh[2])) X = np.empty((3,) + sh, dtype=np.float64) O = np.ones(sh) X[0, ...] = x_0[:, None, None] * O - cs X[1, ...] = x_1[None, :, None] * O - cr X[2, ...] = x_2[None, None, :] * O - cc nrm = np.sqrt(np.sum(X ** 2, axis=0)) for radius in [0, 7, 17, 32]: expected = nrm <= radius actual = vfu.create_sphere(sh[0], sh[1], sh[2], radius) assert_array_almost_equal(actual, expected) def test_interpolate_scalar_2d(): np.random.seed(5324989) sz = 64 target_shape = (sz, sz) image = np.empty(target_shape, dtype=floating) image[...] = np.random.randint(0, 10, np.size(image)).reshape(target_shape) extended_image = np.zeros((sz + 2, sz + 2), dtype=floating) extended_image[1:sz + 1, 1:sz + 1] = image[...] # Select some coordinates inside the image to interpolate at nsamples = 200 locations =\ np.random.ranf(2 * nsamples).reshape((nsamples, 2)) * (sz + 2) - 1.0 extended_locations = locations + 1.0 # shift coordinates one voxel # Call the implementation under test interp, inside = vfu.interpolate_scalar_2d(image, locations) # Call the reference implementation expected = map_coordinates(extended_image, extended_locations.transpose(), order=1) assert_array_almost_equal(expected, interp) # Test interpolation stability along the boundary epsilon = 5e-8 for k in range(2): for offset in [0, sz - 1]: delta = ((np.random.ranf(nsamples) * 2) - 1) * epsilon locations[:, k] = delta + offset locations[:, (k + 1) % 2] = np.random.ranf(nsamples) * (sz - 1) interp, inside = vfu.interpolate_scalar_2d(image, locations) locations[:, k] = offset expected = map_coordinates(image, locations.transpose(), order=1) assert_array_almost_equal(expected, interp) if offset == 0: expected_flag = np.array(delta >= 0, dtype=np.int32) else: expected_flag = np.array(delta <= 0, dtype=np.int32) assert_array_almost_equal(expected_flag, inside) def test_interpolate_scalar_nn_2d(): np.random.seed(1924781) sz = 64 target_shape = (sz, sz) image = np.empty(target_shape, dtype=floating) image[...] = np.random.randint(0, 10, np.size(image)).reshape(target_shape) # Select some coordinates to interpolate at nsamples = 200 locations =\ np.random.ranf(2 * nsamples).reshape((nsamples, 2)) * (sz + 2) - 1.0 # Call the implementation under test interp, inside = vfu.interpolate_scalar_nn_2d(image, locations) # Call the reference implementation expected = map_coordinates(image, locations.transpose(), order=0) assert_array_almost_equal(expected, interp) # Test the 'inside' flag for i in range(nsamples): if (locations[i, 0] < 0 or locations[i, 0] > (sz - 1)) or\ (locations[i, 1] < 0 or locations[i, 1] > (sz - 1)): assert_equal(inside[i], 0) else: assert_equal(inside[i], 1) def test_interpolate_scalar_nn_3d(): np.random.seed(3121121) sz = 64 target_shape = (sz, sz, sz) image = np.empty(target_shape, dtype=floating) image[...] = np.random.randint(0, 10, np.size(image)).reshape(target_shape) # Select some coordinates to interpolate at nsamples = 200 locations =\ np.random.ranf(3 * nsamples).reshape((nsamples, 3)) * (sz + 2) - 1.0 # Call the implementation under test interp, inside = vfu.interpolate_scalar_nn_3d(image, locations) # Call the reference implementation expected = map_coordinates(image, locations.transpose(), order=0) assert_array_almost_equal(expected, interp) # Test the 'inside' flag for i in range(nsamples): expected_inside = 1 for axis in range(3): if (locations[i, axis] < 0 or locations[i, axis] > (sz - 1)): expected_inside = 0 break assert_equal(inside[i], expected_inside) def test_interpolate_scalar_3d(): np.random.seed(9216326) sz = 64 target_shape = (sz, sz, sz) image = np.empty(target_shape, dtype=floating) image[...] = np.random.randint(0, 10, np.size(image)).reshape(target_shape) extended_image = np.zeros((sz + 2, sz + 2, sz + 2), dtype=floating) extended_image[1:sz + 1, 1:sz + 1, 1:sz + 1] = image[...] # Select some coordinates inside the image to interpolate at nsamples = 800 locations =\ np.random.ranf(3 * nsamples).reshape((nsamples, 3)) * (sz + 2) - 1.0 extended_locations = locations + 1.0 # shift coordinates one voxel # Call the implementation under test interp, inside = vfu.interpolate_scalar_3d(image, locations) # Call the reference implementation expected = map_coordinates(extended_image, extended_locations.transpose(), order=1) assert_array_almost_equal(expected, interp) # Test interpolation stability along the boundary epsilon = 5e-8 for k in range(3): for offset in [0, sz - 1]: delta = ((np.random.ranf(nsamples) * 2) - 1) * epsilon locations[:, k] = delta + offset locations[:, (k + 1) % 3] = np.random.ranf(nsamples) * (sz - 1) locations[:, (k + 2) % 3] = np.random.ranf(nsamples) * (sz - 1) interp, inside = vfu.interpolate_scalar_3d(image, locations) locations[:, k] = offset expected = map_coordinates(image, locations.transpose(), order=1) assert_array_almost_equal(expected, interp) if offset == 0: expected_flag = np.array(delta >= 0, dtype=np.int32) else: expected_flag = np.array(delta <= 0, dtype=np.int32) assert_array_almost_equal(expected_flag, inside) def test_interpolate_vector_3d(): np.random.seed(7711219) sz = 64 target_shape = (sz, sz, sz) field = np.empty(target_shape + (3,), dtype=floating) field[...] =\ np.random.randint(0, 10, np.size(field)).reshape(target_shape + (3,)) extended_field = np.zeros((sz + 2, sz + 2, sz + 2, 3), dtype=floating) extended_field[1:sz + 1, 1:sz + 1, 1:sz + 1] = field # Select some coordinates to interpolate at nsamples = 800 locations =\ np.random.ranf(3 * nsamples).reshape((nsamples, 3)) * (sz + 2) - 1.0 extended_locations = locations + 1 # Call the implementation under test interp, inside = vfu.interpolate_vector_3d(field, locations) # Call the reference implementation expected = np.zeros_like(interp) for i in range(3): expected[..., i] = map_coordinates(extended_field[..., i], extended_locations.transpose(), order=1) assert_array_almost_equal(expected, interp) # Test interpolation stability along the boundary epsilon = 5e-8 for k in range(3): for offset in [0, sz - 1]: delta = ((np.random.ranf(nsamples) * 2) - 1) * epsilon locations[:, k] = delta + offset locations[:, (k + 1) % 3] = np.random.ranf(nsamples) * (sz - 1) locations[:, (k + 2) % 3] = np.random.ranf(nsamples) * (sz - 1) interp, inside = vfu.interpolate_vector_3d(field, locations) locations[:, k] = offset for i in range(3): expected[..., i] = map_coordinates(field[..., i], locations.transpose(), order=1) assert_array_almost_equal(expected, interp) if offset == 0: expected_flag = np.array(delta >= 0, dtype=np.int32) else: expected_flag = np.array(delta <= 0, dtype=np.int32) assert_array_almost_equal(expected_flag, inside) def test_interpolate_vector_2d(): np.random.seed(1271244) sz = 64 target_shape = (sz, sz) field = np.empty(target_shape + (2,), dtype=floating) field[...] =\ np.random.randint(0, 10, np.size(field)).reshape(target_shape + (2,)) extended_field = np.zeros((sz + 2, sz + 2, 2), dtype=floating) extended_field[1:sz + 1, 1:sz + 1] = field # Select some coordinates to interpolate at nsamples = 200 locations =\ np.random.ranf(2 * nsamples).reshape((nsamples, 2)) * (sz + 2) - 1.0 extended_locations = locations + 1 # Call the implementation under test interp, inside = vfu.interpolate_vector_2d(field, locations) # Call the reference implementation expected = np.zeros_like(interp) for i in range(2): expected[..., i] = map_coordinates(extended_field[..., i], extended_locations.transpose(), order=1) assert_array_almost_equal(expected, interp) # Test interpolation stability along the boundary epsilon = 5e-8 for k in range(2): for offset in [0, sz - 1]: delta = ((np.random.ranf(nsamples) * 2) - 1) * epsilon locations[:, k] = delta + offset locations[:, (k + 1) % 2] = np.random.ranf(nsamples) * (sz - 1) interp, inside = vfu.interpolate_vector_2d(field, locations) locations[:, k] = offset for i in range(2): expected[..., i] = map_coordinates(field[..., i], locations.transpose(), order=1) assert_array_almost_equal(expected, interp) if offset == 0: expected_flag = np.array(delta >= 0, dtype=np.int32) else: expected_flag = np.array(delta <= 0, dtype=np.int32) assert_array_almost_equal(expected_flag, inside) def test_warping_2d(): r""" Tests the cython implementation of the 2d warpings against scipy """ sh = (64, 64) nr = sh[0] nc = sh[1] # Create an image of a circle radius = 24 circle = vfu.create_circle(nr, nc, radius) circle = np.array(circle, dtype=floating) # Create a displacement field for warping d, dinv = vfu.create_harmonic_fields_2d(nr, nc, 0.2, 8) d = np.asarray(d).astype(floating) dinv = np.asarray(dinv).astype(floating) # Create grid coordinates x_0 = np.asarray(range(sh[0])) x_1 = np.asarray(range(sh[1])) X = np.empty((3,) + sh, dtype=np.float64) O = np.ones(sh) X[0, ...] = x_0[:, None] * O X[1, ...] = x_1[None, :] * O X[2, ...] = 1 # Select an arbitrary translation matrix t = 0.1 trans = np.array([[1, 0, -t * nr], [0, 1, -t * nc], [0, 0, 1]]) trans_inv = np.linalg.inv(trans) # Select arbitrary rotation and scaling matrices for theta in [-1 * np.pi / 6.0, 0.0, np.pi / 6.0]: # rotation angle for s in [0.42, 1.3, 2.15]: # scale ct = np.cos(theta) st = np.sin(theta) rot = np.array([[ct, -st, 0], [st, ct, 0], [0, 0, 1]]) scale = np.array([[1 * s, 0, 0], [0, 1 * s, 0], [0, 0, 1]]) aff = trans_inv.dot(scale.dot(rot.dot(trans))) # Select arbitrary (but different) grid-to-space transforms sampling_grid2world = scale field_grid2world = aff field_world2grid = np.linalg.inv(field_grid2world) image_grid2world = aff.dot(scale) image_world2grid = np.linalg.inv(image_grid2world) A = field_world2grid.dot(sampling_grid2world) B = image_world2grid.dot(sampling_grid2world) C = image_world2grid # Reorient the displacement field according to its grid-to-space # transform dcopy = np.copy(d) vfu.reorient_vector_field_2d(dcopy, field_grid2world) extended_dcopy = np.zeros((nr + 2, nc + 2, 2), dtype=floating) extended_dcopy[1:nr + 1, 1:nc + 1, :] = dcopy # Compute the warping coordinates (see warp_2d documentation) Y = np.apply_along_axis(A.dot, 0, X)[0:2, ...] Z = np.zeros_like(X) Z[0, ...] = map_coordinates(extended_dcopy[..., 0], Y + 1, order=1) Z[1, ...] = map_coordinates(extended_dcopy[..., 1], Y + 1, order=1) Z[2, ...] = 0 Z = np.apply_along_axis(C.dot, 0, Z)[0:2, ...] T = np.apply_along_axis(B.dot, 0, X)[0:2, ...] W = T + Z # Test bilinear interpolation expected = map_coordinates(circle, W, order=1) warped = vfu.warp_2d(circle, dcopy, A, B, C, np.array(sh, dtype=np.int32)) assert_array_almost_equal(warped, expected) # Test nearest neighbor interpolation expected = map_coordinates(circle, W, order=0) warped = vfu.warp_2d_nn(circle, dcopy, A, B, C, np.array(sh, dtype=np.int32)) assert_array_almost_equal(warped, expected) # Test exception is raised when the affine transform matrix is not valid val = np.zeros((2, 3), dtype=np.float64) inval = np.zeros((2, 2), dtype=np.float64) sh = np.array(sh, dtype=np.int32) # Exceptions from warp_2d assert_raises(ValueError, vfu.warp_2d, circle, d, inval, val, val, sh) assert_raises(ValueError, vfu.warp_2d, circle, d, val, inval, val, sh) assert_raises(ValueError, vfu.warp_2d, circle, d, val, val, inval, sh) # Exceptions from warp_2d_nn assert_raises(ValueError, vfu.warp_2d_nn, circle, d, inval, val, val, sh) assert_raises(ValueError, vfu.warp_2d_nn, circle, d, val, inval, val, sh) assert_raises(ValueError, vfu.warp_2d_nn, circle, d, val, val, inval, sh) def test_warping_3d(): r""" Tests the cython implementation of the 2d warpings against scipy """ sh = (64, 64, 64) ns = sh[0] nr = sh[1] nc = sh[2] # Create an image of a sphere radius = 24 sphere = vfu.create_sphere(ns, nr, nc, radius) sphere = np.array(sphere, dtype=floating) # Create a displacement field for warping d, dinv = vfu.create_harmonic_fields_3d(ns, nr, nc, 0.2, 8) d = np.asarray(d).astype(floating) dinv = np.asarray(dinv).astype(floating) # Create grid coordinates x_0 = np.asarray(range(sh[0])) x_1 = np.asarray(range(sh[1])) x_2 = np.asarray(range(sh[2])) X = np.empty((4,) + sh, dtype=np.float64) O = np.ones(sh) X[0, ...] = x_0[:, None, None] * O X[1, ...] = x_1[None, :, None] * O X[2, ...] = x_2[None, None, :] * O X[3, ...] = 1 # Select an arbitrary rotation axis axis = np.array([.5, 2.0, 1.5]) # Select an arbitrary translation matrix t = 0.1 trans = np.array([[1, 0, 0, -t * ns], [0, 1, 0, -t * nr], [0, 0, 1, -t * nc], [0, 0, 0, 1]]) trans_inv = np.linalg.inv(trans) # Select arbitrary rotation and scaling matrices for theta in [-1 * np.pi / 5.0, 0.0, np.pi / 5.0]: # rotation angle for s in [0.45, 1.1, 2.0]: # scale rot = np.zeros(shape=(4, 4)) rot[:3, :3] = geometry.rodrigues_axis_rotation(axis, theta) rot[3, 3] = 1.0 scale = np.array([[1 * s, 0, 0, 0], [0, 1 * s, 0, 0], [0, 0, 1 * s, 0], [0, 0, 0, 1]]) aff = trans_inv.dot(scale.dot(rot.dot(trans))) # Select arbitrary (but different) grid-to-space transforms sampling_grid2world = scale field_grid2world = aff field_world2grid = np.linalg.inv(field_grid2world) image_grid2world = aff.dot(scale) image_world2grid = np.linalg.inv(image_grid2world) A = field_world2grid.dot(sampling_grid2world) B = image_world2grid.dot(sampling_grid2world) C = image_world2grid # Reorient the displacement field according to its grid-to-space # transform dcopy = np.copy(d) vfu.reorient_vector_field_3d(dcopy, field_grid2world) extended_dcopy = np.zeros( (ns + 2, nr + 2, nc + 2, 3), dtype=floating) extended_dcopy[1:ns + 1, 1:nr + 1, 1:nc + 1, :] = dcopy # Compute the warping coordinates (see warp_2d documentation) Y = np.apply_along_axis(A.dot, 0, X)[0:3, ...] Z = np.zeros_like(X) Z[0, ...] = map_coordinates(extended_dcopy[..., 0], Y + 1, order=1) Z[1, ...] = map_coordinates(extended_dcopy[..., 1], Y + 1, order=1) Z[2, ...] = map_coordinates(extended_dcopy[..., 2], Y + 1, order=1) Z[3, ...] = 0 Z = np.apply_along_axis(C.dot, 0, Z)[0:3, ...] T = np.apply_along_axis(B.dot, 0, X)[0:3, ...] W = T + Z # Test bilinear interpolation expected = map_coordinates(sphere, W, order=1) warped = vfu.warp_3d(sphere, dcopy, A, B, C, np.array(sh, dtype=np.int32)) assert_array_almost_equal(warped, expected, decimal=5) # Test nearest neighbor interpolation expected = map_coordinates(sphere, W, order=0) warped = vfu.warp_3d_nn(sphere, dcopy, A, B, C, np.array(sh, dtype=np.int32)) assert_array_almost_equal(warped, expected, decimal=5) # Test exception is raised when the affine transform matrix is not valid val = np.zeros((3, 4), dtype=np.float64) inval = np.zeros((3, 3), dtype=np.float64) sh = np.array(sh, dtype=np.int32) # Exceptions from warp_3d assert_raises(ValueError, vfu.warp_3d, sphere, d, inval, val, val, sh) assert_raises(ValueError, vfu.warp_3d, sphere, d, val, inval, val, sh) assert_raises(ValueError, vfu.warp_3d, sphere, d, val, val, inval, sh) # Exceptions from warp_3d_nn assert_raises(ValueError, vfu.warp_3d_nn, sphere, d, inval, val, val, sh) assert_raises(ValueError, vfu.warp_3d_nn, sphere, d, val, inval, val, sh) assert_raises(ValueError, vfu.warp_3d_nn, sphere, d, val, val, inval, sh) def test_affine_transforms_2d(): r""" Tests 2D affine transform functions against scipy implementation """ # Create a simple invertible affine transform d_shape = (64, 64) codomain_shape = (80, 80) nr = d_shape[0] nc = d_shape[1] # Create an image of a circle radius = 16 circle = vfu.create_circle(codomain_shape[0], codomain_shape[1], radius) circle = np.array(circle, dtype=floating) # Create grid coordinates x_0 = np.asarray(range(d_shape[0])) x_1 = np.asarray(range(d_shape[1])) X = np.empty((3,) + d_shape, dtype=np.float64) O = np.ones(d_shape) X[0, ...] = x_0[:, None] * O X[1, ...] = x_1[None, :] * O X[2, ...] = 1 # Generate affine transforms t = 0.3 trans = np.array([[1, 0, -t * nr], [0, 1, -t * nc], [0, 0, 1]]) trans_inv = np.linalg.inv(trans) for theta in [-1 * np.pi / 5.0, 0.0, np.pi / 5.0]: # rotation angle for s in [0.5, 1.0, 2.0]: # scale ct = np.cos(theta) st = np.sin(theta) rot = np.array([[ct, -st, 0], [st, ct, 0], [0, 0, 1]]) scale = np.array([[1 * s, 0, 0], [0, 1 * s, 0], [0, 0, 1]]) gt_affine = trans_inv.dot(scale.dot(rot.dot(trans))) # Apply the affine transform to the grid coordinates Y = np.apply_along_axis(gt_affine.dot, 0, X)[0:2, ...] expected = map_coordinates(circle, Y, order=1) warped = vfu.transform_2d_affine( circle, np.array( d_shape, dtype=np.int32), gt_affine) assert_array_almost_equal(warped, expected) # Test affine warping with nearest-neighbor interpolation expected = map_coordinates(circle, Y, order=0) warped = vfu.transform_2d_affine_nn( circle, np.array(d_shape, dtype=np.int32), gt_affine) assert_array_almost_equal(warped, expected) # Test the affine = None case warped = vfu.transform_2d_affine( circle, np.array( codomain_shape, dtype=np.int32), None) assert_array_equal(warped, circle) warped = vfu.transform_2d_affine_nn( circle, np.array( codomain_shape, dtype=np.int32), None) assert_array_equal(warped, circle) # Test exception is raised when the affine transform matrix is not valid invalid = np.zeros((2, 2), dtype=np.float64) invalid_nan = np.zeros((3, 3), dtype=np.float64) invalid_nan[1, 1] = np.nan shape = np.array(codomain_shape, dtype=np.int32) # Exceptions from transform_2d assert_raises(ValueError, vfu.transform_2d_affine, circle, shape, invalid) assert_raises( ValueError, vfu.transform_2d_affine, circle, shape, invalid_nan) # Exceptions from transform_2d_nn assert_raises( ValueError, vfu.transform_2d_affine_nn, circle, shape, invalid) assert_raises( ValueError, vfu.transform_2d_affine_nn, circle, shape, invalid_nan) def test_affine_transforms_3d(): r""" Tests 3D affine transform functions against scipy implementation """ # Create a simple invertible affine transform d_shape = (64, 64, 64) codomain_shape = (80, 80, 80) ns = d_shape[0] nr = d_shape[1] nc = d_shape[2] # Create an image of a sphere radius = 16 sphere = vfu.create_sphere(codomain_shape[0], codomain_shape[1], codomain_shape[2], radius) sphere = np.array(sphere, dtype=floating) # Create grid coordinates x_0 = np.asarray(range(d_shape[0])) x_1 = np.asarray(range(d_shape[1])) x_2 = np.asarray(range(d_shape[2])) X = np.empty((4,) + d_shape, dtype=np.float64) O = np.ones(d_shape) X[0, ...] = x_0[:, None, None] * O X[1, ...] = x_1[None, :, None] * O X[2, ...] = x_2[None, None, :] * O X[3, ...] = 1 # Generate affine transforms # Select an arbitrary rotation axis axis = np.array([.5, 2.0, 1.5]) t = 0.3 trans = np.array([[1, 0, 0, -t * ns], [0, 1, 0, -t * nr], [0, 0, 1, -t * nc], [0, 0, 0, 1]]) trans_inv = np.linalg.inv(trans) for theta in [-1 * np.pi / 5.0, 0.0, np.pi / 5.0]: # rotation angle for s in [0.45, 1.1, 2.3]: # scale rot = np.zeros(shape=(4, 4)) rot[:3, :3] = geometry.rodrigues_axis_rotation(axis, theta) rot[3, 3] = 1.0 scale = np.array([[1 * s, 0, 0, 0], [0, 1 * s, 0, 0], [0, 0, 1 * s, 0], [0, 0, 0, 1]]) gt_affine = trans_inv.dot(scale.dot(rot.dot(trans))) # Apply the affine transform to the grid coordinates Y = np.apply_along_axis(gt_affine.dot, 0, X)[0:3, ...] expected = map_coordinates(sphere, Y, order=1) transformed = vfu.transform_3d_affine( sphere, np.array(d_shape, dtype=np.int32), gt_affine) assert_array_almost_equal(transformed, expected) # Test affine transform with nearest-neighbor interpolation expected = map_coordinates(sphere, Y, order=0) transformed = vfu.transform_3d_affine_nn( sphere, np.array(d_shape, dtype=np.int32), gt_affine) assert_array_almost_equal(transformed, expected) # Test the affine = None case transformed = vfu.transform_3d_affine( sphere, np.array(codomain_shape, dtype=np.int32), None) assert_array_equal(transformed, sphere) transformed = vfu.transform_3d_affine_nn( sphere, np.array(codomain_shape, dtype=np.int32), None) assert_array_equal(transformed, sphere) # Test exception is raised when the affine transform matrix is not valid invalid = np.zeros((3, 3), dtype=np.float64) invalid_nan = np.zeros((4, 4), dtype=np.float64) invalid_nan[1, 1] = np.nan shape = np.array(codomain_shape, dtype=np.int32) # Exceptions from transform_3d_affine assert_raises(ValueError, vfu.transform_3d_affine, sphere, shape, invalid) assert_raises( ValueError, vfu.transform_3d_affine, sphere, shape, invalid_nan) # Exceptions from transform_3d_affine_nn assert_raises( ValueError, vfu.transform_3d_affine_nn, sphere, shape, invalid) assert_raises( ValueError, vfu.transform_3d_affine_nn, sphere, shape, invalid_nan) def test_compose_vector_fields_2d(): r""" Creates two random displacement field that exactly map pixels from an input image to an output image. The resulting displacements and their composition, although operating in physical space, map the points exactly (up to numerical precision). """ np.random.seed(8315759) input_shape = (10, 10) tgt_sh = (10, 10) # create a simple affine transformation nr = input_shape[0] nc = input_shape[1] s = 1.5 t = 2.5 trans = np.array([[1, 0, -t * nr], [0, 1, -t * nc], [0, 0, 1]]) trans_inv = np.linalg.inv(trans) scale = np.array([[1 * s, 0, 0], [0, 1 * s, 0], [0, 0, 1]]) gt_affine = trans_inv.dot(scale.dot(trans)) # create two random displacement fields input_grid2world = gt_affine target_grid2world = gt_affine disp1, assign1 = vfu.create_random_displacement_2d( np.array(input_shape, dtype=np.int32), input_grid2world, np.array(tgt_sh, dtype=np.int32), target_grid2world) disp1 = np.array(disp1, dtype=floating) assign1 = np.array(assign1) disp2, assign2 = vfu.create_random_displacement_2d( np.array(input_shape, dtype=np.int32), input_grid2world, np.array(tgt_sh, dtype=np.int32), target_grid2world) disp2 = np.array(disp2, dtype=floating) assign2 = np.array(assign2) # create a random image (with decimal digits) to warp moving_image = np.empty(tgt_sh, dtype=floating) moving_image[...] =\ np.random.randint(0, 10, np.size(moving_image)).reshape(tuple(tgt_sh)) # set boundary values to zero so we don't test wrong interpolation due to # floating point precision moving_image[0, :] = 0 moving_image[-1, :] = 0 moving_image[:, 0] = 0 moving_image[:, -1] = 0 # evaluate the composed warping using the exact assignments # (first 1 then 2) warp1 = moving_image[(assign2[..., 0], assign2[..., 1])] expected = warp1[(assign1[..., 0], assign1[..., 1])] # compose the displacement fields target_world2grid = np.linalg.inv(target_grid2world) target_world2grid = np.linalg.inv(target_grid2world) premult_index = target_world2grid.dot(input_grid2world) premult_disp = target_world2grid for time_scaling in [0.25, 0.5, 1.0, 2.0, 4.0]: composition, stats = vfu.compose_vector_fields_2d(disp1, disp2 / time_scaling, premult_index, premult_disp, time_scaling, None) # apply the implementation under test warped = np.array(vfu.warp_2d(moving_image, composition, None, premult_index, premult_disp)) assert_array_almost_equal(warped, expected) # test also using nearest neighbor interpolation warped = np.array(vfu.warp_2d_nn(moving_image, composition, None, premult_index, premult_disp)) assert_array_almost_equal(warped, expected) # test updating the displacement field instead of creating a new one composition = disp1.copy() vfu.compose_vector_fields_2d(composition, disp2 / time_scaling, premult_index, premult_disp, time_scaling, composition) # apply the implementation under test warped = np.array(vfu.warp_2d(moving_image, composition, None, premult_index, premult_disp)) assert_array_almost_equal(warped, expected) # test also using nearest neighbor interpolation warped = np.array(vfu.warp_2d_nn(moving_image, composition, None, premult_index, premult_disp)) assert_array_almost_equal(warped, expected) # Test non-overlapping case x_0 = np.asarray(range(input_shape[0])) x_1 = np.asarray(range(input_shape[1])) X = np.empty(input_shape + (2,), dtype=np.float64) O = np.ones(input_shape) X[..., 0] = x_0[:, None] * O X[..., 1] = x_1[None, :] * O random_labels = np.random.randint( 0, 2, input_shape[0] * input_shape[1] * 2) random_labels = random_labels.reshape(input_shape + (2,)) values = np.array([-1, tgt_sh[0]]) disp1 = (values[random_labels] - X).astype(floating) composition, stats = vfu.compose_vector_fields_2d(disp1, disp2, None, None, 1.0, None) assert_array_almost_equal(composition, np.zeros_like(composition)) # test updating the displacement field instead of creating a new one composition = disp1.copy() vfu.compose_vector_fields_2d(composition, disp2, None, None, 1.0, composition) assert_array_almost_equal(composition, np.zeros_like(composition)) # Test exception is raised when the affine transform matrix is not valid valid = np.zeros((2, 3), dtype=np.float64) invalid = np.zeros((2, 2), dtype=np.float64) assert_raises(ValueError, vfu.compose_vector_fields_2d, disp1, disp2, invalid, valid, 1.0, None) assert_raises(ValueError, vfu.compose_vector_fields_2d, disp1, disp2, valid, invalid, 1.0, None) def test_compose_vector_fields_3d(): r""" Creates two random displacement field that exactly map pixels from an input image to an output image. The resulting displacements and their composition, although operating in physical space, map the points exactly (up to numerical precision). """ np.random.seed(8315759) input_shape = (10, 10, 10) tgt_sh = (10, 10, 10) # create a simple affine transformation ns = input_shape[0] nr = input_shape[1] nc = input_shape[2] s = 1.5 t = 2.5 trans = np.array([[1, 0, 0, -t * ns], [0, 1, 0, -t * nr], [0, 0, 1, -t * nc], [0, 0, 0, 1]]) trans_inv = np.linalg.inv(trans) scale = np.array([[1 * s, 0, 0, 0], [0, 1 * s, 0, 0], [0, 0, 1 * s, 0], [0, 0, 0, 1]]) gt_affine = trans_inv.dot(scale.dot(trans)) # create two random displacement fields input_grid2world = gt_affine target_grid2world = gt_affine disp1, assign1 = vfu.create_random_displacement_3d( np.array(input_shape, dtype=np.int32), input_grid2world, np.array(tgt_sh, dtype=np.int32), target_grid2world) disp1 = np.array(disp1, dtype=floating) assign1 = np.array(assign1) disp2, assign2 = vfu.create_random_displacement_3d( np.array( input_shape, dtype=np.int32), input_grid2world, np.array( tgt_sh, dtype=np.int32), target_grid2world) disp2 = np.array(disp2, dtype=floating) assign2 = np.array(assign2) # create a random image (with decimal digits) to warp moving_image = np.empty(tgt_sh, dtype=floating) moving_image[...] =\ np.random.randint(0, 10, np.size(moving_image)).reshape(tuple(tgt_sh)) # set boundary values to zero so we don't test wrong interpolation due to # floating point precision moving_image[0, :, :] = 0 moving_image[-1, :, :] = 0 moving_image[:, 0, :] = 0 moving_image[:, -1, :] = 0 moving_image[:, :, 0] = 0 moving_image[:, :, -1] = 0 # evaluate the composed warping using the exact assignments # (first 1 then 2) warp1 = moving_image[(assign2[..., 0], assign2[..., 1], assign2[..., 2])] expected = warp1[(assign1[..., 0], assign1[..., 1], assign1[..., 2])] # compose the displacement fields target_world2grid = np.linalg.inv(target_grid2world) target_world2grid = np.linalg.inv(target_grid2world) premult_index = target_world2grid.dot(input_grid2world) premult_disp = target_world2grid for time_scaling in [0.25, 0.5, 1.0, 2.0, 4.0]: composition, stats = vfu.compose_vector_fields_3d(disp1, disp2 / time_scaling, premult_index, premult_disp, time_scaling, None) # apply the implementation under test warped = np.array(vfu.warp_3d(moving_image, composition, None, premult_index, premult_disp)) assert_array_almost_equal(warped, expected) # test also using nearest neighbor interpolation warped = np.array(vfu.warp_3d_nn(moving_image, composition, None, premult_index, premult_disp)) assert_array_almost_equal(warped, expected) # test updating the displacement field instead of creating a new one composition = disp1.copy() vfu.compose_vector_fields_3d(composition, disp2 / time_scaling, premult_index, premult_disp, time_scaling, composition) # apply the implementation under test warped = np.array(vfu.warp_3d(moving_image, composition, None, premult_index, premult_disp)) assert_array_almost_equal(warped, expected) # test also using nearest neighbor interpolation warped = np.array(vfu.warp_3d_nn(moving_image, composition, None, premult_index, premult_disp)) assert_array_almost_equal(warped, expected) # Test non-overlapping case x_0 = np.asarray(range(input_shape[0])) x_1 = np.asarray(range(input_shape[1])) x_2 = np.asarray(range(input_shape[2])) X = np.empty(input_shape + (3,), dtype=np.float64) O = np.ones(input_shape) X[..., 0] = x_0[:, None, None] * O X[..., 1] = x_1[None, :, None] * O X[..., 2] = x_2[None, None, :] * O sz = input_shape[0] * input_shape[1] * input_shape[2] * 3 random_labels = np.random.randint(0, 2, sz) random_labels = random_labels.reshape(input_shape + (3,)) values = np.array([-1, tgt_sh[0]]) disp1 = (values[random_labels] - X).astype(floating) composition, stats = vfu.compose_vector_fields_3d(disp1, disp2, None, None, 1.0, None) assert_array_almost_equal(composition, np.zeros_like(composition)) # test updating the displacement field instead of creating a new one composition = disp1.copy() vfu.compose_vector_fields_3d(composition, disp2, None, None, 1.0, composition) assert_array_almost_equal(composition, np.zeros_like(composition)) # Test exception is raised when the affine transform matrix is not valid valid = np.zeros((3, 4), dtype=np.float64) invalid = np.zeros((3, 3), dtype=np.float64) assert_raises(ValueError, vfu.compose_vector_fields_3d, disp1, disp2, invalid, valid, 1.0, None) assert_raises(ValueError, vfu.compose_vector_fields_3d, disp1, disp2, valid, invalid, 1.0, None) def test_invert_vector_field_2d(): r""" Inverts a synthetic, analytically invertible, displacement field """ shape = (64, 64) nr = shape[0] nc = shape[1] # Create an arbitrary image-to-space transform t = 2.5 # translation factor trans = np.array([[1, 0, -t * nr], [0, 1, -t * nc], [0, 0, 1]]) trans_inv = np.linalg.inv(trans) d, dinv = vfu.create_harmonic_fields_2d(nr, nc, 0.2, 8) d = np.asarray(d).astype(floating) dinv = np.asarray(dinv).astype(floating) for theta in [-1 * np.pi / 5.0, 0.0, np.pi / 5.0]: # rotation angle for s in [0.5, 1.0, 2.0]: # scale ct = np.cos(theta) st = np.sin(theta) rot = np.array([[ct, -st, 0], [st, ct, 0], [0, 0, 1]]) scale = np.array([[1 * s, 0, 0], [0, 1 * s, 0], [0, 0, 1]]) gt_affine = trans_inv.dot(scale.dot(rot.dot(trans))) gt_affine_inv = np.linalg.inv(gt_affine) dcopy = np.copy(d) # make sure the field remains invertible after the re-mapping vfu.reorient_vector_field_2d(dcopy, gt_affine) inv_approx =\ vfu.invert_vector_field_fixed_point_2d(dcopy, gt_affine_inv, np.array([s, s]), 40, 1e-7) mapping = imwarp.DiffeomorphicMap(2, (nr, nc), gt_affine) mapping.forward = dcopy mapping.backward = inv_approx residual, stats = mapping.compute_inversion_error() assert_almost_equal(stats[1], 0, decimal=4) assert_almost_equal(stats[2], 0, decimal=4) # Test exception is raised when the affine transform matrix is not valid invalid = np.zeros((2, 2), dtype=np.float64) spacing = np.array([1.0, 1.0]) assert_raises(ValueError, vfu.invert_vector_field_fixed_point_2d, d, invalid, spacing, 40, 1e-7, None) def test_invert_vector_field_3d(): r""" Inverts a synthetic, analytically invertible, displacement field """ shape = (64, 64, 64) ns = shape[0] nr = shape[1] nc = shape[2] # Create an arbitrary image-to-space transform # Select an arbitrary rotation axis axis = np.array([2.0, 0.5, 1.0]) t = 2.5 # translation factor trans = np.array([[1, 0, 0, -t * ns], [0, 1, 0, -t * nr], [0, 0, 1, -t * nc], [0, 0, 0, 1]]) trans_inv = np.linalg.inv(trans) d, dinv = vfu.create_harmonic_fields_3d(ns, nr, nc, 0.2, 8) d = np.asarray(d).astype(floating) dinv = np.asarray(dinv).astype(floating) for theta in [-1 * np.pi / 5.0, 0.0, np.pi / 5.0]: # rotation angle for s in [0.5, 1.0, 2.0]: # scale rot = np.zeros(shape=(4, 4)) rot[:3, :3] = geometry.rodrigues_axis_rotation(axis, theta) rot[3, 3] = 1.0 scale = np.array([[1 * s, 0, 0, 0], [0, 1 * s, 0, 0], [0, 0, 1 * s, 0], [0, 0, 0, 1]]) gt_affine = trans_inv.dot(scale.dot(rot.dot(trans))) gt_affine_inv = np.linalg.inv(gt_affine) dcopy = np.copy(d) # make sure the field remains invertible after the re-mapping vfu.reorient_vector_field_3d(dcopy, gt_affine) # Note: the spacings are used just to check convergence, so they # don't need to be very accurate. Here we are passing (0.5 * s) to # force the algorithm to make more iterations: in ANTS, there is a # hard-coded bound on the maximum residual, that's why we cannot # force more iteration by changing the parameters. # We will investigate this issue with more detail in the future. inv_approx = vfu.invert_vector_field_fixed_point_3d( dcopy, gt_affine_inv, np.array([s, s, s]) * 0.5, 40, 1e-7) mapping = imwarp.DiffeomorphicMap(3, (nr, nc), gt_affine) mapping.forward = dcopy mapping.backward = inv_approx residual, stats = mapping.compute_inversion_error() assert_almost_equal(stats[1], 0, decimal=3) assert_almost_equal(stats[2], 0, decimal=3) # Test exception is raised when the affine transform matrix is not valid invalid = np.zeros((3, 3), dtype=np.float64) spacing = np.array([1.0, 1.0, 1.0]) assert_raises(ValueError, vfu.invert_vector_field_fixed_point_3d, d, invalid, spacing, 40, 1e-7, None) def test_resample_vector_field_2d(): r""" Expand a vector field by 2, then subsample by 2, the resulting field should be the original one """ domain_shape = np.array((64, 64), dtype=np.int32) reduced_shape = np.array((32, 32), dtype=np.int32) factors = np.array([0.5, 0.5]) d, dinv = vfu.create_harmonic_fields_2d(reduced_shape[0], reduced_shape[1], 0.3, 6) d = np.array(d, dtype=floating) expanded = vfu.resample_displacement_field_2d(d, factors, domain_shape) subsampled = expanded[::2, ::2, :] assert_array_almost_equal(d, subsampled) def test_resample_vector_field_3d(): r""" Expand a vector field by 2, then subsample by 2, the resulting field should be the original one """ domain_shape = np.array((64, 64, 64), dtype=np.int32) reduced_shape = np.array((32, 32, 32), dtype=np.int32) factors = np.array([0.5, 0.5, 0.5]) d, dinv = vfu.create_harmonic_fields_3d(reduced_shape[0], reduced_shape[1], reduced_shape[2], 0.3, 6) d = np.array(d, dtype=floating) expanded = vfu.resample_displacement_field_3d(d, factors, domain_shape) subsampled = expanded[::2, ::2, ::2, :] assert_array_almost_equal(d, subsampled) def test_downsample_scalar_field_2d(): np.random.seed(8315759) size = 32 sh = (size, size) for reduce_r in [True, False]: nr = size - 1 if reduce_r else size for reduce_c in [True, False]: nc = size - 1 if reduce_c else size image = np.empty((size, size), dtype=floating) image[...] = np.random.randint(0, 10, np.size(image)).reshape(sh) if reduce_r: image[-1, :] = 0 if reduce_c: image[:, -1] = 0 a = image[::2, ::2] b = image[1::2, ::2] c = image[::2, 1::2] d = image[1::2, 1::2] expected = 0.25 * (a + b + c + d) if reduce_r: expected[-1, :] *= 2 if reduce_c: expected[:, -1] *= 2 actual = np.array(vfu.downsample_scalar_field_2d(image[:nr, :nc])) assert_array_almost_equal(expected, actual) def test_downsample_displacement_field_2d(): np.random.seed(2115556) size = 32 sh = (size, size, 2) for reduce_r in [True, False]: nr = size - 1 if reduce_r else size for reduce_c in [True, False]: nc = size - 1 if reduce_c else size field = np.empty((size, size, 2), dtype=floating) field[...] = np.random.randint(0, 10, np.size(field)).reshape(sh) if reduce_r: field[-1, :, :] = 0 if reduce_c: field[:, -1, :] = 0 a = field[::2, ::2, :] b = field[1::2, ::2, :] c = field[::2, 1::2, :] d = field[1::2, 1::2, :] expected = 0.25 * (a + b + c + d) if reduce_r: expected[-1, :, :] *= 2 if reduce_c: expected[:, -1, :] *= 2 actual = vfu.downsample_displacement_field_2d(field[:nr, :nc, :]) assert_array_almost_equal(expected, actual) def test_downsample_scalar_field_3d(): np.random.seed(8315759) size = 32 sh = (size, size, size) for reduce_s in [True, False]: ns = size - 1 if reduce_s else size for reduce_r in [True, False]: nr = size - 1 if reduce_r else size for reduce_c in [True, False]: nc = size - 1 if reduce_c else size image = np.empty((size, size, size), dtype=floating) image[...] =\ np.random.randint(0, 10, np.size(image)).reshape(sh) if reduce_s: image[-1, :, :] = 0 if reduce_r: image[:, -1, :] = 0 if reduce_c: image[:, :, -1] = 0 a = image[::2, ::2, ::2] b = image[1::2, ::2, ::2] c = image[::2, 1::2, ::2] d = image[1::2, 1::2, ::2] aa = image[::2, ::2, 1::2] bb = image[1::2, ::2, 1::2] cc = image[::2, 1::2, 1::2] dd = image[1::2, 1::2, 1::2] expected = 0.125 * (a + b + c + d + aa + bb + cc + dd) if reduce_s: expected[-1, :, :] *= 2 if reduce_r: expected[:, -1, :] *= 2 if reduce_c: expected[:, :, -1] *= 2 actual = vfu.downsample_scalar_field_3d(image[:ns, :nr, :nc]) assert_array_almost_equal(expected, actual) def test_downsample_displacement_field_3d(): np.random.seed(8315759) size = 32 sh = (size, size, size, 3) for reduce_s in [True, False]: ns = size - 1 if reduce_s else size for reduce_r in [True, False]: nr = size - 1 if reduce_r else size for reduce_c in [True, False]: nc = size - 1 if reduce_c else size field = np.empty((size, size, size, 3), dtype=floating) field[...] =\ np.random.randint(0, 10, np.size(field)).reshape(sh) if reduce_s: field[-1, :, :] = 0 if reduce_r: field[:, -1, :] = 0 if reduce_c: field[:, :, -1] = 0 a = field[::2, ::2, ::2, :] b = field[1::2, ::2, ::2, :] c = field[::2, 1::2, ::2, :] d = field[1::2, 1::2, ::2, :] aa = field[::2, ::2, 1::2, :] bb = field[1::2, ::2, 1::2, :] cc = field[::2, 1::2, 1::2, :] dd = field[1::2, 1::2, 1::2, :] expected = 0.125 * (a + b + c + d + aa + bb + cc + dd) if reduce_s: expected[-1, :, :, :] *= 2 if reduce_r: expected[:, -1, :, :] *= 2 if reduce_c: expected[:, :, -1, :] *= 2 actual =\ vfu.downsample_displacement_field_3d(field[:ns, :nr, :nc]) assert_array_almost_equal(expected, actual) def test_reorient_vector_field_2d(): shape = (16, 16) d, dinv = vfu.create_harmonic_fields_2d(shape[0], shape[1], 0.2, 4) d = np.array(d, dtype=floating) # the vector field rotated 90 degrees expected = np.empty(shape=shape + (2,), dtype=floating) expected[..., 0] = -1 * d[..., 1] expected[..., 1] = d[..., 0] # rotate 45 degrees twice c = np.sqrt(0.5) affine = np.array([[c, -c, 0.0], [c, c, 0.0]]) vfu.reorient_vector_field_2d(d, affine) vfu.reorient_vector_field_2d(d, affine) # verify almost equal assert_array_almost_equal(d, expected) # Test exception is raised when the affine transform matrix is not valid invalid = np.zeros((2, 2), dtype=np.float64) assert_raises(ValueError, vfu.reorient_vector_field_2d, d, invalid) def test_reorient_vector_field_3d(): sh = (16, 16, 16) d, dinv = vfu.create_harmonic_fields_3d(sh[0], sh[1], sh[2], 0.2, 4) d = np.array(d, dtype=floating) dinv = np.array(dinv, dtype=floating) # the vector field rotated 90 degrees around the last axis expected = np.empty(shape=sh + (3,), dtype=floating) expected[..., 0] = -1 * d[..., 1] expected[..., 1] = d[..., 0] expected[..., 2] = d[..., 2] # rotate 45 degrees twice around the last axis c = np.sqrt(0.5) affine = np.array([[c, -c, 0, 0], [c, c, 0, 0], [0, 0, 1, 0]]) vfu.reorient_vector_field_3d(d, affine) vfu.reorient_vector_field_3d(d, affine) # verify almost equal assert_array_almost_equal(d, expected) # the vector field rotated 90 degrees around the first axis expected[..., 0] = dinv[..., 0] expected[..., 1] = -1 * dinv[..., 2] expected[..., 2] = dinv[..., 1] # rotate 45 degrees twice around the first axis affine = np.array([[1, 0, 0, 0], [0, c, -c, 0], [0, c, c, 0]]) vfu.reorient_vector_field_3d(dinv, affine) vfu.reorient_vector_field_3d(dinv, affine) # verify almost equal assert_array_almost_equal(dinv, expected) # Test exception is raised when the affine transform matrix is not valid invalid = np.zeros((3, 3), dtype=np.float64) assert_raises(ValueError, vfu.reorient_vector_field_3d, d, invalid) def test_reorient_random_vector_fields(): np.random.seed(1134781) # Test reorienting vector field for n_dims, func in ((2, vfu.reorient_vector_field_2d), (3, vfu.reorient_vector_field_3d)): size = [20, 30, 40][:n_dims] + [n_dims] arr = np.random.normal(size=size) arr_32 = arr.astype(floating) affine = from_matvec(np.random.normal(size=(n_dims, n_dims)), np.zeros(n_dims)) func(arr_32, affine) assert_almost_equal(arr_32, apply_affine(affine, arr), 6) # Reorient reorients without translation trans = np.arange(n_dims) + 2 affine[:-1, -1] = trans arr_32 = arr.astype(floating) func(arr_32, affine) assert_almost_equal(arr_32, apply_affine(affine, arr) - trans, 6) # Test exception is raised when the affine transform is not valid invalid = np.eye(n_dims) assert_raises(ValueError, func, arr_32, invalid) def test_gradient_2d(): np.random.seed(3921116) sh = (25, 32) # Create grid coordinates x_0 = np.asarray(range(sh[0])) x_1 = np.asarray(range(sh[1])) X = np.empty(sh + (3,), dtype=np.float64) O = np.ones(sh) X[..., 0] = x_0[:, None] * O X[..., 1] = x_1[None, :] * O X[..., 2] = 1 transform = regtransforms[('RIGID', 2)] theta = np.array([0.1, 5.0, 2.5]) T = transform.param_to_matrix(theta) TX = X.dot(T.T) # Eval an arbitrary (known) function at TX # f(x, y) = ax^2 + bxy + cy^{2} # df/dx = 2ax + by # df/dy = 2cy + bx a = 2e-3 b = 5e-3 c = 7e-3 img = a * TX[..., 0] ** 2 +\ b * TX[..., 0] * TX[..., 1] +\ c * TX[..., 1] ** 2 img = img.astype(floating) # img is an image sampled at X with grid-to-space transform T # Test sparse gradient: choose some sample points (in space) sample = sample_domain_regular(20, np.array(sh, dtype=np.int32), T) sample = np.array(sample) # Compute the analytical gradient at all points expected = np.empty((sample.shape[0], 2), dtype=floating) expected[..., 0] = 2 * a * sample[:, 0] + b * sample[:, 1] expected[..., 1] = 2 * c * sample[:, 1] + b * sample[:, 0] # Get the numerical gradient with the implementation under test sp_to_grid = np.linalg.inv(T) img_spacing = np.ones(2) actual, inside = vfu.sparse_gradient(img, sp_to_grid, img_spacing, sample) diff = np.abs(expected - actual).mean(1) * inside # The finite differences are really not accurate, especially with float32 assert_equal(diff.max() < 1e-3, True) # Verify exception is raised when passing invalid affine or spacings invalid_affine = np.eye(2) invalid_spacings = np.ones(1) assert_raises(ValueError, vfu.sparse_gradient, img, invalid_affine, img_spacing, sample) assert_raises(ValueError, vfu.sparse_gradient, img, sp_to_grid, invalid_spacings, sample) # Test dense gradient # Compute the analytical gradient at all points expected = np.empty(sh + (2,), dtype=floating) expected[..., 0] = 2 * a * TX[..., 0] + b * TX[..., 1] expected[..., 1] = 2 * c * TX[..., 1] + b * TX[..., 0] # Get the numerical gradient with the implementation under test sp_to_grid = np.linalg.inv(T) img_spacing = np.ones(2) actual, inside = vfu.gradient(img, sp_to_grid, img_spacing, sh, T) diff = np.abs(expected - actual).mean(2) * inside # In the dense case, we are evaluating at the exact points (sample points # are not slightly moved like in the sparse case) so we have more precision assert_equal(diff.max() < 1e-5, True) # Verify exception is raised when passing invalid affine or spacings assert_raises(ValueError, vfu.gradient, img, invalid_affine, img_spacing, sh, T) assert_raises(ValueError, vfu.gradient, img, sp_to_grid, img_spacing, sh, invalid_affine) assert_raises(ValueError, vfu.gradient, img, sp_to_grid, invalid_spacings, sh, T) def test_gradient_3d(): np.random.seed(3921116) shape = (25, 32, 15) # Create grid coordinates x_0 = np.asarray(range(shape[0])) x_1 = np.asarray(range(shape[1])) x_2 = np.asarray(range(shape[2])) X = np.zeros(shape + (4,), dtype=np.float64) O = np.ones(shape) X[..., 0] = x_0[:, None, None] * O X[..., 1] = x_1[None, :, None] * O X[..., 2] = x_2[None, None, :] * O X[..., 3] = 1 transform = regtransforms[('RIGID', 3)] theta = np.array([0.1, 0.05, 0.12, -12.0, -15.5, -7.2]) T = transform.param_to_matrix(theta) TX = X.dot(T.T) # Eval an arbitrary (known) function at TX # f(x, y, z) = ax^2 + by^2 + cz^2 + dxy + exz + fyz # df/dx = 2ax + dy + ez # df/dy = 2by + dx + fz # df/dz = 2cz + ex + fy a, b, c = 2e-3, 3e-3, 1e-3 d, e, f = 1e-3, 2e-3, 3e-3 img = a * TX[..., 0] ** 2 + b * TX[..., 1] ** 2 +\ c * TX[..., 2] ** 2 + d * TX[..., 0] * TX[..., 1] +\ e * TX[..., 0] * TX[..., 2] + f * TX[..., 1] * TX[..., 2] img = img.astype(floating) # Test sparse gradient: choose some sample points (in space) sample =\ sample_domain_regular(100, np.array(shape, dtype=np.int32), T) sample = np.array(sample) # Compute the analytical gradient at all points expected = np.empty((sample.shape[0], 3), dtype=floating) expected[..., 0] =\ 2 * a * sample[:, 0] + d * sample[:, 1] + e * sample[:, 2] expected[..., 1] =\ 2 * b * sample[:, 1] + d * sample[:, 0] + f * sample[:, 2] expected[..., 2] =\ 2 * c * sample[:, 2] + e * sample[:, 0] + f * sample[:, 1] # Get the numerical gradient with the implementation under test sp_to_grid = np.linalg.inv(T) img_spacing = np.ones(3) actual, inside = vfu.sparse_gradient(img, sp_to_grid, img_spacing, sample) # Discard points outside the image domain diff = np.abs(expected - actual).mean(1) * inside # The finite differences are really not accurate, especially with float32 assert_equal(diff.max() < 1e-3, True) # Verify exception is raised when passing invalid affine or spacings invalid_affine = np.eye(3) invalid_spacings = np.ones(2) assert_raises(ValueError, vfu.sparse_gradient, img, invalid_affine, img_spacing, sample) assert_raises(ValueError, vfu.sparse_gradient, img, sp_to_grid, invalid_spacings, sample) # Test dense gradient # Compute the analytical gradient at all points expected = np.empty(shape + (3,), dtype=floating) expected[..., 0] = 2 * a * TX[..., 0] + d * TX[..., 1] + e * TX[..., 2] expected[..., 1] = 2 * b * TX[..., 1] + d * TX[..., 0] + f * TX[..., 2] expected[..., 2] = 2 * c * TX[..., 2] + e * TX[..., 0] + f * TX[..., 1] # Get the numerical gradient with the implementation under test sp_to_grid = np.linalg.inv(T) img_spacing = np.ones(3) actual, inside = vfu.gradient(img, sp_to_grid, img_spacing, shape, T) diff = np.abs(expected - actual).mean(3) * inside # In the dense case, we are evaluating at the exact points (sample points # are not slightly moved like in the sparse case) so we have more precision assert_equal(diff.max() < 1e-5, True) # Verify exception is raised when passing invalid affine or spacings assert_raises(ValueError, vfu.gradient, img, invalid_affine, img_spacing, shape, T) assert_raises(ValueError, vfu.gradient, img, sp_to_grid, img_spacing, shape, invalid_affine) assert_raises(ValueError, vfu.gradient, img, sp_to_grid, invalid_spacings, shape, T) dipy-0.13.0/dipy/align/transforms.pxd000066400000000000000000000004641317371701200175200ustar00rootroot00000000000000cdef class Transform: cdef: int number_of_parameters int dim cdef int _jacobian(self, double[:] theta, double[:] x, double[:, :] J)nogil cdef void _get_identity_parameters(self, double[:] theta) nogil cdef void _param_to_matrix(self, double[:] theta, double[:, :] T)nogil dipy-0.13.0/dipy/align/transforms.pyx000066400000000000000000001036631317371701200175520ustar00rootroot00000000000000#!python #cython: boundscheck=False #cython: wraparound=False #cython: cdivision=True import numpy as np cimport numpy as cnp cimport cython cdef extern from "dpy_math.h" nogil: double cos(double) double sin(double) double log(double) cdef class Transform: r""" Base class (contract) for all transforms for affine image registration Each transform must define the following (fast, nogil) methods: 1. _jacobian(theta, x, J): receives a parameter vector theta, a point in x, and a matrix J with shape (dim, len(theta)). It must writes in J, the Jacobian of the transform with parameters theta evaluated at x. 2. _get_identity_parameters(theta): receives a vector theta whose length is the number of parameters of the transform and sets in theta the values that define the identity transform. 3. _param_to_matrix(theta, T): receives a parameter vector theta, and a matrix T of shape (dim + 1, dim + 1) and writes in T the matrix representation of the transform with parameters theta This base class defines the (slow, convenient) python wrappers for each of the above functions, which also do parameter checking and raise a ValueError in case the provided parameters are invalid. """ def __cinit__(self): r""" Default constructor Sets transform dimension and number of parameter to invalid values (-1) """ self.dim = -1 self.number_of_parameters = -1 cdef int _jacobian(self, double[:] theta, double[:] x, double[:, :] J)nogil: return -1 cdef void _get_identity_parameters(self, double[:] theta) nogil: return cdef void _param_to_matrix(self, double[:] theta, double[:, :] T)nogil: return def jacobian(self, double[:] theta, double[:] x): r""" Jacobian function of this transform Parameters ---------- theta : array, shape (n,) vector containing the n parameters of this transform x : array, shape (dim,) vector containing the point where the Jacobian must be evaluated Returns ------- J : array, shape (dim, n) Jacobian matrix of the transform with parameters theta at point x """ n = theta.shape[0] if n != self.number_of_parameters: raise ValueError("Invalid number of parameters: %d"%(n,)) m = x.shape[0] if m < self.dim: raise ValueError("Invalid point dimension: %d"%(m,)) J = np.zeros((self.dim, n)) ret = self._jacobian(theta, x, J) return np.asarray(J) def get_identity_parameters(self): r""" Parameter values corresponding to the identity transform Returns ------- theta : array, shape (n,) the n parameter values corresponding to the identity transform """ if self.number_of_parameters < 0: raise ValueError("Invalid transform.") theta = np.zeros(self.number_of_parameters) self._get_identity_parameters(theta) return np.asarray(theta) def param_to_matrix(self, double[:] theta): r""" Matrix representation of this transform with the given parameters Parameters ---------- theta : array, shape (n,) the parameter values of the transform Returns ------- T : array, shape (dim + 1, dim + 1) the matrix representation of this transform with parameters theta """ n = len(theta) if n != self.number_of_parameters: raise ValueError("Invalid number of parameters: %d"%(n,)) T = np.eye(self.dim + 1) self._param_to_matrix(theta, T) return np.asarray(T) def get_number_of_parameters(self): return self.number_of_parameters def get_dim(self): return self.dim cdef class TranslationTransform2D(Transform): def __init__(self): r""" Translation transform in 2D """ self.dim = 2 self.number_of_parameters = 2 cdef int _jacobian(self, double[:] theta, double[:] x, double[:, :] J)nogil: r""" Jacobian matrix of the 2D translation transform The transformation is given by: T(x) = (T1(x), T2(x)) = (x0 + t0, x1 + t1) The derivative w.r.t. t1 and t2 is given by T'(x) = [[1, 0], # derivatives of [T1, T2] w.r.t. t0 [0, 1]] # derivatives of [T1, T2] w.r.t. t1 Parameters ---------- theta : array, shape (2,) the parameters of the 2D translation transform (the Jacobian does not depend on the parameters, but we receive the buffer so all Jacobian functions receive the same parameters) x : array, shape (2,) the point at which to compute the Jacobian (the Jacobian does not depend on x, but we receive the buffer so all Jacobian functions receive the same parameters) J : array, shape (2, 2) the buffer in which to write the Jacobian Returns ------- is_constant : int always returns 1, indicating that the Jacobian is constant (independent of x) """ J[0, 0], J[0, 1] = 1.0, 0.0 J[1, 0], J[1, 1] = 0.0, 1.0 # This Jacobian does not depend on x (it's constant): return 1 return 1 cdef void _get_identity_parameters(self, double[:] theta) nogil: r""" Parameter values corresponding to the identity Sets in theta the parameter values corresponding to the identity transform Parameters ---------- theta : array, shape (2,) buffer to write the parameters of the 2D translation transform """ theta[:2] = 0 cdef void _param_to_matrix(self, double[:] theta, double[:, :] R) nogil: r""" Matrix associated with the 2D translation transform Parameters ---------- theta : array, shape (2,) the parameters of the 2D translation transform R : array, shape (3, 3) the buffer in which to write the translation matrix """ R[0, 0], R[0, 1], R[0, 2] = 1, 0, theta[0] R[1, 0], R[1, 1], R[1, 2] = 0, 1, theta[1] R[2, 0], R[2, 1], R[2, 2] = 0, 0, 1 cdef class TranslationTransform3D(Transform): def __init__(self): r""" Translation transform in 3D """ self.dim = 3 self.number_of_parameters = 3 cdef int _jacobian(self, double[:] theta, double[:] x, double[:, :] J)nogil: r""" Jacobian matrix of the 3D translation transform The transformation is given by: T(x) = (T1(x), T2(x), T3(x)) = (x0 + t0, x1 + t1, x2 + t2) The derivative w.r.t. t1, t2 and t3 is given by T'(x) = [[1, 0, 0], # derivatives of [T1, T2, T3] w.r.t. t0 [0, 1, 0], # derivatives of [T1, T2, T3] w.r.t. t1 [0, 0, 1]] # derivatives of [T1, T2, T3] w.r.t. t2 Parameters ---------- theta : array, shape (3,) the parameters of the 3D translation transform (the Jacobian does not depend on the parameters, but we receive the buffer so all Jacobian functions receive the same parameters) x : array, shape (3,) the point at which to compute the Jacobian (the Jacobian does not depend on x, but we receive the buffer so all Jacobian functions receive the same parameters) J : array, shape (3, 3) the buffer in which to write the Jacobian Returns ------- is_constant : int always returns 1, indicating that the Jacobian is constant (independent of x) """ J[0, 0], J[0, 1], J[0, 2] = 1.0, 0.0, 0.0 J[1, 0], J[1, 1], J[1, 2] = 0.0, 1.0, 0.0 J[2, 0], J[2, 1], J[2, 2] = 0.0, 0.0, 1.0 # This Jacobian does not depend on x (it's constant): return 1 return 1 cdef void _get_identity_parameters(self, double[:] theta) nogil: r""" Parameter values corresponding to the identity Sets in theta the parameter values corresponding to the identity transform Parameters ---------- theta : array, shape (3,) buffer to write the parameters of the 3D translation transform """ theta[:3] = 0 cdef void _param_to_matrix(self, double[:] theta, double[:, :] R) nogil: r""" Matrix associated with the 3D translation transform Parameters ---------- theta : array, shape (3,) the parameters of the 3D translation transform R : array, shape (4, 4) the buffer in which to write the translation matrix """ R[0, 0], R[0, 1], R[0, 2], R[0, 3] = 1, 0, 0, theta[0] R[1, 0], R[1, 1], R[1, 2], R[1, 3] = 0, 1, 0, theta[1] R[2, 0], R[2, 1], R[2, 2], R[2, 3] = 0, 0, 1, theta[2] R[3, 0], R[3, 1], R[3, 2], R[3, 3] = 0, 0, 0, 1 cdef class RotationTransform2D(Transform): def __init__(self): r""" Rotation transform in 2D """ self.dim = 2 self.number_of_parameters = 1 cdef int _jacobian(self, double[:] theta, double[:] x, double[:, :] J)nogil: r''' Jacobian matrix of a 2D rotation with parameter theta, at x The transformation is given by: T(x,y) = (T1(x,y), T2(x,y)) = (x cost - y sint, x sint + y cost) The derivatives w.r.t. the rotation angle, t, are: T'(x,y) = [-x sint - y cost, # derivative of T1 w.r.t. t x cost - y sint] # derivative of T2 w.r.t. t Parameters ---------- theta : array, shape (1,) the rotation angle x : array, shape (2,) the point at which to compute the Jacobian J : array, shape (2, 1) the buffer in which to write the Jacobian Returns ------- is_constant : int always returns 0, indicating that the Jacobian is not constant (it depends on the value of x) ''' cdef: double st = sin(theta[0]) double ct = cos(theta[0]) double px = x[0], py = x[1] J[0, 0] = -px * st - py * ct J[1, 0] = px * ct - py * st # This Jacobian depends on x (it's not constant): return 0 return 0 cdef void _get_identity_parameters(self, double[:] theta) nogil: r""" Parameter values corresponding to the identity Sets in theta the parameter values corresponding to the identity transform Parameters ---------- theta : array, shape (1,) buffer to write the parameters of the 2D rotation transform """ theta[0] = 0 cdef void _param_to_matrix(self, double[:] theta, double[:, :] R) nogil: r""" Matrix associated with the 2D rotation transform Parameters ---------- theta : array, shape (1,) the rotation angle R : array, shape (3,3) the buffer in which to write the matrix """ cdef: double ct = cos(theta[0]) double st = sin(theta[0]) R[0, 0], R[0, 1], R[0, 2] = ct, -st, 0 R[1, 0], R[1, 1], R[1, 2] = st, ct, 0 R[2, 0], R[2, 1], R[2, 2] = 0, 0, 1 cdef class RotationTransform3D(Transform): def __init__(self): r""" Rotation transform in 3D """ self.dim = 3 self.number_of_parameters = 3 cdef int _jacobian(self, double[:] theta, double[:] x, double[:, :] J)nogil: r''' Jacobian matrix of a 3D rotation with parameters theta, at x Parameters ---------- theta : array, shape (3,) the rotation angles about the canonical axes x : array, shape (3,) the point at which to compute the Jacobian J : array, shape (3, 3) the buffer in which to write the Jacobian Returns ------- is_constant : int always returns 0, indicating that the Jacobian is not constant (it depends on the value of x) ''' cdef: double sa = sin(theta[0]) double ca = cos(theta[0]) double sb = sin(theta[1]) double cb = cos(theta[1]) double sc = sin(theta[2]) double cc = cos(theta[2]) double px = x[0], py = x[1], z = x[2] J[0, 0] = (-sc * ca * sb) * px + (sc * sa) * py + (sc * ca * cb) * z J[1, 0] = (cc * ca * sb) * px + (-cc * sa) * py + (-cc * ca * cb) * z J[2, 0] = (sa * sb) * px + ca * py + (-sa * cb) * z J[0, 1] = (-cc * sb - sc * sa * cb) * px + (cc * cb - sc * sa * sb) * z J[1, 1] = (-sc * sb + cc * sa * cb) * px + (sc * cb + cc * sa * sb) * z J[2, 1] = (-ca * cb) * px + (-ca * sb) * z J[0, 2] = (-sc * cb - cc * sa * sb) * px + (-cc * ca) * py + \ (-sc * sb + cc * sa * cb) * z J[1, 2] = (cc * cb - sc * sa * sb) * px + (-sc * ca) * py + \ (cc * sb + sc * sa * cb) * z J[2, 2] = 0 # This Jacobian depends on x (it's not constant): return 0 return 0 cdef void _get_identity_parameters(self, double[:] theta) nogil: r""" Parameter values corresponding to the identity Sets in theta the parameter values corresponding to the identity transform Parameters ---------- theta : array, shape (3,) buffer to write the parameters of the 3D rotation transform """ theta[:3] = 0 cdef void _param_to_matrix(self, double[:] theta, double[:, :] R) nogil: r""" Matrix associated with the 3D rotation transform The matrix is the product of rotation matrices of angles theta[0], theta[1], theta[2] around axes x, y, z applied in the following order: y, x, z. This order was chosen for consistency with ANTS. Parameters ---------- theta : array, shape (3,) the rotation angles about each axis: theta[0] : rotation angle around x axis theta[1] : rotation angle around y axis theta[2] : rotation angle around z axis R : array, shape (4, 4) buffer in which to write the rotation matrix """ cdef: double sa = sin(theta[0]) double ca = cos(theta[0]) double sb = sin(theta[1]) double cb = cos(theta[1]) double sc = sin(theta[2]) double cc = cos(theta[2]) R[0,0], R[0,1], R[0,2] = cc*cb-sc*sa*sb, -sc*ca, cc*sb+sc*sa*cb R[1,0], R[1,1], R[1,2] = sc*cb+cc*sa*sb, cc*ca, sc*sb-cc*sa*cb R[2,0], R[2,1], R[2,2] = -ca*sb, sa, ca*cb R[3,0], R[3,1], R[3,2] = 0, 0, 0 R[0, 3] = 0 R[1, 3] = 0 R[2, 3] = 0 R[3, 3] = 1 cdef class RigidTransform2D(Transform): def __init__(self): r""" Rigid transform in 2D (rotation + translation) The parameter vector theta of length 3 is interpreted as follows: theta[0] : rotation angle theta[1] : translation along the x axis theta[2] : translation along the y axis """ self.dim = 2 self.number_of_parameters = 3 cdef int _jacobian(self, double[:] theta, double[:] x, double[:, :] J)nogil: r''' Jacobian matrix of a 2D rigid transform (rotation + translation) The transformation is given by: T(x,y) = (T1(x,y), T2(x,y)) = (x cost - y sint + dx, x sint + y cost + dy) The derivatives w.r.t. t, dx and dy are: T'(x,y) = [-x sint - y cost, 1, 0, # derivative of T1 w.r.t. t, dx, dy x cost - y sint, 0, 1] # derivative of T2 w.r.t. t, dx, dy Parameters ---------- theta : array, shape (3,) the parameters of the 2D rigid transform theta[0] : rotation angle (t) theta[1] : translation along the x axis (dx) theta[2] : translation along the y axis (dy) x : array, shape (2,) the point at which to compute the Jacobian J : array, shape (2, 3) the buffer in which to write the Jacobian Returns ------- is_constant : int always returns 0, indicating that the Jacobian is not constant (it depends on the value of x) ''' cdef: double st = sin(theta[0]) double ct = cos(theta[0]) double px = x[0], py = x[1] J[0, 0], J[0, 1], J[0, 2] = -px * st - py * ct, 1, 0 J[1, 0], J[1, 1], J[1, 2] = px * ct - py * st, 0, 1 # This Jacobian depends on x (it's not constant): return 0 return 0 cdef void _get_identity_parameters(self, double[:] theta) nogil: r""" Parameter values corresponding to the identity Sets in theta the parameter values corresponding to the identity transform Parameters ---------- theta : array, shape (3,) buffer to write the parameters of the 2D rigid transform theta[0] : rotation angle theta[1] : translation along the x axis theta[2] : translation along the y axis """ theta[:3] = 0 cdef void _param_to_matrix(self, double[:] theta, double[:, :] R) nogil: r""" Matrix associated with the 2D rigid transform Parameters ---------- theta : array, shape (3,) the parameters of the 2D rigid transform theta[0] : rotation angle theta[1] : translation along the x axis theta[2] : translation along the y axis R : array, shape (3, 3) buffer in which to write the rigid matrix """ cdef: double ct = cos(theta[0]) double st = sin(theta[0]) R[0, 0], R[0, 1], R[0, 2] = ct, -st, theta[1] R[1, 0], R[1, 1], R[1, 2] = st, ct, theta[2] R[2, 0], R[2, 1], R[2, 2] = 0, 0, 1 cdef class RigidTransform3D(Transform): def __init__(self): r""" Rigid transform in 3D (rotation + translation) The parameter vector theta of length 6 is interpreted as follows: theta[0] : rotation about the x axis theta[1] : rotation about the y axis theta[2] : rotation about the z axis theta[3] : translation along the x axis theta[4] : translation along the y axis theta[5] : translation along the z axis """ self.dim = 3 self.number_of_parameters = 6 cdef int _jacobian(self, double[:] theta, double[:] x, double[:, :] J)nogil: r''' Jacobian matrix of a 3D rigid transform (rotation + translation) Parameters ---------- theta : array, shape (6,) the parameters of the 3D rigid transform theta[0] : rotation about the x axis theta[1] : rotation about the y axis theta[2] : rotation about the z axis theta[3] : translation along the x axis theta[4] : translation along the y axis theta[5] : translation along the z axis x : array, shape (3,) the point at which to compute the Jacobian J : array, shape (3, 6) the buffer in which to write the Jacobian Returns ------- is_constant : int always returns 0, indicating that the Jacobian is not constant (it depends on the value of x) ''' cdef: double sa = sin(theta[0]) double ca = cos(theta[0]) double sb = sin(theta[1]) double cb = cos(theta[1]) double sc = sin(theta[2]) double cc = cos(theta[2]) double px = x[0], py = x[1], z = x[2] J[0, 0] = (-sc * ca * sb) * px + (sc * sa) * py + (sc * ca * cb) * z J[1, 0] = (cc * ca * sb) * px + (-cc * sa) * py + (-cc * ca * cb) * z J[2, 0] = (sa * sb) * px + ca * py + (-sa * cb) * z J[0, 1] = (-cc * sb - sc * sa * cb) * px + (cc * cb - sc * sa * sb) * z J[1, 1] = (-sc * sb + cc * sa * cb) * px + (sc * cb + cc * sa * sb) * z J[2, 1] = (-ca * cb) * px + (-ca * sb) * z J[0, 2] = (-sc * cb - cc * sa * sb) * px + (-cc * ca) * py + \ (-sc * sb + cc * sa * cb) * z J[1, 2] = (cc * cb - sc * sa * sb) * px + (-sc * ca) * py + \ (cc * sb + sc * sa * cb) * z J[2, 2] = 0 J[0, 3:6] = 0 J[1, 3:6] = 0 J[2, 3:6] = 0 J[0, 3], J[1, 4], J[2, 5] = 1, 1, 1 # This Jacobian depends on x (it's not constant): return 0 return 0 cdef void _get_identity_parameters(self, double[:] theta) nogil: r""" Parameter values corresponding to the identity Sets in theta the parameter values corresponding to the identity transform Parameters ---------- theta : array, shape (6,) buffer to write the parameters of the 3D rigid transform theta[0] : rotation about the x axis theta[1] : rotation about the y axis theta[2] : rotation about the z axis theta[3] : translation along the x axis theta[4] : translation along the y axis theta[5] : translation along the z axis """ theta[:6] = 0 cdef void _param_to_matrix(self, double[:] theta, double[:, :] R) nogil: r""" Matrix associated with the 3D rigid transform Parameters ---------- theta : array, shape (6,) the parameters of the 3D rigid transform theta[0] : rotation about the x axis theta[1] : rotation about the y axis theta[2] : rotation about the z axis theta[3] : translation along the x axis theta[4] : translation along the y axis theta[5] : translation along the z axis R : array, shape (4, 4) buffer in which to write the rigid matrix """ cdef: double sa = sin(theta[0]) double ca = cos(theta[0]) double sb = sin(theta[1]) double cb = cos(theta[1]) double sc = sin(theta[2]) double cc = cos(theta[2]) double dx = theta[3] double dy = theta[4] double dz = theta[5] R[0,0], R[0,1], R[0,2] = cc*cb-sc*sa*sb, -sc*ca, cc*sb+sc*sa*cb R[1,0], R[1,1], R[1,2] = sc*cb+cc*sa*sb, cc*ca, sc*sb-cc*sa*cb R[2,0], R[2,1], R[2,2] = -ca*sb, sa, ca*cb R[3,0], R[3,1], R[3,2] = 0, 0, 0 R[0,3] = dx R[1,3] = dy R[2,3] = dz R[3,3] = 1 cdef class ScalingTransform2D(Transform): def __init__(self): r""" Scaling transform in 2D """ self.dim = 2 self.number_of_parameters = 1 cdef int _jacobian(self, double[:] theta, double[:] x, double[:, :] J)nogil: r""" Jacobian matrix of the isotropic 2D scale transform The transformation is given by: T(x) = (s*x0, s*x1) The derivative w.r.t. s is T'(x) = [x0, x1] Parameters ---------- theta : array, shape (1,) the scale factor (the Jacobian does not depend on the scale factor, but we receive the buffer to make it consistent with other Jacobian functions) x : array, shape (2,) the point at which to compute the Jacobian J : array, shape (2, 1) the buffer in which to write the Jacobian Returns ------- is_constant : int always returns 0, indicating that the Jacobian is not constant (it depends on the value of x) """ J[0, 0], J[1, 0] = x[0], x[1] # This Jacobian depends on x (it's not constant): return 0 return 0 cdef void _get_identity_parameters(self, double[:] theta) nogil: r""" Parameter values corresponding to the identity Sets in theta the parameter values corresponding to the identity transform Parameters ---------- theta : array, shape (1,) buffer to write the parameters of the 2D scale transform """ theta[0] = 1 cdef void _param_to_matrix(self, double[:] theta, double[:, :] R) nogil: r""" Matrix associated with the 2D (isotropic) scaling transform Parameters ---------- theta : array, shape (1,) the scale factor R : array, shape (3, 3) the buffer in which to write the scaling matrix """ R[0, 0], R[0, 1], R[0, 2] = theta[0], 0, 0 R[1, 0], R[1, 1], R[1, 2] = 0, theta[0], 0 R[2, 0], R[2, 1], R[2, 2] = 0, 0, 1 cdef class ScalingTransform3D(Transform): def __init__(self): r""" Scaling transform in 3D """ self.dim = 3 self.number_of_parameters = 1 cdef int _jacobian(self, double[:] theta, double[:] x, double[:, :] J)nogil: r""" Jacobian matrix of the isotropic 3D scale transform The transformation is given by: T(x) = (s*x0, s*x1, s*x2) The derivative w.r.t. s is T'(x) = [x0, x1, x2] Parameters ---------- theta : array, shape (1,) the scale factor (the Jacobian does not depend on the scale factor, but we receive the buffer to make it consistent with other Jacobian functions) x : array, shape (3,) the point at which to compute the Jacobian J : array, shape (3, 1) the buffer in which to write the Jacobian Returns ------- is_constant : int always returns 0, indicating that the Jacobian is not constant (it depends on the value of x) """ J[0, 0], J[1, 0], J[2, 0]= x[0], x[1], x[2] # This Jacobian depends on x (it's not constant): return 0 return 0 cdef void _get_identity_parameters(self, double[:] theta) nogil: r""" Parameter values corresponding to the identity Sets in theta the parameter values corresponding to the identity transform Parameters ---------- theta : array, shape (1,) buffer to write the parameters of the 3D scale transform """ theta[0] = 1 cdef void _param_to_matrix(self, double[:] theta, double[:, :] R) nogil: r""" Matrix associated with the 3D (isotropic) scaling transform Parameters ---------- theta : array, shape (1,) the scale factor R : array, shape (4, 4) the buffer in which to write the scaling matrix """ R[0, 0], R[0, 1], R[0, 2], R[0, 3] = theta[0], 0, 0, 0 R[1, 0], R[1, 1], R[1, 2], R[1, 3] = 0, theta[0], 0, 0 R[2, 0], R[2, 1], R[2, 2], R[2, 3] = 0, 0, theta[0], 0 R[3, 0], R[3, 1], R[3, 2], R[3, 3] = 0, 0, 0, 1 cdef class AffineTransform2D(Transform): def __init__(self): r""" Affine transform in 2D """ self.dim = 2 self.number_of_parameters = 6 cdef int _jacobian(self, double[:] theta, double[:] x, double[:, :] J)nogil: r""" Jacobian matrix of the 2D affine transform The transformation is given by: T(x) = |a0, a1, a2 | |x0| | T1(x) | |a0*x0 + a1*x1 + a2| |a3, a4, a5 | * |x1| = | T2(x) | = |a3*x0 + a4*x1 + a5| | 1| The derivatives w.r.t. each parameter are given by T'(x) = [[x0, 0], #derivatives of [T1, T2] w.r.t a0 [x1, 0], #derivatives of [T1, T2] w.r.t a1 [ 1, 0], #derivatives of [T1, T2] w.r.t a2 [ 0, x0], #derivatives of [T1, T2] w.r.t a3 [ 0, x1], #derivatives of [T1, T2] w.r.t a4 [ 0, 1]] #derivatives of [T1, T2, T3] w.r.t a5 The Jacobian matrix is the transpose of the above matrix. Parameters ---------- theta : array, shape (6,) the parameters of the 2D affine transform x : array, shape (2,) the point at which to compute the Jacobian J : array, shape (2, 6) the buffer in which to write the Jacobian Returns ------- is_constant : int always returns 0, indicating that the Jacobian is not constant (it depends on the value of x) """ J[0, :6] = 0 J[1, :6] = 0 J[0, :2] = x[:2] J[0, 2] = 1 J[1, 3:5] = x[:2] J[1, 5] = 1 # This Jacobian depends on x (it's not constant): return 0 return 0 cdef void _get_identity_parameters(self, double[:] theta) nogil: r""" Parameter values corresponding to the identity Sets in theta the parameter values corresponding to the identity transform Parameters ---------- theta : array, shape (6,) buffer to write the parameters of the 2D affine transform """ theta[0], theta[1], theta[2] = 1, 0, 0 theta[3], theta[4], theta[5] = 0, 1, 0 cdef void _param_to_matrix(self, double[:] theta, double[:, :] R) nogil: r""" Matrix associated with a general 2D affine transform The transformation is given by the matrix: A = [[a0, a1, a2], [a3, a4, a5], [ 0, 0, 1]] Parameters ---------- theta : array, shape (6,) the parameters of the 2D affine transform R : array, shape (3,3) the buffer in which to write the matrix """ R[0, 0], R[0, 1], R[0, 2] = theta[0], theta[1], theta[2] R[1, 0], R[1, 1], R[1, 2] = theta[3], theta[4], theta[5] R[2, 0], R[2, 1], R[2, 2] = 0, 0, 1 cdef class AffineTransform3D(Transform): def __init__(self): r""" Affine transform in 3D """ self.dim = 3 self.number_of_parameters = 12 cdef int _jacobian(self, double[:] theta, double[:] x, double[:, :] J)nogil: r""" Jacobian matrix of the 3D affine transform The transformation is given by: T(x)= |a0, a1, a2, a3 | |x0| | T1(x) | |a0*x0 + a1*x1 + a2*x2 + a3| |a4, a5, a6, a7 |* |x1|= | T2(x) |= |a4*x0 + a5*x1 + a6*x2 + a7| |a8, a9, a10, a11| |x2| | T3(x) | |a8*x0 + a9*x1 + a10*x2+a11| | 1| The derivatives w.r.t. each parameter are given by T'(x) = [[x0, 0, 0], #derivatives of [T1, T2, T3] w.r.t a0 [x1, 0, 0], #derivatives of [T1, T2, T3] w.r.t a1 [x2, 0, 0], #derivatives of [T1, T2, T3] w.r.t a2 [ 1, 0, 0], #derivatives of [T1, T2, T3] w.r.t a3 [ 0, x0, 0], #derivatives of [T1, T2, T3] w.r.t a4 [ 0, x1, 0], #derivatives of [T1, T2, T3] w.r.t a5 [ 0, x2, 0], #derivatives of [T1, T2, T3] w.r.t a6 [ 0, 1, 0], #derivatives of [T1, T2, T3] w.r.t a7 [ 0, 0, x0], #derivatives of [T1, T2, T3] w.r.t a8 [ 0, 0, x1], #derivatives of [T1, T2, T3] w.r.t a9 [ 0, 0, x2], #derivatives of [T1, T2, T3] w.r.t a10 [ 0, 0, 1]] #derivatives of [T1, T2, T3] w.r.t a11 The Jacobian matrix is the transpose of the above matrix. Parameters ---------- theta : array, shape (12,) the parameters of the 3D affine transform x : array, shape (3,) the point at which to compute the Jacobian J : array, shape (3, 12) the buffer in which to write the Jacobian Returns ------- is_constant : int always returns 0, indicating that the Jacobian is not constant (it depends on the value of x) """ cdef: cnp.npy_intp j for j in range(3): J[j, :12] = 0 J[0, :3] = x[:3] J[0, 3] = 1 J[1, 4:7] = x[:3] J[1, 7] = 1 J[2, 8:11] = x[:3] J[2, 11] = 1 # This Jacobian depends on x (it's not constant): return 0 return 0 cdef void _get_identity_parameters(self, double[:] theta) nogil: r""" Parameter values corresponding to the identity Sets in theta the parameter values corresponding to the identity transform Parameters ---------- theta : array, shape (12,) buffer to write the parameters of the 3D affine transform """ theta[0], theta[1], theta[2], theta[3] = 1, 0, 0, 0 theta[4], theta[5], theta[6], theta[7] = 0, 1, 0, 0 theta[8], theta[9], theta[10], theta[11] = 0, 0, 1, 0 cdef void _param_to_matrix(self, double[:] theta, double[:, :] R) nogil: r""" Matrix associated with a general 3D affine transform The transformation is given by the matrix: A = [[a0, a1, a2, a3], [a4, a5, a6, a7], [a8, a9, a10, a11], [ 0, 0, 0, 1]] Parameters ---------- theta : array, shape (12,) the parameters of the 3D affine transform R : array, shape (4,4) the buffer in which to write the matrix """ R[0, 0], R[0, 1], R[0, 2] = theta[0], theta[1], theta[2] R[1, 0], R[1, 1], R[1, 2] = theta[4], theta[5], theta[6] R[2, 0], R[2, 1], R[2, 2] = theta[8], theta[9], theta[10] R[3, 0], R[3, 1], R[3, 2] = 0, 0, 0 R[0, 3] = theta[3] R[1, 3] = theta[7] R[2, 3] = theta[11] R[3, 3] = 1 regtransforms = {} regtransforms [('TRANSLATION', 2)] = TranslationTransform2D() regtransforms [('TRANSLATION', 3)] = TranslationTransform3D() regtransforms [('ROTATION', 2)] = RotationTransform2D() regtransforms [('ROTATION', 3)] = RotationTransform3D() regtransforms [('RIGID', 2)] = RigidTransform2D() regtransforms [('RIGID', 3)] = RigidTransform3D() regtransforms [('SCALING', 2)] = ScalingTransform2D() regtransforms [('SCALING', 3)] = ScalingTransform3D() regtransforms [('AFFINE', 2)] = AffineTransform2D() regtransforms [('AFFINE', 3)] = AffineTransform3D() dipy-0.13.0/dipy/align/vector_fields.pxd000066400000000000000000000040101317371701200201410ustar00rootroot00000000000000#!python #cython: boundscheck=False #cython: wraparound=False #cython: cdivision=True cdef inline double _apply_affine_3d_x0(double x0, double x1, double x2, double h, double[:, :] aff) nogil: r"""Multiplies aff by (x0, x1, x2, h), returns the 1st element of product Returns the first component of the product of the homogeneous matrix aff by (x0, x1, x2, h) """ return aff[0, 0] * x0 + aff[0, 1] * x1 + aff[0, 2] * x2 + h*aff[0, 3] cdef inline double _apply_affine_3d_x1(double x0, double x1, double x2, double h, double[:, :] aff) nogil: r"""Multiplies aff by (x0, x1, x2, h), returns the 2nd element of product Returns the first component of the product of the homogeneous matrix aff by (x0, x1, x2, h) """ return aff[1, 0] * x0 + aff[1, 1] * x1 + aff[1, 2] * x2 + h*aff[1, 3] cdef inline double _apply_affine_3d_x2(double x0, double x1, double x2, double h, double[:, :] aff) nogil: r"""Multiplies aff by (x0, x1, x2, h), returns the 3d element of product Returns the first component of the product of the homogeneous matrix aff by (x0, x1, x2, h) """ return aff[2, 0] * x0 + aff[2, 1] * x1 + aff[2, 2] * x2 + h*aff[2, 3] cdef inline double _apply_affine_2d_x0(double x0, double x1, double h, double[:, :] aff) nogil: r"""Multiplies aff by (x0, x1, h), returns the 1st element of product Returns the first component of the product of the homogeneous matrix aff by (x0, x1, h) """ return aff[0, 0] * x0 + aff[0, 1] * x1 + h*aff[0, 2] cdef inline double _apply_affine_2d_x1(double x0, double x1, double h, double[:, :] aff) nogil: r"""Multiplies aff by (x0, x1, h), returns the 2nd element of product Returns the first component of the product of the homogeneous matrix aff by (x0, x1, h) """ return aff[1, 0] * x0 + aff[1, 1] * x1 + h*aff[1, 2] dipy-0.13.0/dipy/align/vector_fields.pyx000066400000000000000000004077601317371701200202110ustar00rootroot00000000000000#!python #cython: boundscheck=False #cython: wraparound=False #cython: cdivision=True import numpy as np cimport numpy as cnp cimport cython from .fused_types cimport floating, number cdef extern from "dpy_math.h" nogil: double floor(double) double sqrt(double) double cos(double) double atan2(double, double) def is_valid_affine(double[:, :] M, int dim): if M is None: return True if M.shape[0] < dim or M.shape[1] < dim + 1: return False if not np.all(np.isfinite(M)): return False return True def interpolate_vector_2d(floating[:, :, :] field, double[:, :] locations): r"""Bilinear interpolation of a 2D vector field Interpolates the 2D vector field at the given locations. This function is a wrapper for _interpolate_vector_2d for testing purposes, it is equivalent to using scipy.ndimage.interpolation.map_coordinates with bilinear interpolation at each vector component Parameters ---------- field : array, shape (S, R, 2) the 2D vector field to be interpolated locations : array, shape (n, 2) (locations[i,0], locations[i,1]), 0<=i= nr) or (djj >= nc)): out[0] = 0 out[1] = 0 return 0 # ---top-left ii = floor(dii) jj = floor(djj) calpha = dii - ii cbeta = djj - jj alpha = 1 - calpha beta = 1 - cbeta inside = 0 if (ii >= 0) and (jj >= 0): out[0] = alpha * beta * field[ii, jj, 0] out[1] = alpha * beta * field[ii, jj, 1] inside += 1 else: out[0] = 0 out[1] = 0 # ---top-right jj += 1 if (jj < nc) and (ii >= 0): out[0] += alpha * cbeta * field[ii, jj, 0] out[1] += alpha * cbeta * field[ii, jj, 1] inside += 1 # ---bottom-right ii += 1 if (jj < nc) and (ii < nr): out[0] += calpha * cbeta * field[ii, jj, 0] out[1] += calpha * cbeta * field[ii, jj, 1] inside += 1 # ---bottom-left jj -= 1 if (jj >= 0) and (ii < nr): out[0] += calpha * beta * field[ii, jj, 0] out[1] += calpha * beta * field[ii, jj, 1] inside += 1 return 1 if inside == 4 else 0 def interpolate_scalar_2d(floating[:, :] image, double[:, :] locations): r"""Bilinear interpolation of a 2D scalar image Interpolates the 2D image at the given locations. This function is a wrapper for _interpolate_scalar_2d for testing purposes, it is equivalent to scipy.ndimage.interpolation.map_coordinates with bilinear interpolation Parameters ---------- field : array, shape (S, R) the 2D image to be interpolated locations : array, shape (n, 2) (locations[i,0], locations[i,1]), 0<=i= nr) or (djj >= nc)): out[0] = 0 return 0 # ---top-left ii = floor(dii) jj = floor(djj) calpha = dii - ii cbeta = djj - jj alpha = 1 - calpha beta = 1 - cbeta inside = 0 if (ii >= 0) and (jj >= 0): out[0] = alpha * beta * image[ii, jj] inside += 1 else: out[0] = 0 # ---top-right jj += 1 if (jj < nc) and (ii >= 0): out[0] += alpha * cbeta * image[ii, jj] inside += 1 # ---bottom-right ii += 1 if (jj < nc) and (ii < nr): out[0] += calpha * cbeta * image[ii, jj] inside += 1 # ---bottom-left jj -= 1 if (jj >= 0) and (ii < nr): out[0] += calpha * beta * image[ii, jj] inside += 1 return 1 if inside == 4 else 0 def interpolate_scalar_nn_2d(number[:, :] image, double[:, :] locations): r"""Nearest neighbor interpolation of a 2D scalar image Interpolates the 2D image at the given locations. This function is a wrapper for _interpolate_scalar_nn_2d for testing purposes, it is equivalent to scipy.ndimage.interpolation.map_coordinates with nearest neighbor interpolation Parameters ---------- image : array, shape (S, R) the 2D image to be interpolated locations : array, shape (n, 2) (locations[i,0], locations[i,1]), 0<=i nr - 1) or (djj > nc - 1)): out[0] = 0 return 0 # find the top left index and the interpolation coefficients ii = floor(dii) jj = floor(djj) # no one is affected if((ii < 0) or (jj < 0) or (ii >= nr) or (jj >= nc)): out[0] = 0 return 0 calpha = dii - ii # by definition these factors are nonnegative cbeta = djj - jj alpha = 1 - calpha beta = 1 - cbeta if(alpha < calpha): ii += 1 if(beta < cbeta): jj += 1 # no one is affected if((ii < 0) or (jj < 0) or (ii >= nr) or (jj >= nc)): out[0] = 0 return 0 out[0] = image[ii, jj] return 1 def interpolate_scalar_nn_3d(number[:, :, :] image, double[:, :] locations): r"""Nearest neighbor interpolation of a 3D scalar image Interpolates the 3D image at the given locations. This function is a wrapper for _interpolate_scalar_nn_3d for testing purposes, it is equivalent to scipy.ndimage.interpolation.map_coordinates with nearest neighbor interpolation Parameters ---------- image : array, shape (S, R, C) the 3D image to be interpolated locations : array, shape (n, 3) (locations[i,0], locations[i,1], locations[i,2), 0<=ifloor(dkk) ii = floor(dii) jj = floor(djj) # no one is affected if not ((0 <= kk < ns) and (0 <= ii < nr) and (0 <= jj < nc)): out[0] = 0 return 0 cgamma = dkk - kk calpha = dii - ii cbeta = djj - jj alpha = 1 - calpha beta = 1 - cbeta gamma = 1 - cgamma if(gamma < cgamma): kk += 1 if(alpha < calpha): ii += 1 if(beta < cbeta): jj += 1 # no one is affected if not ((0 <= kk < ns) and (0 <= ii < nr) and (0 <= jj < nc)): out[0] = 0 return 0 out[0] = volume[kk, ii, jj] return 1 def interpolate_scalar_3d(floating[:, :, :] image, locations): r"""Trilinear interpolation of a 3D scalar image Interpolates the 3D image at the given locations. This function is a wrapper for _interpolate_scalar_3d for testing purposes, it is equivalent to scipy.ndimage.interpolation.map_coordinates with trilinear interpolation Parameters ---------- field : array, shape (S, R, C) the 3D image to be interpolated locations : array, shape (n, 3) (locations[i,0], locations[i,1], locations[i,2), 0<=ifloor(dkk) ii = floor(dii) jj = floor(djj) # no one is affected cgamma = dkk - kk calpha = dii - ii cbeta = djj - jj alpha = 1 - calpha beta = 1 - cbeta gamma = 1 - cgamma inside = 0 # ---top-left if (ii >= 0) and (jj >= 0) and (kk >= 0): out[0] = alpha * beta * gamma * volume[kk, ii, jj] inside += 1 else: out[0] = 0 # ---top-right jj += 1 if (ii >= 0) and (jj < nc) and (kk >= 0): out[0] += alpha * cbeta * gamma * volume[kk, ii, jj] inside += 1 # ---bottom-right ii += 1 if (ii < nr) and (jj < nc) and (kk >= 0): out[0] += calpha * cbeta * gamma * volume[kk, ii, jj] inside += 1 # ---bottom-left jj -= 1 if (ii < nr) and (jj >= 0) and (kk >= 0): out[0] += calpha * beta * gamma * volume[kk, ii, jj] inside += 1 kk += 1 if(kk < ns): ii -= 1 if (ii >= 0) and (jj >= 0): out[0] += alpha * beta * cgamma * volume[kk, ii, jj] inside += 1 jj += 1 if (ii >= 0) and (jj < nc): out[0] += alpha * cbeta * cgamma * volume[kk, ii, jj] inside += 1 # ---bottom-right ii += 1 if (ii < nr) and (jj < nc): out[0] += calpha * cbeta * cgamma * volume[kk, ii, jj] inside += 1 # ---bottom-left jj -= 1 if (ii < nr) and (jj >= 0): out[0] += calpha * beta * cgamma * volume[kk, ii, jj] inside += 1 return 1 if inside == 8 else 0 def interpolate_vector_3d(floating[:, :, :, :] field, double[:, :] locations): r"""Trilinear interpolation of a 3D vector field Interpolates the 3D vector field at the given locations. This function is a wrapper for _interpolate_vector_3d for testing purposes, it is equivalent to using scipy.ndimage.interpolation.map_coordinates with trilinear interpolation at each vector component Parameters ---------- field : array, shape (S, R, C, 3) the 3D vector field to be interpolated locations : array, shape (n, 3) (locations[i,0], locations[i,1], locations[i,2), 0<=ifloor(dkk) ii = floor(dii) jj = floor(djj) cgamma = dkk - kk calpha = dii - ii cbeta = djj - jj alpha = 1 - calpha beta = 1 - cbeta gamma = 1 - cgamma inside = 0 if (ii >= 0) and (jj >= 0) and (kk >= 0): out[0] = alpha * beta * gamma * field[kk, ii, jj, 0] out[1] = alpha * beta * gamma * field[kk, ii, jj, 1] out[2] = alpha * beta * gamma * field[kk, ii, jj, 2] inside += 1 else: out[0] = 0 out[1] = 0 out[2] = 0 # ---top-right jj += 1 if (jj < nc) and (ii >= 0) and (kk >= 0): out[0] += alpha * cbeta * gamma * field[kk, ii, jj, 0] out[1] += alpha * cbeta * gamma * field[kk, ii, jj, 1] out[2] += alpha * cbeta * gamma * field[kk, ii, jj, 2] inside += 1 # ---bottom-right ii += 1 if (jj < nc) and (ii < nr) and (kk >= 0): out[0] += calpha * cbeta * gamma * field[kk, ii, jj, 0] out[1] += calpha * cbeta * gamma * field[kk, ii, jj, 1] out[2] += calpha * cbeta * gamma * field[kk, ii, jj, 2] inside += 1 # ---bottom-left jj -= 1 if (jj >= 0) and (ii < nr) and (kk >= 0): out[0] += calpha * beta * gamma * field[kk, ii, jj, 0] out[1] += calpha * beta * gamma * field[kk, ii, jj, 1] out[2] += calpha * beta * gamma * field[kk, ii, jj, 2] inside += 1 kk += 1 if (kk < ns): ii -= 1 if (jj >= 0) and (ii >= 0): out[0] += alpha * beta * cgamma * field[kk, ii, jj, 0] out[1] += alpha * beta * cgamma * field[kk, ii, jj, 1] out[2] += alpha * beta * cgamma * field[kk, ii, jj, 2] inside += 1 jj += 1 if (jj < nc) and (ii >= 0): out[0] += alpha * cbeta * cgamma * field[kk, ii, jj, 0] out[1] += alpha * cbeta * cgamma * field[kk, ii, jj, 1] out[2] += alpha * cbeta * cgamma * field[kk, ii, jj, 2] inside += 1 # ---bottom-right ii += 1 if (jj < nc) and (ii < nr): out[0] += calpha * cbeta * cgamma * field[kk, ii, jj, 0] out[1] += calpha * cbeta * cgamma * field[kk, ii, jj, 1] out[2] += calpha * cbeta * cgamma * field[kk, ii, jj, 2] inside += 1 # ---bottom-left jj -= 1 if (jj >= 0) and (ii < nr): out[0] += calpha * beta * cgamma * field[kk, ii, jj, 0] out[1] += calpha * beta * cgamma * field[kk, ii, jj, 1] out[2] += calpha * beta * cgamma * field[kk, ii, jj, 2] inside += 1 return 1 if inside == 8 else 0 cdef void _compose_vector_fields_2d(floating[:, :, :] d1, floating[:, :, :] d2, double[:, :] premult_index, double[:, :] premult_disp, double time_scaling, floating[:, :, :] comp, double[:] stats) nogil: r"""Computes the composition of two 2D displacement fields Computes the composition of the two 2-D displacements d1 and d2. The evaluation of d2 at non-lattice points is computed using tri-linear interpolation. The actual composition is computed as: comp[i] = d1[i] + t * d2[ A * i + B * d1[i] ] where t = time_scaling, A = premult_index and B=premult_disp and i denotes the voxel coordinates of a voxel in d1's grid. Using this parameters it is possible to compose vector fields with arbitrary discretization: let R and S be the voxel-to-space transformation associated to d1 and d2, respectively then the composition at a voxel with coordinates i in d1's grid is given by: comp[i] = d1[i] + R*i + d2[Sinv*(R*i + d1[i])] - R*i (the R*i terms cancel each other) where Sinv = S^{-1} we can then define A = Sinv * R and B = Sinv to compute the composition using this function. Parameters ---------- d1 : array, shape (R, C, 2) first displacement field to be applied. R, C are the number of rows and columns of the displacement field, respectively. d2 : array, shape (R', C', 2) second displacement field to be applied. R', C' are the number of rows and columns of the displacement field, respectively. premult_index : array, shape (3, 3) the matrix A in the explanation above premult_disp : array, shape (3, 3) the matrix B in the explanation above time_scaling : float this corresponds to the time scaling 't' in the above explanation comp : array, shape (R, C, 2), same dimension as d1 on output, this array will contain the composition of the two fields stats : array, shape (3,) on output, this array will contain three statistics of the vector norms of the composition (maximum, mean, standard_deviation) Returns ------- comp : array, shape (R, C, 2), same dimension as d1 on output, this array will contain the composition of the two fields stats : array, shape (3,) on output, this array will contain three statistics of the vector norms of the composition (maximum, mean, standard_deviation) Notes ----- If d1[r,c] lies outside the domain of d2, then comp[r,c] will contain a zero vector. Warning: it is possible to use the same array reference for d1 and comp to effectively update d1 to the composition of d1 and d2 because previously updated values from d1 are no longer used (this is done to save memory and time). However, using the same array for d2 and comp may not be the intended operation (see comment below). """ cdef: cnp.npy_intp nr1 = d1.shape[0] cnp.npy_intp nc1 = d1.shape[1] cnp.npy_intp nr2 = d2.shape[0] cnp.npy_intp nc2 = d2.shape[1] int inside, cnt = 0 double maxNorm = 0 double meanNorm = 0 double stdNorm = 0 double nn cnp.npy_intp i, j double di, dj, dii, djj, diii, djjj for i in range(nr1): for j in range(nc1): # This is the only place we access d1[i, j] dii = d1[i, j, 0] djj = d1[i, j, 1] if premult_disp is None: di = dii dj = djj else: di = _apply_affine_2d_x0(dii, djj, 0, premult_disp) dj = _apply_affine_2d_x1(dii, djj, 0, premult_disp) if premult_index is None: diii = i djjj = j else: diii = _apply_affine_2d_x0(i, j, 1, premult_index) djjj = _apply_affine_2d_x1(i, j, 1, premult_index) diii += di djjj += dj # If d1 and comp are the same array, this will correctly update # d1[i,j], which will never be accessed again # If d2 and comp are the same array, then (diii, djjj) may be # in the neighborhood of a previously updated vector from d2, # which may be problematic inside = _interpolate_vector_2d[floating](d2, diii, djjj, &comp[i, j, 0]) if inside == 1: comp[i, j, 0] = time_scaling * comp[i, j, 0] + dii comp[i, j, 1] = time_scaling * comp[i, j, 1] + djj nn = comp[i, j, 0] ** 2 + comp[i, j, 1] ** 2 meanNorm += nn stdNorm += nn * nn cnt += 1 if(maxNorm < nn): maxNorm = nn else: comp[i, j, 0] = 0 comp[i, j, 1] = 0 meanNorm /= cnt stats[0] = sqrt(maxNorm) stats[1] = sqrt(meanNorm) stats[2] = sqrt(stdNorm / cnt - meanNorm * meanNorm) def compose_vector_fields_2d(floating[:, :, :] d1, floating[:, :, :] d2, double[:, :] premult_index, double[:, :] premult_disp, double time_scaling, floating[:, :, :] comp): r"""Computes the composition of two 2D displacement fields Computes the composition of the two 2-D displacements d1 and d2. The evaluation of d2 at non-lattice points is computed using tri-linear interpolation. The actual composition is computed as: comp[i] = d1[i] + t * d2[ A * i + B * d1[i] ] where t = time_scaling, A = premult_index and B=premult_disp and i denotes the voxel coordinates of a voxel in d1's grid. Using this parameters it is possible to compose vector fields with arbitrary discretizations: let R and S be the voxel-to-space transformation associated to d1 and d2, respectively then the composition at a voxel with coordinates i in d1's grid is given by: comp[i] = d1[i] + R*i + d2[Sinv*(R*i + d1[i])] - R*i (the R*i terms cancel each other) where Sinv = S^{-1} we can then define A = Sinv * R and B = Sinv to compute the composition using this function. Parameters ---------- d1 : array, shape (R, C, 2) first displacement field to be applied. R, C are the number of rows and columns of the displacement field, respectively. d2 : array, shape (R', C', 2) second displacement field to be applied. R', C' are the number of rows and columns of the displacement field, respectively. premult_index : array, shape (3, 3) the matrix A in the explanation above premult_disp : array, shape (3, 3) the matrix B in the explanation above time_scaling : float this corresponds to the time scaling 't' in the above explanation comp : array, shape (R, C, 2) the buffer to write the composition to. If None, the buffer is created internally Returns ------- comp : array, shape (R, C, 2), same dimension as d1 on output, this array will contain the composition of the two fields stats : array, shape (3,) on output, this array will contain three statistics of the vector norms of the composition (maximum, mean, standard_deviation) """ cdef: double[:] stats = np.zeros(shape=(3,), dtype=np.float64) if comp is None: comp = np.zeros_like(d1) if not is_valid_affine(premult_index, 2): raise ValueError("Invalid index multiplication matrix") if not is_valid_affine(premult_disp, 2): raise ValueError("Invalid displacement multiplication matrix") _compose_vector_fields_2d[floating](d1, d2, premult_index, premult_disp, time_scaling, comp, stats) return np.asarray(comp), np.asarray(stats) cdef void _compose_vector_fields_3d(floating[:, :, :, :] d1, floating[:, :, :, :] d2, double[:, :] premult_index, double[:, :] premult_disp, double t, floating[:, :, :, :] comp, double[:] stats) nogil: r"""Computes the composition of two 3D displacement fields Computes the composition of the two 3-D displacements d1 and d2. The evaluation of d2 at non-lattice points is computed using tri-linear interpolation. The actual composition is computed as: comp[i] = d1[i] + t * d2[ A * i + B * d1[i] ] where t = time_scaling, A = premult_index and B=premult_disp and i denotes the voxel coordinates of a voxel in d1's grid. Using this parameters it is possible to compose vector fields with arbitrary discretization: let R and S be the voxel-to-space transformation associated to d1 and d2, respectively then the composition at a voxel with coordinates i in d1's grid is given by: comp[i] = d1[i] + R*i + d2[Sinv*(R*i + d1[i])] - R*i (the R*i terms cancel each other) where Sinv = S^{-1} we can then define A = Sinv * R and B = Sinv to compute the composition using this function. Parameters ---------- d1 : array, shape (S, R, C, 3) first displacement field to be applied. S, R, C are the number of slices, rows and columns of the displacement field, respectively. d2 : array, shape (S', R', C', 3) second displacement field to be applied. R', C' are the number of rows and columns of the displacement field, respectively. premult_index : array, shape (4, 4) the matrix A in the explanation above premult_disp : array, shape (4, 4) the matrix B in the explanation above time_scaling : float this corresponds to the time scaling 't' in the above explanation comp : array, shape (S, R, C, 3), same dimension as d1 on output, this array will contain the composition of the two fields stats : array, shape (3,) on output, this array will contain three statistics of the vector norms of the composition (maximum, mean, standard_deviation) Returns ------- comp : array, shape (S, R, C, 3), same dimension as d1 on output, this array will contain the composition of the two fields stats : array, shape (3,) on output, this array will contain three statistics of the vector norms of the composition (maximum, mean, standard_deviation) Notes ----- If d1[s,r,c] lies outside the domain of d2, then comp[s,r,c] will contain a zero vector. Warning: it is possible to use the same array reference for d1 and comp to effectively update d1 to the composition of d1 and d2 because previously updated values from d1 are no longer used (this is done to save memory and time). However, using the same array for d2 and comp may not be the intended operation (see comment below). """ cdef: cnp.npy_intp ns1 = d1.shape[0] cnp.npy_intp nr1 = d1.shape[1] cnp.npy_intp nc1 = d1.shape[2] cnp.npy_intp ns2 = d2.shape[0] cnp.npy_intp nr2 = d2.shape[1] cnp.npy_intp nc2 = d2.shape[2] int inside, cnt = 0 double maxNorm = 0 double meanNorm = 0 double stdNorm = 0 double nn cnp.npy_intp i, j, k double di, dj, dk, dii, djj, dkk, diii, djjj, dkkk for k in range(ns1): for i in range(nr1): for j in range(nc1): # This is the only place we access d1[k, i, j] dkk = d1[k, i, j, 0] dii = d1[k, i, j, 1] djj = d1[k, i, j, 2] if premult_disp is None: dk = dkk di = dii dj = djj else: dk = _apply_affine_3d_x0(dkk, dii, djj, 0, premult_disp) di = _apply_affine_3d_x1(dkk, dii, djj, 0, premult_disp) dj = _apply_affine_3d_x2(dkk, dii, djj, 0, premult_disp) if premult_index is None: dkkk = k diii = i djjj = j else: dkkk = _apply_affine_3d_x0(k, i, j, 1, premult_index) diii = _apply_affine_3d_x1(k, i, j, 1, premult_index) djjj = _apply_affine_3d_x2(k, i, j, 1, premult_index) dkkk += dk diii += di djjj += dj # If d1 and comp are the same array, this will correctly update # d1[k,i,j], which will never be accessed again # If d2 and comp are the same array, then (dkkk, diii, djjj) # may be in the neighborhood of a previously updated vector # from d2, which may be problematic inside = _interpolate_vector_3d[floating](d2, dkkk, diii, djjj, &comp[k, i, j, 0]) if inside == 1: comp[k, i, j, 0] = t * comp[k, i, j, 0] + dkk comp[k, i, j, 1] = t * comp[k, i, j, 1] + dii comp[k, i, j, 2] = t * comp[k, i, j, 2] + djj nn = (comp[k, i, j, 0] ** 2 + comp[k, i, j, 1] ** 2 + comp[k, i, j, 2]**2) meanNorm += nn stdNorm += nn * nn cnt += 1 if(maxNorm < nn): maxNorm = nn else: comp[k, i, j, 0] = 0 comp[k, i, j, 1] = 0 comp[k, i, j, 2] = 0 meanNorm /= cnt stats[0] = sqrt(maxNorm) stats[1] = sqrt(meanNorm) stats[2] = sqrt(stdNorm / cnt - meanNorm * meanNorm) def compose_vector_fields_3d(floating[:, :, :, :] d1, floating[:, :, :, :] d2, double[:, :] premult_index, double[:, :] premult_disp, double time_scaling, floating[:, :, :, :] comp): r"""Computes the composition of two 3D displacement fields Computes the composition of the two 3-D displacements d1 and d2. The evaluation of d2 at non-lattice points is computed using tri-linear interpolation. The actual composition is computed as: comp[i] = d1[i] + t * d2[ A * i + B * d1[i] ] where t = time_scaling, A = premult_index and B=premult_disp and i denotes the voxel coordinates of a voxel in d1's grid. Using this parameters it is possible to compose vector fields with arbitrary discretization: let R and S be the voxel-to-space transformation associated to d1 and d2, respectively then the composition at a voxel with coordinates i in d1's grid is given by: comp[i] = d1[i] + R*i + d2[Sinv*(R*i + d1[i])] - R*i (the R*i terms cancel each other) where Sinv = S^{-1} we can then define A = Sinv * R and B = Sinv to compute the composition using this function. Parameters ---------- d1 : array, shape (S, R, C, 3) first displacement field to be applied. S, R, C are the number of slices, rows and columns of the displacement field, respectively. d2 : array, shape (S', R', C', 3) second displacement field to be applied. R', C' are the number of rows and columns of the displacement field, respectively. premult_index : array, shape (4, 4) the matrix A in the explanation above premult_disp : array, shape (4, 4) the matrix B in the explanation above time_scaling : float this corresponds to the time scaling 't' in the above explanation comp : array, shape (S, R, C, 3), same dimension as d1 the buffer to write the composition to. If None, the buffer will be created internally Returns ------- comp : array, shape (S, R, C, 3), same dimension as d1 on output, this array will contain the composition of the two fields stats : array, shape (3,) on output, this array will contain three statistics of the vector norms of the composition (maximum, mean, standard_deviation) Notes ----- If d1[s,r,c] lies outside the domain of d2, then comp[s,r,c] will contain a zero vector. """ cdef: double[:] stats = np.zeros(shape=(3,), dtype=np.float64) if comp is None: comp = np.zeros_like(d1) if not is_valid_affine(premult_index, 3): raise ValueError("Invalid index pre-multiplication matrix") if not is_valid_affine(premult_disp, 3): raise ValueError("Invalid displacement pre-multiplication matrix") _compose_vector_fields_3d[floating](d1, d2, premult_index, premult_disp, time_scaling, comp, stats) return np.asarray(comp), np.asarray(stats) def invert_vector_field_fixed_point_2d(floating[:, :, :] d, double[:, :] d_world2grid, double[:] spacing, int max_iter, double tolerance, floating[:, :, :] start=None): r"""Computes the inverse of a 2D displacement fields Computes the inverse of the given 2-D displacement field d using the fixed-point algorithm [1]. [1] Chen, M., Lu, W., Chen, Q., Ruchala, K. J., & Olivera, G. H. (2008). A simple fixed-point approach to invert a deformation field. Medical Physics, 35(1), 81. doi:10.1118/1.2816107 Parameters ---------- d : array, shape (R, C, 2) the 2-D displacement field to be inverted d_world2grid : array, shape (3, 3) the space-to-grid transformation associated to the displacement field d (transforming physical space coordinates to voxel coordinates of the displacement field grid) spacing :array, shape (2,) the spacing between voxels (voxel size along each axis) max_iter : int maximum number of iterations to be performed tolerance : float maximum tolerated inversion error start : array, shape (R, C) an approximation to the inverse displacement field (if no approximation is available, None can be provided and the start displacement field will be zero) Returns ------- p : array, shape (R, C, 2) the inverse displacement field Notes ----- We assume that the displacement field is an endomorphism so that the shape and voxel-to-space transformation of the inverse field's discretization is the same as those of the input displacement field. The 'inversion error' at iteration t is defined as the mean norm of the displacement vectors of the input displacement field composed with the inverse at iteration t. """ cdef: cnp.npy_intp nr = d.shape[0] cnp.npy_intp nc = d.shape[1] int iter_count, current, flag double difmag, mag, maxlen, step_factor double epsilon double error = 1 + tolerance double di, dj, dii, djj double sr = spacing[0], sc = spacing[1] ftype = np.asarray(d).dtype cdef: double[:] stats = np.zeros(shape=(2,), dtype=np.float64) double[:] substats = np.empty(shape=(3,), dtype=np.float64) double[:, :] norms = np.zeros(shape=(nr, nc), dtype=np.float64) floating[:, :, :] p = np.zeros(shape=(nr, nc, 2), dtype=ftype) floating[:, :, :] q = np.zeros(shape=(nr, nc, 2), dtype=ftype) if not is_valid_affine(d_world2grid, 2): raise ValueError("Invalid world-to-image transform") if start is not None: p[...] = start with nogil: iter_count = 0 while (iter_count < max_iter) and (tolerance < error): if iter_count == 0: epsilon = 0.75 else: epsilon = 0.5 _compose_vector_fields_2d[floating](p, d, None, d_world2grid, 1.0, q, substats) difmag = 0 error = 0 for i in range(nr): for j in range(nc): mag = sqrt((q[i, j, 0]/sr) ** 2 + (q[i, j, 1]/sc) ** 2) norms[i, j] = mag error += mag if(difmag < mag): difmag = mag maxlen = difmag * epsilon for i in range(nr): for j in range(nc): if norms[i, j] > maxlen: step_factor = epsilon * maxlen / norms[i, j] else: step_factor = epsilon p[i, j, 0] = p[i, j, 0] - step_factor * q[i, j, 0] p[i, j, 1] = p[i, j, 1] - step_factor * q[i, j, 1] error /= (nr * nc) iter_count += 1 stats[0] = substats[1] stats[1] = iter_count return np.asarray(p) def invert_vector_field_fixed_point_3d(floating[:, :, :, :] d, double[:, :] d_world2grid, double[:] spacing, int max_iter, double tol, floating[:, :, :, :] start=None): r"""Computes the inverse of a 3D displacement fields Computes the inverse of the given 3-D displacement field d using the fixed-point algorithm [1]. [1] Chen, M., Lu, W., Chen, Q., Ruchala, K. J., & Olivera, G. H. (2008). A simple fixed-point approach to invert a deformation field. Medical Physics, 35(1), 81. doi:10.1118/1.2816107 Parameters ---------- d : array, shape (S, R, C, 3) the 3-D displacement field to be inverted d_world2grid : array, shape (4, 4) the space-to-grid transformation associated to the displacement field d (transforming physical space coordinates to voxel coordinates of the displacement field grid) spacing :array, shape (3,) the spacing between voxels (voxel size along each axis) max_iter : int maximum number of iterations to be performed tol : float maximum tolerated inversion error start : array, shape (S, R, C) an approximation to the inverse displacement field (if no approximation is available, None can be provided and the start displacement field will be zero) Returns ------- p : array, shape (S, R, C, 3) the inverse displacement field Notes ----- We assume that the displacement field is an endomorphism so that the shape and voxel-to-space transformation of the inverse field's discretization is the same as those of the input displacement field. The 'inversion error' at iteration t is defined as the mean norm of the displacement vectors of the input displacement field composed with the inverse at iteration t. """ cdef: cnp.npy_intp ns = d.shape[0] cnp.npy_intp nr = d.shape[1] cnp.npy_intp nc = d.shape[2] int iter_count, current double dkk, dii, djj, dk, di, dj double difmag, mag, maxlen, step_factor double epsilon = 0.5 double error = 1 + tol double ss = spacing[0], sr = spacing[1], sc = spacing[2] ftype = np.asarray(d).dtype cdef: double[:] stats = np.zeros(shape=(2,), dtype=np.float64) double[:] substats = np.zeros(shape=(3,), dtype=np.float64) double[:, :, :] norms = np.zeros(shape=(ns, nr, nc), dtype=np.float64) floating[:, :, :, :] p = np.zeros(shape=(ns, nr, nc, 3), dtype=ftype) floating[:, :, :, :] q = np.zeros(shape=(ns, nr, nc, 3), dtype=ftype) if not is_valid_affine(d_world2grid, 3): raise ValueError("Invalid world-to-image transform") if start is not None: p[...] = start with nogil: iter_count = 0 difmag = 1 while (0.1 < difmag) and (iter_count < max_iter) and (tol < error): if iter_count == 0: epsilon = 0.75 else: epsilon = 0.5 _compose_vector_fields_3d[floating](p, d, None, d_world2grid, 1.0, q, substats) difmag = 0 error = 0 for k in range(ns): for i in range(nr): for j in range(nc): mag = sqrt((q[k, i, j, 0]/ss) ** 2 + (q[k, i, j, 1]/sr) ** 2 + (q[k, i, j, 2]/sc) ** 2) norms[k, i, j] = mag error += mag if(difmag < mag): difmag = mag maxlen = difmag*epsilon for k in range(ns): for i in range(nr): for j in range(nc): if norms[k, i, j] > maxlen: step_factor = epsilon * maxlen / norms[k, i, j] else: step_factor = epsilon p[k, i, j, 0] = (p[k, i, j, 0] - step_factor * q[k, i, j, 0]) p[k, i, j, 1] = (p[k, i, j, 1] - step_factor * q[k, i, j, 1]) p[k, i, j, 2] = (p[k, i, j, 2] - step_factor * q[k, i, j, 2]) error /= (ns * nr * nc) iter_count += 1 stats[0] = error stats[1] = iter_count return np.asarray(p) def simplify_warp_function_2d(floating[:, :, :] d, double[:, :] affine_idx_in, double[:, :] affine_idx_out, double[:, :] affine_disp, int[:] out_shape): r""" Simplifies a nonlinear warping function combined with an affine transform Modifies the given deformation field by incorporating into it a an affine transformation and voxel-to-space transforms associated to the discretization of its domain and codomain. The resulting transformation may be regarded as operating on the image spaces given by the domain and codomain discretization. More precisely, the resulting transform is of the form: (1) T[i] = W * d[U * i] + V * i Where U = affine_idx_in, V = affine_idx_out, W = affine_disp. Parameters ---------- d : array, shape (R', C', 2) the non-linear part of the transformation (displacement field) affine_idx_in : array, shape (3, 3) the matrix U in eq. (1) above affine_idx_out : array, shape (3, 3) the matrix V in eq. (1) above affine_disp : array, shape (3, 3) the matrix W in eq. (1) above out_shape : array, shape (2,) the number of rows and columns of the sampling grid Returns ------- out : array, shape = out_shape the deformation field `out` associated with `T` in eq. (1) such that: T[i] = i + out[i] Notes ----- Both the direct and inverse transforms of a DiffeomorphicMap can be written in this form: Direct: Let D be the voxel-to-space transform of the domain's discretization, P be the pre-align matrix, Rinv the space-to-voxel transform of the reference grid (the grid the displacement field is defined on) and Cinv be the space-to-voxel transform of the codomain's discretization. Then, for each i in the domain's grid, the direct transform is given by (2) T[i] = Cinv * d[Rinv * P * D * i] + Cinv * P * D * i and we identify U = Rinv * P * D, V = Cinv * P * D, W = Cinv Inverse: Let C be the voxel-to-space transform of the codomain's discretization, Pinv be the inverse of the pre-align matrix, Rinv the space-to-voxel transform of the reference grid (the grid the displacement field is defined on) and Dinv be the space-to-voxel transform of the domain's discretization. Then, for each j in the codomain's grid, the inverse transform is given by (3) Tinv[j] = Dinv * Pinv * d[Rinv * C * j] + Dinv * Pinv * C * j and we identify U = Rinv * C, V = Dinv * Pinv * C, W = Dinv * Pinv """ cdef: cnp.npy_intp nrows = out_shape[0] cnp.npy_intp ncols = out_shape[1] cnp.npy_intp i, j double di, dj, dii, djj floating[:] tmp = np.zeros((2,), dtype=np.asarray(d).dtype) floating[:, :, :] out = np.zeros(shape=(nrows, ncols, 2), dtype=np.asarray(d).dtype) if not is_valid_affine(affine_idx_in, 2): raise ValueError("Invalid inner index multiplication matrix") if not is_valid_affine(affine_idx_out, 2): raise ValueError("Invalid outer index multiplication matrix") if not is_valid_affine(affine_disp, 2): raise ValueError("Invalid displacement multiplication matrix") with nogil: for i in range(nrows): for j in range(ncols): # Apply inner index pre-multiplication if affine_idx_in is None: dii = d[i, j, 0] djj = d[i, j, 1] else: di = _apply_affine_2d_x0( i, j, 1, affine_idx_in) dj = _apply_affine_2d_x1( i, j, 1, affine_idx_in) _interpolate_vector_2d[floating](d, di, dj, &tmp[0]) dii = tmp[0] djj = tmp[1] # Apply displacement multiplication if affine_disp is not None: di = _apply_affine_2d_x0( dii, djj, 0, affine_disp) dj = _apply_affine_2d_x1( dii, djj, 0, affine_disp) else: di = dii dj = djj # Apply outer index multiplication and add the displacements if affine_idx_out is not None: out[i, j, 0] = di + _apply_affine_2d_x0(i, j, 1, affine_idx_out) - i out[i, j, 1] = dj + _apply_affine_2d_x1(i, j, 1, affine_idx_out) - j else: out[i, j, 0] = di out[i, j, 1] = dj return np.asarray(out) def simplify_warp_function_3d(floating[:, :, :, :] d, double[:, :] affine_idx_in, double[:, :] affine_idx_out, double[:, :] affine_disp, int[:] out_shape): r""" Simplifies a nonlinear warping function combined with an affine transform Modifies the given deformation field by incorporating into it an affine transformation and voxel-to-space transforms associated with the discretization of its domain and codomain. The resulting transformation may be regarded as operating on the image spaces given by the domain and codomain discretization. More precisely, the resulting transform is of the form: (1) T[i] = W * d[U * i] + V * i Where U = affine_idx_in, V = affine_idx_out, W = affine_disp. Parameters ---------- d : array, shape (S', R', C', 3) the non-linear part of the transformation (displacement field) affine_idx_in : array, shape (4, 4) the matrix U in eq. (1) above affine_idx_out : array, shape (4, 4) the matrix V in eq. (1) above affine_disp : array, shape (4, 4) the matrix W in eq. (1) above out_shape : array, shape (3,) the number of slices, rows and columns of the sampling grid Returns ------- out : array, shape = out_shape the deformation field `out` associated with `T` in eq. (1) such that: T[i] = i + out[i] Notes ----- Both the direct and inverse transforms of a DiffeomorphicMap can be written in this form: Direct: Let D be the voxel-to-space transform of the domain's discretization, P be the pre-align matrix, Rinv the space-to-voxel transform of the reference grid (the grid the displacement field is defined on) and Cinv be the space-to-voxel transform of the codomain's discretization. Then, for each i in the domain's grid, the direct transform is given by (2) T[i] = Cinv * d[Rinv * P * D * i] + Cinv * P * D * i and we identify U = Rinv * P * D, V = Cinv * P * D, W = Cinv Inverse: Let C be the voxel-to-space transform of the codomain's discretization, Pinv be the inverse of the pre-align matrix, Rinv the space-to-voxel transform of the reference grid (the grid the displacement field is defined on) and Dinv be the space-to-voxel transform of the domain's discretization. Then, for each j in the codomain's grid, the inverse transform is given by (3) Tinv[j] = Dinv * Pinv * d[Rinv * C * j] + Dinv * Pinv * C * j and we identify U = Rinv * C, V = Dinv * Pinv * C, W = Dinv * Pinv """ cdef: cnp.npy_intp nslices = out_shape[0] cnp.npy_intp nrows = out_shape[1] cnp.npy_intp ncols = out_shape[2] cnp.npy_intp i, j, k, inside double di, dj, dk, dii, djj, dkk floating[:] tmp = np.zeros((3,), dtype=np.asarray(d).dtype) floating[:, :, :, :] out = np.zeros(shape=(nslices, nrows, ncols, 3), dtype=np.asarray(d).dtype) if not is_valid_affine(affine_idx_in, 3): raise ValueError("Invalid inner index multiplication matrix") if not is_valid_affine(affine_idx_out, 3): raise ValueError("Invalid outer index multiplication matrix") if not is_valid_affine(affine_disp, 3): raise ValueError("Invalid displacement multiplication matrix") with nogil: for k in range(nslices): for i in range(nrows): for j in range(ncols): if affine_idx_in is None: dkk = d[k, i, j, 0] dii = d[k, i, j, 1] djj = d[k, i, j, 2] else: dk = _apply_affine_3d_x0( k, i, j, 1, affine_idx_in) di = _apply_affine_3d_x1( k, i, j, 1, affine_idx_in) dj = _apply_affine_3d_x2( k, i, j, 1, affine_idx_in) inside = _interpolate_vector_3d[floating](d, dk, di, dj, &tmp[0]) dkk = tmp[0] dii = tmp[1] djj = tmp[2] if affine_disp is not None: dk = _apply_affine_3d_x0( dkk, dii, djj, 0, affine_disp) di = _apply_affine_3d_x1( dkk, dii, djj, 0, affine_disp) dj = _apply_affine_3d_x2( dkk, dii, djj, 0, affine_disp) else: dk = dkk di = dii dj = djj if affine_idx_out is not None: out[k, i, j, 0] = dk +\ _apply_affine_3d_x0(k, i, j, 1, affine_idx_out) - k out[k, i, j, 1] = di +\ _apply_affine_3d_x1(k, i, j, 1, affine_idx_out) - i out[k, i, j, 2] = dj +\ _apply_affine_3d_x2(k, i, j, 1, affine_idx_out) - j else: out[k, i, j, 0] = dk out[k, i, j, 1] = di out[k, i, j, 2] = dj return np.asarray(out) def reorient_vector_field_2d(floating[:, :, :] d, double[:, :] affine): r"""Linearly transforms all vectors of a 2D displacement field Modifies the input displacement field by multiplying each displacement vector by the given matrix. Note that the elements of the displacement field are vectors, not points, so their last homogeneous coordinate is zero, not one, and therefore the translation component of the affine transform will not have any effect on them. Parameters ---------- d : array, shape (R, C, 2) the displacement field to be re-oriented affine: array, shape (3, 3) the matrix to be applied """ cdef: cnp.npy_intp nrows = d.shape[0] cnp.npy_intp ncols = d.shape[1] cnp.npy_intp i, j double di, dj if not is_valid_affine(affine, 2): raise ValueError("Invalid affine transform matrix") if affine is None: return with nogil: for i in range(nrows): for j in range(ncols): di = d[i, j, 0] dj = d[i, j, 1] d[i, j, 0] = _apply_affine_2d_x0(di, dj, 0, affine) d[i, j, 1] = _apply_affine_2d_x1(di, dj, 0, affine) def reorient_vector_field_3d(floating[:, :, :, :] d, double[:, :] affine): r"""Linearly transforms all vectors of a 3D displacement field Modifies the input displacement field by multiplying each displacement vector by the given matrix. Note that the elements of the displacement field are vectors, not points, so their last homogeneous coordinate is zero, not one, and therefore the translation component of the affine transform will not have any effect on them. Parameters ---------- d : array, shape (S, R, C, 3) the displacement field to be re-oriented affine : array, shape (4, 4) the matrix to be applied """ cdef: cnp.npy_intp nslices = d.shape[0] cnp.npy_intp nrows = d.shape[1] cnp.npy_intp ncols = d.shape[2] cnp.npy_intp i, j, k double di, dj, dk if not is_valid_affine(affine, 3): raise ValueError("Invalid affine transform matrix") if affine is None: return with nogil: for k in range(nslices): for i in range(nrows): for j in range(ncols): dk = d[k, i, j, 0] di = d[k, i, j, 1] dj = d[k, i, j, 2] d[k, i, j, 0] = _apply_affine_3d_x0(dk, di, dj, 0, affine) d[k, i, j, 1] = _apply_affine_3d_x1(dk, di, dj, 0, affine) d[k, i, j, 2] = _apply_affine_3d_x2(dk, di, dj, 0, affine) def downsample_scalar_field_3d(floating[:, :, :] field): r"""Down-samples the input volume by a factor of 2 Down-samples the input volume by a factor of 2. The value at each voxel of the resulting volume is the average of its surrounding voxels in the original volume. Parameters ---------- field : array, shape (S, R, C) the volume to be down-sampled Returns ------- down : array, shape (S', R', C') the down-sampled displacement field, where S' = ceil(S/2), R'= ceil(R/2), C'=ceil(C/2) """ ftype = np.asarray(field).dtype cdef: cnp.npy_intp ns = field.shape[0] cnp.npy_intp nr = field.shape[1] cnp.npy_intp nc = field.shape[2] cnp.npy_intp nns = (ns + 1) // 2 cnp.npy_intp nnr = (nr + 1) // 2 cnp.npy_intp nnc = (nc + 1) // 2 cnp.npy_intp i, j, k, ii, jj, kk floating[:, :, :] down = np.zeros((nns, nnr, nnc), dtype=ftype) int[:, :, :] cnt = np.zeros((nns, nnr, nnc), dtype=np.int32) with nogil: for k in range(ns): for i in range(nr): for j in range(nc): kk = k // 2 ii = i // 2 jj = j // 2 down[kk, ii, jj] += field[k, i, j] cnt[kk, ii, jj] += 1 for k in range(nns): for i in range(nnr): for j in range(nnc): if cnt[k, i, j] > 0: down[k, i, j] /= cnt[k, i, j] return np.asarray(down) def downsample_displacement_field_3d(floating[:, :, :, :] field): r"""Down-samples the input 3D vector field by a factor of 2 Down-samples the input vector field by a factor of 2. This operation is equivalent to dividing the input image into 2x2x2 cubes and averaging the 8 vectors. The resulting field consists of these average vectors. Parameters ---------- field : array, shape (S, R, C) the vector field to be down-sampled Returns ------- down : array, shape (S', R', C') the down-sampled displacement field, where S' = ceil(S/2), R'= ceil(R/2), C'=ceil(C/2) """ ftype = np.asarray(field).dtype cdef: cnp.npy_intp ns = field.shape[0] cnp.npy_intp nr = field.shape[1] cnp.npy_intp nc = field.shape[2] cnp.npy_intp nns = (ns + 1) // 2 cnp.npy_intp nnr = (nr + 1) // 2 cnp.npy_intp nnc = (nc + 1) // 2 cnp.npy_intp i, j, k, ii, jj, kk floating[:, :, :, :] down = np.zeros((nns, nnr, nnc, 3), dtype=ftype) int[:, :, :] cnt = np.zeros((nns, nnr, nnc), dtype=np.int32) with nogil: for k in range(ns): for i in range(nr): for j in range(nc): kk = k // 2 ii = i // 2 jj = j // 2 down[kk, ii, jj, 0] += field[k, i, j, 0] down[kk, ii, jj, 1] += field[k, i, j, 1] down[kk, ii, jj, 2] += field[k, i, j, 2] cnt[kk, ii, jj] += 1 for k in range(nns): for i in range(nnr): for j in range(nnc): if cnt[k, i, j] > 0: down[k, i, j, 0] /= cnt[k, i, j] down[k, i, j, 1] /= cnt[k, i, j] down[k, i, j, 2] /= cnt[k, i, j] return np.asarray(down) def downsample_scalar_field_2d(floating[:, :] field): r"""Down-samples the input 2D image by a factor of 2 Down-samples the input image by a factor of 2. The value at each pixel of the resulting image is the average of its surrounding pixels in the original image. Parameters ---------- field : array, shape (R, C) the image to be down-sampled Returns ------- down : array, shape (R', C') the down-sampled displacement field, where R'= ceil(R/2), C'=ceil(C/2) """ ftype = np.asarray(field).dtype cdef: cnp.npy_intp nr = field.shape[0] cnp.npy_intp nc = field.shape[1] cnp.npy_intp nnr = (nr + 1) // 2 cnp.npy_intp nnc = (nc + 1) // 2 cnp.npy_intp i, j, ii, jj floating[:, :] down = np.zeros(shape=(nnr, nnc), dtype=ftype) int[:, :] cnt = np.zeros(shape=(nnr, nnc), dtype=np.int32) with nogil: for i in range(nr): for j in range(nc): ii = i // 2 jj = j // 2 down[ii, jj] += field[i, j] cnt[ii, jj] += 1 for i in range(nnr): for j in range(nnc): if cnt[i, j] > 0: down[i, j] /= cnt[i, j] return np.asarray(down) def downsample_displacement_field_2d(floating[:, :, :] field): r"""Down-samples the 2D input vector field by a factor of 2 Down-samples the input vector field by a factor of 2. The value at each pixel of the resulting field is the average of its surrounding pixels in the original field. Parameters ---------- field : array, shape (R, C) the vector field to be down-sampled Returns ------- down : array, shape (R', C') the down-sampled displacement field, where R'= ceil(R/2), C'=ceil(C/2), """ ftype = np.asarray(field).dtype cdef: cnp.npy_intp nr = field.shape[0] cnp.npy_intp nc = field.shape[1] cnp.npy_intp nnr = (nr + 1) // 2 cnp.npy_intp nnc = (nc + 1) // 2 cnp.npy_intp i, j, ii, jj floating[:, :, :] down = np.zeros((nnr, nnc, 2), dtype=ftype) int[:, :] cnt = np.zeros((nnr, nnc), dtype=np.int32) with nogil: for i in range(nr): for j in range(nc): ii = i // 2 jj = j // 2 down[ii, jj, 0] += field[i, j, 0] down[ii, jj, 1] += field[i, j, 1] cnt[ii, jj] += 1 for i in range(nnr): for j in range(nnc): if cnt[i, j] > 0: down[i, j, 0] /= cnt[i, j] down[i, j, 1] /= cnt[i, j] return np.asarray(down) def warp_3d(floating[:, :, :] volume, floating[:, :, :, :] d1, double[:, :] affine_idx_in=None, double[:, :] affine_idx_out=None, double[:, :] affine_disp=None, int[:] out_shape=None): r"""Warps a 3D volume using trilinear interpolation Deforms the input volume under the given transformation. The warped volume is computed using tri-linear interpolation and is given by: (1) warped[i] = volume[ C * d1[A*i] + B*i ] where A = affine_idx_in, B = affine_idx_out, C = affine_disp and i denotes the discrete coordinates of a voxel in the sampling grid of shape = out_shape. Parameters ---------- volume : array, shape (S, R, C) the input volume to be transformed d1 : array, shape (S', R', C', 3) the displacement field driving the transformation affine_idx_in : array, shape (4, 4) the matrix A in eq. (1) above affine_idx_out : array, shape (4, 4) the matrix B in eq. (1) above affine_disp : array, shape (4, 4) the matrix C in eq. (1) above out_shape : array, shape (3,) the number of slices, rows and columns of the sampling grid Returns ------- warped : array, shape = out_shape the transformed volume Notes ----- To illustrate the use of this function, consider a displacement field d1 with grid-to-space transformation R, a volume with grid-to-space transformation T and let's say we want to sample the warped volume on a grid with grid-to-space transformation S (sampling grid). For each voxel in the sampling grid with discrete coordinates i, the warped volume is given by: (2) warped[i] = volume[Tinv * ( d1[Rinv * S * i] + S * i ) ] where Tinv = T^{-1} and Rinv = R^{-1}. By identifying A = Rinv * S, B = Tinv * S, C = Tinv we can use this function to efficiently warp the input image. """ cdef: cnp.npy_intp nslices = volume.shape[0] cnp.npy_intp nrows = volume.shape[1] cnp.npy_intp ncols = volume.shape[2] cnp.npy_intp nsVol = volume.shape[0] cnp.npy_intp nrVol = volume.shape[1] cnp.npy_intp ncVol = volume.shape[2] cnp.npy_intp i, j, k int inside double dkk, dii, djj, dk, di, dj if not is_valid_affine(affine_idx_in, 3): raise ValueError("Invalid inner index multiplication matrix") if not is_valid_affine(affine_idx_out, 3): raise ValueError("Invalid outer index multiplication matrix") if not is_valid_affine(affine_disp, 3): raise ValueError("Invalid displacement multiplication matrix") if out_shape is not None: nslices = out_shape[0] nrows = out_shape[1] ncols = out_shape[2] elif d1 is not None: nslices = d1.shape[0] nrows = d1.shape[1] ncols = d1.shape[2] cdef floating[:, :, :] warped = np.zeros(shape=(nslices, nrows, ncols), dtype=np.asarray(volume).dtype) cdef floating[:] tmp = np.zeros(shape=(3,), dtype = np.asarray(d1).dtype) with nogil: for k in range(nslices): for i in range(nrows): for j in range(ncols): if affine_idx_in is None: dkk = d1[k, i, j, 0] dii = d1[k, i, j, 1] djj = d1[k, i, j, 2] else: dk = _apply_affine_3d_x0( k, i, j, 1, affine_idx_in) di = _apply_affine_3d_x1( k, i, j, 1, affine_idx_in) dj = _apply_affine_3d_x2( k, i, j, 1, affine_idx_in) inside = _interpolate_vector_3d[floating](d1, dk, di, dj, &tmp[0]) dkk = tmp[0] dii = tmp[1] djj = tmp[2] if affine_disp is not None: dk = _apply_affine_3d_x0( dkk, dii, djj, 0, affine_disp) di = _apply_affine_3d_x1( dkk, dii, djj, 0, affine_disp) dj = _apply_affine_3d_x2( dkk, dii, djj, 0, affine_disp) else: dk = dkk di = dii dj = djj if affine_idx_out is not None: dkk = dk + _apply_affine_3d_x0(k, i, j, 1, affine_idx_out) dii = di + _apply_affine_3d_x1(k, i, j, 1, affine_idx_out) djj = dj + _apply_affine_3d_x2(k, i, j, 1, affine_idx_out) else: dkk = dk + k dii = di + i djj = dj + j inside = _interpolate_scalar_3d[floating](volume, dkk, dii, djj, &warped[k,i,j]) return np.asarray(warped) def transform_3d_affine(floating[:, :, :] volume, int[:] ref_shape, double[:, :] affine): r"""Transforms a 3D volume by an affine transform with trilinear interp. Deforms the input volume under the given affine transformation using tri-linear interpolation. The shape of the resulting transformation is given by ref_shape. If the affine matrix is None, it is taken as the identity. Parameters ---------- volume : array, shape (S, R, C) the input volume to be transformed ref_shape : array, shape (3,) the shape of the resulting volume affine : array, shape (4, 4) the affine transform to be applied Returns ------- out : array, shape (S', R', C') the transformed volume Notes ----- The reason it is necessary to provide the intended shape of the resulting volume is because the affine transformation is defined on all R^{3} but we must sample a finite lattice. Also the resulting shape may not be necessarily equal to the input shape, unless we are interested on endomorphisms only and not general diffeomorphisms. """ cdef: cnp.npy_intp nslices = ref_shape[0] cnp.npy_intp nrows = ref_shape[1] cnp.npy_intp ncols = ref_shape[2] cnp.npy_intp nsVol = volume.shape[0] cnp.npy_intp nrVol = volume.shape[1] cnp.npy_intp ncVol = volume.shape[2] cnp.npy_intp i, j, k, ii, jj, kk int inside double dkk, dii, djj, tmp0, tmp1 double alpha, beta, gamma, calpha, cbeta, cgamma floating[:, :, :] out = np.zeros(shape=(nslices, nrows, ncols), dtype=np.asarray(volume).dtype) if not is_valid_affine(affine, 3): raise ValueError("Invalid affine transform matrix") with nogil: for k in range(nslices): for i in range(nrows): for j in range(ncols): if affine is not None: dkk = _apply_affine_3d_x0(k, i, j, 1, affine) dii = _apply_affine_3d_x1(k, i, j, 1, affine) djj = _apply_affine_3d_x2(k, i, j, 1, affine) else: dkk = k dii = i djj = j inside = _interpolate_scalar_3d[floating](volume, dkk, dii, djj, &out[k,i,j]) return np.asarray(out) def warp_3d_nn(number[:, :, :] volume, floating[:, :, :, :] d1, double[:, :] affine_idx_in=None, double[:, :] affine_idx_out=None, double[:, :] affine_disp=None, int[:] out_shape=None): r"""Warps a 3D volume using using nearest-neighbor interpolation Deforms the input volume under the given transformation. The warped volume is computed using nearest-neighbor interpolation and is given by: (1) warped[i] = volume[ C * d1[A*i] + B*i ] where A = affine_idx_in, B = affine_idx_out, C = affine_disp and i denotes the discrete coordinates of a voxel in the sampling grid of shape = out_shape. Parameters ---------- volume : array, shape (S, R, C) the input volume to be transformed d1 : array, shape (S', R', C', 3) the displacement field driving the transformation affine_idx_in : array, shape (4, 4) the matrix A in eq. (1) above affine_idx_out : array, shape (4, 4) the matrix B in eq. (1) above affine_disp : array, shape (4, 4) the matrix C in eq. (1) above out_shape : array, shape (3,) the number of slices, rows and columns of the sampling grid Returns ------- warped : array, shape = out_shape the transformed volume Notes ----- To illustrate the use of this function, consider a displacement field d1 with grid-to-space transformation R, a volume with grid-to-space transformation T and let's say we want to sample the warped volume on a grid with grid-to-space transformation S (sampling grid). For each voxel in the sampling grid with discrete coordinates i, the warped volume is given by: (2) warped[i] = volume[Tinv * ( d1[Rinv * S * i] + S * i ) ] where Tinv = T^{-1} and Rinv = R^{-1}. By identifying A = Rinv * S, B = Tinv * S, C = Tinv we can use this function to efficiently warp the input image. """ cdef: cnp.npy_intp nslices = volume.shape[0] cnp.npy_intp nrows = volume.shape[1] cnp.npy_intp ncols = volume.shape[2] cnp.npy_intp nsVol = volume.shape[0] cnp.npy_intp nrVol = volume.shape[1] cnp.npy_intp ncVol = volume.shape[2] cnp.npy_intp i, j, k int inside double dkk, dii, djj, dk, di, dj if not is_valid_affine(affine_idx_in, 3): raise ValueError("Invalid inner index multiplication matrix") if not is_valid_affine(affine_idx_out, 3): raise ValueError("Invalid outer index multiplication matrix") if not is_valid_affine(affine_disp, 3): raise ValueError("Invalid displacement multiplication matrix") if out_shape is not None: nslices = out_shape[0] nrows = out_shape[1] ncols = out_shape[2] elif d1 is not None: nslices = d1.shape[0] nrows = d1.shape[1] ncols = d1.shape[2] cdef number[:, :, :] warped = np.zeros(shape=(nslices, nrows, ncols), dtype=np.asarray(volume).dtype) cdef floating[:] tmp = np.zeros(shape=(3,), dtype = np.asarray(d1).dtype) with nogil: for k in range(nslices): for i in range(nrows): for j in range(ncols): if affine_idx_in is None: dkk = d1[k, i, j, 0] dii = d1[k, i, j, 1] djj = d1[k, i, j, 2] else: dk = _apply_affine_3d_x0( k, i, j, 1, affine_idx_in) di = _apply_affine_3d_x1( k, i, j, 1, affine_idx_in) dj = _apply_affine_3d_x2( k, i, j, 1, affine_idx_in) inside = _interpolate_vector_3d[floating](d1, dk, di, dj, &tmp[0]) dkk = tmp[0] dii = tmp[1] djj = tmp[2] if affine_disp is not None: dk = _apply_affine_3d_x0( dkk, dii, djj, 0, affine_disp) di = _apply_affine_3d_x1( dkk, dii, djj, 0, affine_disp) dj = _apply_affine_3d_x2( dkk, dii, djj, 0, affine_disp) else: dk = dkk di = dii dj = djj if affine_idx_out is not None: dkk = dk + _apply_affine_3d_x0(k, i, j, 1, affine_idx_out) dii = di + _apply_affine_3d_x1(k, i, j, 1, affine_idx_out) djj = dj + _apply_affine_3d_x2(k, i, j, 1, affine_idx_out) else: dkk = dk + k dii = di + i djj = dj + j inside = _interpolate_scalar_nn_3d[number](volume, dkk, dii, djj, &warped[k,i,j]) return np.asarray(warped) def transform_3d_affine_nn(number[:, :, :] volume, int[:] ref_shape, double[:, :] affine=None): r"""Transforms a 3D volume by an affine transform with NN interpolation Deforms the input volume under the given affine transformation using nearest neighbor interpolation. The shape of the resulting volume is given by ref_shape. If the affine matrix is None, it is taken as the identity. Parameters ---------- volume : array, shape (S, R, C) the input volume to be transformed ref_shape : array, shape (3,) the shape of the resulting volume affine : array, shape (4, 4) the affine transform to be applied Returns ------- out : array, shape (S', R', C') the transformed volume Notes ----- The reason it is necessary to provide the intended shape of the resulting volume is because the affine transformation is defined on all R^{3} but we must sample a finite lattice. Also the resulting shape may not be necessarily equal to the input shape, unless we are interested on endomorphisms only and not general diffeomorphisms. """ cdef: cnp.npy_intp nslices = ref_shape[0] cnp.npy_intp nrows = ref_shape[1] cnp.npy_intp ncols = ref_shape[2] cnp.npy_intp nsVol = volume.shape[0] cnp.npy_intp nrVol = volume.shape[1] cnp.npy_intp ncVol = volume.shape[2] double dkk, dii, djj, tmp0, tmp1 double alpha, beta, gamma, calpha, cbeta, cgamma cnp.npy_intp k, i, j, kk, ii, jj number[:, :, :] out = np.zeros((nslices, nrows, ncols), dtype=np.asarray(volume).dtype) if not is_valid_affine(affine, 3): raise ValueError("Invalid affine transform matrix") with nogil: for k in range(nslices): for i in range(nrows): for j in range(ncols): if affine is not None: dkk = _apply_affine_3d_x0(k, i, j, 1, affine) dii = _apply_affine_3d_x1(k, i, j, 1, affine) djj = _apply_affine_3d_x2(k, i, j, 1, affine) else: dkk = k dii = i djj = j _interpolate_scalar_nn_3d[number](volume, dkk, dii, djj, &out[k,i,j]) return np.asarray(out) def warp_2d(floating[:, :] image, floating[:, :, :] d1, double[:, :] affine_idx_in=None, double[:, :] affine_idx_out=None, double[:, :] affine_disp=None, int[:] out_shape=None): r"""Warps a 2D image using bilinear interpolation Deforms the input image under the given transformation. The warped image is computed using bi-linear interpolation and is given by: (1) warped[i] = image[ C * d1[A*i] + B*i ] where A = affine_idx_in, B = affine_idx_out, C = affine_disp and i denotes the discrete coordinates of a voxel in the sampling grid of shape = out_shape. Parameters ---------- image : array, shape (R, C) the input image to be transformed d1 : array, shape (R', C', 2) the displacement field driving the transformation affine_idx_in : array, shape (3, 3) the matrix A in eq. (1) above affine_idx_out : array, shape (3, 3) the matrix B in eq. (1) above affine_disp : array, shape (3, 3) the matrix C in eq. (1) above out_shape : array, shape (2,) the number of rows and columns of the sampling grid Returns ------- warped : array, shape = out_shape the transformed image Notes ----- To illustrate the use of this function, consider a displacement field d1 with grid-to-space transformation R, an image with grid-to-space transformation T and let's say we want to sample the warped image on a grid with grid-to-space transformation S (sampling grid). For each voxel in the sampling grid with discrete coordinates i, the warped image is given by: (2) warped[i] = image[Tinv * ( d1[Rinv * S * i] + S * i ) ] where Tinv = T^{-1} and Rinv = R^{-1}. By identifying A = Rinv * S, B = Tinv * S, C = Tinv we can use this function to efficiently warp the input image. """ cdef: cnp.npy_intp nrows = image.shape[0] cnp.npy_intp ncols = image.shape[1] cnp.npy_intp nrVol = image.shape[0] cnp.npy_intp ncVol = image.shape[1] cnp.npy_intp i, j, ii, jj double di, dj, dii, djj if not is_valid_affine(affine_idx_in, 2): raise ValueError("Invalid inner index multiplication matrix") if not is_valid_affine(affine_idx_out, 2): raise ValueError("Invalid outer index multiplication matrix") if not is_valid_affine(affine_disp, 2): raise ValueError("Invalid displacement multiplication matrix") if out_shape is not None: nrows = out_shape[0] ncols = out_shape[1] elif d1 is not None: nrows = d1.shape[0] ncols = d1.shape[1] cdef floating[:, :] warped = np.zeros(shape=(nrows, ncols), dtype=np.asarray(image).dtype) cdef floating[:] tmp = np.zeros(shape=(2,), dtype=np.asarray(d1).dtype) with nogil: for i in range(nrows): for j in range(ncols): # Apply inner index pre-multiplication if affine_idx_in is None: dii = d1[i, j, 0] djj = d1[i, j, 1] else: di = _apply_affine_2d_x0( i, j, 1, affine_idx_in) dj = _apply_affine_2d_x1( i, j, 1, affine_idx_in) _interpolate_vector_2d[floating](d1, di, dj, &tmp[0]) dii = tmp[0] djj = tmp[1] # Apply displacement multiplication if affine_disp is not None: di = _apply_affine_2d_x0( dii, djj, 0, affine_disp) dj = _apply_affine_2d_x1( dii, djj, 0, affine_disp) else: di = dii dj = djj # Apply outer index multiplication and add the displacements if affine_idx_out is not None: dii = di + _apply_affine_2d_x0(i, j, 1, affine_idx_out) djj = dj + _apply_affine_2d_x1(i, j, 1, affine_idx_out) else: dii = di + i djj = dj + j # Interpolate the input image at the resulting location _interpolate_scalar_2d[floating](image, dii, djj, &warped[i, j]) return np.asarray(warped) def transform_2d_affine(floating[:, :] image, int[:] ref_shape, double[:, :] affine=None): r"""Transforms a 2D image by an affine transform with bilinear interp. Deforms the input image under the given affine transformation using tri-linear interpolation. The shape of the resulting image is given by ref_shape. If the affine matrix is None, it is taken as the identity. Parameters ---------- image : array, shape (R, C) the input image to be transformed ref_shape : array, shape (2,) the shape of the resulting image affine : array, shape (3, 3) the affine transform to be applied Returns ------- out : array, shape (R', C') the transformed image Notes ----- The reason it is necessary to provide the intended shape of the resulting image is because the affine transformation is defined on all R^{2} but we must sample a finite lattice. Also the resulting shape may not be necessarily equal to the input shape, unless we are interested on endomorphisms only and not general diffeomorphisms. """ cdef: cnp.npy_intp nrows = ref_shape[0] cnp.npy_intp ncols = ref_shape[1] cnp.npy_intp nrVol = image.shape[0] cnp.npy_intp ncVol = image.shape[1] cnp.npy_intp i, j, ii, jj double dii, djj, tmp0 double alpha, beta, calpha, cbeta floating[:, :] out = np.zeros(shape=(nrows, ncols), dtype=np.asarray(image).dtype) if not is_valid_affine(affine, 2): raise ValueError("Invalid affine transform matrix") with nogil: for i in range(nrows): for j in range(ncols): if affine is not None: dii = _apply_affine_2d_x0(i, j, 1, affine) djj = _apply_affine_2d_x1(i, j, 1, affine) else: dii = i djj = j _interpolate_scalar_2d[floating](image, dii, djj, &out[i, j]) return np.asarray(out) def warp_2d_nn(number[:, :] image, floating[:, :, :] d1, double[:, :] affine_idx_in=None, double[:, :] affine_idx_out=None, double[:, :] affine_disp=None, int[:] out_shape=None): r"""Warps a 2D image using nearest neighbor interpolation Deforms the input image under the given transformation. The warped image is computed using nearest-neighbor interpolation and is given by: (1) warped[i] = image[ C * d1[A*i] + B*i ] where A = affine_idx_in, B = affine_idx_out, C = affine_disp and i denotes the discrete coordinates of a voxel in the sampling grid of shape = out_shape. Parameters ---------- image : array, shape (R, C) the input image to be transformed d1 : array, shape (R', C', 2) the displacement field driving the transformation affine_idx_in : array, shape (3, 3) the matrix A in eq. (1) above affine_idx_out : array, shape (3, 3) the matrix B in eq. (1) above affine_disp : array, shape (3, 3) the matrix C in eq. (1) above out_shape : array, shape (2,) the number of rows and columns of the sampling grid Returns ------- warped : array, shape = out_shape the transformed image Notes ----- To illustrate the use of this function, consider a displacement field d1 with grid-to-space transformation R, an image with grid-to-space transformation T and let's say we want to sample the warped image on a grid with grid-to-space transformation S (sampling grid). For each voxel in the sampling grid with discrete coordinates i, the warped image is given by: (2) warped[i] = image[Tinv * ( d1[Rinv * S * i] + S * i ) ] where Tinv = T^{-1} and Rinv = R^{-1}. By identifying A = Rinv * S, B = Tinv * S, C = Tinv we can use this function to efficiently warp the input image. """ cdef: cnp.npy_intp nrows = image.shape[0] cnp.npy_intp ncols = image.shape[1] cnp.npy_intp nrVol = image.shape[0] cnp.npy_intp ncVol = image.shape[1] cnp.npy_intp i, j, ii, jj double di, dj, dii, djj if not is_valid_affine(affine_idx_in, 2): raise ValueError("Invalid inner index multiplication matrix") if not is_valid_affine(affine_idx_out, 2): raise ValueError("Invalid outer index multiplication matrix") if not is_valid_affine(affine_disp, 2): raise ValueError("Invalid displacement multiplication matrix") if out_shape is not None: nrows = out_shape[0] ncols = out_shape[1] elif d1 is not None: nrows = d1.shape[0] ncols = d1.shape[1] cdef number[:, :] warped = np.zeros(shape=(nrows, ncols), dtype=np.asarray(image).dtype) cdef floating[:] tmp = np.zeros(shape=(2,), dtype=np.asarray(d1).dtype) with nogil: for i in range(nrows): for j in range(ncols): # Apply inner index pre-multiplication if affine_idx_in is None: dii = d1[i, j, 0] djj = d1[i, j, 1] else: di = _apply_affine_2d_x0( i, j, 1, affine_idx_in) dj = _apply_affine_2d_x1( i, j, 1, affine_idx_in) _interpolate_vector_2d[floating](d1, di, dj, &tmp[0]) dii = tmp[0] djj = tmp[1] # Apply displacement multiplication if affine_disp is not None: di = _apply_affine_2d_x0( dii, djj, 0, affine_disp) dj = _apply_affine_2d_x1( dii, djj, 0, affine_disp) else: di = dii dj = djj # Apply outer index multiplication and add the displacements if affine_idx_out is not None: dii = di + _apply_affine_2d_x0(i, j, 1, affine_idx_out) djj = dj + _apply_affine_2d_x1(i, j, 1, affine_idx_out) else: dii = di + i djj = dj + j # Interpolate the input image at the resulting location _interpolate_scalar_nn_2d[number](image, dii, djj, &warped[i, j]) return np.asarray(warped) def transform_2d_affine_nn(number[:, :] image, int[:] ref_shape, double[:, :] affine=None): r"""Transforms a 2D image by an affine transform with NN interpolation Deforms the input image under the given affine transformation using nearest neighbor interpolation. The shape of the resulting image is given by ref_shape. If the affine matrix is None, it is taken as the identity. Parameters ---------- image : array, shape (R, C) the input image to be transformed ref_shape : array, shape (2,) the shape of the resulting image affine : array, shape (3, 3) the affine transform to be applied Returns ------- out : array, shape (R', C') the transformed image Notes ----- The reason it is necessary to provide the intended shape of the resulting image is because the affine transformation is defined on all R^{2} but we must sample a finite lattice. Also the resulting shape may not be necessarily equal to the input shape, unless we are interested on endomorphisms only and not general diffeomorphisms. """ cdef: cnp.npy_intp nrows = ref_shape[0] cnp.npy_intp ncols = ref_shape[1] cnp.npy_intp nrVol = image.shape[0] cnp.npy_intp ncVol = image.shape[1] double dii, djj, tmp0 double alpha, beta, calpha, cbeta cnp.npy_intp i, j, ii, jj number[:, :] out = np.zeros((nrows, ncols), dtype=np.asarray(image).dtype) if not is_valid_affine(affine, 2): raise ValueError("Invalid affine transform matrix") with nogil: for i in range(nrows): for j in range(ncols): if affine is not None: dii = _apply_affine_2d_x0(i, j, 1, affine) djj = _apply_affine_2d_x1(i, j, 1, affine) else: dii = i djj = j _interpolate_scalar_nn_2d[number](image, dii, djj, &out[i, j]) return np.asarray(out) def resample_displacement_field_3d(floating[:, :, :, :] field, double[:] factors, int[:] out_shape): r"""Resamples a 3D vector field to a custom target shape Resamples the given 3D displacement field on a grid of the requested shape, using the given scale factors. More precisely, the resulting displacement field at each grid cell i is given by D[i] = field[Diag(factors) * i] Parameters ---------- factors : array, shape (3,) the scaling factors mapping (integer) grid coordinates in the resampled grid to (floating point) grid coordinates in the original grid out_shape : array, shape (3,) the desired shape of the resulting grid Returns ------- expanded : array, shape = out_shape + (3, ) the resampled displacement field """ ftype = np.asarray(field).dtype cdef: cnp.npy_intp tslices = out_shape[0] cnp.npy_intp trows = out_shape[1] cnp.npy_intp tcols = out_shape[2] cnp.npy_intp k, i, j int inside double dkk, dii, djj floating[:, :, :, :] expanded = np.zeros((tslices, trows, tcols, 3), dtype=ftype) for k in range(tslices): for i in range(trows): for j in range(tcols): dkk = k * factors[0] dii = i * factors[1] djj = j * factors[2] _interpolate_vector_3d[floating](field, dkk, dii, djj, &expanded[k, i, j, 0]) return np.asarray(expanded) def resample_displacement_field_2d(floating[:, :, :] field, double[:] factors, int[:] out_shape): r"""Resamples a 2D vector field to a custom target shape Resamples the given 2D displacement field on a grid of the requested shape, using the given scale factors. More precisely, the resulting displacement field at each grid cell i is given by D[i] = field[Diag(factors) * i] Parameters ---------- factors : array, shape (2,) the scaling factors mapping (integer) grid coordinates in the resampled grid to (floating point) grid coordinates in the original grid out_shape : array, shape (2,) the desired shape of the resulting grid Returns ------- expanded : array, shape = out_shape + (2, ) the resampled displacement field """ ftype = np.asarray(field).dtype cdef: cnp.npy_intp trows = out_shape[0] cnp.npy_intp tcols = out_shape[1] cnp.npy_intp i, j int inside double dii, djj floating[:, :, :] expanded = np.zeros((trows, tcols, 2), dtype=ftype) for i in range(trows): for j in range(tcols): dii = i*factors[0] djj = j*factors[1] inside = _interpolate_vector_2d[floating](field, dii, djj, &expanded[i, j, 0]) return np.asarray(expanded) def create_random_displacement_2d(int[:] from_shape, double[:, :] from_grid2world, int[:] to_shape, double[:, :] to_grid2world): r"""Creates a random 2D displacement 'exactly' mapping points of two grids Creates a random 2D displacement field mapping points of an input discrete domain (with dimensions given by from_shape) to points of an output discrete domain (with shape given by to_shape). The affine matrices bringing discrete coordinates to physical space are given by from_grid2world (for the displacement field discretization) and to_grid2world (for the target discretization). Since this function is intended to be used for testing, voxels in the input domain will never be assigned to boundary voxels on the output domain. Parameters ---------- from_shape : array, shape (2,) the grid shape where the displacement field will be defined on. from_grid2world : array, shape (3,3) the grid-to-space transformation of the displacement field to_shape : array, shape (2,) the grid shape where the deformation field will map the input grid to. to_grid2world : array, shape (3,3) the grid-to-space transformation of the mapped grid Returns ------- output : array, shape = from_shape the random displacement field in the physical domain int_field : array, shape = from_shape the assignment of each point in the input grid to the target grid """ cdef: cnp.npy_intp i, j, ri, rj double di, dj, dii, djj int[:, :, :] int_field = np.empty(tuple(from_shape) + (2,), dtype=np.int32) double[:, :, :] output = np.zeros(tuple(from_shape) + (2,), dtype=np.float64) cnp.npy_intp dom_size = from_shape[0]*from_shape[1] if not is_valid_affine(from_grid2world, 2): raise ValueError("Invalid 'from' affine transform matrix") if not is_valid_affine(to_grid2world, 2): raise ValueError("Invalid 'to' affine transform matrix") # compute the actual displacement field in the physical space for i in range(from_shape[0]): for j in range(from_shape[1]): # randomly choose where each input grid point will be mapped to in # the target grid ri = np.random.randint(1, to_shape[0]-1) rj = np.random.randint(1, to_shape[1]-1) int_field[i, j, 0] = ri int_field[i, j, 1] = rj # convert the input point to physical coordinates if from_grid2world is not None: di = _apply_affine_2d_x0(i, j, 1, from_grid2world) dj = _apply_affine_2d_x1(i, j, 1, from_grid2world) else: di = i dj = j # convert the output point to physical coordinates if to_grid2world is not None: dii = _apply_affine_2d_x0(ri, rj, 1, to_grid2world) djj = _apply_affine_2d_x1(ri, rj, 1, to_grid2world) else: dii = ri djj = rj # the displacement vector at (i,j) must be the target point minus # the original point, both in physical space output[i, j, 0] = dii - di output[i, j, 1] = djj - dj return np.asarray(output), np.asarray(int_field) def create_random_displacement_3d(int[:] from_shape, double[:, :] from_grid2world, int[:] to_shape, double[:, :] to_grid2world): r"""Creates a random 3D displacement 'exactly' mapping points of two grids Creates a random 3D displacement field mapping points of an input discrete domain (with dimensions given by from_shape) to points of an output discrete domain (with shape given by to_shape). The affine matrices bringing discrete coordinates to physical space are given by from_grid2world (for the displacement field discretization) and to_grid2world (for the target discretization). Since this function is intended to be used for testing, voxels in the input domain will never be assigned to boundary voxels on the output domain. Parameters ---------- from_shape : array, shape (3,) the grid shape where the displacement field will be defined on. from_grid2world : array, shape (4,4) the grid-to-space transformation of the displacement field to_shape : array, shape (3,) the grid shape where the deformation field will map the input grid to. to_grid2world : array, shape (4,4) the grid-to-space transformation of the mapped grid Returns ------- output : array, shape = from_shape the random displacement field in the physical domain int_field : array, shape = from_shape the assignment of each point in the input grid to the target grid """ cdef: cnp.npy_intp i, j, k, ri, rj, rk double di, dj, dk, dii, djj, dkk int[:, :, :, :] int_field = np.empty(tuple(from_shape) + (3,), dtype=np.int32) double[:, :, :, :] output = np.zeros(tuple(from_shape) + (3,), dtype=np.float64) cnp.npy_intp dom_size = from_shape[0]*from_shape[1]*from_shape[2] if not is_valid_affine(from_grid2world, 3): raise ValueError("Invalid 'from' affine transform matrix") if not is_valid_affine(to_grid2world, 3): raise ValueError("Invalid 'to' affine transform matrix") # compute the actual displacement field in the physical space for k in range(from_shape[0]): for i in range(from_shape[1]): for j in range(from_shape[2]): # randomly choose the location of each point on the target grid rk = np.random.randint(1, to_shape[0]-1) ri = np.random.randint(1, to_shape[1]-1) rj = np.random.randint(1, to_shape[2]-1) int_field[k, i, j, 0] = rk int_field[k, i, j, 1] = ri int_field[k, i, j, 2] = rj # convert the input point to physical coordinates if from_grid2world is not None: dk = _apply_affine_3d_x0(k, i, j, 1, from_grid2world) di = _apply_affine_3d_x1(k, i, j, 1, from_grid2world) dj = _apply_affine_3d_x2(k, i, j, 1, from_grid2world) else: dk = k di = i dj = j # convert the output point to physical coordinates if to_grid2world is not None: dkk = _apply_affine_3d_x0(rk, ri, rj, 1, to_grid2world) dii = _apply_affine_3d_x1(rk, ri, rj, 1, to_grid2world) djj = _apply_affine_3d_x2(rk, ri, rj, 1, to_grid2world) else: dkk = rk dii = ri djj = rj # the displacement vector at (i,j) must be the target point minus # the original point, both in physical space output[k, i, j, 0] = dkk - dk output[k, i, j, 1] = dii - di output[k, i, j, 2] = djj - dj return np.asarray(output), np.asarray(int_field) def create_harmonic_fields_2d(cnp.npy_intp nrows, cnp.npy_intp ncols, double b, double m): r"""Creates an invertible 2D displacement field Creates the invertible displacement fields used in Chen et al. eqs. 9 and 10 [1] Parameters ---------- nrows : int number of rows in the resulting harmonic field ncols : int number of columns in the resulting harmonic field b, m : float parameters of the harmonic field (as in [1]). To understand the effect of these parameters, please consider plotting a deformed image (a circle or a grid) under the deformation field, or see examples in [1] Returns ------- d : array, shape (nrows, ncols, 2) the harmonic displacement field inv : array, shape (nrows, ncols, 2) the analitical inverse of the harmonic displacement field [1] Chen, M., Lu, W., Chen, Q., Ruchala, K. J., & Olivera, G. H. (2008). A simple fixed-point approach to invert a deformation field. Medical Physics, 35(1), 81. doi:10.1118/1.2816107 """ cdef: cnp.npy_intp mid_row = nrows/2 cnp.npy_intp mid_col = ncols/2 cnp.npy_intp i, j, ii, jj double theta double[:, :, :] d = np.zeros((nrows, ncols, 2), dtype=np.float64) double[:, :, :] inv = np.zeros((nrows, ncols, 2), dtype=np.float64) for i in range(nrows): for j in range(ncols): ii = i - mid_row jj = j - mid_col theta = atan2(ii, jj) d[i, j, 0] = ii * (1.0 / (1 + b * cos(m * theta)) - 1.0) d[i, j, 1] = jj * (1.0 / (1 + b * cos(m * theta)) - 1.0) inv[i, j, 0] = b * cos(m * theta) * ii inv[i, j, 1] = b * cos(m * theta) * jj return np.asarray(d), np.asarray(inv) def create_harmonic_fields_3d(int nslices, cnp.npy_intp nrows, cnp.npy_intp ncols, double b, double m): r"""Creates an invertible 3D displacement field Creates the invertible displacement fields used in Chen et al. eqs. 9 and 10 [1] computing the angle theta along z-slides. Parameters ---------- nslices : int number of slices in the resulting harmonic field nrows : int number of rows in the resulting harmonic field ncols : int number of columns in the resulting harmonic field b, f : float parameters of the harmonic field (as in [1]). To understand the effect of these parameters, please consider plotting a deformed image (e.g. a circle or a grid) under the deformation field, or see examples in [1] Returns ------- d : array, shape (nslices, nrows, ncols, 3) the harmonic displacement field inv : array, shape (nslices, nrows, ncols, 3) the analitical inverse of the harmonic displacement field [1] Chen, M., Lu, W., Chen, Q., Ruchala, K. J., & Olivera, G. H. (2008). A simple fixed-point approach to invert a deformation field. Medical Physics, 35(1), 81. doi:10.1118/1.2816107 """ cdef: cnp.npy_intp mid_slice = nslices / 2 cnp.npy_intp mid_row = nrows / 2 cnp.npy_intp mid_col = ncols / 2 cnp.npy_intp i, j, k, ii, jj, kk double theta double[:, :, :, :] d = np.zeros((nslices, nrows, ncols, 3), dtype=np.float64) double[:, :, :, :] inv = np.zeros((nslices, nrows, ncols, 3), dtype=np.float64) for k in range(nslices): for i in range(nrows): for j in range(ncols): kk = k - mid_slice ii = i - mid_row jj = j - mid_col theta = atan2(ii, jj) d[k, i, j, 0] = kk * (1.0 / (1 + b * cos(m * theta)) - 1.0) d[k, i, j, 1] = ii * (1.0 / (1 + b * cos(m * theta)) - 1.0) d[k, i, j, 2] = jj * (1.0 / (1 + b * cos(m * theta)) - 1.0) inv[k, i, j, 0] = b * cos(m * theta) * kk inv[k, i, j, 1] = b * cos(m * theta) * ii inv[k, i, j, 2] = b * cos(m * theta) * jj return np.asarray(d), np.asarray(inv) def create_circle(cnp.npy_intp nrows, cnp.npy_intp ncols, cnp.npy_intp radius): r""" Create a binary 2D image where pixel values are 1 iff their distance to the center of the image is less than or equal to radius. Parameters ---------- nrows : int number of rows of the resulting image ncols : int number of columns of the resulting image radius : int the radius of the circle Returns ------- c : array, shape (nrows, ncols) the binary image of the circle with the requested dimensions """ cdef: cnp.npy_intp mid_row = nrows/2 cnp.npy_intp mid_col = ncols/2 cnp.npy_intp i, j, ii, jj double r double[:, :] c = np.zeros((nrows, ncols), dtype=np.float64) for i in range(nrows): for j in range(ncols): ii = i - mid_row jj = j - mid_col r = sqrt(ii*ii + jj*jj) if r <= radius: c[i, j] = 1 else: c[i, j] = 0 return np.asarray(c) def create_sphere(cnp.npy_intp nslices, cnp.npy_intp nrows, cnp.npy_intp ncols, cnp.npy_intp radius): r""" Create a binary 3D image where voxel values are 1 iff their distance to the center of the image is less than or equal to radius. Parameters ---------- nslices : int number if slices of the resulting image nrows : int number of rows of the resulting image ncols : int number of columns of the resulting image radius : int the radius of the sphere Returns ------- c : array, shape (nslices, nrows, ncols) the binary image of the sphere with the requested dimensions """ cdef: cnp.npy_intp mid_slice = nslices/2 cnp.npy_intp mid_row = nrows/2 cnp.npy_intp mid_col = ncols/2 cnp.npy_intp i, j, k, ii, jj, kk double r double[:, :, :] s = np.zeros((nslices, nrows, ncols), dtype=np.float64) for k in range(nslices): for i in range(nrows): for j in range(ncols): kk = k - mid_slice ii = i - mid_row jj = j - mid_col r = sqrt(ii*ii + jj*jj + kk*kk) if r <= radius: s[k, i, j] = 1 else: s[k, i, j] = 0 return np.asarray(s) def _gradient_3d(floating[:, :, :] img, double[:, :] img_world2grid, double[:] img_spacing, double[:, :] out_grid2world, floating[:, :, :, :] out, int[:, :, :] inside): r""" Gradient of a 3D image in physical space coordinates Each grid cell (i, j, k) in the sampling grid (determined by out.shape) is mapped to its corresponding physical point (x, y, z) by multiplying out_grid2world (its grid-to-space transform) by (i, j, k), then the image is interpolated, at P1=(x + h, y, z), Q1=(x - h, y, z) P2=(x, y + h, z), Q2=(x, y - h, z) P3=(x, y, z + h), Q3=(x, y, z - h) (by mapping Pi and Qi to the grid using img_world2grid: the inverse of the grid-to-space transform of img). The displacement parameter h is of magnitude 0.5 (in physical space units), therefore the approximated partial derivatives are given by the difference between the image interpolated at Pi and Qi. Parameters ---------- img : array, shape (S, R, C) the input volume whose gradient will be computed img_world2grid : array, shape (4, 4) the space-to-grid transform matrix associated to img img_spacing : array, shape (3,) the spacing between voxels (voxel size along each axis) of the input volume out_grid2world : array, shape (4, 4) the grid-to-space transform associated to the sampling grid out : array, shape (S', R', C', 3) the buffer in which to store the image gradient inside : array, shape (S', R', C') the buffer in which to store the flags indicating whether the sample point lies inside (=1) or outside (=0) the image grid """ cdef: int nslices = out.shape[0] int nrows = out.shape[1] int ncols = out.shape[2] int i, j, k, in_flag double tmp double[:] x = np.empty(shape=(3,), dtype=np.float64) double[:] dx = np.empty(shape=(3,), dtype=np.float64) double[:] h = np.empty(shape=(3,), dtype=np.float64) double[:] q = np.empty(shape=(3,), dtype=np.float64) with nogil: h[0] = 0.5 * img_spacing[0] h[1] = 0.5 * img_spacing[1] h[2] = 0.5 * img_spacing[2] for k in range(nslices): for i in range(nrows): for j in range(ncols): inside[k, i, j] = 1 # Compute coordinates of index (k, i, j) in physical space x[0] = _apply_affine_3d_x0(k, i, j, 1, out_grid2world) x[1] = _apply_affine_3d_x1(k, i, j, 1, out_grid2world) x[2] = _apply_affine_3d_x2(k, i, j, 1, out_grid2world) dx[:] = x[:] for p in range(3): # Compute coordinates of point dx on img's grid dx[p] = x[p] - h[p] q[0] = _apply_affine_3d_x0(dx[0], dx[1], dx[2], 1, img_world2grid) q[1] = _apply_affine_3d_x1(dx[0], dx[1], dx[2], 1, img_world2grid) q[2] = _apply_affine_3d_x2(dx[0], dx[1], dx[2], 1, img_world2grid) # Interpolate img at q in_flag = _interpolate_scalar_3d[floating](img, q[0], q[1], q[2], &out[k, i, j, p]) if in_flag == 0: out[k, i, j, p] = 0 inside[k, i, j] = 0 continue tmp = out[k, i, j, p] # Compute coordinates of point dx on img's grid dx[p] = x[p] + h[p] q[0] = _apply_affine_3d_x0(dx[0], dx[1], dx[2], 1, img_world2grid) q[1] = _apply_affine_3d_x1(dx[0], dx[1], dx[2], 1, img_world2grid) q[2] = _apply_affine_3d_x2(dx[0], dx[1], dx[2], 1, img_world2grid) # Interpolate img at q in_flag = _interpolate_scalar_3d[floating](img, q[0], q[1], q[2], &out[k, i, j, p]) if in_flag == 0: out[k, i, j, p] = 0 inside[k, i, j] = 0 continue out[k, i, j, p] = (out[k, i, j, p] - tmp) / img_spacing[p] dx[p] = x[p] def _sparse_gradient_3d(floating[:, :, :] img, double[:, :] img_world2grid, double[:] img_spacing, double[:, :] sample_points, floating[:, :] out, int[:] inside): r""" Gradient of a 3D image evaluated at a set of points in physical space For each row (x_i, y_i, z_i) in sample_points, the image is interpolated at P1=(x_i + h, y_i, z_i), Q1=(x_i - h, y_i, z_i) P2=(x_i, y_i + h, z_i), Q2=(x_i, y_i - h, z_i) P3=(x_i, y_i, z_i + h), Q3=(x_i, y_i, z_i - h) (by mapping Pi and Qi to the grid using img_world2grid: the inverse of the grid-to-space transform of img). The displacement parameter h is of magnitude 0.5 (in physical space units), therefore the approximated partial derivatives are given by the difference between the image interpolated at Pi and Qi. Parameters ---------- img : array, shape (S, R, C) the input volume whose gradient will be computed img_world2grid : array, shape (4, 4) the space-to-grid transform matrix associated to img img_spacing : array, shape (3,) the spacing between voxels (voxel size along each axis) of the input sample_points: array, shape (n, 3) list of points where the derivative will be evaluated (one point per row) out : array, shape (n, 3) the buffer in which to store the image gradient """ cdef: int n = sample_points.shape[0] int i, in_flag double tmp double[:] dx = np.empty(shape=(3,), dtype=np.float64) double[:] h = np.empty(shape=(3,), dtype=np.float64) double[:] q = np.empty(shape=(3,), dtype=np.float64) with nogil: h[0] = 0.5 * img_spacing[0] h[1] = 0.5 * img_spacing[1] h[2] = 0.5 * img_spacing[2] for i in range(n): inside[i] = 1 dx[0] = sample_points[i, 0] dx[1] = sample_points[i, 1] dx[2] = sample_points[i, 2] for p in range(3): # Compute coordinates of point dx on img's grid dx[p] = sample_points[i, p] - h[p] q[0] = _apply_affine_3d_x0(dx[0], dx[1], dx[2], 1, img_world2grid) q[1] = _apply_affine_3d_x1(dx[0], dx[1], dx[2], 1, img_world2grid) q[2] = _apply_affine_3d_x2(dx[0], dx[1], dx[2], 1, img_world2grid) # Interpolate img at q in_flag = _interpolate_scalar_3d[floating](img, q[0], q[1], q[2], &out[i, p]) if in_flag == 0: out[i, p] = 0 inside[i] = 0 continue tmp = out[i, p] # Compute coordinates of point dx on img's grid dx[p] = sample_points[i, p] + h[p] q[0] = _apply_affine_3d_x0(dx[0], dx[1], dx[2], 1, img_world2grid) q[1] = _apply_affine_3d_x1(dx[0], dx[1], dx[2], 1, img_world2grid) q[2] = _apply_affine_3d_x2(dx[0], dx[1], dx[2], 1, img_world2grid) # Interpolate img at q in_flag = _interpolate_scalar_3d[floating](img, q[0], q[1], q[2], &out[i, p]) if in_flag == 0: out[i, p] = 0 inside[i] = 0 continue out[i, p] = (out[i, p] - tmp) / img_spacing[p] dx[p] = sample_points[i, p] def _gradient_2d(floating[:, :] img, double[:, :] img_world2grid, double[:] img_spacing, double[:, :] out_grid2world, floating[:, :, :] out, int[:, :] inside): r""" Gradient of a 2D image in physical space coordinates Each grid cell (i, j) in the sampling grid (determined by out.shape) is mapped to its corresponding physical point (x, y) by multiplying out_grid2world (its grid-to-space transform) by (i, j), then the image is interpolated, at P1=(x + h, y), Q1=(x - h, y) P2=(x, y + h), Q2=(x, y - h) (by mapping Pi and Qi to the grid using img_world2grid: the inverse of the grid-to-space transform of img). The displacement parameter h is of magnitude 0.5 (in physical space units), therefore the approximated partial derivatives are given by the difference between the image interpolated at Pi and Qi. Parameters ---------- img : array, shape (R, C) the input image whose gradient will be computed img_world2grid : array, shape (3, 3) the space-to-grid transform matrix associated to img img_spacing : array, shape (2,) the spacing between pixels (pixel size along each axis) of the input image out_grid2world : array, shape (3, 3) the grid-to-space transform associated to the sampling grid out : array, shape (S', R', 2) the buffer in which to store the image gradient inside : array, shape (S', R') the buffer in which to store the flags indicating whether the sample point lies inside (=1) or outside (=0) the image grid """ cdef: int nrows = out.shape[0] int ncols = out.shape[1] int i, j, k, in_flag double tmp double[:] x = np.empty(shape=(2,), dtype=np.float64) double[:] dx = np.empty(shape=(2,), dtype=np.float64) double[:] h = np.empty(shape=(2,), dtype=np.float64) double[:] q = np.empty(shape=(2,), dtype=np.float64) with nogil: h[0] = 0.5 * img_spacing[0] h[1] = 0.5 * img_spacing[1] for i in range(nrows): for j in range(ncols): inside[i, j] = 1 # Compute coordinates of index (i, j) in physical space x[0] = _apply_affine_2d_x0(i, j, 1, out_grid2world) x[1] = _apply_affine_2d_x1(i, j, 1, out_grid2world) dx[:] = x[:] for p in range(2): # Compute coordinates of point dx on img's grid dx[p] = x[p] - h[p] q[0] = _apply_affine_2d_x0(dx[0], dx[1], 1, img_world2grid) q[1] = _apply_affine_2d_x1(dx[0], dx[1], 1, img_world2grid) # Interpolate img at q in_flag = _interpolate_scalar_2d[floating](img, q[0], q[1], &out[i, j, p]) if in_flag == 0: out[i, j, p] = 0 inside[i, j] = 0 continue tmp = out[i, j, p] # Compute coordinates of point dx on img's grid dx[p] = x[p] + h[p] q[0] = _apply_affine_2d_x0(dx[0], dx[1], 1, img_world2grid) q[1] = _apply_affine_2d_x1(dx[0], dx[1], 1, img_world2grid) # Interpolate img at q in_flag = _interpolate_scalar_2d[floating](img, q[0], q[1], &out[i, j, p]) if in_flag == 0: out[i, j, p] = 0 inside[i, j] = 0 continue out[i, j, p] = (out[i, j, p] - tmp) / img_spacing[p] dx[p] = x[p] def _sparse_gradient_2d(floating[:, :] img, double[:, :] img_world2grid, double[:] img_spacing, double[:, :] sample_points, floating[:, :] out, int[:] inside): r""" Gradient of a 2D image evaluated at a set of points in physical space For each row (x_i, y_i) in sample_points, the image is interpolated at P1=(x_i + h, y_i), Q1=(x_i - h, y_i) P2=(x_i, y_i + h), Q2=(x_i, y_i - h) (by mapping Pi and Qi to the grid using img_world2grid: the inverse of the grid-to-space transform of img). The displacement parameter h is of magnitude 0.5 (in physical space units), therefore the approximated partial derivatives are given by the difference between the image interpolated at Pi and Qi. Parameters ---------- img : array, shape (R, C) the input volume whose gradient will be computed img_world2grid : array, shape (3, 3) the space-to-grid transform matrix associated to img img_spacing : array, shape (2,) the spacing between pixels (pixel size along each axis) of the input sample_points: array, shape (n, 2) list of points where the derivative will be evaluated (one point per row) out : array, shape (n, 2) the buffer in which to store the image gradient inside : array, shape (n,) the buffer in which to store the flags indicating whether the sample point lies inside (=1) or outside (=0) the image grid """ cdef: int n = sample_points.shape[0] int i, in_flag double tmp double[:] dx = np.empty(shape=(2,), dtype=np.float64) double[:] h = np.empty(shape=(2,), dtype=np.float64) double[:] q = np.empty(shape=(2,), dtype=np.float64) with nogil: h[0] = 0.5 * img_spacing[0] h[1] = 0.5 * img_spacing[1] for i in range(n): inside[i] = 1 dx[0] = sample_points[i, 0] dx[1] = sample_points[i, 1] for p in range(2): # Compute coordinates of point dx on img's grid dx[p] = sample_points[i, p] - h[p] q[0] = _apply_affine_2d_x0(dx[0], dx[1], 1, img_world2grid) q[1] = _apply_affine_2d_x1(dx[0], dx[1], 1, img_world2grid) # Interpolate img at q in_flag = _interpolate_scalar_2d[floating](img, q[0], q[1], &out[i, p]) if in_flag == 0: out[i, p] = 0 inside[i] = 0 continue tmp = out[i, p] # Compute coordinates of point dx on img's grid dx[p] = sample_points[i, p] + h[p] q[0] = _apply_affine_2d_x0(dx[0], dx[1], 1, img_world2grid) q[1] = _apply_affine_2d_x1(dx[0], dx[1], 1, img_world2grid) # Interpolate img at q in_flag = _interpolate_scalar_2d[floating](img, q[0], q[1], &out[i, p]) if in_flag == 0: out[i, p] = 0 inside[i] = 0 continue out[i, p] = (out[i, p] - tmp) / img_spacing[p] dx[p] = sample_points[i, p] def gradient(img, img_world2grid, img_spacing, out_shape, out_grid2world): r""" Gradient of an image in physical space Parameters ---------- img : 2D or 3D array, shape (R, C) or (S, R, C) the input image whose gradient will be computed img_world2grid : array, shape (dim+1, dim+1) the space-to-grid transform matrix associated to img img_spacing : array, shape (dim,) the spacing between voxels (voxel size along each axis) of the input image out_shape : array, shape (dim,) the number of (slices), rows and columns of the sampling grid out_grid2world : array, shape (dim+1, dim+1) the grid-to-space transform associated to the sampling grid Returns ------- out : array, shape (R', C', 2) or (S', R', C', 3) the buffer in which to store the image gradient, where (S'), R', C' are given by out_shape """ dim = len(img.shape) if not is_valid_affine(img_world2grid, dim): raise ValueError("Invalid image affine transform") if not is_valid_affine(out_grid2world, dim): raise ValueError("Invalid sampling grid affine transform") if len(img_spacing) < dim: raise ValueError("Invalid spacings") ftype = img.dtype.type out = np.empty(tuple(out_shape)+(dim,), dtype=ftype) inside = np.empty(tuple(out_shape), dtype=np.int32) # Select joint density gradient 2D or 3D if dim == 2: jd_grad = _gradient_2d elif dim == 3: jd_grad = _gradient_3d else: raise ValueError('Undefined gradient for image dimension %d' % (dim,)) if img_world2grid.dtype != np.float64: img_world2grid = img_world2grid.astype(np.float64) if img_spacing.dtype != np.float64: img_spacing = img_spacing.astype(np.float64) if out_grid2world.dtype != np.float64: out_grid2world = out_grid2world.astype(np.float64) jd_grad(img, img_world2grid, img_spacing, out_grid2world, out, inside) return np.asarray(out), np.asarray(inside) def sparse_gradient(img, img_world2grid, img_spacing, sample_points): r""" Gradient of an image in physical space Parameters ---------- img : 2D or 3D array, shape (R, C) or (S, R, C) the input image whose gradient will be computed img_world2grid : array, shape (dim+1, dim+1) the space-to-grid transform matrix associated to img img_spacing : array, shape (dim,) the spacing between voxels (voxel size along each axis) of the input image sample_points: array, shape (n, dim) list of points where the derivative will be evaluated (one point per row) Returns ------- out : array, shape (n, dim) the gradient at each point stored at its corresponding row """ dim = len(img.shape) if not is_valid_affine(img_world2grid, dim): raise ValueError("Invalid affine transform matrix") if len(img_spacing) < dim: raise ValueError("Invalid spacings") ftype = img.dtype.type n = sample_points.shape[0] out = np.empty(shape=(n, dim), dtype=ftype) inside = np.empty(shape=(n,), dtype=np.int32) # Select joint density gradient 2D or 3D if dim == 2: jd_grad = _sparse_gradient_2d else: jd_grad = _sparse_gradient_3d if img_world2grid.dtype != np.float64: img_world2grid = img_world2grid.astype(np.float64) if img_spacing.dtype != np.float64: img_spacing = img_spacing.astype(np.float64) jd_grad(img, img_world2grid, img_spacing, sample_points, out, inside) return np.asarray(out), np.asarray(inside) dipy-0.13.0/dipy/boots/000077500000000000000000000000001317371701200146355ustar00rootroot00000000000000dipy-0.13.0/dipy/boots/__init__.py000066400000000000000000000000421317371701200167420ustar00rootroot00000000000000# Init for core.stat dipy objects dipy-0.13.0/dipy/boots/resampling.py000066400000000000000000000232051317371701200173520ustar00rootroot00000000000000#!/usr/bin/python import numpy as np import scipy as sp import warnings warningMsg = "This module is most likely to change both as\ a name and in structure in the future" warnings.warn(warningMsg, FutureWarning) def bs_se(bs_pdf): """ Calculates the bootstrap standard error estimate of a statistic """ N = len(bs_pdf) return np.std(bs_pdf) * np.sqrt(N / (N - 1)) def bootstrap(x, statistic=bs_se, B=1000, alpha=0.95): """ Bootstrap resampling [1]_ to accurately estimate the standard error and confidence interval of a desired statistic of a probability distribution function (pdf). Parameters ------------ x : ndarray (N, 1) Observable sample to resample. N should be reasonably large. statistic : method (optional) Method to calculate the desired statistic. (Default: calculate bootstrap standard error) B : integer (optional) Total number of bootstrap resamples in bootstrap pdf. (Default: 1000) alpha : float (optional) Percentile for confidence interval of the statistic. (Default: 0.05) Returns --------- bs_pdf : ndarray (M, 1) Jackknife probabilisty distribution function of the statistic. se : float Standard error of the statistic. ci : ndarray (2, 1) Confidence interval of the statistic. See Also ----------- numpy.std, numpy.random.random Notes -------- Bootstrap resampling is non parametric. It is quite powerful in determining the standard error and the confidence interval of a sample distribution. The key characteristics of bootstrap is: 1) uniform weighting among all samples (1/n) 2) resampling with replacement In general, the sample size should be large to ensure accuracy of the estimates. The number of bootstrap resamples should be large as well as that will also influence the accuracy of the estimate. References ---------- .. [1] Efron, B., 1979. 1977 Rietz lecture--Bootstrap methods--Another look at the jackknife. Ann. Stat. 7, 1-26. """ N = len(x) pdf_mask = np.ones((N,), dtype='int16') bs_pdf = np.empty((B,)) for ii in range(0, B): # resample with replacement rand_index = np.int16(np.round(np.random.random(N) * (N - 1))) bs_pdf[ii] = statistic(x[rand_index]) return bs_pdf, bs_se(bs_pdf), abc(x, statistic, alpha=alpha) def abc(x, statistic=bs_se, alpha=0.05, eps=1e-5): """ Calculates the bootstrap confidence interval by approximating the BCa. Parameters ---------- x : np.ndarray Observed data (e.g. chosen gold standard estimate used for bootstrap) statistic : method Method to calculate the desired statistic given x and probability proportions (flat probability densities vector) alpha : float (0, 1) Desired confidence interval initial endpoint (Default: 0.05) eps : float (optional) Specifies step size in calculating numerical derivative T' and T''. Default: 1e-5 See Also -------- __tt, __tt_dot, __tt_dot_dot, __calc_z0 Notes ----- Unlike the BCa method of calculating the bootstrap confidence interval, the ABC method is computationally less demanding (about 3% computational power needed) and is fairly accurate (sometimes out performing BCa!). It does not require any bootstrap resampling and instead uses numerical derivatives via Taylor series to approximate the BCa calculation. However, the ABC method requires the statistic to be smooth and follow a multinomial distribution. References ---------- .. [2] DiCiccio, T.J., Efron, B., 1996. Bootstrap Confidence Intervals. Statistical Science. 11, 3, 189-228. """ # define base variables -- n, p_0, sigma_hat, delta_hat n = len(x) p_0 = np.ones(x.shape) / n sigma_hat = np.zeros(x.shape) delta_hat = np.zeros(x.shape) for i in range(0, n): sigma_hat[i] = __tt_dot(i, x, p_0, statistic, eps)**2 delta_hat[i] = __tt_dot(i, x, p_0, statistic, eps) sigma_hat = (sigma_hat / n**2)**0.5 # estimate the bias (z_0) and the acceleration (a_hat) a_hat = np.zeros(x.shape) a_num = np.zeros(x.shape) a_dem = np.zeros(x.shape) for i in range(0, n): a_num[i] = __tt_dot(i, x, p_0, statistic, eps)**3 a_dem[i] = __tt_dot(i, x, p_0, statistic, eps)**2 a_hat = 1 / 6 * a_num / a_dem**1.5 z_0 = __calc_z0(x, p_0, statistic, eps, a_hat, sigma_hat) # define helper variables -- w and l w = z_0 + __calc_z_alpha(1 - alpha) l = w / (1 - a_hat * w)**2 return __tt(x, p_0 + l * delta_hat / sigma_hat, statistic) def __calc_z_alpha(alpha): """ Classic "quantile function" that calculates inverse of cdf of standard normal. """ return 2**0.5 * sp.special.erfinv(2 * alpha - 1) def __calc_z0(x, p_0, statistic, eps, a_hat, sigma_hat): """ Function that calculates the bias z_0 for abc method. See Also ---------- abc, __tt, __tt_dot, __tt_dot_dot """ n = len(x) b_hat = np.ones(x.shape) c_q_hat = np.ones(x.shape) tt_dot = np.ones(x.shape) for i in range(0, n): b_hat[i] = __tt_dot_dot(i, x, p_0, statistic, eps) tt_dot[i] = __tt_dot(i, x, p_0, statistic, eps) b_hat = b_hat / (2 * n**2) c_q_hat = (__tt(x, ((1 - eps) * p_0 + eps * tt_dot / (n**2 * sigma_hat)), statistic) + __tt(x, ((1 - eps) * p_0 - eps * tt_dot / (n**2 * sigma_hat)), statistic) - 2 * __tt(x, p_0, statistic)) / eps**2 return a_hat - (b_hat / sigma_hat - c_q_hat) def __tt(x, p_0, statistic=bs_se): """ Function that calculates desired statistic from observable data and a given proportional weighting. Parameters ------------ x : np.ndarray Observable data (e.g. from gold standard). p_0 : np.ndarray Proportional weighting vector (Default: uniform weighting 1/n) Returns ------- theta_hat : float Desired statistic of the observable data. See Also ----------- abc, __tt_dot, __tt_dot_dot """ return statistic(x / p_0) def __tt_dot(i, x, p_0, statistic, eps): """ First numerical derivative of __tt """ e = np.zeros(x.shape) e[i] = 1 return ((__tt(x, ((1 - eps) * p_0 + eps * e[i]), statistic) - __tt(x, p_0, statistic)) / eps) def __tt_dot_dot(i, x, p_0, statistic, eps): """ Second numerical derivative of __tt """ e = np.zeros(x.shape) e[i] = 1 return (__tt_dot(i, x, p_0, statistic, eps) / eps + (__tt(x, ((1 - eps) * p_0 - eps * e[i]), statistic) - __tt(x, p_0, statistic)) / eps**2) def jackknife(pdf, statistic=np.std, M=None): """ Jackknife resampling [3]_ to quickly estimate the bias and standard error of a desired statistic in a probability distribution function (pdf). Parameters ------------ pdf : ndarray (N, 1) Probability distribution function to resample. N should be reasonably large. statistic : method (optional) Method to calculate the desired statistic. (Default: calculate standard deviation) M : integer (M < N) Total number of samples in jackknife pdf. (Default: M == N) Returns --------- jk_pdf : ndarray (M, 1) Jackknife probabilisty distribution function of the statistic. bias : float Bias of the jackknife pdf of the statistic. se : float Standard error of the statistic. See Also ----------- numpy.std, numpy.mean, numpy.random.random Notes -------- Jackknife resampling like bootstrap resampling is non parametric. However, it requires a large distribution to be accurate and in some ways can be considered deterministic (if one removes the same set of samples, then one will get the same estimates of the bias and variance). In the context of this implementation, the sample size should be at least larger than the asymptotic convergence of the statistic (ACstat); preferably, larger than ACstat + np.greater(ACbias, ACvar) The clear benefit of using jackknife is its ability to estimate the bias of the statistic. The most powerful application of this is estimating the bias of a bootstrap-estimated standard error. In fact, one could "bootstrap the bootstrap" (nested bootstrap) of the estimated standard error, but the inaccuracy of the bootstrap to characterize the true mean would incur a poor estimate of the bias (recall: bias = mean[sample_est] - mean[true population]) References ------------- .. [3] Efron, B., 1979. 1977 Rietz lecture--Bootstrap methods--Another look at the jackknife. Ann. Stat. 7, 1-26. """ N = len(pdf) pdf_mask = np.ones((N,), dtype='int16') # keeps track of all n - 1 indexes mask_index = np.copy(pdf_mask) if M is None: M = N M = np.minimum(M, N - 1) jk_pdf = np.empty((M,)) for ii in range(0, M): rand_index = np.round(np.random.random() * (N - 1)) # choose a unique random sample to remove while pdf_mask[rand_index] == 0: rand_index = np.round(np.random.random() * (N - 1)) # set mask to zero for chosen random index so not to choose again pdf_mask[rand_index] = 0 mask_index[rand_index] = 0 jk_pdf[ii] = statistic(pdf[mask_index > 0]) # compute n-1 statistic mask_index[rand_index] = 1 return (jk_pdf, (N - 1) * (np.mean(jk_pdf) - statistic(pdf)), np.sqrt(N - 1) * np.std(jk_pdf)) def residual_bootstrap(data): pass def repetition_bootstrap(data): pass dipy-0.13.0/dipy/core/000077500000000000000000000000001317371701200144375ustar00rootroot00000000000000dipy-0.13.0/dipy/core/__init__.py000066400000000000000000000002041317371701200165440ustar00rootroot00000000000000# Init for core dipy objects """ Core objects """ # Test callable from numpy.testing import Tester test = Tester().test del Tester dipy-0.13.0/dipy/core/geometry.py000066400000000000000000000745331317371701200166600ustar00rootroot00000000000000''' Utility functions for algebra etc ''' from __future__ import division, print_function, absolute_import import math import numpy as np import numpy.linalg as npl from dipy.testing import setup_test # epsilon for testing whether a number is close to zero _EPS = np.finfo(float).eps * 4.0 # axis sequences for Euler angles _NEXT_AXIS = [1, 2, 0, 1] # map axes strings to/from tuples of inner axis, parity, repetition, frame _AXES2TUPLE = { 'sxyz': (0, 0, 0, 0), 'sxyx': (0, 0, 1, 0), 'sxzy': (0, 1, 0, 0), 'sxzx': (0, 1, 1, 0), 'syzx': (1, 0, 0, 0), 'syzy': (1, 0, 1, 0), 'syxz': (1, 1, 0, 0), 'syxy': (1, 1, 1, 0), 'szxy': (2, 0, 0, 0), 'szxz': (2, 0, 1, 0), 'szyx': (2, 1, 0, 0), 'szyz': (2, 1, 1, 0), 'rzyx': (0, 0, 0, 1), 'rxyx': (0, 0, 1, 1), 'ryzx': (0, 1, 0, 1), 'rxzx': (0, 1, 1, 1), 'rxzy': (1, 0, 0, 1), 'ryzy': (1, 0, 1, 1), 'rzxy': (1, 1, 0, 1), 'ryxy': (1, 1, 1, 1), 'ryxz': (2, 0, 0, 1), 'rzxz': (2, 0, 1, 1), 'rxyz': (2, 1, 0, 1), 'rzyz': (2, 1, 1, 1)} _TUPLE2AXES = dict((v, k) for k, v in _AXES2TUPLE.items()) def sphere2cart(r, theta, phi): ''' Spherical to Cartesian coordinates This is the standard physics convention where `theta` is the inclination (polar) angle, and `phi` is the azimuth angle. Imagine a sphere with center (0,0,0). Orient it with the z axis running south-north, the y axis running west-east and the x axis from posterior to anterior. `theta` (the inclination angle) is the angle to rotate from the z-axis (the zenith) around the y-axis, towards the x axis. Thus the rotation is counter-clockwise from the point of view of positive y. `phi` (azimuth) gives the angle of rotation around the z-axis towards the y axis. The rotation is counter-clockwise from the point of view of positive z. Equivalently, given a point P on the sphere, with coordinates x, y, z, `theta` is the angle between P and the z-axis, and `phi` is the angle between the projection of P onto the XY plane, and the X axis. Geographical nomenclature designates theta as 'co-latitude', and phi as 'longitude' Parameters ------------ r : array_like radius theta : array_like inclination or polar angle phi : array_like azimuth angle Returns --------- x : array x coordinate(s) in Cartesion space y : array y coordinate(s) in Cartesian space z : array z coordinate Notes -------- See these pages: * http://en.wikipedia.org/wiki/Spherical_coordinate_system * http://mathworld.wolfram.com/SphericalCoordinates.html for excellent discussion of the many different conventions possible. Here we use the physics conventions, used in the wikipedia page. Derivations of the formulae are simple. Consider a vector x, y, z of length r (norm of x, y, z). The inclination angle (theta) can be found from: cos(theta) == z / r -> z == r * cos(theta). This gives the hypotenuse of the projection onto the XY plane, which we will call Q. Q == r*sin(theta). Now x / Q == cos(phi) -> x == r * sin(theta) * cos(phi) and so on. We have deliberately named this function ``sphere2cart`` rather than ``sph2cart`` to distinguish it from the Matlab function of that name, because the Matlab function uses an unusual convention for the angles that we did not want to replicate. The Matlab function is trivial to implement with the formulae given in the Matlab help. ''' sin_theta = np.sin(theta) x = r * np.cos(phi) * sin_theta y = r * np.sin(phi) * sin_theta z = r * np.cos(theta) x, y, z = np.broadcast_arrays(x, y, z) return x, y, z def cart2sphere(x, y, z): r''' Return angles for Cartesian 3D coordinates `x`, `y`, and `z` See doc for ``sphere2cart`` for angle conventions and derivation of the formulae. $0\le\theta\mathrm{(theta)}\le\pi$ and $-\pi\le\phi\mathrm{(phi)}\le\pi$ Parameters ------------ x : array_like x coordinate in Cartesian space y : array_like y coordinate in Cartesian space z : array_like z coordinate Returns --------- r : array radius theta : array inclination (polar) angle phi : array azimuth angle ''' r = np.sqrt(x * x + y * y + z * z) theta = np.arccos(z / r) theta = np.where(r > 0, theta, 0.) phi = np.arctan2(y, x) r, theta, phi = np.broadcast_arrays(r, theta, phi) return r, theta, phi def sph2latlon(theta, phi): """Convert spherical coordinates to latitude and longitude. Returns ------- lat, lon : ndarray Latitude and longitude. """ return np.rad2deg(theta - np.pi / 2), np.rad2deg(phi - np.pi) def normalized_vector(vec, axis=-1): ''' Return vector divided by its Euclidean (L2) norm See :term:`unit vector` and :term:`Euclidean norm` Parameters ------------ vec : array_like shape (3,) Returns ---------- nvec : array shape (3,) vector divided by L2 norm Examples ----------- >>> vec = [1, 2, 3] >>> l2n = np.sqrt(np.dot(vec, vec)) >>> nvec = normalized_vector(vec) >>> np.allclose(np.array(vec) / l2n, nvec) True >>> vec = np.array([[1, 2, 3]]) >>> vec.shape == (1, 3) True >>> normalized_vector(vec).shape == (1, 3) True ''' return vec / vector_norm(vec, axis, keepdims=True) def vector_norm(vec, axis=-1, keepdims=False): ''' Return vector Euclidean (L2) norm See :term:`unit vector` and :term:`Euclidean norm` Parameters ------------- vec : array_like Vectors to norm. axis : int Axis over which to norm. By default norm over last axis. If `axis` is None, `vec` is flattened then normed. keepdims : bool If True, the output will have the same number of dimensions as `vec`, with shape 1 on `axis`. Returns --------- norm : array Euclidean norms of vectors. Examples -------- >>> import numpy as np >>> vec = [[8, 15, 0], [0, 36, 77]] >>> vector_norm(vec) array([ 17., 85.]) >>> vector_norm(vec, keepdims=True) array([[ 17.], [ 85.]]) >>> vector_norm(vec, axis=0) array([ 8., 39., 77.]) ''' vec = np.asarray(vec) vec_norm = np.sqrt((vec * vec).sum(axis)) if keepdims: if axis is None: shape = [1] * vec.ndim else: shape = list(vec.shape) shape[axis] = 1 vec_norm = vec_norm.reshape(shape) return vec_norm def rodrigues_axis_rotation(r, theta): """ Rodrigues formula Rotation matrix for rotation around axis r for angle theta. The rotation matrix is given by the Rodrigues formula: R = Id + sin(theta)*Sn + (1-cos(theta))*Sn^2 with:: 0 -nz ny Sn = nz 0 -nx -ny nx 0 where n = r / ||r|| In case the angle ||r|| is very small, the above formula may lead to numerical instabilities. We instead use a Taylor expansion around theta=0: R = I + sin(theta)/tetha Sr + (1-cos(theta))/teta2 Sr^2 leading to: R = I + (1-theta2/6)*Sr + (1/2-theta2/24)*Sr^2 Parameters ----------- r : array_like shape (3,), axis theta : float, angle in degrees Returns ---------- R : array, shape (3,3), rotation matrix Examples --------- >>> import numpy as np >>> from dipy.core.geometry import rodrigues_axis_rotation >>> v=np.array([0,0,1]) >>> u=np.array([1,0,0]) >>> R=rodrigues_axis_rotation(v,40) >>> ur=np.dot(R,u) >>> np.round(np.rad2deg(np.arccos(np.dot(ur,u)))) 40.0 """ theta = np.deg2rad(theta) if theta > 1e-30: n = r / np.linalg.norm(r) Sn = np.array([[0, -n[2], n[1]], [n[2], 0, -n[0]], [-n[1], n[0], 0]]) R = np.eye(3) + np.sin(theta) * Sn + \ (1 - np.cos(theta)) * np.dot(Sn, Sn) else: Sr = np.array([[0, -r[2], r[1]], [r[2], 0, -r[0]], [-r[1], r[0], 0]]) theta2 = theta * theta R = np.eye(3) + (1 - theta2 / 6.) * \ Sr + (.5 - theta2 / 24.) * np.dot(Sr, Sr) return R def nearest_pos_semi_def(B): ''' Least squares positive semi-definite tensor estimation Parameters ------------ B : (3,3) array_like B matrix - symmetric. We do not check the symmetry. Returns --------- npds : (3,3) array Estimated nearest positive semi-definite array to matrix `B`. Examples ---------- >>> B = np.diag([1, 1, -1]) >>> nearest_pos_semi_def(B) array([[ 0.75, 0. , 0. ], [ 0. , 0.75, 0. ], [ 0. , 0. , 0. ]]) References ---------- .. [1] Niethammer M, San Jose Estepar R, Bouix S, Shenton M, Westin CF. On diffusion tensor estimation. Conf Proc IEEE Eng Med Biol Soc. 2006;1:2622-5. PubMed PMID: 17946125; PubMed Central PMCID: PMC2791793. ''' B = np.asarray(B) vals, vecs = npl.eigh(B) # indices of eigenvalues in descending order inds = np.argsort(vals)[::-1] vals = vals[inds] cardneg = np.sum(vals < 0) if cardneg == 0: return B if cardneg == 3: return np.zeros((3, 3)) lam1a, lam2a, lam3a = vals scalers = np.zeros((3,)) if cardneg == 2: b112 = np.max([0, lam1a + (lam2a + lam3a) / 3.]) scalers[0] = b112 elif cardneg == 1: lam1b = lam1a + 0.25 * lam3a lam2b = lam2a + 0.25 * lam3a if lam1b >= 0 and lam2b >= 0: scalers[:2] = lam1b, lam2b else: # one of the lam1b, lam2b is < 0 if lam2b < 0: b111 = np.max([0, lam1a + (lam2a + lam3a) / 3.]) scalers[0] = b111 if lam1b < 0: b221 = np.max([0, lam2a + (lam1a + lam3a) / 3.]) scalers[1] = b221 # resort the scalers to match the original vecs scalers = scalers[np.argsort(inds)] return np.dot(vecs, np.dot(np.diag(scalers), vecs.T)) def sphere_distance(pts1, pts2, radius=None, check_radius=True): """ Distance across sphere surface between `pts1` and `pts2` Parameters ------------ pts1 : (N,R) or (R,) array_like where N is the number of points and R is the number of coordinates defining a point (``R==3`` for 3D) pts2 : (N,R) or (R,) array_like where N is the number of points and R is the number of coordinates defining a point (``R==3`` for 3D). It should be possible to broadcast `pts1` against `pts2` radius : None or float, optional Radius of sphere. Default is to work out radius from mean of the length of each point vector check_radius : bool, optional If True, check if the points are on the sphere surface - i.e check if the vector lengths in `pts1` and `pts2` are close to `radius`. Default is True. Returns --------- d : (N,) or (0,) array Distances between corresponding points in `pts1` and `pts2` across the spherical surface, i.e. the great circle distance See also ---------- cart_distance : cartesian distance between points vector_cosine : cosine of angle between vectors Examples ---------- >>> print('%.4f' % sphere_distance([0,1],[1,0])) 1.5708 >>> print('%.4f' % sphere_distance([0,3],[3,0])) 4.7124 """ pts1 = np.asarray(pts1) pts2 = np.asarray(pts2) lens1 = np.sqrt(np.sum(pts1 ** 2, axis=-1)) lens2 = np.sqrt(np.sum(pts2 ** 2, axis=-1)) if radius is None: radius = (np.mean(lens1) + np.mean(lens2)) / 2.0 if check_radius: if not (np.allclose(radius, lens1) and np.allclose(radius, lens2)): raise ValueError('Radii do not match sphere surface') # Get angle with vector cosine dots = np.inner(pts1, pts2) lens = lens1 * lens2 angle_cos = np.arccos(dots / lens) return angle_cos * radius def cart_distance(pts1, pts2): ''' Cartesian distance between `pts1` and `pts2` If either of `pts1` or `pts2` is 2D, then we take the first dimension to index points, and the second indexes coordinate. More generally, we take the last dimension to be the coordinate dimension. Parameters ---------- pts1 : (N,R) or (R,) array_like where N is the number of points and R is the number of coordinates defining a point (``R==3`` for 3D) pts2 : (N,R) or (R,) array_like where N is the number of points and R is the number of coordinates defining a point (``R==3`` for 3D). It should be possible to broadcast `pts1` against `pts2` Returns ------- d : (N,) or (0,) array Cartesian distances between corresponding points in `pts1` and `pts2` See also -------- sphere_distance : distance between points on sphere surface Examples ---------- >>> cart_distance([0,0,0], [0,0,3]) 3.0 ''' sqs = np.subtract(pts1, pts2) ** 2 return np.sqrt(np.sum(sqs, axis=-1)) def vector_cosine(vecs1, vecs2): """ Cosine of angle between two (sets of) vectors The cosine of the angle between two vectors ``v1`` and ``v2`` is given by the inner product of ``v1`` and ``v2`` divided by the product of the vector lengths:: v_cos = np.inner(v1, v2) / (np.sqrt(np.sum(v1**2)) * np.sqrt(np.sum(v2**2))) Parameters ------------- vecs1 : (N, R) or (R,) array_like N vectors (as rows) or single vector. Vectors have R elements. vecs1 : (N, R) or (R,) array_like N vectors (as rows) or single vector. Vectors have R elements. It should be possible to broadcast `vecs1` against `vecs2` Returns ---------- vcos : (N,) or (0,) array Vector cosines. To get the angles you will need ``np.arccos`` Notes -------- The vector cosine will be the same as the correlation only if all the input vectors have zero mean. """ vecs1 = np.asarray(vecs1) vecs2 = np.asarray(vecs2) lens1 = np.sqrt(np.sum(vecs1 ** 2, axis=-1)) lens2 = np.sqrt(np.sum(vecs2 ** 2, axis=-1)) dots = np.inner(vecs1, vecs2) lens = lens1 * lens2 return dots / lens def lambert_equal_area_projection_polar(theta, phi): r""" Lambert Equal Area Projection from polar sphere to plane Return positions in (y1,y2) plane corresponding to the points with polar coordinates (theta, phi) on the unit sphere, under the Lambert Equal Area Projection mapping (see Mardia and Jupp (2000), Directional Statistics, p. 161). See doc for ``sphere2cart`` for angle conventions - $0 \le \theta \le \pi$ and $0 \le \phi \le 2 \pi$ - $|(y_1,y_2)| \le 2$ The Lambert EAP maps the upper hemisphere to the planar disc of radius 1 and the lower hemisphere to the planar annulus between radii 1 and 2, and *vice versa*. Parameters ---------- theta : array_like theta spherical coordinates phi : array_like phi spherical coordinates Returns --------- y : (N,2) array planar coordinates of points following mapping by Lambert's EAP. """ return 2 * np.repeat(np.sin(theta / 2), 2).reshape((theta.shape[0], 2)) * \ np.column_stack((np.cos(phi), np.sin(phi))) def lambert_equal_area_projection_cart(x, y, z): r''' Lambert Equal Area Projection from cartesian vector to plane Return positions in $(y_1,y_2)$ plane corresponding to the directions of the vectors with cartesian coordinates xyz under the Lambert Equal Area Projection mapping (see Mardia and Jupp (2000), Directional Statistics, p. 161). The Lambert EAP maps the upper hemisphere to the planar disc of radius 1 and the lower hemisphere to the planar annulus between radii 1 and 2, The Lambert EAP maps the upper hemisphere to the planar disc of radius 1 and the lower hemisphere to the planar annulus between radii 1 and 2. and *vice versa*. See doc for ``sphere2cart`` for angle conventions Parameters ------------ x : array_like x coordinate in Cartesion space y : array_like y coordinate in Cartesian space z : array_like z coordinate Returns ---------- y : (N,2) array planar coordinates of points following mapping by Lambert's EAP. ''' (r, theta, phi) = cart2sphere(x, y, z) return lambert_equal_area_projection_polar(theta, phi) def euler_matrix(ai, aj, ak, axes='sxyz'): """Return homogeneous rotation matrix from Euler angles and axis sequence. Code modified from the work of Christoph Gohlke link provided here http://www.lfd.uci.edu/~gohlke/code/transformations.py.html Parameters ------------ ai, aj, ak : Euler's roll, pitch and yaw angles axes : One of 24 axis sequences as string or encoded tuple Returns --------- matrix : ndarray (4, 4) Code modified from the work of Christoph Gohlke link provided here http://www.lfd.uci.edu/~gohlke/code/transformations.py.html Examples -------- >>> import numpy >>> R = euler_matrix(1, 2, 3, 'syxz') >>> numpy.allclose(numpy.sum(R[0]), -1.34786452) True >>> R = euler_matrix(1, 2, 3, (0, 1, 0, 1)) >>> numpy.allclose(numpy.sum(R[0]), -0.383436184) True >>> ai, aj, ak = (4.0*math.pi) * (numpy.random.random(3) - 0.5) >>> for axes in _AXES2TUPLE.keys(): ... R = euler_matrix(ai, aj, ak, axes) >>> for axes in _TUPLE2AXES.keys(): ... R = euler_matrix(ai, aj, ak, axes) """ try: firstaxis, parity, repetition, frame = _AXES2TUPLE[axes] except (AttributeError, KeyError): firstaxis, parity, repetition, frame = axes i = firstaxis j = _NEXT_AXIS[i + parity] k = _NEXT_AXIS[i - parity + 1] if frame: ai, ak = ak, ai if parity: ai, aj, ak = -ai, -aj, -ak si, sj, sk = math.sin(ai), math.sin(aj), math.sin(ak) ci, cj, ck = math.cos(ai), math.cos(aj), math.cos(ak) cc, cs = ci * ck, ci * sk sc, ss = si * ck, si * sk M = np.identity(4) if repetition: M[i, i] = cj M[i, j] = sj * si M[i, k] = sj * ci M[j, i] = sj * sk M[j, j] = -cj * ss + cc M[j, k] = -cj * cs - sc M[k, i] = -sj * ck M[k, j] = cj * sc + cs M[k, k] = cj * cc - ss else: M[i, i] = cj * ck M[i, j] = sj * sc - cs M[i, k] = sj * cc + ss M[j, i] = cj * sk M[j, j] = sj * ss + cc M[j, k] = sj * cs - sc M[k, i] = -sj M[k, j] = cj * si M[k, k] = cj * ci return M def compose_matrix(scale=None, shear=None, angles=None, translate=None, perspective=None): """Return 4x4 transformation matrix from sequence of transformations. Code modified from the work of Christoph Gohlke link provided here http://www.lfd.uci.edu/~gohlke/code/transformations.py.html This is the inverse of the ``decompose_matrix`` function. Parameters ------------- scale : (3,) array_like Scaling factors. shear : array_like Shear factors for x-y, x-z, y-z axes. angles : array_like Euler angles about static x, y, z axes. translate : array_like Translation vector along x, y, z axes. perspective : array_like Perspective partition of matrix. Returns --------- matrix : 4x4 array Examples ---------- >>> import math >>> import numpy as np >>> import dipy.core.geometry as gm >>> scale = np.random.random(3) - 0.5 >>> shear = np.random.random(3) - 0.5 >>> angles = (np.random.random(3) - 0.5) * (2*math.pi) >>> trans = np.random.random(3) - 0.5 >>> persp = np.random.random(4) - 0.5 >>> M0 = gm.compose_matrix(scale, shear, angles, trans, persp) """ M = np.identity(4) if perspective is not None: P = np.identity(4) P[3, :] = perspective[:4] M = np.dot(M, P) if translate is not None: T = np.identity(4) T[:3, 3] = translate[:3] M = np.dot(M, T) if angles is not None: R = euler_matrix(angles[0], angles[1], angles[2], 'sxyz') M = np.dot(M, R) if shear is not None: Z = np.identity(4) Z[1, 2] = shear[2] Z[0, 2] = shear[1] Z[0, 1] = shear[0] M = np.dot(M, Z) if scale is not None: S = np.identity(4) S[0, 0] = scale[0] S[1, 1] = scale[1] S[2, 2] = scale[2] M = np.dot(M, S) M /= M[3, 3] return M def decompose_matrix(matrix): """Return sequence of transformations from transformation matrix. Code modified from the excellent work of Christoph Gohlke link provided here: http://www.lfd.uci.edu/~gohlke/code/transformations.py.html Parameters ------------ matrix : array_like Non-degenerative homogeneous transformation matrix Returns --------- scale : (3,) ndarray Three scaling factors. shear : (3,) ndarray Shear factors for x-y, x-z, y-z axes. angles : (3,) ndarray Euler angles about static x, y, z axes. translate : (3,) ndarray Translation vector along x, y, z axes. perspective : ndarray Perspective partition of matrix. Raises ------ ValueError If matrix is of wrong type or degenerative. Examples ----------- >>> import numpy as np >>> T0=np.diag([2,1,1,1]) >>> scale, shear, angles, trans, persp = decompose_matrix(T0) """ M = np.array(matrix, dtype=np.float64, copy=True).T if abs(M[3, 3]) < _EPS: raise ValueError("M[3, 3] is zero") M /= M[3, 3] P = M.copy() P[:, 3] = 0, 0, 0, 1 if not np.linalg.det(P): raise ValueError("matrix is singular") scale = np.zeros((3, ), dtype=np.float64) shear = [0, 0, 0] angles = [0, 0, 0] if any(abs(M[:3, 3]) > _EPS): perspective = np.dot(M[:, 3], np.linalg.inv(P.T)) M[:, 3] = 0, 0, 0, 1 else: perspective = np.array((0, 0, 0, 1), dtype=np.float64) translate = M[3, :3].copy() M[3, :3] = 0 row = M[:3, :3].copy() scale[0] = vector_norm(row[0]) row[0] /= scale[0] shear[0] = np.dot(row[0], row[1]) row[1] -= row[0] * shear[0] scale[1] = vector_norm(row[1]) row[1] /= scale[1] shear[0] /= scale[1] shear[1] = np.dot(row[0], row[2]) row[2] -= row[0] * shear[1] shear[2] = np.dot(row[1], row[2]) row[2] -= row[1] * shear[2] scale[2] = vector_norm(row[2]) row[2] /= scale[2] shear[1:] /= scale[2] if np.dot(row[0], np.cross(row[1], row[2])) < 0: scale *= -1 row *= -1 angles[1] = math.asin(-row[0, 2]) if math.cos(angles[1]): angles[0] = math.atan2(row[1, 2], row[2, 2]) angles[2] = math.atan2(row[0, 1], row[0, 0]) else: # angles[0] = math.atan2(row[1, 0], row[1, 1]) angles[0] = math.atan2(-row[2, 1], row[1, 1]) angles[2] = 0.0 return scale, shear, angles, translate, perspective def circumradius(a, b, c): ''' a, b and c are 3-dimensional vectors which are the vertices of a triangle. The function returns the circumradius of the triangle, i.e the radius of the smallest circle that can contain the triangle. In the degenerate case when the 3 points are collinear it returns half the distance between the furthest apart points. Parameters ---------- a, b, c : (3,) array_like the three vertices of the triangle Returns ------- circumradius : float the desired circumradius ''' x = a - c xx = np.linalg.norm(x) ** 2 y = b - c yy = np.linalg.norm(y) ** 2 z = np.cross(x, y) # test for collinearity if np.linalg.norm(z) == 0: return np.sqrt(np.max(np.dot(x, x), np.dot(y, y), np.dot(a - b, a - b))) / 2. else: m = np.vstack((x, y, z)) w = np.dot(np.linalg.inv(m.T), np.array([xx / 2., yy / 2., 0])) return np.linalg.norm(w) / 2. def vec2vec_rotmat(u, v): r""" rotation matrix from 2 unit vectors u, v being unit 3d vectors return a 3x3 rotation matrix R than aligns u to v. In general there are many rotations that will map u to v. If S is any rotation using v as an axis then R.S will also map u to v since (S.R)u = S(Ru) = Sv = v. The rotation R returned by vec2vec_rotmat leaves fixed the perpendicular to the plane spanned by u and v. The transpose of R will align v to u. Parameters ----------- u : array, shape(3,) v : array, shape(3,) Returns --------- R : array, shape(3,3) Examples --------- >>> import numpy as np >>> from dipy.core.geometry import vec2vec_rotmat >>> u=np.array([1,0,0]) >>> v=np.array([0,1,0]) >>> R=vec2vec_rotmat(u,v) >>> np.dot(R,u) array([ 0., 1., 0.]) >>> np.dot(R.T,v) array([ 1., 0., 0.]) """ # Cross product is the first step to find R # Rely on numpy instead of manual checking for failing # cases w = np.cross(u, v) wn = np.linalg.norm(w) # Check that cross product is OK and vectors # u, v are not collinear (norm(w)>0.0) if np.isnan(wn) or wn < np.finfo(float).eps: norm_u_v = np.linalg.norm(u - v) # This is the case of two antipodal vectors: # ** former checking assumed norm(u) == norm(v) if norm_u_v > np.linalg.norm(u): return -np.eye(3) return np.eye(3) # if everything ok, normalize w w = w / wn # vp is in plane of u,v, perpendicular to u vp = (v - (np.dot(u, v) * u)) vp = vp / np.linalg.norm(vp) # (u vp w) is an orthonormal basis P = np.array([u, vp, w]) Pt = P.T cosa = np.dot(u, v) sina = np.sqrt(1 - cosa ** 2) R = np.array([[cosa, -sina, 0], [sina, cosa, 0], [0, 0, 1]]) Rp = np.dot(Pt, np.dot(R, P)) # make sure that you don't return any Nans # check using the appropriate tool in numpy if np.any(np.isnan(Rp)): return np.eye(3) return Rp def compose_transformations(*mats): """ Compose multiple 4x4 affine transformations in one 4x4 matrix Parameters ----------- mat1 : array, (4, 4) mat2 : array, (4, 4) ... matN : array, (4, 4) Returns ------- matN x ... x mat2 x mat1 : array, (4, 4) """ prev = mats[0] if len(mats) < 2: raise ValueError('At least two or more matrices are needed') for mat in mats[1:]: prev = np.dot(mat, prev) return prev def perpendicular_directions(v, num=30, half=False): r""" Computes n evenly spaced perpendicular directions relative to a given vector v Parameters ----------- v : array (3,) Array containing the three cartesian coordinates of vector v num : int, optional Number of perpendicular directions to generate half : bool, optional If half is True, perpendicular directions are sampled on half of the unit circumference perpendicular to v, otherwive perpendicular directions are sampled on the full circumference. Default of half is False Returns ------- psamples : array (n, 3) array of vectors perpendicular to v Notes -------- Perpendicular directions are estimated using the following two step procedure: 1) the perpendicular directions are first sampled in a unit circumference parallel to the plane normal to the x-axis. 2) Samples are then rotated and aligned to the plane normal to vector v. The rotational matrix for this rotation is constructed as reference frame basis which axis are the following: - The first axis is vector v - The second axis is defined as the normalized vector given by the cross product between vector v and the unit vector aligned to the x-axis - The third axis is defined as the cross product between the previous computed vector and vector v. Following this two steps, coordinates of the final perpendicular directions are given as: .. math:: \left [ -\sin(a_{i}) \sqrt{{v_{y}}^{2}+{v_{z}}^{2}} \; , \; \frac{v_{x}v_{y}\sin(a_{i})-v_{z}\cos(a_{i})} {\sqrt{{v_{y}}^{2}+{v_{z}}^{2}}} \; , \; \frac{v_{x}v_{z}\sin(a_{i})-v_{y}\cos(a_{i})} {\sqrt{{v_{y}}^{2}+{v_{z}}^{2}}} \right ] This procedure has a singularity when vector v is aligned to the x-axis. To solve this singularity, perpendicular directions in procedure's step 1 are defined in the plane normal to y-axis and the second axis of the rotated frame of reference is computed as the normalized vector given by the cross product between vector v and the unit vector aligned to the y-axis. Following this, the coordinates of the perpendicular directions are given as: \left [ -\frac{\left (v_{x}v_{y}\sin(a_{i})+v_{z}\cos(a_{i}) \right )} {\sqrt{{v_{x}}^{2}+{v_{z}}^{2}}} \; , \; \sin(a_{i}) \sqrt{{v_{x}}^{2}+{v_{z}}^{2}} \; , \; \frac{v_{y}v_{z}\sin(a_{i})+v_{x}\cos(a_{i})} {\sqrt{{v_{x}}^{2}+{v_{z}}^{2}}} \right ] For more details on this calculation, see ` here `_. """ v = np.array(v, dtype=float) # Float error used for floats comparison er = np.finfo(v[0]).eps * 1e3 # Define circumference or semi-circumference if half is True: a = np.linspace(0., math.pi, num=num, endpoint=False) else: a = np.linspace(0., 2 * math.pi, num=num, endpoint=False) cosa = np.cos(a) sina = np.sin(a) # Check if vector is not aligned to the x axis if abs(v[0] - 1.) > er: sq = np.sqrt(v[1]**2 + v[2]**2) psamples = np.array([- sq*sina, (v[0]*v[1]*sina - v[2]*cosa) / sq, (v[0]*v[2]*sina + v[1]*cosa) / sq]) else: sq = np.sqrt(v[0]**2 + v[2]**2) psamples = np.array([- (v[2]*cosa + v[0]*v[1]*sina) / sq, sina*sq, (v[0]*cosa - v[2]*v[1]*sina) / sq]) return psamples.T def dist_to_corner(affine): """Calculate the maximal distance from the center to a corner of a voxel, given an affine Parameters ---------- affine : 4 by 4 array. The spatial transformation from the measurement to the scanner space. Returns ------- dist: float The maximal distance to the corner of a voxel, given voxel size encoded in the affine. """ R = affine[0:3, 0:3] vox_dim = np.diag(np.linalg.cholesky(R.T.dot(R))) return np.sqrt(np.sum((vox_dim / 2) ** 2)) dipy-0.13.0/dipy/core/gradients.py000066400000000000000000000323011317371701200167700ustar00rootroot00000000000000from __future__ import division, print_function, absolute_import from dipy.utils.six import string_types import numpy as np try: from scipy.linalg import polar except ImportError: # Some elderly scipy doesn't have polar from dipy.fixes.scipy import polar from scipy.linalg import inv from dipy.io import gradients as io from dipy.core.onetime import auto_attr from dipy.core.geometry import vector_norm from dipy.core.sphere import disperse_charges, HemiSphere class GradientTable(object): """Diffusion gradient information Parameters ---------- gradients : array_like (N, 3) Diffusion gradients. The direction of each of these vectors corresponds to the b-vector, and the length corresponds to the b-value. b0_threshold : float Gradients with b-value less than or equal to `b0_threshold` are considered as b0s i.e. without diffusion weighting. Attributes ---------- gradients : (N,3) ndarray diffusion gradients bvals : (N,) ndarray The b-value, or magnitude, of each gradient direction. qvals: (N,) ndarray The q-value for each gradient direction. Needs big and small delta. bvecs : (N,3) ndarray The direction, represented as a unit vector, of each gradient. b0s_mask : (N,) ndarray Boolean array indicating which gradients have no diffusion weighting, ie b-value is close to 0. b0_threshold : float Gradients with b-value less than or equal to `b0_threshold` are considered to not have diffusion weighting. See Also -------- gradient_table Notes -------- The GradientTable object is immutable. Do NOT assign attributes. If you have your gradient table in a bval & bvec format, we recommend using the factory function gradient_table """ def __init__(self, gradients, big_delta=None, small_delta=None, b0_threshold=0): """Constructor for GradientTable class""" gradients = np.asarray(gradients) if gradients.ndim != 2 or gradients.shape[1] != 3: raise ValueError("gradients should be an (N, 3) array") self.gradients = gradients # Avoid nan gradients. Set these to 0 instead: self.gradients = np.where(np.isnan(gradients), 0., gradients) self.big_delta = big_delta self.small_delta = small_delta self.b0_threshold = b0_threshold @auto_attr def bvals(self): return vector_norm(self.gradients) @auto_attr def qvals(self): tau = self.big_delta - self.small_delta / 3.0 return np.sqrt(self.bvals / tau) / (2 * np.pi) @auto_attr def b0s_mask(self): return self.bvals <= self.b0_threshold @auto_attr def bvecs(self): # To get unit vectors we divide by bvals, where bvals is 0 we divide by # 1 to avoid making nans denom = self.bvals + (self.bvals == 0) denom = denom.reshape((-1, 1)) return self.gradients / denom @property def info(self): print('B-values shape (%d,)' % self.bvals.shape) print(' min %f ' % self.bvals.min()) print(' max %f ' % self.bvals.max()) print('B-vectors shape (%d, %d)' % self.bvecs.shape) print(' min %f ' % self.bvecs.min()) print(' max %f ' % self.bvecs.max()) def gradient_table_from_bvals_bvecs(bvals, bvecs, b0_threshold=0, atol=1e-2, **kwargs): """Creates a GradientTable from a bvals array and a bvecs array Parameters ---------- bvals : array_like (N,) The b-value, or magnitude, of each gradient direction. bvecs : array_like (N, 3) The direction, represented as a unit vector, of each gradient. b0_threshold : float Gradients with b-value less than or equal to `bo_threshold` are considered to not have diffusion weighting. atol : float Each vector in `bvecs` must be a unit vectors up to a tolerance of `atol`. Other Parameters ---------------- **kwargs : dict Other keyword inputs are passed to GradientTable. Returns ------- gradients : GradientTable A GradientTable with all the gradient information. See Also -------- GradientTable, gradient_table """ bvals = np.asarray(bvals, np.float) bvecs = np.asarray(bvecs, np.float) dwi_mask = bvals > b0_threshold # check that bvals is (N,) array and bvecs is (N, 3) unit vectors if bvals.ndim != 1 or bvecs.ndim != 2 or bvecs.shape[0] != bvals.shape[0]: raise ValueError("bvals and bvecs should be (N,) and (N, 3) arrays " "respectively, where N is the number of diffusion " "gradients") bvecs = np.where(np.isnan(bvecs), 0, bvecs) bvecs_close_to_1 = abs(vector_norm(bvecs) - 1) <= atol if bvecs.shape[1] != 3: raise ValueError("bvecs should be (N, 3)") if not np.all(bvecs_close_to_1[dwi_mask]): raise ValueError("The vectors in bvecs should be unit (The tolerance " "can be modified as an input parameter)") bvecs = np.where(bvecs_close_to_1[:, None], bvecs, 0) bvals = bvals * bvecs_close_to_1 gradients = bvals[:, None] * bvecs grad_table = GradientTable(gradients, b0_threshold=b0_threshold, **kwargs) grad_table.bvals = bvals grad_table.bvecs = bvecs grad_table.b0s_mask = ~dwi_mask return grad_table def gradient_table(bvals, bvecs=None, big_delta=None, small_delta=None, b0_threshold=0, atol=1e-2): """A general function for creating diffusion MR gradients. It reads, loads and prepares scanner parameters like the b-values and b-vectors so that they can be useful during the reconstruction process. Parameters ---------- bvals : can be any of the four options 1. an array of shape (N,) or (1, N) or (N, 1) with the b-values. 2. a path for the file which contains an array like the above (1). 3. an array of shape (N, 4) or (4, N). Then this parameter is considered to be a b-table which contains both bvals and bvecs. In this case the next parameter is skipped. 4. a path for the file which contains an array like the one at (3). bvecs : can be any of two options 1. an array of shape (N, 3) or (3, N) with the b-vectors. 2. a path for the file which contains an array like the previous. big_delta : float acquisition timing duration (default None) small_delta : float acquisition timing duration (default None) b0_threshold : float All b-values with values less than or equal to `bo_threshold` are considered as b0s i.e. without diffusion weighting. atol : float All b-vectors need to be unit vectors up to a tolerance. Returns ------- gradients : GradientTable A GradientTable with all the gradient information. Examples -------- >>> from dipy.core.gradients import gradient_table >>> bvals=1500*np.ones(7) >>> bvals[0]=0 >>> sq2=np.sqrt(2)/2 >>> bvecs=np.array([[0, 0, 0], ... [1, 0, 0], ... [0, 1, 0], ... [0, 0, 1], ... [sq2, sq2, 0], ... [sq2, 0, sq2], ... [0, sq2, sq2]]) >>> gt = gradient_table(bvals, bvecs) >>> gt.bvecs.shape == bvecs.shape True >>> gt = gradient_table(bvals, bvecs.T) >>> gt.bvecs.shape == bvecs.T.shape False Notes ----- 1. Often b0s (b-values which correspond to images without diffusion weighting) have 0 values however in some cases the scanner cannot provide b0s of an exact 0 value and it gives a bit higher values e.g. 6 or 12. This is the purpose of the b0_threshold in the __init__. 2. We assume that the minimum number of b-values is 7. 3. B-vectors should be unit vectors. """ # If you provided strings with full paths, we go and load those from # the files: if isinstance(bvals, string_types): bvals, _ = io.read_bvals_bvecs(bvals, None) if isinstance(bvecs, string_types): _, bvecs = io.read_bvals_bvecs(None, bvecs) bvals = np.asarray(bvals) # If bvecs is None we expect bvals to be an (N, 4) or (4, N) array. if bvecs is None: if bvals.shape[-1] == 4: bvecs = bvals[:, 1:] bvals = np.squeeze(bvals[:, 0]) elif bvals.shape[0] == 4: bvecs = bvals[1:, :].T bvals = np.squeeze(bvals[0, :]) else: raise ValueError("input should be bvals and bvecs OR an (N, 4)" " array containing both bvals and bvecs") else: bvecs = np.asarray(bvecs) if (bvecs.shape[1] > bvecs.shape[0]) and bvecs.shape[0] > 1: bvecs = bvecs.T return gradient_table_from_bvals_bvecs(bvals, bvecs, big_delta=big_delta, small_delta=small_delta, b0_threshold=b0_threshold, atol=atol) def reorient_bvecs(gtab, affines): """Reorient the directions in a GradientTable. When correcting for motion, rotation of the diffusion-weighted volumes might cause systematic bias in rotationally invariant measures, such as FA and MD, and also cause characteristic biases in tractography, unless the gradient directions are appropriately reoriented to compensate for this effect [Leemans2009]_. Parameters ---------- gtab : GradientTable The nominal gradient table with which the data were acquired. affines : list or ndarray of shape (n, 4, 4) or (n, 3, 3) Each entry in this list or array contain either an affine transformation (4,4) or a rotation matrix (3, 3). In both cases, the transformations encode the rotation that was applied to the image corresponding to one of the non-zero gradient directions (ordered according to their order in `gtab.bvecs[~gtab.b0s_mask]`) Returns ------- gtab : a GradientTable class instance with the reoriented directions References ---------- .. [Leemans2009] The B-Matrix Must Be Rotated When Correcting for Subject Motion in DTI Data. Leemans, A. and Jones, D.K. (2009). MRM, 61: 1336-1349 """ new_bvecs = gtab.bvecs[~gtab.b0s_mask] if new_bvecs.shape[0] != len(affines): e_s = "Number of affine transformations must match number of " e_s += "non-zero gradients" raise ValueError(e_s) for i, aff in enumerate(affines): if aff.shape == (4, 4): # This must be an affine! # Remove the translation component: aff_no_trans = aff[:3, :3] # Decompose into rotation and scaling components: R, S = polar(aff_no_trans) elif aff.shape == (3, 3): # We assume this is a rotation matrix: R = aff Rinv = inv(R) # Apply the inverse of the rotation to the corresponding gradient # direction: new_bvecs[i] = np.dot(Rinv, new_bvecs[i]) return_bvecs = np.zeros(gtab.bvecs.shape) return_bvecs[~gtab.b0s_mask] = new_bvecs return gradient_table(gtab.bvals, return_bvecs) def generate_bvecs(N, iters=5000): """Generates N bvectors. Uses dipy.core.sphere.disperse_charges to model electrostatic repulsion on a unit sphere. Parameters ---------- N : int The number of bvectors to generate. This should be equal to the number of bvals used. iters : int Number of iterations to run. Returns ------- bvecs : (N,3) ndarray The generated directions, represented as a unit vector, of each gradient. """ theta = np.pi * np.random.rand(N) phi = 2 * np.pi * np.random.rand(N) hsph_initial = HemiSphere(theta=theta, phi=phi) hsph_updated, potential = disperse_charges(hsph_initial, iters) bvecs = hsph_updated.vertices return bvecs def check_multi_b(gtab, n_bvals, non_zero=True, bmag=None): """ Check if you have enough different b-values in your gradient table Parameters ---------- gtab : GradientTable class instance. n_bvals : int The number of different b-values you are checking for. non_zero : bool Whether to check only non-zero bvalues. In this case, we will require at least `n_bvals` *non-zero* b-values (where non-zero is defined depending on the `gtab` object's `b0_threshold` attribute) bmag : int The order of magnitude of the b-values used. The function will normalize the b-values relative $10^{bmag - 1}$. Default: derive this value from the maximal b-value provided: $bmag=log_{10}(max(bvals))$. Returns ------- bool : Whether there are at least `n_bvals` different b-values in the gradient table used. """ bvals = gtab.bvals.copy() if non_zero: bvals = bvals[~gtab.b0s_mask] if bmag is None: bmag = int(np.log10(np.max(bvals))) b = bvals / (10 ** (bmag - 1)) # normalize b units b = b.round() uniqueb = np.unique(b) if uniqueb.shape[0] < n_bvals: return False else: return True dipy-0.13.0/dipy/core/graph.py000066400000000000000000000066751317371701200161300ustar00rootroot00000000000000""" A simple graph class """ from __future__ import division, print_function, absolute_import class Graph(object): ''' A simple graph class ''' def __init__(self): ''' A graph class with nodes and edges :-) This class allows us to: 1. find the shortest path 2. find all paths 3. add/delete nodes and edges 4. get parent & children nodes Examples -------- >>> from dipy.core.graph import Graph >>> g=Graph() >>> g.add_node('a',5) >>> g.add_node('b',6) >>> g.add_node('c',10) >>> g.add_node('d',11) >>> g.add_edge('a','b') >>> g.add_edge('b','c') >>> g.add_edge('c','d') >>> g.add_edge('b','d') >>> g.up_short('d') ['d', 'b', 'a'] ''' self.node = {} self.pred = {} self.succ = {} def add_node(self, n, attr=None): self.succ[n] = {} self.pred[n] = {} self.node[n] = attr def add_edge(self, n, m, ws=True, wp=True): self.succ[n][m] = ws self.pred[m][n] = wp def parents(self, n): return self.pred[n].keys() def children(self, n): return self.succ[n].keys() def up(self, n): return self.all_paths(self.pred, n) def down(self, n): return self.all_paths(self.succ, n) def up_short(self, n): return self.shortest_path(self.pred, n) def down_short(self, n): return self.shortest_path(self.succ, n) def all_paths(self, graph, start, end=None, path=[]): path = path + [start] if start == end or graph[start] == {}: return [path] if start not in graph: return [] paths = [] for node in graph[start]: if node not in path: newpaths = self.all_paths(graph, node, end, path) for newpath in newpaths: paths.append(newpath) return paths def shortest_path(self, graph, start, end=None, path=[]): path = path + [start] if graph[start] == {} or start == end: return path if start not in graph: return [] shortest = None for node in graph[start]: if node not in path: newpath = self.shortest_path(graph, node, end, path) if newpath: if not shortest or len(newpath) < len(shortest): shortest = newpath return shortest def del_node_and_edges(self, n): try: del self.node[n] except KeyError: raise KeyError('node not in the graph') for s in self.succ[n]: del self.pred[s][n] del self.succ[n] for p in self.pred[n]: del self.succ[p][n] del self.pred[n] def del_node(self, n): try: del self.node[n] except KeyError: raise KeyError('node not in the graph') for s in self.succ[n]: for p in self.pred[n]: self.succ[p][s] = self.succ[n][s] self.pred[s][p] = self.pred[s][n] for s in self.succ.keys(): try: del self.succ[s][n] except KeyError: pass for p in self.pred.keys(): try: del self.pred[p][n] except KeyError: pass del self.succ[n] del self.pred[n] dipy-0.13.0/dipy/core/histeq.py000066400000000000000000000014311317371701200163050ustar00rootroot00000000000000import numpy as np def histeq(arr, num_bins=256): """ Performs an histogram equalization on ``arr``. This was taken from: http://www.janeriksolem.net/2009/06/histogram-equalization-with-python-and.html Parameters ---------- arr : ndarray Image on which to perform histogram equalization. num_bins : int Number of bins used to construct the histogram. Returns ------- result : ndarray Histogram equalized image. """ # get image histogram histo, bins = np.histogram(arr.flatten(), num_bins, normed=True) cdf = histo.cumsum() cdf = 255 * cdf / cdf[-1] # use linear interpolation of cdf to find new pixel values result = np.interp(arr.flatten(), bins[:-1], cdf) return result.reshape(arr.shape) dipy-0.13.0/dipy/core/ndindex.py000066400000000000000000000023061317371701200164430ustar00rootroot00000000000000from __future__ import division, print_function, absolute_import import numpy as np from numpy.lib.stride_tricks import as_strided def ndindex(shape): """ An N-dimensional iterator object to index arrays. Given the shape of an array, an `ndindex` instance iterates over the N-dimensional index of the array. At each iteration a tuple of indices is returned; the last dimension is iterated over first. Parameters ---------- shape : tuple of ints The dimensions of the array. Examples -------- >>> from dipy.core.ndindex import ndindex >>> shape = (3, 2, 1) >>> for index in ndindex(shape): ... print(index) (0, 0, 0) (0, 1, 0) (1, 0, 0) (1, 1, 0) (2, 0, 0) (2, 1, 0) """ if len(shape) == 0: yield () else: x = as_strided(np.zeros(1), shape=shape, strides=np.zeros_like(shape)) try: ndi = np.nditer(x, flags=['multi_index', 'zerosize_ok'], order='C') except AttributeError: # nditer only available in numpy >= 1.6 for ix in np.ndindex(*shape): yield ix else: for e in ndi: yield ndi.multi_index dipy-0.13.0/dipy/core/onetime.py000066400000000000000000000165751317371701200164670ustar00rootroot00000000000000""" Descriptor support for NIPY. Copyright (c) 2006-2011, NIPY Developers All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the NIPY Developers nor the names of any contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Utilities to support special Python descriptors [1,2], in particular the use of a useful pattern for properties we call 'one time properties'. These are object attributes which are declared as properties, but become regular attributes once they've been read the first time. They can thus be evaluated later in the object's life cycle, but once evaluated they become normal, static attributes with no function call overhead on access or any other constraints. A special ResetMixin class is provided to add a .reset() method to users who may want to have their objects capable of resetting these computed properties to their 'untriggered' state. References ---------- [1] How-To Guide for Descriptors, Raymond Hettinger. http://users.rcn.com/python/download/Descriptor.htm [2] Python data model, http://docs.python.org/reference/datamodel.html """ from __future__ import division, print_function, absolute_import #----------------------------------------------------------------------------- # Classes and Functions #----------------------------------------------------------------------------- class ResetMixin(object): """A Mixin class to add a .reset() method to users of OneTimeProperty. By default, auto attributes once computed, become static. If they happen to depend on other parts of an object and those parts change, their values may now be invalid. This class offers a .reset() method that users can call *explicitly* when they know the state of their objects may have changed and they want to ensure that *all* their special attributes should be invalidated. Once reset() is called, all their auto attributes are reset to their OneTimeProperty descriptors, and their accessor functions will be triggered again. .. warning:: If a class has a set of attributes that are OneTimeProperty, but that can be initialized from any one of them, do NOT use this mixin! For instance, UniformTimeSeries can be initialized with only sampling_rate and t0, sampling_interval and time are auto-computed. But if you were to reset() a UniformTimeSeries, it would lose all 4, and there would be then no way to break the circular dependency chains. If this becomes a problem in practice (for our analyzer objects it isn't, as they don't have the above pattern), we can extend reset() to check for a _no_reset set of names in the instance which are meant to be kept protected. But for now this is NOT done, so caveat emptor. Examples -------- >>> class A(ResetMixin): ... def __init__(self,x=1.0): ... self.x = x ... ... @auto_attr ... def y(self): ... print('*** y computation executed ***') ... return self.x / 2.0 ... >>> a = A(10) About to access y twice, the second time no computation is done: >>> a.y *** y computation executed *** 5.0 >>> a.y 5.0 Changing x >>> a.x = 20 a.y doesn't change to 10, since it is a static attribute: >>> a.y 5.0 We now reset a, and this will then force all auto attributes to recompute the next time we access them: >>> a.reset() About to access y twice again after reset(): >>> a.y *** y computation executed *** 10.0 >>> a.y 10.0 """ def reset(self): """Reset all OneTimeProperty attributes that may have fired already.""" instdict = self.__dict__ classdict = self.__class__.__dict__ # To reset them, we simply remove them from the instance dict. At that # point, it's as if they had never been computed. On the next access, # the accessor function from the parent class will be called, simply # because that's how the python descriptor protocol works. for mname, mval in classdict.items(): if mname in instdict and isinstance(mval, OneTimeProperty): delattr(self, mname) class OneTimeProperty(object): """A descriptor to make special properties that become normal attributes. This is meant to be used mostly by the auto_attr decorator in this module. """ def __init__(self, func): """Create a OneTimeProperty instance. Parameters ---------- func : method The method that will be called the first time to compute a value. Afterwards, the method's name will be a standard attribute holding the value of this computation. """ self.getter = func self.name = func.__name__ def __get__(self, obj, type=None): """This will be called on attribute access on the class or instance.""" if obj is None: # Being called on the class, return the original function. This # way, introspection works on the class. # return func return self.getter # Errors in the following line are errors in setting a # OneTimeProperty val = self.getter(obj) setattr(obj, self.name, val) return val def auto_attr(func): """Decorator to create OneTimeProperty attributes. Parameters ---------- func : method The method that will be called the first time to compute a value. Afterwards, the method's name will be a standard attribute holding the value of this computation. Examples -------- >>> class MagicProp(object): ... @auto_attr ... def a(self): ... return 99 ... >>> x = MagicProp() >>> 'a' in x.__dict__ False >>> x.a 99 >>> 'a' in x.__dict__ True """ return OneTimeProperty(func) #----------------------------------------------------------------------------- # Deprecated API #----------------------------------------------------------------------------- # For backwards compatibility setattr_on_read = auto_attr dipy-0.13.0/dipy/core/optimize.py000066400000000000000000000356051317371701200166620ustar00rootroot00000000000000""" A unified interface for performing and debugging optimization problems. Only L-BFGS-B and Powell is supported in this class for versions of Scipy < 0.12. All optimizers are available for scipy >= 0.12. """ import abc from distutils.version import LooseVersion import numpy as np import scipy import scipy.sparse as sps import scipy.optimize as opt from dipy.utils.six import with_metaclass SCIPY_LESS_0_12 = LooseVersion(scipy.version.short_version) < '0.12' if not SCIPY_LESS_0_12: from scipy.optimize import minimize else: from scipy.optimize import fmin_l_bfgs_b, fmin_powell class Optimizer(object): def __init__(self, fun, x0, args=(), method='L-BFGS-B', jac=None, hess=None, hessp=None, bounds=None, constraints=(), tol=None, callback=None, options=None, evolution=False): """ A class for handling minimization of scalar function of one or more variables. Parameters ---------- fun : callable Objective function. x0 : ndarray Initial guess. args : tuple, optional Extra arguments passed to the objective function and its derivatives (Jacobian, Hessian). method : str, optional Type of solver. Should be one of - 'Nelder-Mead' - 'Powell' - 'CG' - 'BFGS' - 'Newton-CG' - 'Anneal' - 'L-BFGS-B' - 'TNC' - 'COBYLA' - 'SLSQP' - 'dogleg' - 'trust-ncg' jac : bool or callable, optional Jacobian of objective function. Only for CG, BFGS, Newton-CG, dogleg, trust-ncg. If `jac` is a Boolean and is True, `fun` is assumed to return the value of Jacobian along with the objective function. If False, the Jacobian will be estimated numerically. `jac` can also be a callable returning the Jacobian of the objective. In this case, it must accept the same arguments as `fun`. hess, hessp : callable, optional Hessian of objective function or Hessian of objective function times an arbitrary vector p. Only for Newton-CG, dogleg, trust-ncg. Only one of `hessp` or `hess` needs to be given. If `hess` is provided, then `hessp` will be ignored. If neither `hess` nor `hessp` is provided, then the hessian product will be approximated using finite differences on `jac`. `hessp` must compute the Hessian times an arbitrary vector. bounds : sequence, optional Bounds for variables (only for L-BFGS-B, TNC and SLSQP). ``(min, max)`` pairs for each element in ``x``, defining the bounds on that parameter. Use None for one of ``min`` or ``max`` when there is no bound in that direction. constraints : dict or sequence of dict, optional Constraints definition (only for COBYLA and SLSQP). Each constraint is defined in a dictionary with fields: type : str Constraint type: 'eq' for equality, 'ineq' for inequality. fun : callable The function defining the constraint. jac : callable, optional The Jacobian of `fun` (only for SLSQP). args : sequence, optional Extra arguments to be passed to the function and Jacobian. Equality constraint means that the constraint function result is to be zero whereas inequality means that it is to be non-negative. Note that COBYLA only supports inequality constraints. tol : float, optional Tolerance for termination. For detailed control, use solver-specific options. callback : callable, optional Called after each iteration, as ``callback(xk)``, where ``xk`` is the current parameter vector. Only available using Scipy >= 0.12. options : dict, optional A dictionary of solver options. All methods accept the following generic options: maxiter : int Maximum number of iterations to perform. disp : bool Set to True to print convergence messages. For method-specific options, see `show_options('minimize', method)`. evolution : bool, optional save history of x for each iteration. Only available using Scipy >= 0.12. See also --------- scipy.optimize.minimize """ self.size_of_x = len(x0) self._evol_kx = None _eps = np.finfo(float).eps if SCIPY_LESS_0_12: if evolution is True: print('Saving history is available only with Scipy >= 0.12.') if method == 'L-BFGS-B': default_options = {'maxcor': 10, 'ftol': 1e-7, 'gtol': 1e-5, 'eps': 1e-8, 'maxiter': 1000} if jac is None: approx_grad = True else: approx_grad = False if options is None: options = default_options if options is not None: for key in options: default_options[key] = options[key] options = default_options try: out = fmin_l_bfgs_b(fun, x0, fprime=jac, args=args, approx_grad=approx_grad, bounds=bounds, m=options['maxcor'], factr=options['ftol']/_eps, pgtol=options['gtol'], epsilon=options['eps'], maxiter=options['maxiter']) except TypeError: msg = 'In Scipy ' + scipy.__version__ + ' `maxiter` ' msg += 'parameter is not available for L-BFGS-B. \n Using ' msg += '`maxfun` instead with value twice of maxiter.' print(msg) out = fmin_l_bfgs_b(fun, x0, fprime=jac, args=args, approx_grad=approx_grad, bounds=bounds, m=options['maxcor'], factr=options['ftol']/_eps, pgtol=options['gtol'], epsilon=options['eps'], maxfun=options['maxiter'] * 2) res = {'x': out[0], 'fun': out[1], 'nfev': out[2]['funcalls']} try: res['nit'] = out[2]['nit'] except KeyError: res['nit'] = None elif method == 'Powell': default_options = {'xtol': 0.0001, 'ftol': 0.0001, 'maxiter': None} if options is None: options = default_options if options is not None: for key in options: default_options[key] = options[key] options = default_options out = fmin_powell(fun, x0, args, xtol=options['xtol'], ftol=options['ftol'], maxiter=options['maxiter'], full_output=True, disp=False, retall=True) xopt, fopt, direc, iterations, funcs, warnflag, allvecs = out res = {'x': xopt, 'fun': fopt, 'nfev': funcs, 'nit': iterations} else: msg = 'Only L-BFGS-B and Powell is supported in this class ' msg += 'for versions of Scipy < 0.12.' raise ValueError(msg) if not SCIPY_LESS_0_12: if evolution is True: self._evol_kx = [] def history_of_x(kx): self._evol_kx.append(kx) res = minimize(fun, x0, args, method, jac, hess, hessp, bounds, constraints, tol, callback=history_of_x, options=options) else: res = minimize(fun, x0, args, method, jac, hess, hessp, bounds, constraints, tol, callback, options) self.res = res @property def xopt(self): return self.res['x'] @property def fopt(self): return self.res['fun'] @property def nit(self): return self.res['nit'] @property def nfev(self): return self.res['nfev'] @property def message(self): return self.res['message'] def print_summary(self): print(self.res) @property def evolution(self): if self._evol_kx is not None: return np.asarray(self._evol_kx) else: return None def spdot(A, B): """The same as np.dot(A, B), except it works even if A or B or both are sparse matrices. Parameters ---------- A, B : arrays of shape (m, n), (n, k) Returns ------- The matrix product AB. If both A and B are sparse, the result will be a sparse matrix. Otherwise, a dense result is returned See discussion here: http://mail.scipy.org/pipermail/scipy-user/2010-November/027700.html """ if sps.issparse(A) and sps.issparse(B): return A * B elif sps.issparse(A) and not sps.issparse(B): return (A * B).view(type=B.__class__) elif not sps.issparse(A) and sps.issparse(B): return (B.T * A.T).T.view(type=A.__class__) else: return np.dot(A, B) def sparse_nnls(y, X, momentum=1, step_size=0.01, non_neg=True, check_error_iter=10, max_error_checks=10, converge_on_sse=0.99): """ Solve y=Xh for h, using gradient descent, with X a sparse matrix Parameters ---------- y : 1-d array of shape (N) The data. Needs to be dense. X : ndarray. May be either sparse or dense. Shape (N, M) The regressors momentum : float, optional (default: 1). The persistence of the gradient. step_size : float, optional (default: 0.01). The increment of parameter update in each iteration non_neg : Boolean, optional (default: True) Whether to enforce non-negativity of the solution. check_error_iter : int (default:10) How many rounds to run between error evaluation for convergence-checking. max_error_checks : int (default: 10) Don't check errors more than this number of times if no improvement in r-squared is seen. converge_on_sse : float (default: 0.99) a percentage improvement in SSE that is required each time to say that things are still going well. Returns ------- h_best : The best estimate of the parameters. """ num_regressors = X.shape[1] # Initialize the parameters at the origin: h = np.zeros(num_regressors) # If nothing good happens, we'll return that: h_best = h gradient = np.zeros(num_regressors) iteration = 1 ss_residuals_min = np.inf # This will keep track of the best solution sse_best = np.inf # This will keep track of the best performance so far count_bad = 0 # Number of times estimation error has gone up. error_checks = 0 # How many error checks have we done so far while 1: if iteration > 1: # The sum of squared error given the current parameter setting: sse = np.sum((y - spdot(X, h)) ** 2) # The gradient is (Kay 2008 supplemental page 27): gradient = spdot(X.T, spdot(X, h) - y) gradient += momentum * gradient # Normalize to unit-length unit_length_gradient = (gradient / np.sqrt(np.dot(gradient, gradient))) # Update the parameters in the direction of the gradient: h -= step_size * unit_length_gradient if non_neg: # Set negative values to 0: h[h < 0] = 0 # Every once in a while check whether it's converged: if np.mod(iteration, check_error_iter): # This calculates the sum of squared residuals at this point: sse = np.sum((y - spdot(X, h)) ** 2) # Did we do better this time around? if sse < ss_residuals_min: # Update your expectations about the minimum error: ss_residuals_min = sse h_best = h # This holds the best params we have so far # Are we generally (over iterations) converging on # sufficient improvement in r-squared? if sse < converge_on_sse * sse_best: sse_best = sse count_bad = 0 else: count_bad += 1 else: count_bad += 1 if count_bad >= max_error_checks: return h_best error_checks += 1 iteration += 1 class SKLearnLinearSolver(with_metaclass(abc.ABCMeta, object)): """ Provide a sklearn-like uniform interface to algorithms that solve problems of the form: $y = Ax$ for $x$ Sub-classes of SKLearnLinearSolver should provide a 'fit' method that have the following signature: `SKLearnLinearSolver.fit(X, y)`, which would set an attribute `SKLearnLinearSolver.coef_`, with the shape (X.shape[1],), such that an estimate of y can be calculated as: `y_hat = np.dot(X, SKLearnLinearSolver.coef_.T)` """ def __init__(self, *args, **kwargs): self._args = args self._kwargs = kwargs @abc.abstractmethod def fit(self, X, y): """Implement for all derived classes """ def predict(self, X): """ Predict using the result of the model Parameters ---------- X : array-like (n_samples, n_features) Samples. Returns ------- C : array, shape = (n_samples,) Predicted values. """ X = np.asarray(X) return np.dot(X, self.coef_.T) class NonNegativeLeastSquares(SKLearnLinearSolver): """ A sklearn-like interface to scipy.optimize.nnls """ def fit(self, X, y): """ Fit the NonNegativeLeastSquares linear model to data Parameters ---------- """ coef, rnorm = opt.nnls(X, y) self.coef_ = coef return self dipy-0.13.0/dipy/core/profile.py000066400000000000000000000056351317371701200164620ustar00rootroot00000000000000""" Class for profiling cython code """ import os import subprocess from dipy.utils.optpkg import optional_package cProfile, _, _ = optional_package('cProfile') pstats, _, _ = optional_package('pstats', 'pstats is not installed. It is part of the' 'python-profiler package in Debian/Ubuntu') class Profiler(): ''' Profile python/cython files or functions If you are profiling cython code you need to add # cython: profile=True on the top of your .pyx file and for the functions that you do not want to profile you can use this decorator in your cython files @cython.profile(False) Parameters ------------- caller : file or function call args : function arguments Attributes ------------ stats : function, stats.print_stats(10) will prin the 10 slower functions Examples ----------- from dipy.core.profile import Profiler import numpy as np p=Profiler(np.sum,np.random.rand(1000000,3)) fname='test.py' p=Profiler(fname) p.print_stats(10) p.print_stats('det') References ------------- http://docs.cython.org/src/tutorial/profiling_tutorial.html http://docs.python.org/library/profile.html http://packages.python.org/line_profiler/ ''' def __init__(self, call=None, *args): # Delay import until use of class instance. We were getting some very # odd build-as-we-go errors running tests and documentation otherwise import pyximport pyximport.install() try: ext = os.path.splitext(call)[1].lower() print('ext', ext) if ext == '.py' or ext == '.pyx': # python/cython file print('profiling python/cython file ...') subprocess.call(['python', '-m', 'cProfile', '-o', 'profile.prof', call]) s = pstats.Stats('profile.prof') stats = s.strip_dirs().sort_stats('time') self.stats = stats except: print('profiling function call ...') self.args = args self.call = call cProfile.runctx('self._profile_function()', globals(), locals(), 'profile.prof') s = pstats.Stats('profile.prof') stats = s.strip_dirs().sort_stats('time') self.stats = stats def _profile_function(self): self.call(*self.args) def print_stats(self, N=10): ''' Print stats for profiling You can use it in all different ways developed in pstats for example print_stats(10) will give you the 10 slowest calls or print_stats('function_name') will give you the stats for all the calls with name 'function_name' Parameters ------------ N : stats.print_stats argument ''' self.stats.print_stats(N) dipy-0.13.0/dipy/core/pyalloc.pxd000066400000000000000000000005551317371701200166240ustar00rootroot00000000000000# -*- python -*- or rather like from python_string cimport PyString_FromStringAndSize, \ PyString_AS_STRING, PyString_Size # Function to allocate, wrap memory via Python string creation cdef inline object pyalloc_v(Py_ssize_t n, void **pp): cdef object ob = PyString_FromStringAndSize(NULL, n) pp[0] = PyString_AS_STRING(ob) return ob dipy-0.13.0/dipy/core/rng.py000066400000000000000000000066551317371701200156130ustar00rootroot00000000000000""" Random number generation utilities """ from __future__ import division, print_function, absolute_import from math import floor from platform import architecture def WichmannHill2006(): ''' B.A. Wichmann, I.D. Hill, Generating good pseudo-random numbers, Computational Statistics & Data Analysis, Volume 51, Issue 3, 1 December 2006, Pages 1614-1622, ISSN 0167-9473, DOI: 10.1016/j.csda.2006.05.019. (http://www.sciencedirect.com/science/article/B6V8V-4K7F86W-2/2/a3a33291b8264e4c882a8f21b6e43351) for advice on generating many sequences for use together, and on alternative algorithms and codes Examples ---------- >>> from dipy.core import rng >>> rng.ix, rng.iy, rng.iz, rng.it = 100001, 200002, 300003, 400004 >>> N = 1000 >>> a = [rng.WichmannHill2006() for i in range(N)] ''' global ix, iy, iz, it if architecture()[0] == '64': # If 64 bits are available then the following lines of code will be # faster. ix = (11600 * ix) % 2147483579 iy = (47003 * iy) % 2147483543 iz = (23000 * iz) % 2147483423 it = (33000 * it) % 2147483123 else: # If only 32 bits are available ix = 11600 * (ix % 185127) - 10379 * (ix / 185127) iy = 47003 * (ix % 45688) - 10479 * (iy / 45688) iz = 23000 * (iz % 93368) - 19423 * (iz / 93368) it = 33000 * (it % 65075) - 8123 * (it / 65075) if ix < 0: ix = ix + 2147483579 if iy < 0: iy = iy + 2147483543 if iz < 0: iz = iz + 2147483423 if it < 0: it = it + 2147483123 W = ix/2147483579.0 + iy/2147483543.0 + iz/2147483423.0 + it/2147483123.0 return W - floor(W) def WichmannHill1982(): ''' Algorithm AS 183 Appl. Statist. (1982) vol.31, no.2 Returns a pseudo-random number rectangularly distributed between 0 and 1. The cycle length is 6.95E+12 (See page 123 of Applied Statistics (1984) vol.33), not as claimed in the original article. ix, iy and iz should be set to integer values between 1 and 30000 before the first entry. Integer arithmetic up to 5212632 is required. ''' import numpy as np global ix, iy, iz ix = (171 * ix) % 30269 iy = (172 * iy) % 30307 iz = (170 * iz) % 30323 ''' If integer arithmetic only up to 30323 (!) is available, the preceding 3 statements may be replaced by: ix = 171 * (ix % 177) - 2 * (ix / 177) iy = 172 * (iy % 176) - 35 * (iy / 176) iz = 170 * (iz % 178) - 63 * (iz / 178) if ix < 0: ix = ix + 30269 if iy < 0: iy = iy + 30307 if iz < 0: iz = iz + 30323 ''' return np.remainder(np.float(ix) / 30269. + np.float(iy) / 30307. + np.float(iz) / 30323., 1.0) def LEcuyer(): ''' Generate uniformly distributed random numbers using the 32-bit generator from figure 3 of: L'Ecuyer, P. Efficient and portable combined random number generators, C.A.C.M., vol. 31, 742-749 & 774-?, June 1988. The cycle length is claimed to be 2.30584E+18 ''' global s1, s2 k = s1 / 53668 s1 = 40014 * (s1 - k * 53668) - k * 12211 if s1 < 0: s1 = s1 + 2147483563 k = s2 / 52774 s2 = 40692 * (s2 - k * 52774) - k * 3791 if s2 < 0: s2 = s2 + 2147483399 z = s1 - s2 if z < 0: z = z + 2147483562 return z / 2147483563. dipy-0.13.0/dipy/core/sphere.py000066400000000000000000000475001317371701200163050ustar00rootroot00000000000000from __future__ import division, print_function, absolute_import import numpy as np import warnings from dipy.utils.six.moves import xrange from dipy.core.geometry import cart2sphere, sphere2cart, vector_norm from dipy.core.onetime import auto_attr from dipy.reconst.recspeed import remove_similar_vertices __all__ = ['Sphere', 'HemiSphere', 'faces_from_sphere_vertices', 'unique_edges'] def _all_specified(*args): for a in args: if a is None: return False return True def _some_specified(*args): for a in args: if a is not None: return True return False def faces_from_sphere_vertices(vertices): """ Triangulate a set of vertices on the sphere. Parameters ---------- vertices : (M, 3) ndarray XYZ coordinates of vertices on the sphere. Returns ------- faces : (N, 3) ndarray Indices into vertices; forms triangular faces. """ from scipy.spatial import Delaunay faces = Delaunay(vertices).convex_hull if len(vertices) < 2**16: return np.asarray(faces, np.uint16) else: return faces def unique_edges(faces, return_mapping=False): """Extract all unique edges from given triangular faces. Parameters ---------- faces : (N, 3) ndarray Vertex indices forming triangular faces. return_mapping : bool If true, a mapping to the edges of each face is returned. Returns ------- edges : (N, 2) ndarray Unique edges. mapping : (N, 3) For each face, [x, y, z], a mapping to it's edges [a, b, c]. :: y /\ / \ a/ \b / \ / \ /__________\ x c z """ faces = np.asarray(faces) edges = np.concatenate([faces[:, 0:2], faces[:, 1:3], faces[:, ::2]]) if return_mapping: ue, inverse = unique_sets(edges, return_inverse=True) return ue, inverse.reshape((3, -1)).T else: return unique_sets(edges) def unique_sets(sets, return_inverse=False): """Remove duplicate sets. Parameters ---------- sets : array (N, k) N sets of size k. return_inverse : bool If True, also returns the indices of unique_sets that can be used to reconstruct `sets` (the original ordering of each set may not be preserved). Return ------ unique_sets : array Unique sets. inverse : array (N,) The indices to reconstruct `sets` from `unique_sets`. """ sets = np.sort(sets, 1) order = np.lexsort(sets.T) sets = sets[order] flag = np.ones(len(sets), 'bool') flag[1:] = (sets[1:] != sets[:-1]).any(-1) uniqsets = sets[flag] if return_inverse: inverse = np.empty_like(order) inverse[order] = np.arange(len(order)) index = flag.cumsum() - 1 return uniqsets, index[inverse] else: return uniqsets class Sphere(object): """Points on the unit sphere. The sphere can be constructed using one of three conventions:: Sphere(x, y, z) Sphere(xyz=xyz) Sphere(theta=theta, phi=phi) Parameters ---------- x, y, z : 1-D array_like Vertices as x-y-z coordinates. theta, phi : 1-D array_like Vertices as spherical coordinates. Theta and phi are the inclination and azimuth angles respectively. xyz : (N, 3) ndarray Vertices as x-y-z coordinates. faces : (N, 3) ndarray Indices into vertices that form triangular faces. If unspecified, the faces are computed using a Delaunay triangulation. edges : (N, 2) ndarray Edges between vertices. If unspecified, the edges are derived from the faces. """ def __init__(self, x=None, y=None, z=None, theta=None, phi=None, xyz=None, faces=None, edges=None): all_specified = _all_specified(x, y, z) + _all_specified(xyz) + \ _all_specified(theta, phi) one_complete = (_some_specified(x, y, z) + _some_specified(xyz) + _some_specified(theta, phi)) if not (all_specified == 1 and one_complete == 1): raise ValueError("Sphere must be constructed using either " "(x,y,z), (theta, phi) or xyz.") if edges is not None and faces is None: raise ValueError("Either specify both faces and " "edges, only faces, or neither.") if edges is not None: self.edges = np.asarray(edges) if faces is not None: self.faces = np.asarray(faces) if theta is not None: self.theta = np.array(theta, copy=False, ndmin=1) self.phi = np.array(phi, copy=False, ndmin=1) return if xyz is not None: xyz = np.asarray(xyz) x, y, z = xyz.T x, y, z = (np.asarray(t) for t in (x, y, z)) r, self.theta, self.phi = cart2sphere(x, y, z) if not np.allclose(r, 1): warnings.warn("Vertices are not on the unit sphere.") @auto_attr def vertices(self): return np.column_stack(sphere2cart(1, self.theta, self.phi)) @property def x(self): return self.vertices[:, 0] @property def y(self): return self.vertices[:, 1] @property def z(self): return self.vertices[:, 2] @auto_attr def faces(self): faces = faces_from_sphere_vertices(self.vertices) return faces @auto_attr def edges(self): return unique_edges(self.faces) def subdivide(self, n=1): """Subdivides each face of the sphere into four new faces. New vertices are created at a, b, and c. Then each face [x, y, z] is divided into faces [x, a, c], [y, a, b], [z, b, c], and [a, b, c]. :: y /\ / \ a/____\b /\ /\ / \ / \ /____\/____\ x c z Parameters ---------- n : int, optional The number of subdivisions to preform. Returns ------- new_sphere : Sphere The subdivided sphere. """ vertices = self.vertices faces = self.faces for i in xrange(n): edges, mapping = unique_edges(faces, return_mapping=True) new_vertices = vertices[edges].sum(1) new_vertices /= vector_norm(new_vertices, keepdims=True) mapping += len(vertices) vertices = np.vstack([vertices, new_vertices]) x, y, z = faces.T a, b, c = mapping.T face1 = np.column_stack([x, a, c]) face2 = np.column_stack([y, b, a]) face3 = np.column_stack([z, c, b]) face4 = mapping faces = np.concatenate([face1, face2, face3, face4]) if len(vertices) < 2**16: faces = np.asarray(faces, dtype='uint16') return Sphere(xyz=vertices, faces=faces) def find_closest(self, xyz): """ Find the index of the vertex in the Sphere closest to the input vector Parameters ---------- xyz : array-like, 3 elements A unit vector Return ------ idx : int The index into the Sphere.vertices array that gives the closest vertex (in angle). """ cos_sim = np.dot(self.vertices, xyz) return np.argmax(cos_sim) class HemiSphere(Sphere): """Points on the unit sphere. A HemiSphere is similar to a Sphere but it takes antipodal symmetry into account. Antipodal symmetry means that point v on a HemiSphere is the same as the point -v. Duplicate points are discarded when constructing a HemiSphere (including antipodal duplicates). `edges` and `faces` are remapped to the remaining points as closely as possible. The HemiSphere can be constructed using one of three conventions:: HemiSphere(x, y, z) HemiSphere(xyz=xyz) HemiSphere(theta=theta, phi=phi) Parameters ---------- x, y, z : 1-D array_like Vertices as x-y-z coordinates. theta, phi : 1-D array_like Vertices as spherical coordinates. Theta and phi are the inclination and azimuth angles respectively. xyz : (N, 3) ndarray Vertices as x-y-z coordinates. faces : (N, 3) ndarray Indices into vertices that form triangular faces. If unspecified, the faces are computed using a Delaunay triangulation. edges : (N, 2) ndarray Edges between vertices. If unspecified, the edges are derived from the faces. tol : float Angle in degrees. Vertices that are less than tol degrees apart are treated as duplicates. See Also -------- Sphere """ def __init__(self, x=None, y=None, z=None, theta=None, phi=None, xyz=None, faces=None, edges=None, tol=1e-5): """Create a HemiSphere from points""" sphere = Sphere(x=x, y=y, z=z, theta=theta, phi=phi, xyz=xyz) uniq_vertices, mapping = remove_similar_vertices(sphere.vertices, tol, return_mapping=True) uniq_vertices *= 1 - 2*(uniq_vertices[:, -1:] < 0) if faces is not None: faces = np.asarray(faces) faces = unique_sets(mapping[faces]) if edges is not None: edges = np.asarray(edges) edges = unique_sets(mapping[edges]) Sphere.__init__(self, xyz=uniq_vertices, edges=edges, faces=faces) @classmethod def from_sphere(klass, sphere, tol=1e-5): """Create instance from a Sphere""" return klass(theta=sphere.theta, phi=sphere.phi, edges=sphere.edges, faces=sphere.faces, tol=tol) def mirror(self): """Create a full Sphere from a HemiSphere""" n = len(self.vertices) vertices = np.vstack([self.vertices, -self.vertices]) edges = np.vstack([self.edges, n + self.edges]) _switch_vertex(edges[:, 0], edges[:, 1], vertices) faces = np.vstack([self.faces, n + self.faces]) _switch_vertex(faces[:, 0], faces[:, 1], vertices) _switch_vertex(faces[:, 0], faces[:, 2], vertices) return Sphere(xyz=vertices, edges=edges, faces=faces) @auto_attr def faces(self): vertices = np.vstack([self.vertices, -self.vertices]) faces = faces_from_sphere_vertices(vertices) return unique_sets(faces % len(self.vertices)) def subdivide(self, n=1): """Create a more subdivided HemiSphere See Sphere.subdivide for full documentation. """ sphere = self.mirror() sphere = sphere.subdivide(n) return HemiSphere.from_sphere(sphere) def find_closest(self, xyz): """ Find the index of the vertex in the Sphere closest to the input vector, taking into account antipodal symmetry Parameters ---------- xyz : array-like, 3 elements A unit vector Return ------ idx : int The index into the Sphere.vertices array that gives the closest vertex (in angle). """ cos_sim = abs(np.dot(self.vertices, xyz)) return np.argmax(cos_sim) def _switch_vertex(index1, index2, vertices): """When we mirror an edge (a, b). We can either create (a, b) and (a', b') OR (a, b') and (a', b). The angles of edges (a, b) and (a, b') are supplementary, so we choose the two new edges such that their angles are less than 90 degrees. """ n = len(vertices) A = vertices[index1] B = vertices[index2] is_far = (A * B).sum(-1) < 0 index2[is_far] = index2[is_far] + (n / 2.0) index2 %= n def _get_forces(charges): r"""Given a set of charges on the surface of the sphere gets total force those charges exert on each other. The force exerted by one charge on another is given by Coulomb's law. For this simulation we use charges of equal magnitude so this force can be written as $\vec{r}/r^3$, up to a constant factor, where $\vec{r}$ is the separation of the two charges and $r$ is the magnitude of $\vec{r}$. Forces are additive so the total force on each of the charges is the sum of the force exerted by each other charge in the system. Charges do not exert a force on themselves. The electric potential can similarly be written as $1/r$ and is also additive. """ all_charges = np.concatenate((charges, -charges)) all_charges = all_charges[:, None] r = charges - all_charges r_mag = np.sqrt((r*r).sum(-1))[:, :, None] with warnings.catch_warnings(): warnings.simplefilter("ignore") force = r / r_mag**3 potential = 1. / r_mag d = np.arange(len(charges)) force[d, d] = 0 force = force.sum(0) force_r_comp = (charges*force).sum(-1)[:, None] f_theta = force - force_r_comp*charges potential[d, d] = 0 potential = 2*potential.sum() return f_theta, potential def disperse_charges(hemi, iters, const=.2): """Models electrostatic repulsion on the unit sphere Places charges on a sphere and simulates the repulsive forces felt by each one. Allows the charges to move for some number of iterations and returns their final location as well as the total potential of the system at each step. Parameters ---------- hemi : HemiSphere Points on a unit sphere. iters : int Number of iterations to run. const : float Using a smaller const could provide a more accurate result, but will need more iterations to converge. Returns ------- hemi : HemiSphere Distributed points on a unit sphere. potential : ndarray The electrostatic potential at each iteration. This can be useful to check if the repulsion converged to a minimum. Note: ----- This function is meant to be used with diffusion imaging so antipodal symmetry is assumed. Therefor each charge must not only be unique, but if there is a charge at +x, there cannot be a charge at -x. These are treated as the same location and because the distance between the two charges will be zero, the result will be unstable. """ if not isinstance(hemi, HemiSphere): raise ValueError("expecting HemiSphere") charges = hemi.vertices forces, v = _get_forces(charges) force_mag = np.sqrt((forces*forces).sum()) const = const / force_mag.max() potential = np.empty(iters) v_min = v for ii in xrange(iters): new_charges = charges + forces * const norms = np.sqrt((new_charges**2).sum(-1)) new_charges /= norms[:, None] new_forces, v = _get_forces(new_charges) if v <= v_min: charges = new_charges forces = new_forces potential[ii] = v_min = v else: const /= 2. potential[ii] = v_min return HemiSphere(xyz=charges), potential def interp_rbf(data, sphere_origin, sphere_target, function='multiquadric', epsilon=None, smooth=0.1, norm="angle"): """Interpolate data on the sphere, using radial basis functions. Parameters ---------- data : (N,) ndarray Function values on the unit sphere. sphere_origin : Sphere Positions of data values. sphere_target : Sphere M target positions for which to interpolate. function : {'multiquadric', 'inverse', 'gaussian'} Radial basis function. epsilon : float Radial basis function spread parameter. Defaults to approximate average distance between nodes. a good start smooth : float values greater than zero increase the smoothness of the approximation with 0 as pure interpolation. Default: 0.1 norm : str A string indicating the function that returns the "distance" between two points. 'angle' - The angle between two vectors 'euclidean_norm' - The Euclidean distance Returns ------- v : (M,) ndarray Interpolated values. See Also -------- scipy.interpolate.Rbf """ from scipy.interpolate import Rbf def angle(x1, x2): xx = np.arccos((x1 * x2).sum(axis=0)) xx[np.isnan(xx)] = 0 return xx def euclidean_norm(x1, x2): return np.sqrt(((x1 - x2)**2).sum(axis=0)) if norm == "angle": norm = angle elif norm == "euclidean_norm": w_s = "The Eucldian norm used for interpolation is inaccurate " w_s += "and will be deprecated in future versions. Please consider " w_s += "using the 'angle' norm instead" warnings.warn(w_s, DeprecationWarning) norm = euclidean_norm # Workaround for bug in older versions of SciPy that don't allow # specification of epsilon None: if epsilon is not None: kwargs = {'function': function, 'epsilon': epsilon, 'smooth': smooth, 'norm': norm} else: kwargs = {'function': function, 'smooth': smooth, 'norm': norm} rbfi = Rbf(sphere_origin.x, sphere_origin.y, sphere_origin.z, data, **kwargs) return rbfi(sphere_target.x, sphere_target.y, sphere_target.z) def euler_characteristic_check(sphere, chi=2): r"""Checks the euler characteristic of a sphere If $f$ = number of faces, $e$ = number_of_edges and $v$ = number of vertices, the Euler formula says $f-e+v = 2$ for a mesh on a sphere. More generally, whether $f -e + v == \chi$ where $\chi$ is the Euler characteristic of the mesh. - Open chain (track) has $\chi=1$ - Closed chain (loop) has $\chi=0$ - Disk has $\chi=1$ - Sphere has $\chi=2$ - HemiSphere has $\chi=1$ Parameters ---------- sphere : Sphere A Sphere instance with vertices, edges and faces attributes. chi : int, optional The Euler characteristic of the mesh to be checked Returns ------- check : bool True if the mesh has Euler characteristic $\chi$ Examples -------- >>> euler_characteristic_check(unit_octahedron) True >>> hemisphere = HemiSphere.from_sphere(unit_icosahedron) >>> euler_characteristic_check(hemisphere, chi=1) True """ v = sphere.vertices.shape[0] e = sphere.edges.shape[0] f = sphere.faces.shape[0] return (f - e + v) == chi octahedron_vertices = np.array( [[1.0, 0.0, 0.0], [-1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, -1.0, 0.0], [0.0, 0.0, 1.0], [0.0, 0.0, -1.0], ]) octahedron_faces = np.array( [[0, 4, 2], [1, 5, 3], [4, 2, 1], [5, 3, 0], [1, 4, 3], [0, 5, 2], [0, 4, 3], [1, 5, 2], ], dtype='uint16') t = (1 + np.sqrt(5)) / 2 icosahedron_vertices = np.array( [[t, 1, 0], # 0 [-t, 1, 0], # 1 [t, -1, 0], # 2 [-t, -1, 0], # 3 [1, 0, t], # 4 [1, 0, -t], # 5 [-1, 0, t], # 6 [-1, 0, -t], # 7 [0, t, 1], # 8 [0, -t, 1], # 9 [0, t, -1], # 10 [0, -t, -1], ]) # 11 icosahedron_vertices /= vector_norm(icosahedron_vertices, keepdims=True) icosahedron_faces = np.array( [[8, 4, 0], [2, 5, 0], [2, 5, 11], [9, 2, 11], [2, 4, 0], [9, 2, 4], [10, 8, 1], [10, 8, 0], [10, 5, 0], [6, 3, 1], [9, 6, 3], [6, 8, 1], [6, 8, 4], [9, 6, 4], [7, 10, 1], [7, 10, 5], [7, 3, 1], [7, 3, 11], [9, 3, 11], [7, 5, 11], ], dtype='uint16') unit_octahedron = Sphere(xyz=octahedron_vertices, faces=octahedron_faces) unit_icosahedron = Sphere(xyz=icosahedron_vertices, faces=icosahedron_faces) hemi_icosahedron = HemiSphere.from_sphere(unit_icosahedron) dipy-0.13.0/dipy/core/sphere_stats.py000066400000000000000000000201451317371701200175170ustar00rootroot00000000000000""" Statistics on spheres """ from __future__ import division, print_function, absolute_import import numpy as np import dipy.core.geometry as geometry from itertools import permutations def random_uniform_on_sphere(n=1, coords='xyz'): r'''Random unit vectors from a uniform distribution on the sphere. Parameters ----------- n : int Number of random vectors coords : {'xyz', 'radians', 'degrees'} 'xyz' for cartesian form 'radians' for spherical form in rads 'degrees' for spherical form in degrees Notes ------ The uniform distribution on the sphere, parameterized by spherical coordinates $(\theta, \phi)$, should verify $\phi\sim U[0,2\pi]$, while $z=\cos(\theta)\sim U[-1,1]$. References ----------- .. [1] http://mathworld.wolfram.com/SpherePointPicking.html. Returns -------- X : array, shape (n,3) if coords='xyz' or shape (n,2) otherwise Uniformly distributed vectors on the unit sphere. Examples --------- >>> from dipy.core.sphere_stats import random_uniform_on_sphere >>> X = random_uniform_on_sphere(4, 'radians') >>> X.shape == (4, 2) True >>> X = random_uniform_on_sphere(4, 'xyz') >>> X.shape == (4, 3) True ''' z = np.random.uniform(-1, 1, n) theta = np.arccos(z) phi = np.random.uniform(0, 2*np.pi, n) if coords == 'xyz': r = np.ones(n) return np.vstack(geometry.sphere2cart(r, theta, phi)).T angles = np.vstack((theta, phi)).T if coords == 'radians': return angles if coords == 'degrees': return np.rad2deg(angles) def eigenstats(points, alpha=0.05): r'''Principal direction and confidence ellipse Implements equations in section 6.3.1(ii) of Fisher, Lewis and Embleton, supplemented by equations in section 3.2.5. Parameters ---------- points : arraey_like (N,3) array of points on the sphere of radius 1 in $\mathbb{R}^3$ alpha : real or None 1 minus the coverage for the confidence ellipsoid, e.g. 0.05 for 95% coverage. Returns ------- centre : vector (3,) centre of ellipsoid b1 : vector (2,) lengths of semi-axes of ellipsoid ''' n = points.shape[0] # the number of points rad2deg = 180/np.pi # scale angles from radians to degrees # there is a problem with averaging and axis data. ''' centroid = np.sum(points, axis=0)/n normed_centroid = geometry.normalized_vector(centroid) x,y,z = normed_centroid #coordinates of normed centroid polar_centroid = np.array(geometry.cart2sphere(x,y,z))*rad2deg ''' cross = np.dot(points.T, points)/n # cross-covariance of points evals, evecs = np.linalg.eigh(cross) # eigen decomposition assuming that cross is symmetric order = np.argsort(evals) # eigenvalues don't necessarily come in an particular order? tau = evals[order] # the ordered eigenvalues h = evecs[:, order] # the eigenvectors in corresponding order h[:, 2] = h[:, 2]*np.sign(h[2, 2]) # map the first principal direction into upper hemisphere centre = np.array(geometry.cart2sphere(*h[:, 2]))[1:]*rad2deg # the spherical coordinates of the first principal direction e = np.zeros((2, 2)) p0 = np.dot(points, h[:, 0]) p1 = np.dot(points, h[:, 1]) p2 = np.dot(points, h[:, 2]) # the principal coordinates of the points e[0, 0] = np.sum((p0**2)*(p2**2))/(n*(tau[0]-tau[2])**2) e[1, 1] = np.sum((p1**2)*(p2**2))/(n*(tau[1]-tau[2])**2) e[0, 1] = np.sum((p0*p1*(p2**2))/(n*(tau[0]-tau[2])*(tau[1]-tau[2]))) e[1, 0] = e[0, 1] # e is a 2x2 helper matrix b1 = np.array([np.NaN, np.NaN]) d = -2*np.log(alpha)/n s, w = np.linalg.eig(e) g = np.sqrt(d*s) b1 = np.arcsin(g)*rad2deg # b1 are the estimated 100*(1-alpha)% confidence ellipsoid semi-axes # in degrees return centre, b1 ''' # b2 is equivalent to b1 above # try to invert e and calculate vector b the standard errors of # centre - these are forced to a mixture of NaN and/or 0 in singular cases b2 = np.array([np.NaN,np.NaN]) if np.abs(np.linalg.det(e)) < 10**-20: b2 = np.array([0,np.NaN]) else: try: f = np.linalg.inv(e) except np.linalg.LigAlgError: b2 = np.array([np.NaN, np.NaN]) else: t, y = np.linalg.eig(f) d = -2*np.log(alpha)/n g = np.sqrt(d/t) b2= np.arcsin(g)*rad2deg ''' def compare_orientation_sets(S, T): r'''Computes the mean cosine distance of the best match between points of two sets of vectors S and T (angular similarity) Parameters ----------- S : array, shape (m,d) First set of vectors. T : array, shape (n,d) Second set of vectors. Returns -------- max_mean_cosine : float Maximum mean cosine distance. Examples --------- >>> from dipy.core.sphere_stats import compare_orientation_sets >>> S=np.array([[1,0,0],[0,1,0],[0,0,1]]) >>> T=np.array([[1,0,0],[0,0,1]]) >>> compare_orientation_sets(S,T) 1.0 >>> T=np.array([[0,1,0],[1,0,0],[0,0,1]]) >>> S=np.array([[1,0,0],[0,0,1]]) >>> compare_orientation_sets(S,T) 1.0 >>> from dipy.core.sphere_stats import compare_orientation_sets >>> S=np.array([[-1,0,0],[0,1,0],[0,0,1]]) >>> T=np.array([[1,0,0],[0,0,-1]]) >>> compare_orientation_sets(S,T) 1.0 ''' m = len(S) n = len(T) if m < n: A = S.copy() a = m S = T T = A m = n n = a v = [np.sum([np.abs(np.dot(p[i], T[i])) for i in range(n)]) for p in permutations(S, n)] return np.max(v)/np.float(n) # return np.max(v)*np.float(n)/np.float(m) def angular_similarity(S, T): r'''Computes the cosine distance of the best match between points of two sets of vectors S and T Parameters ----------- S : array, shape (m,d) T : array, shape (n,d) Returns -------- max_cosine_distance:float Examples --------- >>> import numpy as np >>> from dipy.core.sphere_stats import angular_similarity >>> S=np.array([[1,0,0],[0,1,0],[0,0,1]]) >>> T=np.array([[1,0,0],[0,0,1]]) >>> angular_similarity(S,T) 2.0 >>> T=np.array([[0,1,0],[1,0,0],[0,0,1]]) >>> S=np.array([[1,0,0],[0,0,1]]) >>> angular_similarity(S,T) 2.0 >>> S=np.array([[-1,0,0],[0,1,0],[0,0,1]]) >>> T=np.array([[1,0,0],[0,0,-1]]) >>> angular_similarity(S,T) 2.0 >>> T=np.array([[0,1,0],[1,0,0],[0,0,1]]) >>> S=np.array([[1,0,0],[0,1,0],[0,0,1]]) >>> angular_similarity(S,T) 3.0 >>> S=np.array([[0,1,0],[1,0,0],[0,0,1]]) >>> T=np.array([[1,0,0],[0,np.sqrt(2)/2.,np.sqrt(2)/2.],[0,0,1]]) >>> angular_similarity(S,T) 2.7071067811865475 >>> S=np.array([[0,1,0],[1,0,0],[0,0,1]]) >>> T=np.array([[1,0,0]]) >>> angular_similarity(S,T) 1.0 >>> S=np.array([[0,1,0],[1,0,0]]) >>> T=np.array([[0,0,1]]) >>> angular_similarity(S,T) 0.0 >>> S=np.array([[0,1,0],[1,0,0]]) >>> T=np.array([[0,np.sqrt(2)/2.,np.sqrt(2)/2.]]) Now we use ``print`` to reduce the precision of of the printed output (so the doctests don't detect unimportant differences) >>> print('%.12f' % angular_similarity(S,T)) 0.707106781187 >>> S=np.array([[0,1,0]]) >>> T=np.array([[0,np.sqrt(2)/2.,np.sqrt(2)/2.]]) >>> print('%.12f' % angular_similarity(S,T)) 0.707106781187 >>> S=np.array([[0,1,0],[0,0,1]]) >>> T=np.array([[0,np.sqrt(2)/2.,np.sqrt(2)/2.]]) >>> print('%.12f' % angular_similarity(S,T)) 0.707106781187 ''' m = len(S) n = len(T) if m < n: A = S.copy() a = m S = T T = A m = n n = a """ v=[] for p in permutations(S,n): angles=[] for i in range(n): angles.append(np.abs(np.dot(p[i],T[i]))) v.append(np.sum(angles)) print(v) """ v = [np.sum([np.abs(np.dot(p[i], T[i])) for i in range(n)]) for p in permutations(S, n)] return np.float(np.max(v)) # *np.float(n)/np.float(m) dipy-0.13.0/dipy/core/subdivide_octahedron.py000066400000000000000000000037321317371701200212020ustar00rootroot00000000000000"""Create a unit sphere by subdividing all triangles of an octahedron recursively. The unit sphere has a radius of 1, which also means that all points in this sphere (assumed to have centre at [0, 0, 0]) have an absolute value (modulus) of 1. Another feature of the unit sphere is that the unit normals of this sphere are exactly the same as the vertices. This recursive method will avoid the common problem of the polar singularity, produced by 2d (lon-lat) parameterization methods. """ from dipy.core.sphere import unit_octahedron, HemiSphere def create_unit_sphere(recursion_level=2): """ Creates a unit sphere by subdividing a unit octahedron. Starts with a unit octahedron and subdivides the faces, projecting the resulting points onto the surface of a unit sphere. Parameters ------------ recursion_level : int Level of subdivision, recursion_level=1 will return an octahedron, anything bigger will return a more subdivided sphere. The sphere will have $4^recursion_level+2$ vertices. Returns --------- Sphere : The unit sphere. See Also ---------- create_unit_hemisphere, Sphere """ if recursion_level > 7 or recursion_level < 1: raise ValueError("recursion_level must be between 1 and 7") return unit_octahedron.subdivide(recursion_level - 1) def create_unit_hemisphere(recursion_level=2): """Creates a unit sphere by subdividing a unit octahedron, returns half the sphere. Parameters ------------- recursion_level : int Level of subdivision, recursion_level=1 will return an octahedron, anything bigger will return a more subdivided sphere. The sphere will have $(4^recursion_level+2)/2$ vertices. Returns --------- HemiSphere : Half of a unit sphere. See Also ---------- create_unit_sphere, Sphere, HemiSphere """ sphere = create_unit_sphere(recursion_level) return HemiSphere.from_sphere(sphere) dipy-0.13.0/dipy/core/tests/000077500000000000000000000000001317371701200156015ustar00rootroot00000000000000dipy-0.13.0/dipy/core/tests/__init__.py000066400000000000000000000001661317371701200177150ustar00rootroot00000000000000# init to make tests into a package # Test callable from numpy.testing import Tester test = Tester().test del Tester dipy-0.13.0/dipy/core/tests/test_geometry.py000066400000000000000000000264121317371701200210520ustar00rootroot00000000000000""" Testing utility functions """ import numpy as np import random from dipy.core.geometry import (sphere2cart, cart2sphere, nearest_pos_semi_def, sphere_distance, cart_distance, vector_cosine, lambert_equal_area_projection_polar, circumradius, vec2vec_rotmat, vector_norm, compose_transformations, compose_matrix, decompose_matrix, perpendicular_directions, dist_to_corner) from nose.tools import (assert_false, assert_equal, assert_raises, assert_almost_equal) from numpy.testing import (assert_array_equal, assert_array_almost_equal, run_module_suite) from dipy.testing import sphere_points from itertools import permutations def test_vector_norm(): A = np.array([[1, 0, 0], [3, 4, 0], [0, 5, 12], [1, 2, 3]]) expected = np.array([1, 5, 13, np.sqrt(14)]) assert_array_almost_equal(vector_norm(A), expected) expected.shape = (4, 1) assert_array_almost_equal(vector_norm(A, keepdims=True), expected) assert_array_almost_equal(vector_norm(A.T, axis=0, keepdims=True), expected.T) def test_sphere_cart(): # test arrays of points rs, thetas, phis = cart2sphere(*(sphere_points.T)) xyz = sphere2cart(rs, thetas, phis) yield assert_array_almost_equal, xyz, sphere_points.T # test radius estimation big_sph_pts = sphere_points * 10.4 rs, thetas, phis = cart2sphere(*big_sph_pts.T) yield assert_array_almost_equal, rs, 10.4 xyz = sphere2cart(rs, thetas, phis) yield assert_array_almost_equal, xyz, big_sph_pts.T, 6 # test that result shapes match x, y, z = big_sph_pts.T r, theta, phi = cart2sphere(x[:1], y[:1], z) yield assert_equal, r.shape, theta.shape yield assert_equal, r.shape, phi.shape x, y, z = sphere2cart(r[:1], theta[:1], phi) yield assert_equal, x.shape, y.shape yield assert_equal, x.shape, z.shape # test a scalar point pt = sphere_points[3] r, theta, phi = cart2sphere(*pt) xyz = sphere2cart(r, theta, phi) yield assert_array_almost_equal, xyz, pt # Test full circle on x=1, y=1, z=1 x, y, z = sphere2cart(*cart2sphere(1.0, 1.0, 1.0)) yield assert_array_almost_equal, (x, y, z), (1.0, 1.0, 1.0) def test_invert_transform(): n = 100. theta = np.arange(n)/n * np.pi # Limited to 0,pi phi = (np.arange(n)/n - .5) * 2 * np.pi # Limited to 0,2pi x, y, z = sphere2cart(1, theta, phi) # Let's assume they're all unit vecs r, new_theta, new_phi = cart2sphere(x, y, z) # Transform back yield assert_array_almost_equal, theta, new_theta yield assert_array_almost_equal, phi, new_phi def test_nearest_pos_semi_def(): B = np.diag(np.array([1, 2, 3])) yield assert_array_almost_equal, B, nearest_pos_semi_def(B) B = np.diag(np.array([0, 2, 3])) yield assert_array_almost_equal, B, nearest_pos_semi_def(B) B = np.diag(np.array([0, 0, 3])) yield assert_array_almost_equal, B, nearest_pos_semi_def(B) B = np.diag(np.array([-1, 2, 3])) Bpsd = np.array([[0., 0., 0.], [0., 1.75, 0.], [0., 0., 2.75]]) yield assert_array_almost_equal, Bpsd, nearest_pos_semi_def(B) B = np.diag(np.array([-1, -2, 3])) Bpsd = np.array([[0., 0., 0.], [0., 0., 0.], [0., 0., 2.]]) yield assert_array_almost_equal, Bpsd, nearest_pos_semi_def(B) B = np.diag(np.array([-1.e-11, 0, 1000])) Bpsd = np.array([[0., 0., 0.], [0., 0., 0.], [0., 0., 1000.]]) yield assert_array_almost_equal, Bpsd, nearest_pos_semi_def(B) B = np.diag(np.array([-1, -2, -3])) Bpsd = np.array([[0., 0., 0.], [0., 0., 0.], [0., 0., 0.]]) yield assert_array_almost_equal, Bpsd, nearest_pos_semi_def(B) def test_cart_distance(): a = [0, 1] b = [1, 0] yield assert_array_almost_equal, cart_distance(a, b), np.sqrt(2) yield assert_array_almost_equal, cart_distance([1, 0], [-1, 0]), 2 pts1 = [2, 1, 0] pts2 = [0, 1, -2] yield assert_array_almost_equal, cart_distance(pts1, pts2), np.sqrt(8) pts2 = [[0, 1, -2], [-2, 1, 0]] yield assert_array_almost_equal, cart_distance(pts1, pts2), [np.sqrt(8), 4] def test_sphere_distance(): # make a circle, go around... radius = 3.2 n = 5000 n2 = n // 2 # pi at point n2 in array angles = np.linspace(0, np.pi*2, n, endpoint=False) x = np.sin(angles) * radius y = np.cos(angles) * radius # dists around half circle, including pi half_x = x[:n2+1] half_y = y[:n2+1] half_dists = np.sqrt(np.diff(half_x)**2 + np.diff(half_y)**2) # approximate distances from 0 to pi (not including 0) csums = np.cumsum(half_dists) # concatenated with distances from pi to 0 again cdists = np.r_[0, csums, csums[-2::-1]] # check approximation close to calculated sph_d = sphere_distance([0, radius], np.c_[x, y]) yield assert_array_almost_equal, cdists, sph_d # Now check with passed radius sph_d = sphere_distance([0, radius], np.c_[x, y], radius=radius) yield assert_array_almost_equal, cdists, sph_d # Check points not on surface raises error when asked for yield assert_raises, ValueError, sphere_distance, [1, 0], [0, 2] # Not when check is disabled sph_d = sphere_distance([1, 0], [0, 2], None, False) # Error when radii don't match passed radius yield assert_raises, ValueError, sphere_distance, [1, 0], [0, 1], 2.0 def test_vector_cosine(): a = [0, 1] b = [1, 0] yield assert_array_almost_equal, vector_cosine(a, b), 0 yield assert_array_almost_equal, vector_cosine([1, 0], [-1, 0]), -1 yield assert_array_almost_equal, vector_cosine([1, 0], [1, 1]), \ 1/np.sqrt(2) yield assert_array_almost_equal, vector_cosine([2, 0], [-4, 0]), -1 pts1 = [2, 1, 0] pts2 = [-2, -1, 0] yield assert_array_almost_equal, vector_cosine(pts1, pts2), -1 pts2 = [[-2, -1, 0], [2, 1, 0]] yield assert_array_almost_equal, vector_cosine(pts1, pts2), [-1, 1] # test relationship with correlation # not the same if non-zero vector mean a = np.random.uniform(size=(100,)) b = np.random.uniform(size=(100,)) cc = np.corrcoef(a, b)[0, 1] vcos = vector_cosine(a, b) yield assert_false, np.allclose(cc, vcos) # is the same if zero vector mean a_dm = a - np.mean(a) b_dm = b - np.mean(b) vcos = vector_cosine(a_dm, b_dm) yield assert_array_almost_equal, cc, vcos def test_lambert_equal_area_projection_polar(): theta = np.repeat(np.pi/3, 10) phi = np.linspace(0, 2*np.pi, 10) # points sit on circle with co-latitude pi/3 (60 degrees) leap = lambert_equal_area_projection_polar(theta, phi) yield \ assert_array_almost_equal, np.sqrt(np.sum(leap**2, axis=1)), \ np.array([1., 1., 1., 1., 1., 1., 1., 1., 1., 1.]) # points map onto the circle of radius 1 def test_lambert_equal_area_projection_cart(): xyz = np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1], [-1, 0, 0], [0, -1, 0], [0, 0, -1]]) # points sit on +/-1 on all 3 axes r, theta, phi = cart2sphere(*xyz.T) leap = lambert_equal_area_projection_polar(theta, phi) r2 = np.sqrt(2) yield assert_array_almost_equal, np.sqrt(np.sum(leap**2, axis=1)), \ np.array([r2, r2, 0, r2, r2, 2]) # x and y =+/-1 map onto circle of radius sqrt(2) # z=1 maps to origin, and z=-1 maps to (an arbitrary point on) the # outer circle of radius 2 def test_circumradius(): yield assert_array_almost_equal, np.sqrt(0.5), \ circumradius(np.array([0, 2, 0]), np.array([2, 0, 0]), np.array([0, 0, 0])) def test_vec2vec_rotmat(): a = np.array([1, 0, 0]) for b in np.array([[0, 0, 1], [-1, 0, 0], [1, 0, 0]]): R = vec2vec_rotmat(a, b) assert_array_almost_equal(np.dot(R, a), b) def test_compose_transformations(): A = np.eye(4) A[0, -1] = 10 B = np.eye(4) B[0, -1] = -20 C = np.eye(4) C[0, -1] = 10 CBA = compose_transformations(A, B, C) assert_array_equal(CBA, np.eye(4)) assert_raises(ValueError, compose_transformations, A) def test_compose_decompose_matrix(): for translate in permutations(40 * np.random.rand(3), 3): for angles in permutations(np.deg2rad(90 * np.random.rand(3)), 3): for shears in permutations(3 * np.random.rand(3), 3): for scale in permutations(3 * np.random.rand(3), 3): mat = compose_matrix(translate=translate, angles=angles, shear=shears, scale=scale) sc, sh, ang, trans, _ = decompose_matrix(mat) assert_array_almost_equal(translate, trans) assert_array_almost_equal(angles, ang) assert_array_almost_equal(shears, sh) assert_array_almost_equal(scale, sc) def test_perpendicular_directions(): num = 35 vectors_v = np.zeros((4, 3)) for v in range(4): theta = random.uniform(0, np.pi) phi = random.uniform(0, 2*np.pi) vectors_v[v] = sphere2cart(1., theta, phi) vectors_v[3] = [1, 0, 0] for vector_v in vectors_v: pd = perpendicular_directions(vector_v, num=num, half=False) # see if length of pd is equal to the number of intendend samples assert_equal(num, len(pd)) # check if all directions are perpendicular to vector v for d in pd: cos_angle = np.dot(d, vector_v) assert_almost_equal(cos_angle, 0) # check if directions are sampled by multiples of 2*pi / num delta_a = 2. * np.pi / num for d in pd[1:]: angle = np.arccos(np.dot(pd[0], d)) rest = angle % delta_a if rest > delta_a * 0.99: # To correct cases of negative error rest = rest - delta_a assert_almost_equal(rest, 0) def _rotation_from_angles(r): R = np.array([[1, 0, 0], [0, np.cos(r[0]), np.sin(r[0])], [0, -np.sin(r[0]), np.cos(r[0])]]) R = np.dot(R, np.array([[np.cos(r[1]), 0, np.sin(r[1])], [0, 1, 0], [-np.sin(r[1]), 0, np.cos(r[1])]])) R = np.dot(R, np.array([[np.cos(r[2]), np.sin(r[2]), 0], [-np.sin(r[2]), np.cos(r[2]), 0], [0, 0, 1]])) R = np.linalg.inv(R) return R def test_dist_to_corner(): affine = np.eye(4) # Calculate the distance with the pythagorean theorem: pythagoras = np.sqrt(np.sum((np.diag(affine)[:-1] / 2) ** 2)) # Compare to calculation with this function: assert_array_almost_equal(dist_to_corner(affine), pythagoras) # Apply a rotation to the matrix, just to demonstrate the calculation is # robust to that: R = _rotation_from_angles(np.random.randn(3) * np.pi) new_aff = np.vstack([np.dot(R, affine[:3, :]), [0, 0, 0, 1]]) assert_array_almost_equal(dist_to_corner(new_aff), pythagoras) if __name__ == '__main__': run_module_suite() dipy-0.13.0/dipy/core/tests/test_gradients.py000066400000000000000000000254331317371701200212010ustar00rootroot00000000000000import warnings from nose.tools import assert_true, assert_raises import numpy as np import numpy.testing as npt from dipy.data import get_data from dipy.core.gradients import (gradient_table, GradientTable, gradient_table_from_bvals_bvecs, reorient_bvecs, generate_bvecs, check_multi_b) from dipy.io.gradients import read_bvals_bvecs def test_btable_prepare(): sq2 = np.sqrt(2) / 2. bvals = 1500 * np.ones(7) bvals[0] = 0 bvecs = np.array([[0, 0, 0], [1, 0, 0], [0, 1, 0], [0, 0, 1], [sq2, sq2, 0], [sq2, 0, sq2], [0, sq2, sq2]]) bt = gradient_table(bvals, bvecs) npt.assert_array_equal(bt.bvecs, bvecs) bt.info fimg, fbvals, fbvecs = get_data('small_64D') bvals = np.load(fbvals) bvecs = np.load(fbvecs) bvecs = np.where(np.isnan(bvecs), 0, bvecs) bt = gradient_table(bvals, bvecs) npt.assert_array_equal(bt.bvecs, bvecs) bt2 = gradient_table(bvals, bvecs.T) npt.assert_array_equal(bt2.bvecs, bvecs) btab = np.concatenate((bvals[:, None], bvecs), axis=1) bt3 = gradient_table(btab) npt.assert_array_equal(bt3.bvecs, bvecs) npt.assert_array_equal(bt3.bvals, bvals) bt4 = gradient_table(btab.T) npt.assert_array_equal(bt4.bvecs, bvecs) npt.assert_array_equal(bt4.bvals, bvals) # Test for proper inputs (expects either bvals/bvecs or 4 by n): assert_raises(ValueError, gradient_table, bvecs) def test_GradientTable(): gradients = np.array([[0, 0, 0], [1, 0, 0], [0, 0, 1], [3, 4, 0], [5, 0, 12]], 'float') expected_bvals = np.array([0, 1, 1, 5, 13]) expected_b0s_mask = expected_bvals == 0 expected_bvecs = gradients / (expected_bvals + expected_b0s_mask)[:, None] gt = GradientTable(gradients, b0_threshold=0) npt.assert_array_almost_equal(gt.bvals, expected_bvals) npt.assert_array_equal(gt.b0s_mask, expected_b0s_mask) npt.assert_array_almost_equal(gt.bvecs, expected_bvecs) npt.assert_array_almost_equal(gt.gradients, gradients) gt = GradientTable(gradients, b0_threshold=1) npt.assert_array_equal(gt.b0s_mask, [1, 1, 1, 0, 0]) npt.assert_array_equal(gt.bvals, expected_bvals) npt.assert_array_equal(gt.bvecs, expected_bvecs) npt.assert_raises(ValueError, GradientTable, np.ones((6, 2))) npt.assert_raises(ValueError, GradientTable, np.ones((6,))) def test_gradient_table_from_bvals_bvecs(): sq2 = np.sqrt(2) / 2 bvals = [0, 1, 2, 3, 4, 5, 6, 0] bvecs = np.array([[0, 0, 0], [1, 0, 0], [0, 1, 0], [0, 0, 1], [sq2, sq2, 0], [sq2, 0, sq2], [0, sq2, sq2], [0, 0, 0]]) gt = gradient_table_from_bvals_bvecs(bvals, bvecs, b0_threshold=0) npt.assert_array_equal(gt.bvecs, bvecs) npt.assert_array_equal(gt.bvals, bvals) npt.assert_array_equal(gt.gradients, np.reshape(bvals, (-1, 1)) * bvecs) npt.assert_array_equal(gt.b0s_mask, [1, 0, 0, 0, 0, 0, 0, 1]) # Test nans are replaced by 0 new_bvecs = bvecs.copy() new_bvecs[[0, -1]] = np.nan gt = gradient_table_from_bvals_bvecs(bvals, new_bvecs, b0_threshold=0) npt.assert_array_equal(gt.bvecs, bvecs) # Bvalue > 0 for non-unit vector bad_bvals = [2, 1, 2, 3, 4, 5, 6, 0] npt.assert_raises(ValueError, gradient_table_from_bvals_bvecs, bad_bvals, bvecs, b0_threshold=0.) # num_gard inconsistent bvals, bvecs bad_bvals = np.ones(7) npt.assert_raises(ValueError, gradient_table_from_bvals_bvecs, bad_bvals, bvecs, b0_threshold=0.) # bvals not 1d bad_bvals = np.ones((1, 8)) npt.assert_raises(ValueError, gradient_table_from_bvals_bvecs, bad_bvals, bvecs, b0_threshold=0.) # bvec not 2d bad_bvecs = np.ones((1, 8, 3)) npt.assert_raises(ValueError, gradient_table_from_bvals_bvecs, bvals, bad_bvecs, b0_threshold=0.) # bvec not (N, 3) bad_bvecs = np.ones((8, 2)) npt.assert_raises(ValueError, gradient_table_from_bvals_bvecs, bvals, bad_bvecs, b0_threshold=0.) # bvecs not unit vectors bad_bvecs = bvecs * 2 npt.assert_raises(ValueError, gradient_table_from_bvals_bvecs, bvals, bad_bvecs, b0_threshold=0.) # Test **kargs get passed along gt = gradient_table_from_bvals_bvecs(bvals, bvecs, b0_threshold=0, big_delta=5, small_delta=2) npt.assert_equal(gt.big_delta, 5) npt.assert_equal(gt.small_delta, 2) def test_b0s(): sq2 = np.sqrt(2) / 2. bvals = 1500 * np.ones(8) bvals[0] = 0 bvals[7] = 0 bvecs = np.array([[0, 0, 0], [1, 0, 0], [0, 1, 0], [0, 0, 1], [sq2, sq2, 0], [sq2, 0, sq2], [0, sq2, sq2], [0, 0, 0]]) bt = gradient_table(bvals, bvecs) npt.assert_array_equal(np.where(bt.b0s_mask > 0)[0], np.array([0, 7])) npt.assert_array_equal(np.where(bt.b0s_mask == 0)[0], np.arange(1, 7)) def test_gtable_from_files(): fimg, fbvals, fbvecs = get_data('small_101D') gt = gradient_table(fbvals, fbvecs) bvals, bvecs = read_bvals_bvecs(fbvals, fbvecs) npt.assert_array_equal(gt.bvals, bvals) npt.assert_array_equal(gt.bvecs, bvecs) def test_deltas(): sq2 = np.sqrt(2) / 2. bvals = 1500 * np.ones(7) bvals[0] = 0 bvecs = np.array([[0, 0, 0], [1, 0, 0], [0, 1, 0], [0, 0, 1], [sq2, sq2, 0], [sq2, 0, sq2], [0, sq2, sq2]]) bt = gradient_table(bvals, bvecs, big_delta=5, small_delta=2) npt.assert_equal(bt.big_delta, 5) npt.assert_equal(bt.small_delta, 2) def test_qvalues(): sq2 = np.sqrt(2) / 2. bvals = 1500 * np.ones(7) bvals[0] = 0 bvecs = np.array([[0, 0, 0], [1, 0, 0], [0, 1, 0], [0, 0, 1], [sq2, sq2, 0], [sq2, 0, sq2], [0, sq2, sq2]]) qvals = np.sqrt(bvals / 6) / (2 * np.pi) bt = gradient_table(bvals, bvecs, big_delta=8, small_delta=6) npt.assert_almost_equal(bt.qvals, qvals) def test_reorient_bvecs(): sq2 = np.sqrt(2) / 2 bvals = np.concatenate([[0], np.ones(6) * 1000]) bvecs = np.array([[0, 0, 0], [1, 0, 0], [0, 1, 0], [0, 0, 1], [sq2, sq2, 0], [sq2, 0, sq2], [0, sq2, sq2]]) gt = gradient_table_from_bvals_bvecs(bvals, bvecs, b0_threshold=0) # The simple case: all affines are identity affs = np.zeros((6, 4, 4)) for i in range(4): affs[:, i, i] = 1 # We should get back the same b-vectors new_gt = reorient_bvecs(gt, affs) npt.assert_equal(gt.bvecs, new_gt.bvecs) # Now apply some rotations rotation_affines = [] rotated_bvecs = bvecs[:] for i in np.where(~gt.b0s_mask)[0]: rot_ang = np.random.rand() cos_rot = np.cos(rot_ang) sin_rot = np.sin(rot_ang) rotation_affines.append(np.array([[1, 0, 0, 0], [0, cos_rot, -sin_rot, 0], [0, sin_rot, cos_rot, 0], [0, 0, 0, 1]])) rotated_bvecs[i] = np.dot(rotation_affines[-1][:3, :3], bvecs[i]) # Copy over the rotation affines full_affines = rotation_affines[:] # And add some scaling and translation to each one to make this harder for i in range(len(full_affines)): full_affines[i] = np.dot(full_affines[i], np.array([[2.5, 0, 0, -10], [0, 2.2, 0, 20], [0, 0, 1, 0], [0, 0, 0, 1]])) gt_rot = gradient_table_from_bvals_bvecs(bvals, rotated_bvecs, b0_threshold=0) new_gt = reorient_bvecs(gt_rot, full_affines) # At the end of all this, we should be able to recover the original # vectors npt.assert_almost_equal(gt.bvecs, new_gt.bvecs) # We should be able to pass just the 3-by-3 rotation components to the same # effect new_gt = reorient_bvecs(gt_rot, np.array(rotation_affines)[:, :3, :3]) npt.assert_almost_equal(gt.bvecs, new_gt.bvecs) # Verify that giving the wrong number of affines raises an error: full_affines.append(np.zeros((4, 4))) assert_raises(ValueError, reorient_bvecs, gt_rot, full_affines) def test_nan_bvecs(): """ Test that the presence of nan's in b-vectors doesn't raise warnings. In previous versions, the presence of NaN in b-vectors was taken to indicate a 0 b-value, but also raised a warning when testing for the length of these vectors. This checks that it doesn't happen. """ fdata, fbvals, fbvecs = get_data() with warnings.catch_warnings(record=True) as w: gtab = gradient_table(fbvals, fbvecs) npt.assert_(len(w) == 0) def test_generate_bvecs(): """Tests whether we have properly generated bvecs. """ # Test if the generated bvectors are unit vectors bvecs = generate_bvecs(100) norm = [np.linalg.norm(v) for v in bvecs] npt.assert_almost_equal(norm, np.ones(100)) # Test if two generated vectors are almost orthogonal bvecs_2 = generate_bvecs(2) cos_theta = np.dot(bvecs_2[0], bvecs_2[1]) npt.assert_almost_equal(cos_theta, 0., decimal=6) def test_check_multi_b(): bvals = np.array([1000, 1000, 1000, 1000, 2000, 2000, 2000, 2000, 0]) bvecs = generate_bvecs(bvals.shape[-1]) gtab = gradient_table(bvals, bvecs) npt.assert_(check_multi_b(gtab, 2, non_zero=False)) # We don't consider differences this small to be sufficient: bvals = np.array([1995, 1995, 1995, 1995, 2005, 2005, 2005, 2005, 0]) bvecs = generate_bvecs(bvals.shape[-1]) gtab = gradient_table(bvals, bvecs) npt.assert_(not check_multi_b(gtab, 2, non_zero=True)) # Unless you specify that you are interested in this magnitude of changes: npt.assert_(check_multi_b(gtab, 2, non_zero=True, bmag=1)) # Or if you consider zero to be one of your b-values: npt.assert_(check_multi_b(gtab, 2, non_zero=False)) if __name__ == "__main__": from numpy.testing import run_module_suite run_module_suite() dipy-0.13.0/dipy/core/tests/test_graph.py000066400000000000000000000016741317371701200203230ustar00rootroot00000000000000from dipy.core.graph import Graph from nose.tools import assert_equal def test_graph(): g = Graph() g.add_node('a', 5) g.add_node('b', 6) g.add_node('c', 10) g.add_node('d', 11) g.add_edge('a', 'b') g.add_edge('b', 'c') g.add_edge('c', 'd') g.add_edge('b', 'd') print('Nodes') print(g.node) print('Successors') print(g.succ) print('Predecessors') print(g.pred) print('Paths above d') print(g.up('d')) print('Paths below a') print(g.down('a')) print('Shortest path above d') print(g.up_short('d')) print('Shortest path below a') print(g.down_short('a')) print('Deleting node b') # g.del_node_and_edges('b') g.del_node('b') print('Nodes') print(g.node) print('Successors') print(g.succ) print('Predecessors') print(g.pred) assert_equal(len(g.node), 3) assert_equal(len(g.succ), 3) assert_equal(len(g.pred), 3) dipy-0.13.0/dipy/core/tests/test_ndindex.py000066400000000000000000000006011317371701200206400ustar00rootroot00000000000000from dipy.core.ndindex import ndindex import numpy as np from numpy.testing import assert_array_equal def test_ndindex(): x = list(ndindex((1, 2, 3))) expected = [ix for ix, e in np.ndenumerate(np.zeros((1, 2, 3)))] assert_array_equal(x, expected) def test_ndindex_0d(): x = list(ndindex(np.array(1).shape)) expected = [()] assert_array_equal(x, expected) dipy-0.13.0/dipy/core/tests/test_optimize.py000066400000000000000000000127341317371701200210610ustar00rootroot00000000000000import numpy as np import scipy.sparse as sps import numpy.testing as npt from dipy.core.optimize import Optimizer, SCIPY_LESS_0_12, sparse_nnls, spdot import dipy.core.optimize as opt def func(x): return x[0]**2 + x[1]**2 + x[2]**2 def func2(x): return x[0]**2 + 0.5 * x[1]**2 + 0.2 * x[2]**2 + 0.2 * x[3]**2 @npt.dec.skipif(SCIPY_LESS_0_12) def test_optimize_new_scipy(): opt = Optimizer(fun=func, x0=np.array([1., 1., 1.]), method='Powell') npt.assert_array_almost_equal(opt.xopt, np.array([0, 0, 0])) npt.assert_almost_equal(opt.fopt, 0) opt = Optimizer(fun=func, x0=np.array([1., 1., 1.]), method='L-BFGS-B', options={'maxcor': 10, 'ftol': 1e-7, 'gtol': 1e-5, 'eps': 1e-8}) npt.assert_array_almost_equal(opt.xopt, np.array([0, 0, 0])) npt.assert_almost_equal(opt.fopt, 0) npt.assert_equal(opt.evolution, None) npt.assert_equal(opt.evolution, None) opt = Optimizer(fun=func, x0=np.array([1., 1., 1.]), method='L-BFGS-B', options={'maxcor': 10, 'ftol': 1e-7, 'gtol': 1e-5, 'eps': 1e-8}, evolution=False) npt.assert_array_almost_equal(opt.xopt, np.array([0, 0, 0])) npt.assert_almost_equal(opt.fopt, 0) opt.print_summary() opt = Optimizer(fun=func2, x0=np.array([1., 1., 1., 5.]), method='L-BFGS-B', options={'maxcor': 10, 'ftol': 1e-7, 'gtol': 1e-5, 'eps': 1e-8}, evolution=True) npt.assert_equal(opt.evolution.shape, (opt.nit, 4)) opt = Optimizer(fun=func2, x0=np.array([1., 1., 1., 5.]), method='Powell', options={'xtol': 1e-6, 'ftol': 1e-6, 'maxiter': 1e6}, evolution=True) npt.assert_array_almost_equal(opt.xopt, np.array([0, 0, 0, 0.])) @npt.dec.skipif(not SCIPY_LESS_0_12) def test_optimize_old_scipy(): opt = Optimizer(fun=func, x0=np.array([1., 1., 1.]), method='L-BFGS-B', options={'maxcor': 10, 'ftol': 1e-7, 'gtol': 1e-5, 'eps': 1e-8}) npt.assert_array_almost_equal(opt.xopt, np.array([0, 0, 0])) npt.assert_almost_equal(opt.fopt, 0) opt = Optimizer(fun=func2, x0=np.array([1., 1., 1., 5.]), method='Powell', options={'xtol': 1e-6, 'ftol': 1e-6, 'maxiter': 1e6}, evolution=True) npt.assert_array_almost_equal(opt.xopt, np.array([0, 0, 0, 0.])) opt = Optimizer(fun=func, x0=np.array([1., 1., 1.]), method='L-BFGS-B', options={'maxcor': 10, 'eps': 1e-8}) npt.assert_array_almost_equal(opt.xopt, np.array([0, 0, 0])) npt.assert_almost_equal(opt.fopt, 0) opt = Optimizer(fun=func, x0=np.array([1., 1., 1.]), method='L-BFGS-B', options=None) npt.assert_array_almost_equal(opt.xopt, np.array([0, 0, 0])) npt.assert_almost_equal(opt.fopt, 0) opt = Optimizer(fun=func2, x0=np.array([1., 1., 1., 5.]), method='L-BFGS-B', options={'gtol': 1e-7, 'ftol': 1e-7, 'maxiter': 10000}) npt.assert_array_almost_equal(opt.xopt, np.array([0, 0, 0, 0.]), 4) npt.assert_almost_equal(opt.fopt, 0) opt = Optimizer(fun=func2, x0=np.array([1., 1., 1., 5.]), method='Powell', options={'maxiter': 1e6}, evolution=True) npt.assert_array_almost_equal(opt.xopt, np.array([0, 0, 0, 0.])) opt = Optimizer(fun=func2, x0=np.array([1., 1., 1., 5.]), method='Powell', options={'maxiter': 1e6}, evolution=True) npt.assert_array_almost_equal(opt.xopt, np.array([0, 0, 0, 0.])) def test_sklearn_linear_solver(): class SillySolver(opt.SKLearnLinearSolver): def fit(self, X, y): self.coef_ = np.ones(X.shape[-1]) MySillySolver = SillySolver() n_samples = 100 n_features = 20 y = np.random.rand(n_samples) X = np.ones((n_samples, n_features)) MySillySolver.fit(X, y) npt.assert_equal(MySillySolver.coef_, np.ones(n_features)) npt.assert_equal(MySillySolver.predict(X), np.ones(n_samples) * 20) def test_nonnegativeleastsquares(): n = 100 X = np.eye(n) beta = np.random.rand(n) y = np.dot(X, beta) my_nnls = opt.NonNegativeLeastSquares() my_nnls.fit(X, y) npt.assert_equal(my_nnls.coef_, beta) npt.assert_equal(my_nnls.predict(X), y) def test_spdot(): n = 100 m = 20 k = 10 A = np.random.randn(n, m) B = np.random.randn(m, k) A_sparse = sps.csr_matrix(A) B_sparse = sps.csr_matrix(B) dense_dot = np.dot(A, B) # Try all the different variations: npt.assert_array_almost_equal(dense_dot, spdot(A_sparse, B_sparse).todense()) npt.assert_array_almost_equal(dense_dot, spdot(A, B_sparse)) npt.assert_array_almost_equal(dense_dot, spdot(A_sparse, B)) def test_sparse_nnls(): # Set up the regression: beta = np.random.rand(10) X = np.random.randn(1000, 10) y = np.dot(X, beta) beta_hat = sparse_nnls(y, X) beta_hat_sparse = sparse_nnls(y, sps.csr_matrix(X)) # We should be able to get back the right answer for this simple case npt.assert_array_almost_equal(beta, beta_hat, decimal=1) npt.assert_array_almost_equal(beta, beta_hat_sparse, decimal=1) if __name__ == '__main__': npt.run_module_suite() dipy-0.13.0/dipy/core/tests/test_sphere.py000066400000000000000000000301551317371701200205040ustar00rootroot00000000000000from __future__ import division, print_function, absolute_import import numpy as np import numpy.testing as nt import warnings from dipy.utils.six.moves import xrange from dipy.core.sphere import (Sphere, HemiSphere, unique_edges, unique_sets, faces_from_sphere_vertices, HemiSphere, disperse_charges, _get_forces, unit_octahedron, unit_icosahedron, hemi_icosahedron) from dipy.core.subdivide_octahedron import create_unit_sphere from dipy.core.geometry import cart2sphere, sphere2cart, vector_norm from numpy.testing.decorators import skipif try: from scipy.spatial import Delaunay except ImportError: needs_delaunay = skipif(True, "Need scipy.spatial.Delaunay") else: needs_delaunay = skipif(False) verts = unit_octahedron.vertices edges = unit_octahedron.edges oct_faces = unit_octahedron.faces r, theta, phi = cart2sphere(*verts.T) def test_sphere_construct_args(): nt.assert_raises(ValueError, Sphere) nt.assert_raises(ValueError, Sphere, x=1, theta=1) nt.assert_raises(ValueError, Sphere, xyz=1, theta=1) nt.assert_raises(ValueError, Sphere, xyz=1, theta=1, phi=1) def test_sphere_edges_faces(): nt.assert_raises(ValueError, Sphere, xyz=1, edges=1, faces=None) Sphere(xyz=[0, 0, 1], faces=[0, 0, 0]) Sphere(xyz=[[0, 0, 1], [1, 0, 0], [0, 1, 0]], edges=[[0, 1], [1, 2], [2, 0]], faces=[0, 1, 2]) def test_sphere_not_unit(): with warnings.catch_warnings(): warnings.simplefilter('error') nt.assert_raises(UserWarning, Sphere, xyz=[0, 0, 1.5]) def test_bad_edges_faces(): nt.assert_raises(ValueError, Sphere, xyz=[0, 0, 1.5], edges=[[1, 2]]) def test_sphere_construct(): s0 = Sphere(xyz=verts) s1 = Sphere(theta=theta, phi=phi) s2 = Sphere(*verts.T) nt.assert_array_almost_equal(s0.theta, s1.theta) nt.assert_array_almost_equal(s0.theta, s2.theta) nt.assert_array_almost_equal(s0.theta, theta) nt.assert_array_almost_equal(s0.phi, s1.phi) nt.assert_array_almost_equal(s0.phi, s2.phi) nt.assert_array_almost_equal(s0.phi, phi) def array_to_set(a): return set(frozenset(i) for i in a) def test_unique_edges(): faces = np.array([[0, 1, 2], [1, 2, 0]]) e = array_to_set([[1, 2], [0, 1], [0, 2]]) u = unique_edges(faces) nt.assert_equal(e, array_to_set(u)) u, m = unique_edges(faces, return_mapping=True) nt.assert_equal(e, array_to_set(u)) edges = [[[0, 1], [1, 2], [2, 0]], [[1, 2], [2, 0], [0, 1]]] nt.assert_equal(np.sort(u[m], -1), np.sort(edges, -1)) def test_unique_sets(): sets = np.array([[0, 1, 2], [1, 2, 0], [0, 2, 1], [1, 2, 3]]) e = array_to_set([[0, 1, 2], [1, 2, 3]]) # Run without inverse u = unique_sets(sets) nt.assert_equal(len(u), len(e)) nt.assert_equal(array_to_set(u), e) # Run with inverse u, m = unique_sets(sets, return_inverse=True) nt.assert_equal(len(u), len(e)) nt.assert_equal(array_to_set(u), e) nt.assert_equal(np.sort(u[m], -1), np.sort(sets, -1)) @needs_delaunay def test_faces_from_sphere_vertices(): faces = faces_from_sphere_vertices(verts) faces = array_to_set(faces) expected = array_to_set(oct_faces) nt.assert_equal(faces, expected) def test_sphere_attrs(): s = Sphere(xyz=verts) nt.assert_array_almost_equal(s.vertices, verts) nt.assert_array_almost_equal(s.x, verts[:, 0]) nt.assert_array_almost_equal(s.y, verts[:, 1]) nt.assert_array_almost_equal(s.z, verts[:, 2]) @needs_delaunay def test_edges_faces(): s = Sphere(xyz=verts) faces = oct_faces nt.assert_equal(array_to_set(s.faces), array_to_set(faces)) nt.assert_equal(array_to_set(s.edges), array_to_set(edges)) s = Sphere(xyz=verts, faces=[[0, 1, 2]]) nt.assert_equal(array_to_set(s.faces), array_to_set([[0, 1, 2]])) nt.assert_equal(array_to_set(s.edges), array_to_set([[0, 1], [1, 2], [0, 2]])) s = Sphere(xyz=verts, faces=[[0, 1, 2]], edges=[[0, 1]]) nt.assert_equal(array_to_set(s.faces), array_to_set([[0, 1, 2]])) nt.assert_equal(array_to_set(s.edges), array_to_set([[0, 1]])) @needs_delaunay def test_sphere_subdivide(): sphere1 = unit_octahedron.subdivide(4) sphere2 = Sphere(xyz=sphere1.vertices) nt.assert_equal(sphere1.faces.shape, sphere2.faces.shape) nt.assert_equal(array_to_set(sphere1.faces), array_to_set(sphere2.faces)) sphere1 = unit_icosahedron.subdivide(4) sphere2 = Sphere(xyz=sphere1.vertices) nt.assert_equal(sphere1.faces.shape, sphere2.faces.shape) nt.assert_equal(array_to_set(sphere1.faces), array_to_set(sphere2.faces)) # It might be good to also test the vertices somehow if we can think of a # good test for them. def test_sphere_find_closest(): sphere1 = unit_octahedron.subdivide(4) for ii in range(sphere1.vertices.shape[0]): nt.assert_equal(sphere1.find_closest(sphere1.vertices[ii]), ii) def test_hemisphere_find_closest(): hemisphere1 = hemi_icosahedron.subdivide(4) for ii in range(hemisphere1.vertices.shape[0]): nt.assert_equal(hemisphere1.find_closest(hemisphere1.vertices[ii]), ii) nt.assert_equal(hemisphere1.find_closest(-hemisphere1.vertices[ii]), ii) nt.assert_equal(hemisphere1.find_closest(hemisphere1.vertices[ii] * 2), ii) @needs_delaunay def test_hemisphere_subdivide(): def flip(vertices): x, y, z = vertices.T f = (z < 0) | ((z == 0) & (y < 0)) | ((z == 0) & (y == 0) & (x < 0)) return 1 - 2*f[:, None] decimals = 6 # Test HemiSphere.subdivide # Create a hemisphere by dividing a hemi-icosahedron hemi1 = HemiSphere.from_sphere(unit_icosahedron).subdivide(4) vertices1 = np.round(hemi1.vertices, decimals) vertices1 *= flip(vertices1) order = np.lexsort(vertices1.T) vertices1 = vertices1[order] # Create a hemisphere from a subdivided sphere sphere = unit_icosahedron.subdivide(4) hemi2 = HemiSphere.from_sphere(sphere) vertices2 = np.round(hemi2.vertices, decimals) vertices2 *= flip(vertices2) order = np.lexsort(vertices2.T) vertices2 = vertices2[order] # The two hemispheres should have the same vertices up to their order nt.assert_array_equal(vertices1, vertices2) # Create a hemisphere from vertices hemi3 = HemiSphere(xyz=hemi1.vertices) nt.assert_array_equal(hemi1.faces, hemi3.faces) nt.assert_array_equal(hemi1.edges, hemi3.edges) def test_hemisphere_constructor(): s0 = HemiSphere(xyz=verts) s1 = HemiSphere(theta=theta, phi=phi) s2 = HemiSphere(*verts.T) uniq_verts = verts[::2].T rU, thetaU, phiU = cart2sphere(*uniq_verts) nt.assert_array_almost_equal(s0.theta, s1.theta) nt.assert_array_almost_equal(s0.theta, s2.theta) nt.assert_array_almost_equal(s0.theta, thetaU) nt.assert_array_almost_equal(s0.phi, s1.phi) nt.assert_array_almost_equal(s0.phi, s2.phi) nt.assert_array_almost_equal(s0.phi, phiU) @needs_delaunay def test_mirror(): verts = [[0, 0, 1], [0, 1, 0], [1, 0, 0], [-1, -1, -1]] verts = np.array(verts, 'float') verts = verts / np.sqrt((verts * verts).sum(-1)[:, None]) faces = [[0, 1, 3], [0, 2, 3], [1, 2, 3]] h = HemiSphere(xyz=verts, faces=faces) s = h.mirror() nt.assert_equal(len(s.vertices), 8) nt.assert_equal(len(s.faces), 6) verts = s.vertices def _angle(a, b): return np.arccos(np.dot(a, b)) for triangle in s.faces: a, b, c = triangle nt.assert_(_angle(verts[a], verts[b]) <= np.pi/2) nt.assert_(_angle(verts[a], verts[c]) <= np.pi/2) nt.assert_(_angle(verts[b], verts[c]) <= np.pi/2) @needs_delaunay def test_hemisphere_faces(): t = (1 + np.sqrt(5)) / 2 vertices = np.array( [[ -t, -1, 0], [ -t, 1, 0], [ 1, 0, t], [ -1, 0, t], [ 0, t, 1], [ 0, -t, 1], ]) vertices /= vector_norm(vertices, keepdims=True) faces = np.array( [[0, 1, 2], [0, 1, 3], [0, 2, 4], [1, 3, 4], [2, 3, 4], [1, 2, 5], [0, 3, 5], [2, 3, 5], [0, 4, 5], [1, 4, 5], ]) edges = np.array( [(0, 1), (0, 2), (0, 3), (0, 4), (0, 5), (1, 2), (1, 3), (1, 4), (1, 5), (2, 3), (2, 4), (2, 5), (3, 4), (3, 5), (4, 5), ]) h = HemiSphere(xyz=vertices) nt.assert_equal(len(h.edges), len(edges)) nt.assert_equal(array_to_set(h.edges), array_to_set(edges)) nt.assert_equal(len(h.faces), len(faces)) nt.assert_equal(array_to_set(h.faces), array_to_set(faces)) def test_get_force(): charges = np.array([[1., 0, 0], [0, 1., 0], [0, 0, 1.]]) force, pot = _get_forces(charges) nt.assert_array_almost_equal(force, 0) charges = np.array([[1, -.1, 0], [1, 0, 0]]) force, pot = _get_forces(charges) nt.assert_array_almost_equal(force[1, [0, 2]], 0) nt.assert_(force[1, 1] > 0) def test_disperse_charges(): charges = np.array([[1., 0, 0], [0, 1., 0], [0, 0, 1.]]) d_sphere, pot = disperse_charges(HemiSphere(xyz=charges), 10) nt.assert_array_almost_equal(charges, d_sphere.vertices) a = np.sqrt(3)/2 charges = np.array([[3./5, 4./5, 0], [4./5, 3./5, 0]]) expected_charges = np.array([[0, 1., 0], [1., 0, 0]]) d_sphere, pot = disperse_charges(HemiSphere(xyz=charges), 1000, .2) nt.assert_array_almost_equal(expected_charges, d_sphere.vertices) for ii in xrange(1, len(pot)): # check that the potential of the system is going down nt.assert_(pot[ii] - pot[ii-1] <= 0) # Check that the disperse_charges does not blow up with a large constant d_sphere, pot = disperse_charges(HemiSphere(xyz=charges), 1000, 20.) nt.assert_array_almost_equal(expected_charges, d_sphere.vertices) for ii in xrange(1, len(pot)): # check that the potential of the system is going down nt.assert_(pot[ii] - pot[ii-1] <= 0) # check that the function seems to work with a larger number of charges charges = np.arange(21).reshape(7, 3) norms = np.sqrt((charges*charges).sum(-1)) charges = charges / norms[:, None] d_sphere, pot = disperse_charges(HemiSphere(xyz=charges), 1000, .05) for ii in xrange(1, len(pot)): # check that the potential of the system is going down nt.assert_(pot[ii] - pot[ii-1] <= 0) # check that the resulting charges all lie on the unit sphere d_charges = d_sphere.vertices norms = np.sqrt((d_charges*d_charges).sum(-1)) nt.assert_array_almost_equal(norms, 1) def test_interp_rbf(): def data_func(s, a, b): return a * np.cos(s.theta) + b * np.sin(s.phi) from dipy.core.sphere import Sphere, interp_rbf import numpy as np s0 = create_unit_sphere(3) s1 = create_unit_sphere(4) for a, b in zip([1, 2, 0.5], [1, 0.5, 2]): data = data_func(s0, a, b) expected = data_func(s1, a, b) interp_data_a = interp_rbf(data, s0, s1, norm="angle") nt.assert_(np.mean(np.abs(interp_data_a - expected)) < 0.1) # Test that using the euclidean norm raises a warning # (following # https://docs.python.org/2/library/warnings.html#testing-warnings) with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") interp_data_en = interp_rbf(data, s0, s1, norm="euclidean_norm") nt.assert_(len(w) == 1) nt.assert_(issubclass(w[-1].category, DeprecationWarning)) nt.assert_("deprecated" in str(w[-1].message)) if __name__ == "__main__": nt.run_module_suite() dipy-0.13.0/dipy/core/tests/test_subdivide_octahedron.py000066400000000000000000000007451317371701200234040ustar00rootroot00000000000000import numpy as np from numpy.testing import assert_array_almost_equal from dipy.core.subdivide_octahedron import create_unit_sphere def test_create_unit_sphere(): sphere = create_unit_sphere(7) v, e, f = sphere.vertices, sphere.edges, sphere.faces assert_array_almost_equal((v*v).sum(1), 1) def create_half_unit_sphere(): sphere = create_half_unit_sphere(7) v, e, f = sphere.vertices, sphere.edges, sphere.faces assert_array_almost_equal((v*v).sum(1), 1) dipy-0.13.0/dipy/core/wavelet.py000066400000000000000000000143641317371701200164700ustar00rootroot00000000000000import numpy as np from dipy.denoise import nlmeans_block """ Functions for Wavelet Transforms in 3D domain Code adapted from WAVELET SOFTWARE AT POLYTECHNIC UNIVERSITY, BROOKLYN, NY http://taco.poly.edu/WaveletSoftware/ """ def cshift3D(x, m, d): """3D Circular Shift Parameters ---------- x : 3D ndarray N1 by N2 by N3 array m : int amount of shift d : int dimension of shift (d = 1,2,3) Returns ------- y : 3D ndarray array x will be shifed by m samples down along dimension d """ s = x.shape idx = (np.array(range(s[d])) + (s[d] - m % s[d])) % s[d] idx = np.array(idx, dtype=np.int64) if d == 0: return x[idx, :, :] elif d == 1: return x[:, idx, :] else: return x[:, :, idx] def permutationinverse(perm): """ Function generating inverse of the permutation Parameters ---------- perm : 1D array Returns ------- inverse : 1D array permutation inverse of the input """ inverse = [0] * len(perm) for i, p in enumerate(perm): inverse[p] = i return inverse def afb3D_A(x, af, d): """3D Analysis Filter Bank (along one dimension only) Parameters ---------- x : 3D ndarray N1xN2xN2 matrix, where min(N1,N2,N3) > 2*length(filter) (Ni are even) af : 2D ndarray analysis filter for the columns af[:, 1] - lowpass filter af[:, 2] - highpass filter d : int dimension of filtering (d = 1, 2 or 3) Returns ------- lo : 1D array lowpass subbands hi : 1D array highpass subbands """ lpf = af[:, 0] hpf = af[:, 1] # permute dimensions of x so that dimension d is first. p = [(i + d) % 3 for i in range(3)] x = x.transpose(p) # filter along dimension 0 (N1, N2, N3) = x.shape L = af.shape[0] // 2 x = cshift3D(x, -L, 0) n1Half = N1 // 2 lo = np.zeros((L + n1Half, N2, N3)) hi = np.zeros((L + n1Half, N2, N3)) for k in range(N3): lo[:, :, k] = nlmeans_block.firdn(x[:, :, k], lpf) lo[:L] = lo[:L] + lo[n1Half:n1Half + L, :, :] lo = lo[:n1Half, :, :] for k in range(N3): hi[:, :, k] = nlmeans_block.firdn(x[:, :, k], hpf) hi[:L] = hi[:L] + hi[n1Half:n1Half + L, :, :] hi = hi[:n1Half, :, :] # permute dimensions of x (inverse permutation) q = permutationinverse(p) lo = lo.transpose(q) hi = hi.transpose(q) return lo, hi def sfb3D_A(lo, hi, sf, d): """3D Synthesis Filter Bank (along single dimension only) Parameters ---------- lo : 1D array lowpass subbands hi : 1D array highpass subbands sf : 2D ndarray synthesis filters d : int dimension of filtering Returns ------- y : 3D ndarray the N1xN2xN3 matrix """ lpf = sf[:, 0] hpf = sf[:, 1] # permute dimensions of lo and hi so that dimension d is first. p = [(i + d) % 3 for i in range(3)] lo = lo.transpose(p) hi = hi.transpose(p) (N1, N2, N3) = lo.shape N = 2 * N1 L = sf.shape[0] y = np.zeros((N + L - 2, N2, N3)) for k in range(N3): y[:, :, k] = (np.array(nlmeans_block.upfir(lo[:, :, k], lpf)) + np.array(nlmeans_block.upfir(hi[:, :, k], hpf))) y[:(L - 2), :, :] = y[:(L - 2), :, :] + y[N:(N + L - 2), :, :] y = y[:N, :, :] y = cshift3D(y, 1 - L / 2, 0) # permute dimensions of y (inverse permutation) q = permutationinverse(p) y = y.transpose(q) return y def sfb3D(lo, hi, sf1, sf2=None, sf3=None): """3D Synthesis Filter Bank Parameters ---------- lo : 1D array lowpass subbands hi : 1D array highpass subbands sfi : 2D ndarray synthesis filters for dimension i Returns ------- y : 3D ndarray output array """ if sf2 is None: sf2 = sf1 if sf3 is None: sf3 = sf1 LLL = lo LLH = hi[0] LHL = hi[1] LHH = hi[2] HLL = hi[3] HLH = hi[4] HHL = hi[5] HHH = hi[6] # filter along dimension 2 LL = sfb3D_A(LLL, LLH, sf3, 2) LH = sfb3D_A(LHL, LHH, sf3, 2) HL = sfb3D_A(HLL, HLH, sf3, 2) HH = sfb3D_A(HHL, HHH, sf3, 2) # filter along dimension 1 L = sfb3D_A(LL, LH, sf2, 1) H = sfb3D_A(HL, HH, sf2, 1) # filter along dimension 0 y = sfb3D_A(L, H, sf1, 0) return y def afb3D(x, af1, af2=None, af3=None): """3D Analysis Filter Bank Parameters ---------- x : 3D ndarray N1 by N2 by N3 array matrix, where 1) N1, N2, N3 all even 2) N1 >= 2*len(af1) 3) N2 >= 2*len(af2) 4) N3 >= 2*len(af3) afi : 2D ndarray analysis filters for dimension i afi[:, 1] - lowpass filter afi[:, 2] - highpass filter Returns ------- lo : 1D array lowpass subband hi : 1D array highpass subbands, h[d]- d = 1..7 """ if af2 is None: af2 = af1 if af3 is None: af3 = af1 # filter along dimension 0 L, H = afb3D_A(x, af1, 0) # filter along dimension 1 LL, LH = afb3D_A(L, af2, 1) HL, HH = afb3D_A(H, af2, 1) # filter along dimension 3 LLL, LLH = afb3D_A(LL, af3, 2) LHL, LHH = afb3D_A(LH, af3, 2) HLL, HLH = afb3D_A(HL, af3, 2) HHL, HHH = afb3D_A(HH, af3, 2) return LLL, [LLH, LHL, LHH, HLL, HLH, HHL, HHH] def dwt3D(x, J, af): """3-D Discrete Wavelet Transform Parameters ---------- x : 3D ndarray N1 x N2 x N3 matrix 1) Ni all even 2) min(Ni) >= 2^(J-1)*length(af) J : int number of stages af : 2D ndarray analysis filters Returns ------- w : cell array wavelet coefficients """ w = [None] * (J + 1) for k in range(J): x, w[k] = afb3D(x, af, af, af) w[J] = x return w def idwt3D(w, J, sf): """ Inverse 3-D Discrete Wavelet Transform Parameters ---------- w : cell array wavelet coefficient J : int number of stages sf : 2D ndarray synthesis filters Returns ------- y : 3D ndarray output array """ y = w[J] for k in range(J)[::-1]: y = sfb3D(y, w[k], sf, sf, sf) return y dipy-0.13.0/dipy/data/000077500000000000000000000000001317371701200144205ustar00rootroot00000000000000dipy-0.13.0/dipy/data/3shells-1000-2000-3500-N193.bval000066400000000000000000000113311317371701200206140ustar00rootroot000000000000000.000000000000000000e+00 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 1.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 2.000000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 3.500000000000000000e+03 dipy-0.13.0/dipy/data/3shells-1000-2000-3500-N193.bvec000066400000000000000000000347031317371701200206170ustar00rootroot000000000000000.000000000000000000e+00 9.999789999999999512e-01 0.000000000000000000e+00 -2.570549999999999918e-02 5.895179999999999865e-01 -2.357849999999999946e-01 -8.935779999999999834e-01 7.978399999999999936e-01 2.329370000000000052e-01 9.367199999999999971e-01 5.041299999999999670e-01 3.451989999999999781e-01 4.567649999999999766e-01 -4.874809999999999977e-01 -6.170330000000000537e-01 -5.785120000000000262e-01 -8.253639999999999866e-01 8.950759999999999827e-01 2.899920000000000275e-01 1.150140000000000051e-01 -7.999340000000000339e-01 5.124940000000000051e-01 -7.900049999999999573e-01 9.492810000000000414e-01 2.323179999999999967e-01 -1.967069999999999930e-02 2.159689999999999943e-01 7.726450000000000262e-01 -1.601529999999999898e-01 -1.461669999999999914e-01 8.873699999999999921e-01 -5.629889999999999617e-01 -3.813130000000000130e-01 -3.059540000000000037e-01 -3.326819999999999777e-01 -9.622389999999999555e-01 -9.595320000000000515e-01 4.509639999999999760e-01 -7.711919999999999886e-01 7.098160000000000025e-01 -6.945430000000000215e-01 6.815489999999999604e-01 -1.416890000000000094e-01 -7.403509999999999813e-01 -1.027560000000000001e-01 5.839130000000000154e-01 -8.775499999999999967e-02 -5.505060000000000509e-01 8.374430000000000485e-01 3.629290000000000016e-01 -1.836109999999999964e-01 -7.183230000000000448e-01 4.327820000000000000e-01 5.018369999999999775e-01 -1.705180000000000029e-01 4.631950000000000234e-01 3.837130000000000263e-01 -7.141659999999999675e-01 2.592050000000000187e-01 0.000000000000000000e+00 3.636330000000000118e-02 5.708539999999999726e-01 -2.822049999999999836e-01 7.203509999999999636e-01 2.658909999999999885e-01 9.999789999999999512e-01 0.000000000000000000e+00 -2.570549999999999918e-02 5.895179999999999865e-01 -2.357849999999999946e-01 -8.935779999999999834e-01 7.978399999999999936e-01 2.329370000000000052e-01 9.367199999999999971e-01 5.041299999999999670e-01 3.451989999999999781e-01 4.567649999999999766e-01 -4.874809999999999977e-01 -6.170330000000000537e-01 -5.785120000000000262e-01 -8.253639999999999866e-01 8.950759999999999827e-01 2.899920000000000275e-01 1.150140000000000051e-01 -7.999340000000000339e-01 5.124940000000000051e-01 -7.900049999999999573e-01 9.492810000000000414e-01 2.323179999999999967e-01 -1.967069999999999930e-02 2.159689999999999943e-01 7.726450000000000262e-01 -1.601529999999999898e-01 -1.461669999999999914e-01 8.873699999999999921e-01 -5.629889999999999617e-01 -3.813130000000000130e-01 -3.059540000000000037e-01 -3.326819999999999777e-01 -9.622389999999999555e-01 -9.595320000000000515e-01 4.509639999999999760e-01 -7.711919999999999886e-01 7.098160000000000025e-01 -6.945430000000000215e-01 6.815489999999999604e-01 -1.416890000000000094e-01 -7.403509999999999813e-01 -1.027560000000000001e-01 5.839130000000000154e-01 -8.775499999999999967e-02 -5.505060000000000509e-01 8.374430000000000485e-01 3.629290000000000016e-01 -1.836109999999999964e-01 -7.183230000000000448e-01 4.327820000000000000e-01 5.018369999999999775e-01 -1.705180000000000029e-01 4.631950000000000234e-01 3.837130000000000263e-01 -7.141659999999999675e-01 2.592050000000000187e-01 0.000000000000000000e+00 3.636330000000000118e-02 5.708539999999999726e-01 -2.822049999999999836e-01 7.203509999999999636e-01 2.658909999999999885e-01 9.999789999999999512e-01 0.000000000000000000e+00 -2.570549999999999918e-02 5.895179999999999865e-01 -2.357849999999999946e-01 -8.935779999999999834e-01 7.978399999999999936e-01 2.329370000000000052e-01 9.367199999999999971e-01 5.041299999999999670e-01 3.451989999999999781e-01 4.567649999999999766e-01 -4.874809999999999977e-01 -6.170330000000000537e-01 -5.785120000000000262e-01 -8.253639999999999866e-01 8.950759999999999827e-01 2.899920000000000275e-01 1.150140000000000051e-01 -7.999340000000000339e-01 5.124940000000000051e-01 -7.900049999999999573e-01 9.492810000000000414e-01 2.323179999999999967e-01 -1.967069999999999930e-02 2.159689999999999943e-01 7.726450000000000262e-01 -1.601529999999999898e-01 -1.461669999999999914e-01 8.873699999999999921e-01 -5.629889999999999617e-01 -3.813130000000000130e-01 -3.059540000000000037e-01 -3.326819999999999777e-01 -9.622389999999999555e-01 -9.595320000000000515e-01 4.509639999999999760e-01 -7.711919999999999886e-01 7.098160000000000025e-01 -6.945430000000000215e-01 6.815489999999999604e-01 -1.416890000000000094e-01 -7.403509999999999813e-01 -1.027560000000000001e-01 5.839130000000000154e-01 -8.775499999999999967e-02 -5.505060000000000509e-01 8.374430000000000485e-01 3.629290000000000016e-01 -1.836109999999999964e-01 -7.183230000000000448e-01 4.327820000000000000e-01 5.018369999999999775e-01 -1.705180000000000029e-01 4.631950000000000234e-01 3.837130000000000263e-01 -7.141659999999999675e-01 2.592050000000000187e-01 0.000000000000000000e+00 3.636330000000000118e-02 5.708539999999999726e-01 -2.822049999999999836e-01 7.203509999999999636e-01 2.658909999999999885e-01 0.000000000000000000e+00 -5.040010000000000150e-03 9.999919999999999920e-01 6.538610000000000255e-01 -7.692360000000000309e-01 -5.290949999999999820e-01 -2.635589999999999877e-01 1.337260000000000115e-01 9.318840000000000456e-01 1.441389999999999894e-01 -8.466939999999999467e-01 -8.503110000000000390e-01 -6.356720000000000148e-01 -3.939079999999999804e-01 6.768490000000000339e-01 -1.093469999999999998e-01 -5.250340000000000007e-01 -4.482420000000000154e-02 -5.454729999999999857e-01 -9.640499999999999625e-01 4.077669999999999906e-01 8.421389999999999709e-01 1.579929999999999946e-01 -2.376949999999999896e-01 7.870509999999999451e-01 -1.920310000000000072e-01 -9.571229999999999460e-01 -6.075340000000000185e-01 3.604129999999999834e-01 7.352739999999999831e-01 4.211110000000000131e-01 2.364819999999999978e-01 1.470370000000000010e-01 -2.037930000000000019e-01 -1.341130000000000100e-01 -2.694639999999999813e-01 2.097700000000000120e-01 -8.903370000000000450e-01 6.311750000000000416e-01 4.131589999999999985e-01 2.793949999999999906e-02 5.331010000000000471e-01 -7.292410000000000281e-01 3.932229999999999892e-01 8.253669999999999618e-01 -6.007820000000000382e-01 -3.396509999999999807e-01 -7.954839999999999689e-01 -4.622020000000000017e-01 -5.659300000000000441e-01 3.970810000000000173e-01 -6.957010000000000138e-01 6.863609999999999989e-01 6.943369999999999820e-01 -5.137690000000000312e-01 4.280519999999999881e-01 -8.125719999999999610e-01 -2.514669999999999961e-01 8.872579999999999911e-01 8.131860000000000477e-02 -9.046159999999999757e-01 -3.085970000000000102e-01 1.497950000000000115e-01 6.119139999999999580e-01 9.606829999999999536e-01 -5.040010000000000150e-03 9.999919999999999920e-01 6.538610000000000255e-01 -7.692360000000000309e-01 -5.290949999999999820e-01 -2.635589999999999877e-01 1.337260000000000115e-01 9.318840000000000456e-01 1.441389999999999894e-01 -8.466939999999999467e-01 -8.503110000000000390e-01 -6.356720000000000148e-01 -3.939079999999999804e-01 6.768490000000000339e-01 -1.093469999999999998e-01 -5.250340000000000007e-01 -4.482420000000000154e-02 -5.454729999999999857e-01 -9.640499999999999625e-01 4.077669999999999906e-01 8.421389999999999709e-01 1.579929999999999946e-01 -2.376949999999999896e-01 7.870509999999999451e-01 -1.920310000000000072e-01 -9.571229999999999460e-01 -6.075340000000000185e-01 3.604129999999999834e-01 7.352739999999999831e-01 4.211110000000000131e-01 2.364819999999999978e-01 1.470370000000000010e-01 -2.037930000000000019e-01 -1.341130000000000100e-01 -2.694639999999999813e-01 2.097700000000000120e-01 -8.903370000000000450e-01 6.311750000000000416e-01 4.131589999999999985e-01 2.793949999999999906e-02 5.331010000000000471e-01 -7.292410000000000281e-01 3.932229999999999892e-01 8.253669999999999618e-01 -6.007820000000000382e-01 -3.396509999999999807e-01 -7.954839999999999689e-01 -4.622020000000000017e-01 -5.659300000000000441e-01 3.970810000000000173e-01 -6.957010000000000138e-01 6.863609999999999989e-01 6.943369999999999820e-01 -5.137690000000000312e-01 4.280519999999999881e-01 -8.125719999999999610e-01 -2.514669999999999961e-01 8.872579999999999911e-01 8.131860000000000477e-02 -9.046159999999999757e-01 -3.085970000000000102e-01 1.497950000000000115e-01 6.119139999999999580e-01 9.606829999999999536e-01 -5.040010000000000150e-03 9.999919999999999920e-01 6.538610000000000255e-01 -7.692360000000000309e-01 -5.290949999999999820e-01 -2.635589999999999877e-01 1.337260000000000115e-01 9.318840000000000456e-01 1.441389999999999894e-01 -8.466939999999999467e-01 -8.503110000000000390e-01 -6.356720000000000148e-01 -3.939079999999999804e-01 6.768490000000000339e-01 -1.093469999999999998e-01 -5.250340000000000007e-01 -4.482420000000000154e-02 -5.454729999999999857e-01 -9.640499999999999625e-01 4.077669999999999906e-01 8.421389999999999709e-01 1.579929999999999946e-01 -2.376949999999999896e-01 7.870509999999999451e-01 -1.920310000000000072e-01 -9.571229999999999460e-01 -6.075340000000000185e-01 3.604129999999999834e-01 7.352739999999999831e-01 4.211110000000000131e-01 2.364819999999999978e-01 1.470370000000000010e-01 -2.037930000000000019e-01 -1.341130000000000100e-01 -2.694639999999999813e-01 2.097700000000000120e-01 -8.903370000000000450e-01 6.311750000000000416e-01 4.131589999999999985e-01 2.793949999999999906e-02 5.331010000000000471e-01 -7.292410000000000281e-01 3.932229999999999892e-01 8.253669999999999618e-01 -6.007820000000000382e-01 -3.396509999999999807e-01 -7.954839999999999689e-01 -4.622020000000000017e-01 -5.659300000000000441e-01 3.970810000000000173e-01 -6.957010000000000138e-01 6.863609999999999989e-01 6.943369999999999820e-01 -5.137690000000000312e-01 4.280519999999999881e-01 -8.125719999999999610e-01 -2.514669999999999961e-01 8.872579999999999911e-01 8.131860000000000477e-02 -9.046159999999999757e-01 -3.085970000000000102e-01 1.497950000000000115e-01 6.119139999999999580e-01 9.606829999999999536e-01 0.000000000000000000e+00 -4.027949999999999760e-03 -3.987939999999999714e-03 -7.561780000000000168e-01 -2.464619999999999866e-01 -8.151469999999999549e-01 -3.633939999999999948e-01 -5.878510000000000124e-01 -2.780869999999999731e-01 -3.190299999999999803e-01 1.701830000000000009e-01 3.972519999999999940e-01 6.223229999999999595e-01 -7.792289999999999495e-01 -4.014300000000000090e-01 8.083110000000000017e-01 -2.076359999999999872e-01 4.436550000000000216e-01 7.863609999999999767e-01 2.395410000000000039e-01 4.402639999999999887e-01 -1.677849999999999897e-01 5.923939999999999761e-01 -2.058300000000000129e-01 -5.714719999999999800e-01 9.811919999999999531e-01 -1.930610000000000104e-01 -1.841800000000000104e-01 -9.189410000000000078e-01 6.618209999999999926e-01 -1.877240000000000020e-01 7.919089999999999741e-01 -9.126779999999999893e-01 9.299790000000000001e-01 -9.334540000000000060e-01 -3.853909999999999975e-02 -1.878710000000000102e-01 -6.270149999999999335e-02 -8.295329999999999371e-02 -5.704919999999999991e-01 -7.189079999999999915e-01 5.012929999999999886e-01 -6.694269999999999943e-01 -5.452120000000000299e-01 -5.551669999999999661e-01 -5.459920000000000329e-01 -9.364489999999999759e-01 -2.532760000000000011e-01 2.916480000000000183e-01 -7.402739999999999876e-01 8.992299999999999738e-01 -3.548969999999999816e-03 5.844730000000000203e-01 -5.158049999999999580e-01 8.408120000000000038e-01 -7.760289999999999688e-01 -4.387380000000000169e-01 -6.532470000000000221e-01 3.815569999999999795e-01 9.966880000000000184e-01 -4.246750000000000247e-01 7.608510000000000550e-01 9.475879999999999859e-01 -3.265830000000000122e-01 7.993519999999999792e-02 -4.027949999999999760e-03 -3.987939999999999714e-03 -7.561780000000000168e-01 -2.464619999999999866e-01 -8.151469999999999549e-01 -3.633939999999999948e-01 -5.878510000000000124e-01 -2.780869999999999731e-01 -3.190299999999999803e-01 1.701830000000000009e-01 3.972519999999999940e-01 6.223229999999999595e-01 -7.792289999999999495e-01 -4.014300000000000090e-01 8.083110000000000017e-01 -2.076359999999999872e-01 4.436550000000000216e-01 7.863609999999999767e-01 2.395410000000000039e-01 4.402639999999999887e-01 -1.677849999999999897e-01 5.923939999999999761e-01 -2.058300000000000129e-01 -5.714719999999999800e-01 9.811919999999999531e-01 -1.930610000000000104e-01 -1.841800000000000104e-01 -9.189410000000000078e-01 6.618209999999999926e-01 -1.877240000000000020e-01 7.919089999999999741e-01 -9.126779999999999893e-01 9.299790000000000001e-01 -9.334540000000000060e-01 -3.853909999999999975e-02 -1.878710000000000102e-01 -6.270149999999999335e-02 -8.295329999999999371e-02 -5.704919999999999991e-01 -7.189079999999999915e-01 5.012929999999999886e-01 -6.694269999999999943e-01 -5.452120000000000299e-01 -5.551669999999999661e-01 -5.459920000000000329e-01 -9.364489999999999759e-01 -2.532760000000000011e-01 2.916480000000000183e-01 -7.402739999999999876e-01 8.992299999999999738e-01 -3.548969999999999816e-03 5.844730000000000203e-01 -5.158049999999999580e-01 8.408120000000000038e-01 -7.760289999999999688e-01 -4.387380000000000169e-01 -6.532470000000000221e-01 3.815569999999999795e-01 9.966880000000000184e-01 -4.246750000000000247e-01 7.608510000000000550e-01 9.475879999999999859e-01 -3.265830000000000122e-01 7.993519999999999792e-02 -4.027949999999999760e-03 -3.987939999999999714e-03 -7.561780000000000168e-01 -2.464619999999999866e-01 -8.151469999999999549e-01 -3.633939999999999948e-01 -5.878510000000000124e-01 -2.780869999999999731e-01 -3.190299999999999803e-01 1.701830000000000009e-01 3.972519999999999940e-01 6.223229999999999595e-01 -7.792289999999999495e-01 -4.014300000000000090e-01 8.083110000000000017e-01 -2.076359999999999872e-01 4.436550000000000216e-01 7.863609999999999767e-01 2.395410000000000039e-01 4.402639999999999887e-01 -1.677849999999999897e-01 5.923939999999999761e-01 -2.058300000000000129e-01 -5.714719999999999800e-01 9.811919999999999531e-01 -1.930610000000000104e-01 -1.841800000000000104e-01 -9.189410000000000078e-01 6.618209999999999926e-01 -1.877240000000000020e-01 7.919089999999999741e-01 -9.126779999999999893e-01 9.299790000000000001e-01 -9.334540000000000060e-01 -3.853909999999999975e-02 -1.878710000000000102e-01 -6.270149999999999335e-02 -8.295329999999999371e-02 -5.704919999999999991e-01 -7.189079999999999915e-01 5.012929999999999886e-01 -6.694269999999999943e-01 -5.452120000000000299e-01 -5.551669999999999661e-01 -5.459920000000000329e-01 -9.364489999999999759e-01 -2.532760000000000011e-01 2.916480000000000183e-01 -7.402739999999999876e-01 8.992299999999999738e-01 -3.548969999999999816e-03 5.844730000000000203e-01 -5.158049999999999580e-01 8.408120000000000038e-01 -7.760289999999999688e-01 -4.387380000000000169e-01 -6.532470000000000221e-01 3.815569999999999795e-01 9.966880000000000184e-01 -4.246750000000000247e-01 7.608510000000000550e-01 9.475879999999999859e-01 -3.265830000000000122e-01 7.993519999999999792e-02 dipy-0.13.0/dipy/data/__init__.py000066400000000000000000000323641317371701200165410ustar00rootroot00000000000000""" Read test or example data """ from __future__ import division, print_function, absolute_import import sys import json import warnings from nibabel import load from os.path import join as pjoin, dirname import gzip import numpy as np from dipy.core.gradients import GradientTable, gradient_table from dipy.core.sphere import Sphere, HemiSphere from dipy.sims.voxel import SticksAndBall from dipy.data.fetcher import (fetch_scil_b0, read_scil_b0, fetch_stanford_hardi, read_stanford_hardi, fetch_taiwan_ntu_dsi, read_taiwan_ntu_dsi, fetch_sherbrooke_3shell, read_sherbrooke_3shell, fetch_isbi2013_2shell, read_isbi2013_2shell, read_stanford_labels, fetch_syn_data, read_syn_data, fetch_stanford_t1, read_stanford_t1, fetch_stanford_pve_maps, read_stanford_pve_maps, fetch_viz_icons, read_viz_icons, fetch_bundles_2_subjects, read_bundles_2_subjects, fetch_cenir_multib, read_cenir_multib, fetch_mni_template, read_mni_template, fetch_ivim, read_ivim, fetch_tissue_data, read_tissue_data, fetch_cfin_multib, read_cfin_dwi, read_cfin_t1) from ..utils.arrfuncs import as_native_array from dipy.tracking.streamline import relist_streamlines if sys.version_info[0] < 3: import cPickle def loads_compat(bytes): return cPickle.loads(bytes) else: # Python 3 import pickle # Need to load pickles saved in Python 2 def loads_compat(bytes): return pickle.loads(bytes, encoding='latin1') DATA_DIR = pjoin(dirname(__file__), 'files') SPHERE_FILES = { 'symmetric362': pjoin(DATA_DIR, 'evenly_distributed_sphere_362.npz'), 'symmetric642': pjoin(DATA_DIR, 'evenly_distributed_sphere_642.npz'), 'symmetric724': pjoin(DATA_DIR, 'evenly_distributed_sphere_724.npz'), 'repulsion724': pjoin(DATA_DIR, 'repulsion724.npz'), 'repulsion100': pjoin(DATA_DIR, 'repulsion100.npz'), 'repulsion200': pjoin(DATA_DIR, 'repulsion200.npz') } class DataError(Exception): pass def get_sim_voxels(name='fib1'): """ provide some simulated voxel data Parameters ------------ name : str, which file? 'fib0', 'fib1' or 'fib2' Returns --------- dix : dictionary, where dix['data'] returns a 2d array where every row is a simulated voxel with different orientation Examples ---------- >>> from dipy.data import get_sim_voxels >>> sv=get_sim_voxels('fib1') >>> sv['data'].shape == (100, 102) True >>> sv['fibres'] '1' >>> sv['gradients'].shape == (102, 3) True >>> sv['bvals'].shape == (102,) True >>> sv['snr'] '60' >>> sv2=get_sim_voxels('fib2') >>> sv2['fibres'] '2' >>> sv2['snr'] '80' Notes ------- These sim voxels were provided by M.M. Correia using Rician noise. """ if name == 'fib0': fname = pjoin(DATA_DIR, 'fib0.pkl.gz') if name == 'fib1': fname = pjoin(DATA_DIR, 'fib1.pkl.gz') if name == 'fib2': fname = pjoin(DATA_DIR, 'fib2.pkl.gz') return loads_compat(gzip.open(fname, 'rb').read()) def get_skeleton(name='C1'): """ provide skeletons generated from Local Skeleton Clustering (LSC) Parameters ----------- name : str, 'C1' or 'C3' Returns ------- dix : dictionary Examples --------- >>> from dipy.data import get_skeleton >>> C=get_skeleton('C1') >>> len(C.keys()) 117 >>> for c in C: break >>> sorted(C[c].keys()) ['N', 'hidden', 'indices', 'most'] """ if name == 'C1': fname = pjoin(DATA_DIR, 'C1.pkl.gz') if name == 'C3': fname = pjoin(DATA_DIR, 'C3.pkl.gz') return loads_compat(gzip.open(fname, 'rb').read()) def get_sphere(name='symmetric362'): ''' provide triangulated spheres Parameters ------------ name : str which sphere - one of: * 'symmetric362' * 'symmetric642' * 'symmetric724' * 'repulsion724' * 'repulsion100' * 'repulsion200' Returns ------- sphere : a dipy.core.sphere.Sphere class instance Examples -------- >>> import numpy as np >>> from dipy.data import get_sphere >>> sphere = get_sphere('symmetric362') >>> verts, faces = sphere.vertices, sphere.faces >>> verts.shape == (362, 3) True >>> faces.shape == (720, 3) True >>> verts, faces = get_sphere('not a sphere name') #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): ... DataError: No sphere called "not a sphere name" ''' fname = SPHERE_FILES.get(name) if fname is None: raise DataError('No sphere called "%s"' % name) res = np.load(fname) # Set to native byte order to avoid errors in compiled routines for # big-endian platforms, when using these spheres. return Sphere(xyz=as_native_array(res['vertices']), faces=as_native_array(res['faces'])) default_sphere = HemiSphere.from_sphere(get_sphere('symmetric724')) small_sphere = HemiSphere.from_sphere(get_sphere('symmetric362')) def get_data(name='small_64D'): """ provides filenames of some test datasets or other useful parametrisations Parameters ---------- name : str the filename/s of which dataset to return, one of: 'small_64D' small region of interest nifti,bvecs,bvals 64 directions 'small_101D' small region of interest nifti,bvecs,bvals 101 directions 'aniso_vox' volume with anisotropic voxel size as Nifti 'fornix' 300 tracks in Trackvis format (from Pittsburgh Brain Competition) 'gqi_vectors' the scanner wave vectors needed for a GQI acquisitions of 101 directions tested on Siemens 3T Trio 'small_25' small ROI (10x8x2) DTI data (b value 2000, 25 directions) 'test_piesno' slice of N=8, K=14 diffusion data 'reg_c' small 2D image used for validating registration 'reg_o' small 2D image used for validation registration 'cb_2' two vectorized cingulum bundles Returns ------- fnames : tuple filenames for dataset Examples ---------- >>> import numpy as np >>> from dipy.data import get_data >>> fimg,fbvals,fbvecs=get_data('small_101D') >>> bvals=np.loadtxt(fbvals) >>> bvecs=np.loadtxt(fbvecs).T >>> import nibabel as nib >>> img=nib.load(fimg) >>> data=img.get_data() >>> data.shape == (6, 10, 10, 102) True >>> bvals.shape == (102,) True >>> bvecs.shape == (102, 3) True """ if name == 'small_64D': fbvals = pjoin(DATA_DIR, 'small_64D.bvals.npy') fbvecs = pjoin(DATA_DIR, 'small_64D.gradients.npy') fimg = pjoin(DATA_DIR, 'small_64D.nii') return fimg, fbvals, fbvecs if name == '55dir_grad.bvec': return pjoin(DATA_DIR, '55dir_grad.bvec') if name == 'small_101D': fbvals = pjoin(DATA_DIR, 'small_101D.bval') fbvecs = pjoin(DATA_DIR, 'small_101D.bvec') fimg = pjoin(DATA_DIR, 'small_101D.nii.gz') return fimg, fbvals, fbvecs if name == 'aniso_vox': return pjoin(DATA_DIR, 'aniso_vox.nii.gz') if name == 'ascm_test': return pjoin(DATA_DIR, 'ascm_out_test.nii.gz') if name == 'fornix': return pjoin(DATA_DIR, 'tracks300.trk') if name == 'gqi_vectors': return pjoin(DATA_DIR, 'ScannerVectors_GQI101.txt') if name == 'dsi515btable': return pjoin(DATA_DIR, 'dsi515_b_table.txt') if name == 'dsi4169btable': return pjoin(DATA_DIR, 'dsi4169_b_table.txt') if name == 'grad514': return pjoin(DATA_DIR, 'grad_514.txt') if name == "small_25": fbvals = pjoin(DATA_DIR, 'small_25.bval') fbvecs = pjoin(DATA_DIR, 'small_25.bvec') fimg = pjoin(DATA_DIR, 'small_25.nii.gz') return fimg, fbvals, fbvecs if name == "S0_10": fimg = pjoin(DATA_DIR, 'S0_10slices.nii.gz') return fimg if name == "test_piesno": fimg = pjoin(DATA_DIR, 'test_piesno.nii.gz') return fimg if name == "reg_c": return pjoin(DATA_DIR, 'C.npy') if name == "reg_o": return pjoin(DATA_DIR, 'circle.npy') if name == 'cb_2': return pjoin(DATA_DIR, 'cb_2.npz') if name == "t1_coronal_slice": return pjoin(DATA_DIR, 't1_coronal_slice.npy') def _gradient_from_file(filename): """Reads a gradient file saved as a text file compatible with np.loadtxt and saved in the dipy data directory""" def gtab_getter(): gradfile = pjoin(DATA_DIR, filename) grad = np.loadtxt(gradfile, delimiter=',') gtab = GradientTable(grad) return gtab return gtab_getter get_3shell_gtab = _gradient_from_file("gtab_3shell.txt") get_isbi2013_2shell_gtab = _gradient_from_file("gtab_isbi2013_2shell.txt") get_gtab_taiwan_dsi = _gradient_from_file("gtab_taiwan_dsi.txt") def dsi_voxels(): fimg, fbvals, fbvecs = get_data('small_101D') bvals = np.loadtxt(fbvals) bvecs = np.loadtxt(fbvecs).T img = load(fimg) data = img.get_data() gtab = gradient_table(bvals, bvecs) return data, gtab def dsi_deconv_voxels(): gtab = gradient_table(np.loadtxt(get_data('dsi515btable'))) data = np.zeros((2, 2, 2, 515)) for ix in range(2): for iy in range(2): for iz in range(2): data[ix, iy, iz], dirs = SticksAndBall(gtab, d=0.0015, S0=1., angles=[(0, 0), (90, 0)], fractions=[50, 50], snr=None) return data, gtab def mrtrix_spherical_functions(): """Spherical functions represented by spherical harmonic coefficients and evaluated on a discrete sphere. Returns ------- func_coef : array (2, 3, 4, 45) Functions represented by the coefficients associated with the mxtrix spherical harmonic basis of order 8. func_discrete : array (2, 3, 4, 81) Functions evaluated on `sphere`. sphere : Sphere The discrete sphere, points on the surface of a unit sphere, used to evaluate the functions. Notes ----- These coefficients were obtained by using the dwi2SH command of mrtrix. """ func_discrete = load(pjoin(DATA_DIR, "func_discrete.nii.gz")).get_data() func_coef = load(pjoin(DATA_DIR, "func_coef.nii.gz")).get_data() gradients = np.loadtxt(pjoin(DATA_DIR, "sphere_grad.txt")) # gradients[0] and the first volume of func_discrete, # func_discrete[..., 0], are associated with the b=0 signal. # gradients[:, 3] are the b-values for each gradient/volume. sphere = Sphere(xyz=gradients[1:, :3]) return func_coef, func_discrete[..., 1:], sphere dipy_cmaps = None def get_cmap(name): """Makes a callable, similar to maptlotlib.pyplot.get_cmap""" if name.lower() == "accent": warnings.warn("The `Accent` colormap is deprecated as of version" + " 0.12 of Dipy and will be removed in a future " + "version. Please use another colormap", DeprecationWarning) global dipy_cmaps if dipy_cmaps is None: filename = pjoin(DATA_DIR, "dipy_colormaps.json") with open(filename) as f: dipy_cmaps = json.load(f) desc = dipy_cmaps.get(name) if desc is None: return None def simple_cmap(v): """Emulates matplotlib colormap callable""" rgba = np.ones((len(v), 4)) for i, color in enumerate(('red', 'green', 'blue')): x, y0, y1 = zip(*desc[color]) # Matplotlib allows more complex colormaps, but for users who do # not have Matplotlib dipy makes a few simple colormaps available. # These colormaps are simple because y0 == y1, and therefor we # ignore y1 here. rgba[:, i] = np.interp(v, x, y0) return rgba return simple_cmap def two_cingulum_bundles(): fname = get_data('cb_2') res = np.load(fname) cb1 = relist_streamlines(res['points'], res['offsets']) cb2 = relist_streamlines(res['points2'], res['offsets2']) return cb1, cb2 def matlab_life_results(): matlab_rmse = np.load(pjoin(DATA_DIR, 'life_matlab_rmse.npy')) matlab_weights = np.load(pjoin(DATA_DIR, 'life_matlab_weights.npy')) return matlab_rmse, matlab_weights dipy-0.13.0/dipy/data/fetcher.py000066400000000000000000001021101317371701200164050ustar00rootroot00000000000000from __future__ import division, print_function, absolute_import import os import sys import contextlib from os.path import join as pjoin from hashlib import md5 from shutil import copyfileobj import numpy as np import nibabel as nib import tarfile import zipfile from dipy.core.gradients import gradient_table from dipy.io.gradients import read_bvals_bvecs if sys.version_info[0] < 3: from urllib2 import urlopen else: from urllib.request import urlopen # Set a user-writeable file-system location to put files: if 'DIPY_HOME' in os.environ: dipy_home = os.environ['DIPY_HOME'] else: dipy_home = pjoin(os.path.expanduser('~'), '.dipy') # The URL to the University of Washington Researchworks repository: UW_RW_URL = \ "https://digital.lib.washington.edu/researchworks/bitstream/handle/" class FetcherError(Exception): pass def _log(msg): """Helper function to keep track of things. For now, just prints the message """ print(msg) def update_progressbar(progress, total_length): """Show progressbar Takes a number between 0 and 1 to indicate progress from 0 to 100%. """ # Try to set the bar_length according to the console size try: columns = os.popen('tput cols', 'r').read() bar_length = int(columns) - 46 if(not (bar_length > 1)): bar_length = 20 except: # Default value if determination of console size fails bar_length = 20 block = int(round(bar_length * progress)) size_string = "{0:.2f} MB".format(float(total_length) / (1024 * 1024)) text = "\rDownload Progress: [{0}] {1:.2f}% of {2}".format( "#" * block + "-" * (bar_length - block), progress * 100, size_string) sys.stdout.write(text) sys.stdout.flush() def copyfileobj_withprogress(fsrc, fdst, total_length, length=16 * 1024): copied = 0 while True: buf = fsrc.read(length) if not buf: break fdst.write(buf) copied += len(buf) progress = float(copied) / float(total_length) update_progressbar(progress, total_length) def _already_there_msg(folder): """ Prints a message indicating that a certain data-set is already in place """ msg = 'Dataset is already in place. If you want to fetch it again ' msg += 'please first remove the folder %s ' % folder _log(msg) def _get_file_md5(filename): """Compute the md5 checksum of a file""" md5_data = md5() with open(filename, 'rb') as f: for chunk in iter(lambda: f.read(128 * md5_data.block_size), b''): md5_data.update(chunk) return md5_data.hexdigest() def check_md5(filename, stored_md5=None): """ Computes the md5 of filename and check if it matches with the supplied string md5 Input ----- filename : string Path to a file. md5 : string Known md5 of filename to check against. If None (default), checking is skipped """ if stored_md5 is not None: computed_md5 = _get_file_md5(filename) if stored_md5 != computed_md5: msg = """The downloaded file, %s, does not have the expected md5 checksum of "%s". Instead, the md5 checksum was: "%s". This could mean that something is wrong with the file or that the upstream file has been updated. You can try downloading the file again or updating to the newest version of dipy.""" % (filename, stored_md5, computed_md5) raise FetcherError(msg) def _get_file_data(fname, url): with contextlib.closing(urlopen(url)) as opener: if sys.version_info[0] < 3: try: response_size = opener.headers['content-length'] except KeyError: response_size = None else: # python3.x # returns none if header not found response_size = opener.getheader("Content-Length") with open(fname, 'wb') as data: if(response_size is None): copyfileobj(opener, data) else: copyfileobj_withprogress(opener, data, response_size) def fetch_data(files, folder, data_size=None): """Downloads files to folder and checks their md5 checksums Parameters ---------- files : dictionary For each file in `files` the value should be (url, md5). The file will be downloaded from url if the file does not already exist or if the file exists but the md5 checksum does not match. folder : str The directory where to save the file, the directory will be created if it does not already exist. data_size : str, optional A string describing the size of the data (e.g. "91 MB") to be logged to the screen. Default does not produce any information about data size. Raises ------ FetcherError Raises if the md5 checksum of the file does not match the expected value. The downloaded file is not deleted when this error is raised. """ if not os.path.exists(folder): _log("Creating new folder %s" % (folder)) os.makedirs(folder) if data_size is not None: _log('Data size is approximately %s' % data_size) all_skip = True for f in files: url, md5 = files[f] fullpath = pjoin(folder, f) if os.path.exists(fullpath) and (_get_file_md5(fullpath) == md5): continue all_skip = False _log('Downloading "%s" to %s' % (f, folder)) _get_file_data(fullpath, url) check_md5(fullpath, md5) if all_skip: _already_there_msg(folder) else: _log("Files successfully downloaded to %s" % (folder)) def _make_fetcher(name, folder, baseurl, remote_fnames, local_fnames, md5_list=None, doc="", data_size=None, msg=None, unzip=False): """ Create a new fetcher Parameters ---------- name : str The name of the fetcher function. folder : str The full path to the folder in which the files would be placed locally. Typically, this is something like 'pjoin(dipy_home, 'foo')' baseurl : str The URL from which this fetcher reads files remote_fnames : list of strings The names of the files in the baseurl location local_fnames : list of strings The names of the files to be saved on the local filesystem md5_list : list of strings, optional The md5 checksums of the files. Used to verify the content of the files. Default: None, skipping checking md5. doc : str, optional. Documentation of the fetcher. data_size : str, optional. If provided, is sent as a message to the user before downloading starts. msg : str, optional. A message to print to screen when fetching takes place. Default (None) is to print nothing unzip : bool, optional Whether to unzip the file(s) after downloading them. Supports zip, gz, and tar.gz files. returns ------- fetcher : function A function that, when called, fetches data according to the designated inputs """ def fetcher(): files = {} for i, (f, n), in enumerate(zip(remote_fnames, local_fnames)): files[n] = (baseurl + f, md5_list[i] if md5_list is not None else None) fetch_data(files, folder, data_size) if msg is not None: print(msg) if unzip: for f in local_fnames: split_ext = os.path.splitext(f) if split_ext[-1] == '.gz' or split_ext[-1] == '.bz2': if os.path.splitext(split_ext[0])[-1] == '.tar': ar = tarfile.open(pjoin(folder, f)) ar.extractall(path=folder) ar.close() else: raise ValueError('File extension is not recognized') elif split_ext[-1] == '.zip': z = zipfile.ZipFile(pjoin(folder, f), 'r') z.extractall(folder) z.close() else: raise ValueError('File extension is not recognized') return files, folder fetcher.__name__ = name fetcher.__doc__ = doc return fetcher fetch_isbi2013_2shell = _make_fetcher( "fetch_isbi2013_2shell", pjoin(dipy_home, 'isbi2013'), UW_RW_URL + '1773/38465/', ['phantom64.nii.gz', 'phantom64.bval', 'phantom64.bvec'], ['phantom64.nii.gz', 'phantom64.bval', 'phantom64.bvec'], ['42911a70f232321cf246315192d69c42', '90e8cf66e0f4d9737a3b3c0da24df5ea', '4b7aa2757a1ccab140667b76e8075cb1'], doc="Download a 2-shell software phantom dataset", data_size="") fetch_stanford_labels = _make_fetcher( "fetch_stanford_labels", pjoin(dipy_home, 'stanford_hardi'), 'https://stacks.stanford.edu/file/druid:yx282xq2090/', ["aparc-reduced.nii.gz", "label_info.txt"], ["aparc-reduced.nii.gz", "label_info.txt"], ['742de90090d06e687ce486f680f6d71a', '39db9f0f5e173d7a2c2e51b07d5d711b'], doc="Download reduced freesurfer aparc image from stanford web site") fetch_sherbrooke_3shell = _make_fetcher( "fetch_sherbrooke_3shell", pjoin(dipy_home, 'sherbrooke_3shell'), UW_RW_URL + "1773/38475/", ['HARDI193.nii.gz', 'HARDI193.bval', 'HARDI193.bvec'], ['HARDI193.nii.gz', 'HARDI193.bval', 'HARDI193.bvec'], ['0b735e8f16695a37bfbd66aab136eb66', 'e9b9bb56252503ea49d31fb30a0ac637', '0c83f7e8b917cd677ad58a078658ebb7'], doc="Download a 3shell HARDI dataset with 192 gradient direction") fetch_stanford_hardi = _make_fetcher( "fetch_stanford_hardi", pjoin(dipy_home, 'stanford_hardi'), 'https://stacks.stanford.edu/file/druid:yx282xq2090/', ['dwi.nii.gz', 'dwi.bvals', 'dwi.bvecs'], ['HARDI150.nii.gz', 'HARDI150.bval', 'HARDI150.bvec'], ['0b18513b46132b4d1051ed3364f2acbc', '4e08ee9e2b1d2ec3fddb68c70ae23c36', '4c63a586f29afc6a48a5809524a76cb4'], doc="Download a HARDI dataset with 160 gradient directions") fetch_stanford_t1 = _make_fetcher( "fetch_stanford_t1", pjoin(dipy_home, 'stanford_hardi'), 'https://stacks.stanford.edu/file/druid:yx282xq2090/', ['t1.nii.gz'], ['t1.nii.gz'], ['a6a140da6a947d4131b2368752951b0a']) fetch_stanford_pve_maps = _make_fetcher( "fetch_stanford_pve_maps", pjoin(dipy_home, 'stanford_hardi'), 'https://stacks.stanford.edu/file/druid:yx282xq2090/', ['pve_csf.nii.gz', 'pve_gm.nii.gz', 'pve_wm.nii.gz'], ['pve_csf.nii.gz', 'pve_gm.nii.gz', 'pve_wm.nii.gz'], ['2c498e4fed32bca7f726e28aa86e9c18', '1654b20aeb35fc2734a0d7928b713874', '2e244983cf92aaf9f9d37bc7716b37d5']) fetch_taiwan_ntu_dsi = _make_fetcher( "fetch_taiwan_ntu_dsi", pjoin(dipy_home, 'taiwan_ntu_dsi'), UW_RW_URL + "1773/38480/", ['DSI203.nii.gz', 'DSI203.bval', 'DSI203.bvec', 'DSI203_license.txt'], ['DSI203.nii.gz', 'DSI203.bval', 'DSI203.bvec', 'DSI203_license.txt'], ['950408c0980a7154cb188666a885a91f', '602e5cb5fad2e7163e8025011d8a6755', 'a95eb1be44748c20214dc7aa654f9e6b', '7fa1d5e272533e832cc7453eeba23f44'], doc="Download a DSI dataset with 203 gradient directions", msg="See DSI203_license.txt for LICENSE. For the complete datasets please visit : \ http://dsi-studio.labsolver.org", data_size="91MB") fetch_syn_data = _make_fetcher( "fetch_syn_data", pjoin(dipy_home, 'syn_test'), UW_RW_URL + "1773/38476/", ['t1.nii.gz', 'b0.nii.gz'], ['t1.nii.gz', 'b0.nii.gz'], ['701bda02bb769655c7d4a9b1df2b73a6', 'e4b741f0c77b6039e67abb2885c97a78'], data_size="12MB", doc="Download t1 and b0 volumes from the same session") fetch_mni_template = _make_fetcher( "fetch_mni_template", pjoin(dipy_home, 'mni_template'), 'https://ndownloader.figshare.com/files/', ['5572676?private_link=4b8666116a0128560fb5', '5572673?private_link=93216e750d5a7e568bda', '5572670?private_link=33c92d54d1afb9aa7ed2', '5572661?private_link=584319b23e7343fed707'], ['mni_icbm152_t2_tal_nlin_asym_09a.nii', 'mni_icbm152_t1_tal_nlin_asym_09a.nii', 'mni_icbm152_t1_tal_nlin_asym_09c_mask.nii', 'mni_icbm152_t1_tal_nlin_asym_09c.nii'], ['f41f2e1516d880547fbf7d6a83884f0d', '1ea8f4f1e41bc17a94602e48141fdbc8', 'a243e249cd01a23dc30f033b9656a786', '3d5dd9b0cd727a17ceec610b782f66c1'], doc="fetch the MNI 2009a T1 and T2, and 2009c T1 and T1 mask files", data_size="70MB") fetch_scil_b0 = _make_fetcher( "fetch_scil_b0", dipy_home, UW_RW_URL + "1773/38479/", ['datasets_multi-site_all_companies.zip'], ['datasets_multi-site_all_companies.zip'], None, doc="Download b=0 datasets from multiple MR systems (GE, Philips, Siemens) \ and different magnetic fields (1.5T and 3T)", data_size="9.2MB", unzip=True) fetch_viz_icons = _make_fetcher("fetch_viz_icons", pjoin(dipy_home, "icons"), UW_RW_URL + "1773/38478/", ['icomoon.tar.gz'], ['icomoon.tar.gz'], ['94a07cba06b4136b6687396426f1e380'], data_size="12KB", doc="Download icons for dipy.viz", unzip=True) fetch_bundles_2_subjects = _make_fetcher( "fetch_bundles_2_subjects", pjoin(dipy_home, 'exp_bundles_and_maps'), UW_RW_URL + '1773/38477/', ['bundles_2_subjects.tar.gz'], ['bundles_2_subjects.tar.gz'], ['97756fbef11ce2df31f1bedf1fc7aac7'], data_size="234MB", doc="Download 2 subjects from the SNAIL dataset with their bundles", unzip=True) fetch_ivim = _make_fetcher( "fetch_ivim", pjoin(dipy_home, 'ivim'), 'https://ndownloader.figshare.com/files/', ['5305243', '5305246', '5305249'], ['ivim.nii.gz', 'ivim.bval', 'ivim.bvec'], ['cda596f89dc2676af7d9bf1cabccf600', 'f03d89f84aa9a9397103a400e43af43a', 'fb633a06b02807355e49ccd85cb92565'], doc="Download IVIM dataset") fetch_cfin_multib = _make_fetcher( "fetch_cfin_multib", pjoin(dipy_home, 'cfin_multib'), UW_RW_URL + '/1773/38488/', ['T1.nii', '__DTI_AX_ep2d_2_5_iso_33d_20141015095334_4.nii', '__DTI_AX_ep2d_2_5_iso_33d_20141015095334_4.bval', '__DTI_AX_ep2d_2_5_iso_33d_20141015095334_4.bvec'], ['T1.nii', '__DTI_AX_ep2d_2_5_iso_33d_20141015095334_4.nii', '__DTI_AX_ep2d_2_5_iso_33d_20141015095334_4.bval', '__DTI_AX_ep2d_2_5_iso_33d_20141015095334_4.bvec'], ['889883b5e7d93a6e372bc760ea887e7c', '9daea1d01d68fd0055a3b34f5ffd5f6e', '3ee44135fde7ea5c9b8c801414bdde2c', '948373391de950e7cc1201ba9f696bf0'], doc="Download CFIN multi b-value diffusion data", msg=("This data was provided by Brian Hansen and Sune Jespersen" + " More details about the data are available in their paper: " + " https://www.nature.com/articles/sdata201672")) def read_scil_b0(): """ Load GE 3T b0 image form the scil b0 dataset. Returns ------- img : obj, Nifti1Image """ file = pjoin(dipy_home, 'datasets_multi-site_all_companies', '3T', 'GE', 'b0.nii.gz') return nib.load(file) def read_siemens_scil_b0(): """ Load Siemens 1.5T b0 image form the scil b0 dataset. Returns ------- img : obj, Nifti1Image """ file = pjoin(dipy_home, 'datasets_multi-site_all_companies', '1.5T', 'Siemens', 'b0.nii.gz') return nib.load(file) def read_isbi2013_2shell(): """ Load ISBI 2013 2-shell synthetic dataset Returns ------- img : obj, Nifti1Image gtab : obj, GradientTable """ files, folder = fetch_isbi2013_2shell() fraw = pjoin(folder, 'phantom64.nii.gz') fbval = pjoin(folder, 'phantom64.bval') fbvec = pjoin(folder, 'phantom64.bvec') bvals, bvecs = read_bvals_bvecs(fbval, fbvec) gtab = gradient_table(bvals, bvecs) img = nib.load(fraw) return img, gtab def read_sherbrooke_3shell(): """ Load Sherbrooke 3-shell HARDI dataset Returns ------- img : obj, Nifti1Image gtab : obj, GradientTable """ files, folder = fetch_sherbrooke_3shell() fraw = pjoin(folder, 'HARDI193.nii.gz') fbval = pjoin(folder, 'HARDI193.bval') fbvec = pjoin(folder, 'HARDI193.bvec') bvals, bvecs = read_bvals_bvecs(fbval, fbvec) gtab = gradient_table(bvals, bvecs) img = nib.load(fraw) return img, gtab def read_stanford_labels(): """Read stanford hardi data and label map""" # First get the hardi data fetch_stanford_hardi() hard_img, gtab = read_stanford_hardi() # Fetch and load files, folder = fetch_stanford_labels() labels_file = pjoin(folder, "aparc-reduced.nii.gz") labels_img = nib.load(labels_file) return hard_img, gtab, labels_img def read_stanford_hardi(): """ Load Stanford HARDI dataset Returns ------- img : obj, Nifti1Image gtab : obj, GradientTable """ files, folder = fetch_stanford_hardi() fraw = pjoin(folder, 'HARDI150.nii.gz') fbval = pjoin(folder, 'HARDI150.bval') fbvec = pjoin(folder, 'HARDI150.bvec') bvals, bvecs = read_bvals_bvecs(fbval, fbvec) gtab = gradient_table(bvals, bvecs) img = nib.load(fraw) return img, gtab def read_stanford_t1(): files, folder = fetch_stanford_t1() f_t1 = pjoin(folder, 't1.nii.gz') img = nib.load(f_t1) return img def read_stanford_pve_maps(): files, folder = fetch_stanford_pve_maps() f_pve_csf = pjoin(folder, 'pve_csf.nii.gz') f_pve_gm = pjoin(folder, 'pve_gm.nii.gz') f_pve_wm = pjoin(folder, 'pve_wm.nii.gz') img_pve_csf = nib.load(f_pve_csf) img_pve_gm = nib.load(f_pve_gm) img_pve_wm = nib.load(f_pve_wm) return (img_pve_csf, img_pve_gm, img_pve_wm) def read_taiwan_ntu_dsi(): """ Load Taiwan NTU dataset Returns ------- img : obj, Nifti1Image gtab : obj, GradientTable """ files, folder = fetch_taiwan_ntu_dsi() fraw = pjoin(folder, 'DSI203.nii.gz') fbval = pjoin(folder, 'DSI203.bval') fbvec = pjoin(folder, 'DSI203.bvec') bvals, bvecs = read_bvals_bvecs(fbval, fbvec) bvecs[1:] = (bvecs[1:] / np.sqrt(np.sum(bvecs[1:] * bvecs[1:], axis=1))[:, None]) gtab = gradient_table(bvals, bvecs) img = nib.load(fraw) return img, gtab def read_syn_data(): """ Load t1 and b0 volumes from the same session Returns ------- t1 : obj, Nifti1Image b0 : obj, Nifti1Image """ files, folder = fetch_syn_data() t1_name = pjoin(folder, 't1.nii.gz') b0_name = pjoin(folder, 'b0.nii.gz') t1 = nib.load(t1_name) b0 = nib.load(b0_name) return t1, b0 def fetch_tissue_data(): """ Download images to be used for tissue classification """ t1 = 'https://ndownloader.figshare.com/files/6965969' t1d = 'https://ndownloader.figshare.com/files/6965981' ap = 'https://ndownloader.figshare.com/files/6965984' folder = pjoin(dipy_home, 'tissue_data') md5_list = ['99c4b77267a6855cbfd96716d5d65b70', # t1 '4b87e1b02b19994fbd462490cc784fa3', # t1d 'c0ea00ed7f2ff8b28740f18aa74bff6a'] # ap url_list = [t1, t1d, ap] fname_list = ['t1_brain.nii.gz', 't1_brain_denoised.nii.gz', 'power_map.nii.gz'] if not os.path.exists(folder): print('Creating new directory %s' % folder) os.makedirs(folder) msg = 'Downloading 3 Nifti1 images (9.3MB)...' print(msg) for i in range(len(md5_list)): _get_file_data(pjoin(folder, fname_list[i]), url_list[i]) check_md5(pjoin(folder, fname_list[i]), md5_list[i]) print('Done.') print('Files copied in folder %s' % folder) else: _already_there_msg(folder) def read_tissue_data(contrast='T1'): """ Load images to be used for tissue classification Parameters ---------- constrast : str 'T1', 'T1 denoised' or 'Anisotropic Power' Returns ------- image : obj, Nifti1Image """ folder = pjoin(dipy_home, 'tissue_data') t1_name = pjoin(folder, 't1_brain.nii.gz') t1d_name = pjoin(folder, 't1_brain_denoised.nii.gz') ap_name = pjoin(folder, 'power_map.nii.gz') md5_dict = {'t1': '99c4b77267a6855cbfd96716d5d65b70', 't1d': '4b87e1b02b19994fbd462490cc784fa3', 'ap': 'c0ea00ed7f2ff8b28740f18aa74bff6a'} check_md5(t1_name, md5_dict['t1']) check_md5(t1d_name, md5_dict['t1d']) check_md5(ap_name, md5_dict['ap']) if contrast == 'T1 denoised': return nib.load(t1d_name) elif contrast == 'Anisotropic Power': return nib.load(ap_name) else: return nib.load(t1_name) mni_notes = \ """ Notes ----- The templates were downloaded from the MNI (McGill University) `website `_ in July 2015. The following publications should be referenced when using these templates: .. [1] VS Fonov, AC Evans, K Botteron, CR Almli, RC McKinstry, DL Collins and BDCG, Unbiased average age-appropriate atlases for pediatric studies, NeuroImage, 54:1053-8119, DOI: 10.1016/j.neuroimage.2010.07.033 .. [2] VS Fonov, AC Evans, RC McKinstry, CR Almli and DL Collins, Unbiased nonlinear average age-appropriate brain templates from birth to adulthood, NeuroImage, 47:S102 Organization for Human Brain Mapping 2009 Annual Meeting, DOI: http://dx.doi.org/10.1016/S1053-8119(09)70884-5 License for the MNI templates: ----------------------------- Copyright (C) 1993-2004, Louis Collins McConnell Brain Imaging Centre, Montreal Neurological Institute, McGill University. Permission to use, copy, modify, and distribute this software and its documentation for any purpose and without fee is hereby granted, provided that the above copyright notice appear in all copies. The authors and McGill University make no representations about the suitability of this software for any purpose. It is provided "as is" without express or implied warranty. The authors are not responsible for any data loss, equipment damage, property loss, or injury to subjects or patients resulting from the use or misuse of this software package. """ def read_mni_template(version="a", contrast="T2"): """ Read the MNI template from disk Parameters ---------- version: string There are two MNI templates 2009a and 2009c, so options available are: "a" and "c". contrast : list or string, optional Which of the contrast templates to read. For version "a" two contrasts are available: "T1" and "T2". Similarly for version "c" there are two options, "T1" and "mask". You can input contrast as a string or a list Returns ------- list : contains the nibabel.Nifti1Image objects requested, according to the order they were requested in the input. Examples -------- Get only the T1 file for version c: >>> T1_nifti = read_mni_template("c", contrast = "T1") # doctest: +SKIP Get both files in this order for version a: >>> T1_nifti, T2_nifti = read_mni_template(contrast = ["T1", "T2"]) # doctest: +SKIP """ files, folder = fetch_mni_template() file_dict_a = {"T1": pjoin(folder, 'mni_icbm152_t1_tal_nlin_asym_09a.nii'), "T2": pjoin(folder, 'mni_icbm152_t2_tal_nlin_asym_09a.nii')} file_dict_c = { "T1": pjoin( folder, 'mni_icbm152_t1_tal_nlin_asym_09c.nii'), "mask": pjoin( folder, 'mni_icbm152_t1_tal_nlin_asym_09c_mask.nii')} if contrast == "T2" and version == "c": raise ValueError("No T2 image for MNI template 2009c") if contrast == "mask" and version == "a": raise ValueError("No template mask available for MNI 2009a") if not(isinstance(contrast, str)) and version == "c": for k in contrast: if k == "T2": raise ValueError("No T2 image for MNI template 2009c") if version == "a": if isinstance(contrast, str): return nib.load(file_dict_a[contrast]) else: out_list = [] for k in contrast: out_list.append(nib.load(file_dict_a[k])) elif version == "c": if isinstance(contrast, str): return nib.load(file_dict_c[contrast]) else: out_list = [] for k in contrast: out_list.append(nib.load(file_dict_c[k])) else: raise ValueError("Only 2009a and 2009c versions are available") return out_list # Add the references to both MNI-related functions: read_mni_template.__doc__ += mni_notes fetch_mni_template.__doc__ += mni_notes def fetch_cenir_multib(with_raw=False): """ Fetch 'HCP-like' data, collected at multiple b-values Parameters ---------- with_raw : bool Whether to fetch the raw data. Per default, this is False, which means that only eddy-current/motion corrected data is fetched """ folder = pjoin(dipy_home, 'cenir_multib') fname_list = ['4D_dwi_eddycor_B200.nii.gz', 'dwi_bvals_B200', 'dwi_bvecs_B200', '4D_dwieddycor_B400.nii.gz', 'bvals_B400', 'bvecs_B400', '4D_dwieddycor_B1000.nii.gz', 'bvals_B1000', 'bvecs_B1000', '4D_dwieddycor_B2000.nii.gz', 'bvals_B2000', 'bvecs_B2000', '4D_dwieddycor_B3000.nii.gz', 'bvals_B3000', 'bvecs_B3000'] md5_list = ['fd704aa3deb83c1c7229202cb3db8c48', '80ae5df76a575fe5bf9f1164bb0d4cfb', '18e90f8a3e6a4db2457e5b1ba1cc98a9', '3d0f2b8ef7b6a4a3aa5c4f7a90c9cfec', 'c38056c40c9cc42372232d6e75c47f54', '810d79b4c30cb7dff3b2000017d5f72a', 'dde8037601a14436b2173f4345b5fd17', '97de6a492ae304f39e0b418b6ebac64c', 'f28a0faa701bdfc66e31bde471a5b992', 'c5e4b96e3afdee99c0e994eff3b2331a', '9c83b8d5caf9c3def240f320f2d2f56c', '05446bd261d57193d8dbc097e06db5ff', 'f0d70456ce424fda2cecd48e64f3a151', '336accdb56acbbeff8dac1748d15ceb8', '27089f3baaf881d96f6a9da202e3d69b'] if with_raw: fname_list.extend(['4D_dwi_B200.nii.gz', '4D_dwi_B400.nii.gz', '4D_dwi_B1000.nii.gz', '4D_dwi_B2000.nii.gz', '4D_dwi_B3000.nii.gz']) md5_list.extend(['a8c36e76101f2da2ca8119474ded21d5', 'a0e7939f6d977458afbb2f4659062a79', '87fc307bdc2e56e105dffc81b711a808', '7c23e8a5198624aa29455f0578025d4f', '4e4324c676f5a97b3ded8bbb100bf6e5']) files = {} baseurl = UW_RW_URL + '1773/33311/' for f, m in zip(fname_list, md5_list): files[f] = (baseurl + f, m) fetch_data(files, folder) return files, folder def read_cenir_multib(bvals=None): """ Read CENIR multi b-value data Parameters ---------- bvals : list or int The b-values to read from file (200, 400, 1000, 2000, 3000). Returns ------- gtab : a GradientTable class instance img : nibabel.Nifti1Image """ files, folder = fetch_cenir_multib(with_raw=False) if bvals is None: bvals = [200, 400, 1000, 2000, 3000] if isinstance(bvals, int): bvals = [bvals] file_dict = {200: {'DWI': pjoin(folder, '4D_dwi_eddycor_B200.nii.gz'), 'bvals': pjoin(folder, 'dwi_bvals_B200'), 'bvecs': pjoin(folder, 'dwi_bvecs_B200')}, 400: {'DWI': pjoin(folder, '4D_dwieddycor_B400.nii.gz'), 'bvals': pjoin(folder, 'bvals_B400'), 'bvecs': pjoin(folder, 'bvecs_B400')}, 1000: {'DWI': pjoin(folder, '4D_dwieddycor_B1000.nii.gz'), 'bvals': pjoin(folder, 'bvals_B1000'), 'bvecs': pjoin(folder, 'bvecs_B1000')}, 2000: {'DWI': pjoin(folder, '4D_dwieddycor_B2000.nii.gz'), 'bvals': pjoin(folder, 'bvals_B2000'), 'bvecs': pjoin(folder, 'bvecs_B2000')}, 3000: {'DWI': pjoin(folder, '4D_dwieddycor_B3000.nii.gz'), 'bvals': pjoin(folder, 'bvals_B3000'), 'bvecs': pjoin(folder, 'bvecs_B3000')}} data = [] bval_list = [] bvec_list = [] for bval in bvals: data.append(nib.load(file_dict[bval]['DWI']).get_data()) bval_list.extend(np.loadtxt(file_dict[bval]['bvals'])) bvec_list.append(np.loadtxt(file_dict[bval]['bvecs'])) # All affines are the same, so grab the last one: aff = nib.load(file_dict[bval]['DWI']).affine return (nib.Nifti1Image(np.concatenate(data, -1), aff), gradient_table(bval_list, np.concatenate(bvec_list, -1))) CENIR_notes = \ """ Notes ----- Details of the acquisition and processing, and additional meta-data are available through `UW researchworks `_ """ fetch_cenir_multib.__doc__ += CENIR_notes read_cenir_multib.__doc__ += CENIR_notes def read_viz_icons(style='icomoon', fname='infinity.png'): """ Read specific icon from specific style Parameters ---------- style : str Current icon style. Default is icomoon. fname : str Filename of icon. This should be found in folder HOME/.dipy/style/. Default is infinity.png. Returns -------- path : str Complete path of icon. """ folder = pjoin(dipy_home, 'icons', style) return pjoin(folder, fname) def read_bundles_2_subjects(subj_id='subj_1', metrics=['fa'], bundles=['af.left', 'cst.right', 'cc_1']): r""" Read images and streamlines from 2 subjects of the SNAIL dataset Parameters ---------- subj_id : string Either ``subj_1`` or ``subj_2``. metrics : list Either ['fa'] or ['t1'] or ['fa', 't1'] bundles : list Example ['af.left', 'cst.right', 'cc_1']. See all the available bundles in the ``exp_bundles_maps/bundles_2_subjects`` directory of your ``$HOME/.dipy`` folder. Returns ------- dix : dict Dictionary with data of the metrics and the bundles as keys. Notes ----- If you are using these datasets please cite the following publications. References ---------- .. [1] Renauld, E., M. Descoteaux, M. Bernier, E. Garyfallidis, K. Whittingstall, "Morphology of thalamus, LGN and optic radiation do not influence EEG alpha waves", Plos One (under submission), 2015. .. [2] Garyfallidis, E., O. Ocegueda, D. Wassermann, M. Descoteaux. Robust and efficient linear registration of fascicles in the space of streamlines , Neuroimage, 117:124-140, 2015. """ dname = pjoin(dipy_home, 'exp_bundles_and_maps', 'bundles_2_subjects') from nibabel import trackvis as tv res = {} if 't1' in metrics: img = nib.load(pjoin(dname, subj_id, 't1_warped.nii.gz')) data = img.get_data() affine = img.affine res['t1'] = data if 'fa' in metrics: img_fa = nib.load(pjoin(dname, subj_id, 'fa_1x1x1.nii.gz')) fa = img_fa.get_data() affine = img_fa.affine res['fa'] = fa res['affine'] = affine for bun in bundles: streams, hdr = tv.read(pjoin(dname, subj_id, 'bundles', 'bundles_' + bun + '.trk'), points_space="rasmm") streamlines = [s[0] for s in streams] res[bun] = streamlines return res def read_ivim(): """ Load IVIM dataset Returns ------- img : obj, Nifti1Image gtab : obj, GradientTable """ files, folder = fetch_ivim() fraw = pjoin(folder, 'ivim.nii.gz') fbval = pjoin(folder, 'ivim.bval') fbvec = pjoin(folder, 'ivim.bvec') bvals, bvecs = read_bvals_bvecs(fbval, fbvec) gtab = gradient_table(bvals, bvecs) img = nib.load(fraw) return img, gtab def read_cfin_dwi(): """Load CFIN multi b-value DWI data Returns ------- img : obj, Nifti1Image gtab : obj, GradientTable """ files, folder = fetch_cfin_multib() fraw = pjoin(folder, '__DTI_AX_ep2d_2_5_iso_33d_20141015095334_4.nii') fbval = pjoin(folder, '__DTI_AX_ep2d_2_5_iso_33d_20141015095334_4.bval') fbvec = pjoin(folder, '__DTI_AX_ep2d_2_5_iso_33d_20141015095334_4.bvec') bvals, bvecs = read_bvals_bvecs(fbval, fbvec) gtab = gradient_table(bvals, bvecs) img = nib.load(fraw) return img, gtab def read_cfin_t1(): """Load CFIN T1-weighted data. Returns ------- img : obj, Nifti1Image """ files, folder = fetch_cfin_multib() img = nib.load(pjoin(folder, 'T1.nii')) return img, gtab dipy-0.13.0/dipy/data/files/000077500000000000000000000000001317371701200155225ustar00rootroot00000000000000dipy-0.13.0/dipy/data/files/55dir_grad.bval000066400000000000000000000004251317371701200203160ustar00rootroot000000000000000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 dipy-0.13.0/dipy/data/files/55dir_grad.bvec000066400000000000000000000047631317371701200203220ustar00rootroot000000000000000 0.387747134121 0.946163995722 -0.922518214794 0.444901340191 -0.088648379736 0.398858764491 0.707421617532 0.441719326768 -0.984543274106 0.816864993569 0.973222292055 -0.365922889995 0.697928959906 0.194286490773 -0.421934676133 -0.918294577668 0.951258678509 -0.482260349695 0.017747824887 0.008439275026 0.022003496707 -0.302920122261 -0.605795821812 0.801088034306 0.197614702014 0.822315620723 0.531572028134 -0.900257095184 -0.043757932112 0.654348201408 -0.380464261169 -0.819307000804 -0.721142532083 -0.549296621645 -0.696135340591 0.364655265727 -0.090729077048 0.511611491293 -0.693341730595 0.751543758 -0.780508167624 -0.100583708102 0.219939025696 -0.780300794447 0.755047655881 -0.478715333951 0.152870187393 -0.388991446276 -0.066544469261 -0.306611784146 0.0724455254 0.456711407833 0.392354846453 -0.550316062949 -0.25272017908 0 -0.296393661931 -0.081321931027 -0.384882209717 -0.161185017106 -0.92988659956 0.828672486485 0.031786637059 0.343437931108 0.070601203125 0.449497285481 -0.16040248204 0.925434114895 -0.475145821454 0.979839225871 -0.626891396224 -0.058522138943 0.302296624642 -0.174024786406 0.566474680917 -0.843382140828 0.06386048901 -0.640449793751 0.7510264358 0.039519118666 -0.693865338491 -0.565236505598 -0.807120477633 0.382874752951 -0.416578339326 0.755830304425 0.645279718315 -0.266629899103 -0.480787014528 -0.805702982944 -0.669944149585 -0.606149719157 -0.273405988981 0.471016167011 0.289001415041 0.486149073628 0.586409922872 0.932608599035 0.15823728798 0.22399249773 -0.439220995009 0.665543534564 -0.986482345681 0.292599882564 0.886002068699 -0.934865622332 0.640718077698 -0.838931327882 0.776467536851 -0.160226448479 0.232824243654 0 0.872813242996 0.313305660232 0.028737223536 -0.880955270009 0.357004729282 0.392700389778 0.706076670591 -0.828815072158 -0.16028103921 0.36150210598 -0.164649366846 -0.098347026224 0.535846634103 -0.046560186303 -0.654964355074 0.391548500034 -0.061021941069 -0.858568767676 -0.823888008525 -0.537247934542 -0.997716234245 0.705736113015 0.262622761778 -0.597240488038 0.692458895234 0.065610309056 -0.256880737876 0.207229549349 -0.908046105978 -0.023430369769 0.662465871652 -0.507586973175 0.498795845093 0.221621128787 -0.258012449344 -0.70683028737 0.957610254627 0.718607996331 0.660117737014 -0.445915976414 -0.216634260053 0.346589265082 0.962594299624 -0.583916116532 -0.486814086579 -0.572611065768 -0.059019382124 -0.873539331368 -0.45888143117 -0.178928706485 0.764350698803 0.295988035322 -0.493108343755 0.819438659125 0.939108823648 dipy-0.13.0/dipy/data/files/C.npy000066400000000000000000010001201317371701200164260ustar00rootroot00000000000000NUMPYF{'descr': 'ǟH/a1C?~eO7.̀1rx(Ck/Whd׵tdm1$"Y٢ӕ¬E&ddߜ bo7NqJ%{k69?ucPb'p]0c+ѱqLe:']wY_Tj{[inokt'ҍoɚ =ne.{жlB 4dz/ߍ ruo3so3;؛zYV?.FxD9!Gt|,*؁pb/>Eyt5z؇q\,x1a?e;X> Wㇺ[tzL3ߑlU7Ag 3?3N%Fk%TţqKtu<9޲0ݹi$_> ׭p~ ?}il6ɗ!^!/0S6꞊UjlnY^ p=xUoXN鹡"9 6UMRVlo^htpR|\`#{d:񣳲oa$p:2mxJaҲvޫf~vKX"lPZͨCQ9E<_gP6r_Kem 2ٍ{딛4q^gSvH`[qۮ9q7U:r~S=}K`N7QgULJ%m=wOeuonGW 1&slRTdy\4.%9P9 _A QlȄ\dU )'Ι c+/_ԉv/PR\G9?0.|Ʃ;K?EaR8T "Nbc  +_7JN{wyF?Tck+K>0?'<4ݷץ^_J%J"34[ݕuV3L-̹Grg[ŮǭR il%u\Ǐ/FG1DUބz>C ~~.Uʨ -N$q`yJZ)>5GHcES^vT-@a)zJrĂA!wC)̂cÌ[S%MH ؤ&İ%m)C'Nq˩Hh~ӈCoBxa)BT"$̚nB֜ Sb a]'#`{*A6+'v(\RXD(+X=6u>!*QB ыtUA욚*]";M 6_rŢAAwّ]x Kn^\1uk ^aZK.j:-ީi/?ۻ:H 3.F718-o=~O/ =\,6 6B40v){@vd~yZ'Wzeq\bR, ۏ|.ԯ'sۂde=98M>/~ osNBv&}VӸ< Kl:ǐRزةxJفrr9< =< ce\M,@ṫϪL-[^UFM;(c۞42ל8wg_ޘn%O«\2oM[=KSu qٯY#z_i1Mu!X\ћ4.[ǶeKEV.e2\b+^e::E(ʬ۳+C u:i^K?,u}i1b4j4fF5 T{@hU&*]vKV? x\_h<^j\8%uix˝)V翫_"y){FSׯXɺ P"/AШy9hsWd:<coU5!:o#۞[դݓC/ .akxBjXĠM }bJ`kZCrw'tCG~Rg&8tBXA//=& 5(:qMPn lca&Y GFvө/P}R Ұ^n0"V'Ռ/EU*kOW1*;Ngo)v ^qG;w%*>Qgib٨G}mkε dԂ7 5]AE'iK0}}єy,ViC Q5 B4`R*Wrv5?F= W}D8vUɽ:gYPIϬB)R 8aL;A-IS>*<l&XQв{rkUEsZ1~ T94udqꛟ-c)}{iK Dv]SVVNhgB#(uUcZb鯉*V| 58 r St{RPHCSd뎼N-el-1tUF K=)LT$X)I*Üz姰b)xw'd@VdjjL}\K)r!t8qoXBc? =揂[lQ^r|\^m3(1Q(5 ~0<_NK9 jw (GE^No'2~*'ҙ[5lױ8 zـX̣zZ?+aDs1 9`xO?bl e úP%jWqnuٷ+aǷ*'~HDX@Sudpk~s6ʔ1S4whwBx`ƺ>˭=m>zUܭS߉%c;o ݸ!\Pӗf,AYy}g)r?;_~?[*ЫHC?52ǠZ, 0l#bU9xNgqQϗT  8OD@EA\JoGQr%QD%t$*i "]gIFJF1H(8!H8!$hDq8& y e#v! q]?JW!5ŸL!l)8J㰼zbd}zq۷N,P,ݩzyQ\"\ʓLϗ_Dr5MOV-o3."_uxxokH#[H"ęӵy3a(&| 1}i) O\zT0YF`?]3MSJ`O v(7i8: WBf[#츾:Lbt4X'p]vq''.Mg:u' *R`D@B%0[ jp^<>%bڴ xw8;?I;;q}>#^N=yap k 8)WO z#o `$"O嚝/Yњ(F.cE"E_Q .ԛjv =sXg$ZZ[zE18P]Lbr~ҚV87_!*RĎW FEܘ&թ{8R쾺[!x%`=}{.Ћj&BHu[*k֟]\.<ٺ?xE)WJGdaB毺iu) $$ Պ^\~@Yl_u F ިxryZIGys)>ĸ#.;hE ?1EwfNkhؿ({[*;zT(+byzEA ;b2`vRIpzV W)dyV.5+]o2`1xڊ5zgtӛ&fE0AnO7yq՜ 43=s; p/7 =XM# =#(:c'KGfJCS+] vdT؆pM4i'UՑm tk|ѝL;b) Vm49P( .d$gW< ķxxX5cvb;vg @Fm,0W/W`HnT$f.uM!ۻ^1|~{{ݮQBz@Z%'VoOk9Ȫ[|~ ۡowo(U[eOC {)(-)ElLզ腪mA1ZA,&(bU|=B\stzRߩIG7E'x0ݼ}luc8DMͭh;b:#*Ͳihwnt!b, q&`zo?ߠEo|_:bwόm 3N=RDOotТ;gobm`W PEjgmGܚ= ^rV/%d֧tp &~"{WLLe~)k_8qS_gQlHeA05S)SXXwSuH!d}08m(ZΖX2 "ż.KeD"E$B>dMxj KI(4Z?Me\w;WG۷ j'ߒNx.-DzE[؞^xoXR.HCLX<,|U*6:%H73qg%i6ٚCs! Oyu{oNq5͂Is|%1GjnIbL8I0F?IO&[Bl'Yo"r|ч hV~gCxMG*&a0MT+90 !F}H:7:JɄ&jtRsBdУG7<(_ A+.W^輨4lwަH JP82;etiKYC8puj*5~`*ȖE&%`]b` zyLmRN^ʹb<Úe)9X!=\Mz ŠhԾY6 $kܖ-Thxq@uBN5pSHc❬_@]U2܏$~Qz/ Jt&b kBf~!lZI_JtA&7RY]>ԅu0C( h ]LJ*q%( _u*"ܺ%7Y~E7KFݽY~_*`4Sez4y;Z3ԞS@p=dԌ=Ӄ5W+fZ#+f4ֽۘS! Gy(H)13\ӗ Ʀ' bW nf+vNa`d6~0o\/|o!_C5[<d7Æ&!PoɃDIFPE?pEl2F`sDDH i<؞xF]{I"3ǿiʿA¢ox'.QŌ QPD('IqZ=$q1>NE-3DǧIObB%|Ei3ʜr^8uNըJ3-J0NPw!<|ziu \ȧ|)Er h\`r>FT$2ƙcC3ǩEQ\81gʧY$1nGj$wC:N8B fVň$R gAv\A~ЇG/,s?y!N-A5XpBe 6X$nnZf&f x<.3ccIO@j7lS* l~JL"Bc#]@]cӮ R̻6E,&"dU9DL*0|Wl<üc*E+ddD| />W] K5/E}7ˋVu^%^0-8*/kUHvR$q&ͭ IaLBjeQ9 E5A35\ ˥8zTLl & uj3)aȸ,~0Yeg3>"3aLg17M 3Eƽ4>b?' CkZւ D 2e4糅Dua9ˍ_& yLgE]`2 I8 뗑0pxO^:Y}j`ac=/5-)ZdN49}87횲%)0; cɴGBC- Kw;Hb]f"ئ^^gƁ:E&z|f ӴZ8eK7_tIj=MV=+Wl!aW;U mCG?rf;: "Vg@AYWJ_Y%>S 2M/k,.U̍W Y!<5m[E0^k'zc q9.Yb*O]ט0P $ncze ۱{;kݑJhUꚹ'ܡHYdPx=όb5R 楊(KɅ7 C5iA!] .0H3* RGQ~f[PԮf>WO?`2fn(CH6J'R#MP+W;K:dXʒ3~AE\ݐY -/ﰔH"#PQY] \!F\)mqkCYXpk=|.a/xdʌ+S΄~Qnît4yr:1BG?Xiy@,F7;fwgGh:ωf8.<%uQ5  _>A>Ax╧?ԯvmhN))nM 71Xhf'N6$5rpHX#_X-S߆{ C؁ EbV¨ M';J:aNTdI4jq諉qskEj@0@gJR ;DfV!ZAYC9aiZX[_*cV|pdKx9R:'-ZAe<^ݻ;x j\b;",/T$©X,Q0]Ŭe@wX$ :Ȇ":_*fA?<|[We_WS5 G9]\ IX OwU^cQ{)m9)s1~s~/Vu^ } ed5-a G)2Y8W>b2 g~m<_;^PQh+%pN^tkA"EPMvqfgI4jls-4.Vm둀صj{Z1Bⲁ 3 `'fʘBYz|rj1gS5 w5(v-3 t)rfgs2Im>7 [AsDq|>F]yHx:,і7Cr<A\ªv,Nu`纗e RXхÕp ZWǿiU?V~sojQ* Ct9Q).$*|.—Cew.5It'sOsS V&cr|ּ,ؗxq=DJm;ڼ+M=}._*8Q2úrR٤8)%"8珼 df/SMTP> >@l;ݕ,!kŐ'RT" Tc|desXiT#Ef~7([2*$ţjv%VeqWda?N|Ce+$ 81ܯi i𗮍zy̋U T;)ǞIJ)=ɋ_80Mw+@.5ҿ9,hE pRc1XF6WxOҳm,ժ}S:c듪$!Ϸoˤ!m=Y29DO$ҷuI¬V}”v=`x1:j*ZQ)l8TN=BU:B N*$s?!1U9;jӄ!jgll.1 Bm/}aAd 2HI5UiJ[\ߋ?*-Xze^j2pVKԴnIoۙ+v'9eN%Jaz{a g84gKMG6T*ZWZے68J Z` 1-`JHw(:t74P-H1 ǤTvSAW0Y08[USos9wuV_jU,K݁܍B}Qq!3(8EXf fN;_:G*k5AoPljÄLއ%(8>ZNX]~Y"]3$Px6W"?xzosuܢa]8Ⱥ$cJ1Hu{!бMgVz!VvZ1lK ʴZgOKlFuMTBeh4U@EHg1/Lr1y6o[:?{uXy<*C#9zfc)݊1[;2=p R]f> QV ׽=mQ\g"[Qz>, *`J[6;fm5?|c/OWwmX^ FY'Qŷ_r=ͱwz|2b`}FA-4(C,}c򟨑ajՏ_|싲B}|BWu%rꬬay#9 +O dKN#u5OJQ^ e\,b{uu1Ã8(^]bw'kcYx_A,;>m>TC[GC& <&4DTH]d5!y~U 9cϓaZ^ - Urծjb]١_ 7aGcIs[iikj,UZsnӳ2/T38a4k-ܿr$SvB^AzڼiX9_1J`o;x(H& ;ox%E?x{V"N%)CA4D$dU (-p(oL(BK1 菱:?F٭6!ZMxI[߳aI>] "9]Kqc[;\̮Uɋ>6J0$IԦ&,ŚA+1t#%q `6Z !Neap g2hI sk z(ff: a`c N*R@>Uװ!W%e,: TI8h18vAzg>iQiVȑ$CܚOZ'*[|G5Eq3-%Wab?DLM*m(rNABuy0aV+QXvZI\He}5%|(3ux%#:m{y5wP*NWbzD) ".r G/_J!\3~XRɘ"GUS(d>OB7E1U+@^%{M{Y1]"CAkah(7Ք+{pgfI]!Ԕ[ؑ(寮tC R|^dŰltUN M&Ťwd#7ZQXi8cW뉈ʒy8+mzߖV\M55/?̆Ҝ2rFu 9 b4 oHWk.f1Ja}UgHLLgon]"gk<=_h.Gwsgy؝m [6;xY7,K~7|^W:UI8νV2'&TAsD)VX!3gfL[d>h)D,Ggտ{=Ox E.22ʽpj oCWo&h`Q79VG!2f?eH.@%&z3@ L]+嚜 G%Ij,ǴH(v[*U wf"WBϫD"{2 meU gGƹ:Ք5d|8OIufᏥQOE3y3YoW(DbL_a_EQZFx%v1A5 )ѺxaY״2 mLaY mxI}l|7W`"+z^q1ev!JQ c#z'^2͟BG@RV.F\}8Ad|0|ܜW̨'ư딄]RvZW˵3 '"IԑRNjۍit~k9qoB$ ŒzVڧۣP-`//Am85?y|BR]D6u,bQ{)1XdigЯxޣ@­,62TScYD/OQMum*m?n$5OrkB.A)::70PZR@50IV?WJ0E|?_0KBc2ߊp/岱NaJ_?I-Ȩ/EPQӣoa#YmbJ/(6ʊBamd\#-(PxM[dN'GauZ@riA )B5NV6,nL+gKͰ Ki@59GjEAS삏эjꬶ<+c"';̩~W=DZ ZVaӕ.{/|FĻpO2g4M?oe7-#)_%1nLM -#qLDJgPoݩ dPB)HBEQ~VJh`,HZr0$ZU/sFP4! 6ͩ tuWmӏ3f_ 0ڕiY۴T?;5bGysP|J-_Bjb?ea֟E+A~uRli -~ANU_KuJ]1 a])O~d{ yX|ϼǚD"nL'"݋ ;ɺ|O<)/e࠲ ])lqwTT"Xnܕ'>|Ki^yL)MQSTe/Y)m. vX N!UZE/ƾtc *zr#PZ@gib=>FTILjƣSff[sb7V.WDvb?.`P#.?Ipu}SŅX^X~vAB4MNIU"+r 47b q/ %8dY&~LI5h ,HB[:b<(A\RKj*,UqVXQU'R,4mj}:]ż|)>(We@0fίGQRfY=y[ zM.sǟAŐh8 [ѬBnLf5V܋2$/8LXzZ^^̪ {8e9#Ɉ% q.ϗTǣF?-R9A[X\.o{j)7jZkjK2e}Z B ~O4xEtVҊ;u oFClxST;}N<2bɨR}l9'k)ӬS※ }+Ԉ>҅v>%Rz#*2V)0ԇ$DN4֢"y鋑$ZҢZDFy"8L1Z7G#fȻHhD9+Dc%(IG#J Zbؒ|_N.4(/"~/drq4_3Nܒ82IxVdIq^"Vf 8O" !㿬Fp8Q7}4e4 i񅗿1d.߂ϝ"i,9Yqٳ,dgMX&)/oGTJ7y~y"M$$^N+?2 I$'cr9zD gCc!׿$Rg\(T"B-ӓ^ؤN3H<㩑i#s8I<ȷݧ?$p^iJ@,z=䈌wu6Av\p7Z'.dZ$oi(Le$O)/A(.3-se$|Ƶ*?I嬒iWY:kGk!lx ʂh]5CPR}A%6j0FWY{{c5Aj) J`\}x܅V@n1C%O}40JaOX7|;FLiR~aFg&~֜PTE@6,%a?*`1_DG'J&/ r:`M"*3Y\ b7\?[ME,%VR8wSM8m|XuFb dx #1lԏ*e, t|PF>GY@%7ics<FZ~4HnX8^0:zDlqN*0Z,nS[HSlW:S tH D$F~ ^X9RT, q5dR Gp䅶 I uYsꑡ[S}z8_?S엲ǜ&XpOj$`lDJ9UGy\ޯ[٣j"6:*g]ǃoI%L5`d8I <&gT%˛ezQ^wPtjؘ8 y"Gg5u%͗HìGUt׫0H>pYX%[9XV;kSv$B_ [TSE;ނG։vC!xPԒgL5i)7e 3w1*N]WALāGQ@ P/0ZhC\n,:3(lkE]H5:+u>WzF_̼4`ώeI"'sIeu v6!5 +G٣>MJjD>3oJJb2Û19٧-3x̂'B]Ez\gJGP_CaJ9LUOW.~>bwCD=7R̮91E!5Bu9bڕ{5ct ϕV-s1 yGn=ŀX߃gOӛ/[xqOOjHB|[J ´RiLU~HoMyg"4EQseX8{l.US):}yT}ʻ}" k q#Y ()%#yq@dC>'BH2qUjnJ,t[^Si ؕM5Nߔ8>ЩRߦ'^Bc,8<~|Vf\X6waM sbV"ewI pӬm)c\4$)LJ7N]-=ҏخt4e\4X*)NA*z+JXd!&V*p>[Md`!_%WFGx:yƪzQ#vt_<ψ9aFd} ҙ=,jcN @XI9ՈԐ uI$i>J"c #*0`DIha]a0jb4j %e 2HC*bJ%OIN]qPD[{xnY,Iب'x̀F+eD&18WMh76WG~"ļrB,ga7dX.SCqt+[[cySie|DD)?RV0l1h^4xjb:ubɚWeW7USZd昉JDjw=L)*- i_ME\_"sEҦ l&8g[)pDޠPWEUnYJ+SPJ "jH3ưPdJ㤫*$ Jߓ#p%ӳwM[ITqjE#v͖)ŒRI<HxgRXC*-ge9 )jV)S.-GUڣa/J^l|֛vWCruhKKi=*,rj"8Sp. yquoi\j,;$:n`Pƚҫ= 8n~WA&Xа?8?pÚAW?̪ؽG귳+wW{D"nRI"fT^i]6AxSP\c$W"s/JJSyS<^̮V%`/T;RFoAC[iN DFT gYB$FfQCFB3쩔$SzT͖VzMߖzΒZpPo_ڙŊ^މȡ.bYUoKGT{Q]_tz,fUTfЩ@tr/Agl%^ثLwWS*>wtwUًWr"vv{)."{pOxp/^aT!Jٳ/,51oWHMYVwX9q묚r DRU>hTp/TZ=qeJQ֎Kdeɬ>,#eo=i#|Mb\&ʛnFo:QMiY5,An"ZA>ߩefLm>&D[*<ѳM&XG\yP//ڈ۫|fA~{cwT9 =$ы"T}7Cuvo[DBIc[$DpC<(LB@06 G!D^KuJ3҈Mβ IʙH-J&czɌ6ym-CR&ܗU&xk4.WzNMv'&)nFH{^Ɛ狲ΔWI-?e㉍—xPDCvU _*Ď~Nm(٦팼I,&+)ab+}PG^`"JU_Ҁ`2NL<) ]sך\zEve) cJɩL9?G !@6L}x[X l>4:%F~7huVr".,GH#ܘ׆ 3uh.zpO[>uì0,Ўܯt!Vn]N/&KJINc%"pwY/_Fb=+n 3ĵآqGW~ZR{tLnqbԬtUSjUKx0קtLڳ^ȧ&-e׈,1TcF` QK, tI#g]5)w,ч„1;``(Qi5RynD4)I*H1_HyMYM|-OwSG7Ls㩻mqV&`V[FM:a2 ӿج FPY~Sk*˴?+هSN.E1Մք!Z&P0W : dc[tC@ƵlyF䯙l>ݯ$Wݪzޣ|m| TX*ny2)9u0X ދD٤ Y4sDc4Dt}#"T,\JE`X!zEw'h_'QЇ~c?\Xc+M6yf9($zݵ@.8|\ϰGe,;w6M)%=U:> k""@sSBxh8cG ZCxr~X֜4~&WI58Q쑆w}]85ﲓQH?#pcxҬ2cPpXE-p7>mBdQg7&@`G8|}TU;k/Ad#m|+/MC>mc\:M o37#zxn);ut岚 |X ^.oFz_>dV̓?܇0SF|4JK5aiR͠vԣBTcyCsᷝB ^ =U)J|L "9~ zdI%ZĢ 4 [Fp8 >wSk2|& Osxm@d#WƓWC_;)@V҈ je./;}Ŏw_:hpa2L,pKroa˥2.*pc9XHR3ء] ~}:OSYcjjT@vmɔic{ثjq.6kf⯝+Vp a> xCFMu1\WY2M]sQ"rd&VW ޯ #Qd`6ϼŵj9k0<^u~&KWt2Wf t _p坡|IN}xJQSte̶l?T9Z62K#(JxRuB%L;NIv[GYmRp(_JV {\Zb%Ym *9WPԊelB!mC$0S.1 fq03PG%z~Y#} W1:oH< ]H]!a4gd1(Titv5!hjjOOz"jAC_hC-kVnu$7ջ3xH%.bzeHtAu;iKz+5&JT`JGH6s%쑌jT_g}B\,ƴ2Ɨ|~2 tR{eWsV+ m|ÄˠA'kHAgnUL!HqZg@Ωc܆u.X9ŜK#YgE:VlHUl.13/5k7[9B} jsAVH} ZZ%T.9!FPK&ܙJ|l/Rik+j=aX%(ReQupYi۲Y"`y~`/w7" ׾ӗ| ˌQ,VK>"FjbuI }9a_G(I8IVc{7f㣿wҺ,D42.Ɏ0-\9c)LZIHܿމP:kF4b"mwA}hW`b9+}P2j@D\mmyuBQ`Kdc3Is<9Sv3޻y&MBIesWMyQ҄9\Z;>-r<$aP'4W"K]WDIlϺƠ*EX||_Ύ˥SxA@5 4#aXKPkz ,ԶV"# yp 9ͷ[}X^!fN% W!J)kܓ;8)eXH6^5ܧ:<ǟ06BxnP' =|b>[M|{fbM` xjn32>2I&s]Tn]MY(G*V?Y !Jh|Oy(8! \aMjbie''dV5qC9X~32t&a=ƽs 펗0Eޫ#9 ΍ f4t(=-JzK-^^'w,77>ڜPNV=l"ȩW(! Y 9<^j/{}US#%+-mYMsuoXPh͓6 wGHTٔRCUŕ& VR[ w\.v [sXcS~btӾC̔I 5؞yӵDhFsJv1J1׫)Nj* &~E>>[Sj &R>qզU(BxCjYLub 7Ѩr[DLЁjn[ߞ 4LJgbC1}Yc 41&ualAg^]8HP0s!>cY^Ƚ[n ]dYIa SٶCq4W/]ӳ $c}q[Qvd+֩ >fSuYH($Z-X$:NCȭ]hj[\SLtx)Wo# I=W=4cbͯO!nJy@/bI9(ͩ~mq\{ZM8X$-rE*R25/΃[6A?[&!yӞ[CT(KKOl0bU*,Zu56ԉM[1rtjE]IF@7^JjJN%GGz\~oi}8הvcH-i,qO7pۦ㨹/ʌ />Es9XVgІ-D>l$"X2K,,)IL.uۏ,&SVEQ  0'MȆ!yƅ#2>M~EtQrNObjnX'PKG}~AרUEMlk{*ѐS_RԤwz\zݳBCNg|}OVFx\@v~: xQ"e'"U=mN6hg`CUSa:7/HtL!n_Y˭IQͥS[-N*(DNUZg- 6/y?qIawd!yzR1[|z4=\g(Գ& cNTaqx[ĜXG!pil." Ulr'ФK<?ʓ{^a ^wU y; 2V˿u mǠ wF` (A$_d _)v,4W8 eLjPxQE\ZI V bg3]9^a&2;FV>/^S$wuCuh_"~F Q& tYG@Lb-Na2ڠN%mHH,'Fjw& KRvGڹ(ɔĆO .'+-/1T[#cDH%(c 4) Ui}Ifd H4LaCrWp-Cz$b0R 1۷K@,a=MgM˭7kJGgQUEWXjX"Nby Eg^s^=:r<"[^WȸEP6ʜiPC5hxFIfL"}8S+"]LA<,vW\r?MZÝdKMrvqeDʍ~qߐ!b/,@ZOq!'b[LOs]V&ҟ,@O<ܫaH4Pbzy> z q5q(bG ~uJZ$볤eNLf {ρ*.S/=TT-N*Ǡ|PGw<Qʘa?;ꎯ]:j42:X?|*xBYw4ۿgً$ Ƙ .dΖq0rSbN_ঀp3ާ})KJae}f!(P5IiѤ\ 5ks␷ʙ٢IjoƉ,:Wl6MK2MB4;ֵd-*ZڶTyAH(7[&YWUS7^v XKni,.OFg9YƢy.SCFd^QiTJKuUjv ®ysyUSC9 Qk:rt~'w *|D$ #(+ voBdCy4r)^Wa^ 08tZj?i`GapM5%gpGșkWMƓ^CdڲJ x|-_"4{e&S.}-ooܱwHdo3~Lot@-2qB3np 3aՔU˚eRE26Aן?geF1i?G c k\|5?v+[T3LҶ<[BEo:5ZYmDRe@Gb9tl(ixkXE4KTk!S~Yk%v"Ȟb,(T33@mX! ?i-_I(DLǖJE?jfU9t,- n>2E~W}s(@J\Ti"K#WKA%gj"җd4Rju LxK )*ER}K %pBTBuW8!Ve+Mjzڙ#^!CekLGӖt \L㒫0Wn(Q2ZàCPfylήX1rQkda[UHU!*xGhPk͚Eջ#+A9n-![TR(= ~⵵yy8V[-s ެ#:FQ&ht-V LqCN?9G?b|G*>kvaIO7(LA*!5*Kbg !Hga2S/Ĩ͍٤=viT2 Rǃ>ԏ >*m:gRNݯ/Vݣc ~)6Q-:,b/1$@HRVK\t8u;,+Gh8:V;oTE- E* 8c߶u 1]JR33u4:/.meп6su- O50;}h$jፈO@=hd1l+9j\QkǶ'Nv{LӡM#(0;-@xL WQn}nϑ2`1K1Gр"wu=Sxst8 1mzC[ eP4QqH/u ?"WQɄt{&U"z`= ) X CycOh,XELQ徊=g~Fi(k5%b(e<T F`˥z{!{H?}䖌Z.if^NcS?ǿ/}Hg BOp_1!QX~D|6s6@]|)ʃ.\5qblHףg:G!~ƹCnp"!(X{%.?Ewyl񜏐RLc1\aw噭S>L;$BqO!Y0nс 9ŕՍJA}%n#o*?b#sYB>/M~uM! 8| -l, m(N1'a$y)\(nOr)ɆvuC Iyf5_fH!mW0G'b0;5/kL_ྜྷգ@3J䫃˭ omUsշ6{`ܞQ;N5O)`le~с' oe{埫)+3 *R7k\pRޟ2 YQӁѧT6ݶ:fC:|GOzi6sX5(Z9Ft E]kXC\<~1Lb&&8j^ 58HRUX@6}M "x}^MQHoH.:qZ]X FjIN.(ZU$|ETχjs)޿*3I]wܿVM!/,saqs&frh{׬ZWs3 اN4䙑( 7pC@)s%kq;cٟ 0$wNwkB. Ѡa2)WPѢX(yQ*GZXoVSAz /%ٷ`O*YW~ֿWZO.poEi>cMotJ?dIa_HP_mY$9Ad$+hVGK5=l k'oft<'0X4𙱨X99xtjJjS]I٠wEDdq%P$ \]if.jgjqLlޤ*Ht-^S9(q)?ti5%>}W%$#pyt1k6k(SuJ2Q .aL,X55{L߷j"~ns{jLi~MmHlo5y8]v[%ry x-?DƽepU4x>#D9Pnt7SoTYlކtUDZR Z*ʺX~# -S_]|v kǚʕeFYBOu5kwL004g]Uf<7S.v'ja [!:,Ѵo.,OʲK~ˇ?/4t&9WMdA~_[AaPWϯ*./o`̗b`75PE+>#&]͵šQ4C{xߵ`qtaSN(wӭ}9' )? 9jД1쉪*x# ¦"2VNL3jh؊̗ %uZU| W:0=;A֠\(i!P_x[GTp{WBwRpVNwD}/?E$~F5H:$՘E^RjOd 9B}DdeEu'j~4gi*}u(6 "6 t5*ԸO_o&nC"r]CL,g]zߨ1"/TF{TcQdXY ;Aw| ?+!82).a+)#SS5zoO9AKogͩ洪'uP,( ZvI)q[lu&TT`b_"K+R?okRxT;&7 J qY\s:9ڨtDA/Å0=6cHdVV0)50Լj1-, BҩYhNV REKp+ⲛbKHmNQ}g8aKSEl#l3xHJ4M|0!؝9Ui?bLohe mjpZjUe;t8VMɼD%Lq 5p'Ճ?j1#eBjq~uRo$"O--Z%Z=>GWݽcg!W(b:*^R_ۖ<4ZɯC nw%߽L.Vop DOv0ļB*JeN? RQW4) uKv\@/rƳwcfNLkM+݊ElO5TsfB\M:PwڦzLF5qr bMlv-8+O2CrC0erY CtM^ ^8Vob 1B݋.&(:vJOԱ'^_Afr|$ASvQogDkK9yűThmt3\],8vmS6LPK&Q f_ /aDO{|yF;p die4py0]f QyqI+BTZP+ox۳hc|XHcuYJ~?˻3Ov w][ Qqל¿˖7۰ρ{h.?IŽHS]p?Оo:>}w~{R QaEȢ,#WD,Ǩ:$>%K,gE"Y!}I5E4f8Jn厼4 / SoC9hmaQ=bJ/GC^)sfW;HkgG!0(35fl)&( ,ٵ7)BGn変'*?î(nc|R!:xMj{rZ=JSQK PLiElb;mcv1$ܼ5me˃ja[nYTS0薞YN 2-W1t̪g'N3 .͂9J;Gz0B%jUD" v)mxO+,skw ()`ؔsL&CAppdU5I|}!u 6*]'^%5j=xM]S3^Uc~L۔AOd=L U&^YQȿGtj.a\lx -,`\ ִYF4 2!8^+Z`R! ?nW 퍅ub-Rg./]2\G1vs`gPȫIWCm*gMs!8RhW昻ll$TH͸oY؛'%l \+_2{+%㝿mBG%b'j/KK%v0C&SCc܍a-Gۺ|UC"^_n\S0t2XJZL{[%HEG֔$u` ]eI勭J})kh$KLζ0KlJ4|JݱһN8kQt" 4%Ҧ4dq>ߚfQB]XӪ,r5n(E@xTi&,_+Uկ BRQB%!AYZId_ixVH3H[XY#Yy:2na#h(+)@0Xu^0{-ћ+NPvRe~)fT:MbI }yݿ}ZS;Xά|#qs,ưNQ}0lٖP^Ơ'^{kv` y5E>IߞAȲeyB-Oyw2wY9jf^(; 8ѕɐدE$2 LZ׉ԠSUtPi5mW尰8`flv)01Yh?Z!u겳2U&խDu.XSe|-Ԃ 3/L}wP5Z_/]`.u=l ($~ojga}ju~Xqc `~y_k8HЅx"&GQJkD D]M9zCVHK BMiugEj >IW5`llNv~6 vIiS hI!Ba{C<}>}tR:=KbI_e2Y .sLOy!{cB)&Na'?K!XYa1Q1G dX,c+jL4cv06 |2J0)*`{%y ϲڼWM7Y6qd-ɳOVv_1ݸn)@{N@ z 1yWm:O_#:B0k&tL e'Gb3eKb1ȃt:{F"2a2:KlŊ|3`ԋO zYѶ ^kV2oәR^| }x,Om^/q;}k3V*/ 6}VEǐtOZ͹ Ċ=iZ;K{ho@wM7?[ΈwQwX%iĔ`{pf¶2ىy9eHHao5}ButfHEt^&*/N#\ckl皦e2@>A!u6 wd7W4Q\mA7ʐ*ERL^)}oTZ\}HqEjbc}e16uww7mZ;Οn"{T+ګ&҅^Dt^Q$zk,}#8in"j=yf'^,Kb[pm{i1>@:;&/O.2!8سeĐxS"4ϹU%rꤻ oSF꓊@qUbumZ9MAL!fB1,s܎ؗy6pJݠ%%i A{p…rgt!qUq [vŮUST%$gAT Ӝnx"uJq؍ljƑHJ؍eƇd޿.Ii봎#񨜳lb$[79<*K6X䏘ܷ9 -!QkLǺA斎kjBy|~}oV,.w&_BYdJV KDb>lu^hZ*g+Gy X [PJy3vͮ+OʜL{yRFshp֕)LB`a}|{:\w/7:\_)_s=H_):?r:8#ٛ`4+*STΠT& Zӗ`֏r0 0lɏQ?2,BGSMɬu~=`!ߘvt` nI;L+% J,JNll@ZHϊe/4ϲEKRf9|I"9;W %goĈa10V<_r52wErAI"ܬ|Sxf阱&IнJðι^hi7饫L%vx[w&&ů@~lsc揿u ׭հaUqêDYQm$iy 9B+ 4Փ`<h6:VOdj9[˖ks2 156] 3nb?ǩ/Q]yc >f.gۥ)#Z;-ݯp1顱mNOHω}}# ž½3}]LԢabh5p wL6A+ֿE:Sܗad,]WTTh]_TߥuBҙtԌ!g)2ujZ` ŵbGTFI+-Pl#wD/R7a1p8Y{0 &2(߈ &ݳׄn$r5 ! oўLs!w~J%AD/f{.jzrw("υ86dfu G% ^'f%hYByWV]r5۴hPYYx͇]Ylً6D?<Ր]!1l[fz#G0O]MI5!ƨl19n7B0FPN^\\s!+ןo̦Jܕdgw9OQ 'L[ζ FNj8;"h)E ^_wz{R 5=pWV} MX"TI!/t :5h[g* o<[zzд^G ZMKkstۻr}Mb]|QKby)>F\@ddsUNSl̆W806XM oNYచ*v'k0}_nޑ TlG1ƖjB NR@JnAiUPwnpwYpڿ(.Chkc SuZ6t ugy.WY`S숟CXP8"k|J7Jdb&҃UJMx˔0ieE;t oYd+!HpDp%2 Cy8d`PNA =A,~aۈ!+jl27r@*d' ҁ :%yvX]QˁyeV.) m|~XE[ޔΗ˟r]z|ڄ垕+ Pa1Y8Cnϫɭ`.u 'CyMϥHj_NsC=8nc_[JrnOE\D}t-١1B;:^%A5oF+(5r XTdR{cA^^OW`di|!By3b'D..WՔ'u$>S]S>.6ϖ瑳.ʻrjnqmjز8,@_%I*EFcLj ӍG!8MZ3}+%Kqzs @ B NܙH{ Ԭ`^Y+6ͯabCo@WVQN޳qeY8Rx;6&8d=7Tsyk7EkWk:T/!sݹRj.K*Aa|D P"}jO=Bv^*~Y%kݽWM*ٱ&7 +ymXnXlՔ!^m_vqm)E*„+Wl-N_?"ot(MqT 8xhZm "X0-rvgb.w)402 =5FVz h.Qq]] !Vl{z'-n㑱\]Vr|h)c/D#]m+Ӓ|f/[?S6@eYIVF&1J šB=wF#m,>O:cXlMK@XL._*ݝ s E3b dYxkQvYآvq0)\Z)<1]_ϑ !>ډ`vPK(y@HjH,|MP|)/}s8v+qӿ[P*VWR(61OoEQ[q~X\Y p KNJsA Č|(;}Uq^fВR֜׈.]. D꬘ßڿ*5‹4辭(i#dLoJ/`lyQI@ޑIђT]͇lF)S1BTbq(V2c+b7IxԵK"d u ZjG`Fci{89-<+pIOH*ps>$.e 7'x"%K8LCZN>)1+oKb)r,%?!wkQ%EXvmRXB9Ly)4G}3u2R(\cF joW_VƭqZTя{ufЌ(+h8]3qq%Y+ҋ aE'GJou_,\+Jaj=΃8_ T~zT;k$WZelގ~lm{Q3=%my~6.s;.-aRo5 ;_ϢY{Ӑc}Մμ ͊{"2>hrjep.dr AꔂF}>ܶL[ (}3VU:& _ՠ!8ҺYMaɘkS%Z]sZ2{3%f\>VԶ)"h̰ ҐM^69Cp2Y%Eo'3dUPn՜Ԉ&g:B,WŔT7u n?[g]dGmBC04URh]݆`1㳲1?h>! GztLD[WX3a׊]M}6+bYPKtsBt "WN[Fu t̬iݱ&CZ\QT=]F^ I+"F^ryCLTg]5 E G၂WؿZM5IR˕7~Zj$H/@nlg0 Rv5˘쭤Eah-?ݤ'v^*GSt!F_^}8}1(JL0A3ռRq[P#Gz=q:9Ȱ$(EnXlcNV>-:UGaD]wH)pBh!S HuaN 6wa$M%fTHbPg$;O˹ԡOJeAAB_;^Ǵ,ԔHeHӚ] 򈬅߸n%s4E_G6/g=. cLcg鏈(eu,7R,M AS:"toUd~J^tg9jru{t +CS:vzNľyv~eS17?$ jgٸH 1K!,Dݬ u* 8SOlcQ&\XϪ+(u 8 ioإ~\VAxfw'EWMY҄Fib;MT:J|5%45' 9h9`HH)*ۛoərq^p#E%j멉Ǫ96;ڼ*Oيo%韟𻀰z]`irxd_OZs[I}K^5Ay=d`[R͵gg3KAJY!IhסTV`i<Ѥ+ '6AG|_:9Jܔ=gL/Ɗ }:|bzȘn _tPإެט=JL)$9N@v8s#9̞LVȓC~,^7iQf/Hwj)9*b$.2r[ 2G5S8Igs0P>ƇuA|$ѱr:nKw#rk-%7\z:pce[ ⽈1gk[vqp!|*^Kjx_31${#5IlkT*Ϯ;f`ـPJcǵIGjV+ ktK1mtK-y wH;1ѱCrv+Ջit_BKcεoop}5ゝ;>+q%zPZY=f\luNW(!.-pPI8/fͩgkǪCOHT)د8,S\L]fY(3-Mug YFN'75u >H"ܒ(JH[ba?(?H+欻ĸ@ "i՟~~M.PjV<975P#A;v!ԠPzQ9t?Kd{'N6,J?KMk@ 3@)S*5.[{1p B#yWo)Yum-lZOsy!#k0gAnyrrl~\h[+Czh^sΩ <6?Ө Bvz]Oa ) @|1 ~LNz j.[=UcZGUc-&c}@qңOLe?"Z\ɽư}&L?~EDN{`kE=O7~ޫU#_ i̫m(k:߱_hcWMFA(ꒅfBԆ !LUY]7j-vn[2PQw{Rkn+Rh~NT9c]iĀO4+𻨲niBrP(*V\h- E"˸!:0fbvKr.b)Xk5'X'pX}!D֨$ADU~-j`ʡX%5.(sV8D[/XâVNsH1-A.l{xr;D=E͜B?CCs:MQ[(ӷA.hmtF!*mͣL4Daj-~0a(M.ϏRb؆|^2ݍKl4NԼb4%n E)S!BFV"l.r@i;~qR6(nF'Oӌ pLu%ڋ4]԰;D{5`}=}!:0& bCͭ;co +FXM~tNU>w%7Ϫ+;: q2Q2O1:zZE|zf%4v)ْ; ȹP} ;Yͦ3ΟۀfB4z?'# Ȕz=ᄖ;[Rt3mpo;ް95 }5JQp](1'" sCUc4{cۇd "hFB > 3ߨO5;mUAc-bH48m#F3;J yH@D|Dzf-뫤oʤoo!˺2axZztZi8K-i l>1l7[jkbVoHTzky6%4wa?% ٰE. 1p'm|ZE\F.7K1Cϖ@tTqCF+>rTgT(h]x@QƉTR7= Ye +hKuAj%C4tU8'l cNΠ h01+>/n[Z(b7Eř/(׹ȊGx$cFI|2}}pK;{t.EYV>רuTsף-(srvhwb!hme8cQ6q-B{xFסʫQU?p%Fo -2'1&i]knKF|;rĸlD4;sr@T;` Y e{1xg&F߉8y 1ptax r3Y.B-xMW郍K^J96nHR6ͦVK;yzぼ B{zNt x1D9ed?9).2Sl#QQr6~{9|1 t#([w!ž!.vCsy7=_"9[Vh;lM/m:`ZgA%]u^,}dW > nm5=E+5x#f& nU?ՋV0ICzy`;fd4$X 1h>(:_M#IRt0 ʝ.'l v^NvjjqM.L9Ny%"ڼ럃}~E'L؍˻gNeu<{x\@]HA#ak~k,Fس?< DD3/㠃Ts2),v =x61!(p]ډ?zJ.Ө.s? :d6Rzt<“RA^vM_;fZmʭ%7y 6]w6Lt \Y#8`="uw|dr׸e}I5dULJRt_fL8˚ w* +`tGJؖq􆩰[$;9Xzd͞&nMVQ%L)ĢvLg7ubۼft9tY| h|xD .y3ɳ7^'DVVȦDo 5yri sfZ/y\zL]rj' Mcu)K{I28Ǵ06:u~e<^5o:k'y! wE;\6\" tX,ֱX0$۹Tu8IG>5f;ழ޻SHخ7sDbh)ɘ{5'- =l!I%> 1oNJWE57º>4Lk|.e&3MKEdβ$K/Mʨ,8X4N j|gEʎi7߬`,JK]̌=LK<'yf9f=O39ٓ8gihzlc>h5?8N_ssscaZ(1g3$P7|Gyd#sD7s gÁWPXiTi=gV9AGU:<dBCwJTI2ci牙o07ܛ;.R*~J E~( +l3a$?Q%Ei]\|1'ԗ=B& _P YzuNV'6'LOUaMi> tvSЮ9pkELg,1M֛kI?KE>rEb/Yhi;!lEqÕ͙MkN=;LiF ?SIum=uH3 V*P"n58~x*([Ou(p.[*#“Z_8E<@5O .~F5@㹂æHj  䊝d0jxvLNf(kcn}SVTUȤ:93C3wuhjxFvdCi]RD PϏ@;QxB`Mܽ=NR%r'[gTŀ]bp,RgYJ֜/'o_( W^E3~V5b bmO5O魆{g;VxM0ޅ8/fheolgGœJ 8.j9Iθ-O1+v.Ŧ64^[{؎_[tG2;am|G,=ؔi^=+HAxj+`RsDAsoX`mr鲑bƲү\Vo5Un~";Ug~䮄,/$B3kEPs{0 wp]fERPTjٞӶ?=?y_'yFO奬T=$6%8^A:aܷ=R fVY&,Aayܭ]pn 3;v*iLZzzn>_Ruw>BF{a5ɀkz',A3Ca1lqD2L;"癏=yFSXC Za=$2/ж -݇RU˹AׁoEP?]ܢVɱ++%!S~ (̽0iHͺc47WB(0n{V3 K-2/x=:;-CF8XNHLf椰1qmk^S*V{It[7Ef mqz48in[>#X*"8͌ EAҢI /g%ޑw_qap\n]d$ʸjXP\?Un*Ϸ:bD|OsbQe!ܹ$@r.r+ zO9%ۘCc(6mZѧ ;q~>_X<]L)R٢gZꄓG9 j7ŶՋ?p\ˀ0D_# ynG P]׊զ'?vUKp:RO`xңi=Fuى Cʪܖ7E*% #8~ GVknZ\}2t}:XdqAm3Fi}mf c|9<6Gq033{.>$'8k7:a'4S]gU}Aݥڴ@)pBwg\I>vmen𺻥8jIbB͇tK9] Uen(,z+Zl0F umN%-|Vz$INb+iMp:MSmՅ(nQ*g5Cp_9pܛ{)>ZEVۖ]s_>ݰ/ s[5?@T|{^DbRc)8([ܰk)>c#`h.Ķ@_'7HYOQڣ0<7o 6c)T!Orepdp鬞tz3C͊fMMJz6/ٵw%֧e&6-33Ǖ{Kyr8.cYY1&1gYiR~Fl1̲ f"Y28eϊE5Tz6m$u}^]Ò4B9k -2smMY?v6&, p⿏0ʃTK*v߮p#sҕxK4u<ːQҼCn@"ClOaCN-մSjshJo&VƞwSfdS,8=CPCd}HS EkBi\ +iܗ{W-dM&C}xt?~' ~.Ŗl'xDtIJ$DR痑u~c9M↋&t=n~Z-# 7RV u(㎲+ȍlGq+XFWLlYsd>hhJ /n M[h2ڰqs*6"ɦ9ܒϢ6vb+Ǭ#{a>X-.osab껵<&k}̍O;6ިL̉v|08{Qq0[< g>on9trC.=`0Zu9탏g \1_KFGsa {s EVa?7+qɓ+ A!CbPq'9_b} ⫢2!/Boxh #˗1]'3*gDrly_Y~Ni#dշ:䢇!z*%{Oc?N+[;Ay' c)K6LNm]Jo!Pn0HbIdU]M%9a5q؞ ^zȤ ) )%@?zHemDZaى.mupuI!9 ]x_hd@)0 auK&lҝv|plhr+ "$ނ6NP^}Pn`g@M~9Q9uTeՅde)fA<^;,^$X0%^؍?Qʰ_kz+>Q2 i-^ .lCYU՞'``٦)ʏ)av.VlFOУe(md|u.6|_#]xΩ T+&+UǪQbڀ|~ }؜D*Rg?ܷHgʺjxFoWҢ9Kx'N~Vة9q~dئQ !G]jEb .\橞?ot`/1 _e "nJOgd`g+3ׄ "D++(9 ]e8ׇhsSdm,z7"6jf[ƈUi,Lˠ u^};v`’UH o>TI-8&+ۜo72T#obnA; LSVr#^ b~TvQN% 4+wt[ʘ#BHrl˽ciRx0 1 .;x-+ωc|FƳ ְUR*@De;{ڴ' 8DZf;-[,kLƴLwM~ԬVXpx+ƅ: Oէý37FF1Yi#Q<|oEtc7e{Öuam|k] 5 i/ݱV [t:Na7;r(dJP%4%*K#z<vWrK2>8g.3[m[[y$ʓQ sQD_5Gq@ pv襻}rA>my;Dp.#dлPp%3?Փ욖d|Dޛ˳'(N`gU0]3/vZw.[rqX;KO?m @tycɧ Z?dPA!x]7Ċgm,,O25dӔR"KpD``Z#A"-MPts?y">k9l8= ~n3ыnx(}}g؝[٬eHFp ^؉\6ʎJaobT^Ѩ$?O"x}ܻ[uK|/i_㹬yc&DSWu9s" /PRDj]ALX'O^[➓uڴ\3Y?0|P'zLO}Y3;Vl&nvM9aO&҃<\y9L4ok.\5C 9+%˾L2+Iu]wTbq:~OxZ0(#^vq[-=:zڽ]h竏LJd6V]oWr #ݣ!b#؛?Ro({H͛\-.f H 1T!<~&+pabӒL-'vM*pv#9qlzxJ纆RsB88*!C򰝐tR8 |cqsL/j1~r [Y,66&UHn]ȿ8.Szr^6ObhJ]ċS7)tvWr)6º^3W *ͣ A3YJZ#!h=IZ_}XWd2Q|/N Xwo jtM)Pp$;p|/2vɻzWT ?eCRC_sE_rx7Tfv̏G`+3-P*ea {p%Eeϥ"*J[138+,fK- ϳˬHy1c]\3d#,A2n ͊M[,OEu,ݛ/_Mڌ#eYT//#ve-R+)ȳ%|iIqz>h% T!IŖS)9ә%WB|ReiKEw-f^޷wm͘q]g-L874lrkH;-X=`^}zNDRX: a,m^sYt"zGqu=_M$D (CfTɿ-h-`Ƚӝ֫i%F~5|eV=0"3>97 &:[I뵿&0lwd~E,l.N5|[y( -RC>;cCzR/$J ¦G[Mѭr&XEƴu.%Q%7-`h[tgxN>a,\q<ڄ5b| qhTVS!fusS\Bvӑ׋5&G6ހ}@6%UKfTAGf%5_^܈&e/P3 HڶkY!Աn&PK/@2 L$^>?w۽*pmHݪE=KQ/rw9tDjq) U,,HЃLt[ Ѯk gMY|9T'ai>o4>H?f sϺT&2pzNX"2C݁g1(e[d"Uv6lMr}0C/x"pq}tv[oəY{-p47ՍlgȑV* %~feq't$y}bJQf5:hKxpsjU6~IPʜ Y/t 'AS\*#Ǽ]\LU4ר1Rر[)C7$2sUP. _2e-%XɡU'Yg(Z-}Y6hnF=f50V;7)O8 3o)2ЪWnS) v6ta@6dqm*10ѳfT*`%d4uE*S uYVkX#vj!7OPam'C졬"oΕkS!IQ1"bh/JObVO,o7~suLT; %_2O}jL_4K͗., ڪY9u6a-y1@GGed~a"E>n$MT(WY+w)A\%"Eun TQM0hi0סR/;o=֡N}vzsjm,ik?Hl4{]Տ͔Ltawȼr>1Vc1ދboH2 ^jS||o[u,"K&Lu^ÜkY"USKE貈]z߹ QE.tAN c&d0ל^r&i(Mk'u߫Hh+=ttM=5]"TQl07$Mp fysey۫%> SU`#rAlo7MbFWB2I[X;i+Y0S~q~mk'N.ѹ͡8{tXou쳣_8cȶ>1>M^Χ[@9_9ZۤR9 @gS+{R?27G&ػX>(*(%^ L5_0*Z]u^];1%aX?` *W.fjc4vz!fDYHU: 4F;keeߜ2馧s}LgXMKPC`Qo] y uIYCaLǢ'\<[Nb,l%V!8&8 `&L_.V넟%>ݚ΀iT 5#}r+ byio"` Ei"Т{]5j*l^]WR&3,6[6Jy 1)(g KqH)M˒lxTf)&nu,KBd90`FHL A!6TVa9VSM*p4{;ʓ5UomMaex8bu5q0b z Y7G;\kO٢6x/Ћl٤Q#nɮrS6 tx]5s"jS9E._O6]ƄaOpA_ 8Z+XmɴEϕO{M)8OVYKϵzB봹"S h$%I'CM&T['xe[6ߒ Ljq ,դe-Tރ!jU]v/;:?=- ě9-+/7n/_ԖF,? 2&Y*%Mѕ}{xݤӠxcxڤ8;!Ǿi-%aH27s_^uCK޳Tĝ„nM['$yWz^`~9x#b}XXn ܊:+ޜsݛ]fUB=.4POԀԽ#f%HI'T$6.ͳ_r"ע?_=Pb蘼 J]ݲ; akQb뵾N mUb G7vD 2-7̃0C評n"!̋ϙ?/,x iM|YG:H6qӦUK>>LuI-.fMPRoqvG-N1O6P?j}-S*ng|]Rܔ.6^QIlZMk'JtEQv/B9N 0F j^}.eA2o>D;U[AD/kk,ahVI?HdGZ 1a N`!/d)!9b?d7F `poN$qch֛%.tX!%wcם{kP)mvW_\ޚ:W E"HtR}حJK ?i,Boi=, tm)9y~ИHOX(}C{oS|V&ɰ'5}g6ۅI3{j/QL'ALȚn֡+be#Z`F~͉)s-?seE`QsIZ'>כqj\bZu(+Zq}R!BRS FNwuh6]\ Xu #.;F|Ca*L q#]-}QK=Wv,Aqn(o?˟JF'NJF ~yV9\O r~ܔT;h;8W.VJj 4)0ZE˕EoXJ.ka*B^,ŽS}vudCuRT-\"@:t)]BmQ{¯QO9Px=/lƻ8NfZ?^[8*8tZZ~9p1G-$V* Oupi.IgfZ0k 6R.bڎ#@rVU x,~goЬKS0Mh j+,C) Y>]zZ' /;]Y -fBEtQ3JWЬ'W1,wN435͛r W@+C.=Sޚ#;qk++ *6kW6wגT)X^$ #FTY::0+70t8״N|&3Olk횼I:.-Y`]"29c_/Os P:o<~]~]suN1>čohae١)tWPQO,q)dBhj*ޮDNR)/섥Qh??oEKCgʅ+|'0j01 4@證kC HA;)#B,"8+T J-+(X@1z >S~ךN?VLsQ%{xY]5 c!Np3;gwfnS khQݓ\5}.7͎N bWO}H3V_ڤ7/ڡ:`;a GO[Vq&5:g \_}'c~,M% HHu'#E"3,d&_:gO\.?=m#MjuEQ~ܥ0Bܳ Pk;|.B"NZGk>}C띦&* (NCD{,Z{nalmx%P.ft,"c>9)̪ujt9hHmUvYoR&8` JE5hR)k`ڠDJ@BܛÓ;΄L2utY}$:)fJ+9~cadV3+ A'לϕ+j]bZp쿘*v/qc}̢a\4Ƃ4y;vE:QUa-6~M^OԢb_Th?dVաL ,MV$u\7|IS'z[ OiYi>xoN9Bc-ҟd.F\9|X Sn=7,3.KH3 ak`ҳOM&*cwo:bW.[ՈD ~Q8wL[:Ի#X"sOs>fTdɥ Zcݺ/]ZLSkv4H/@<ބt{2tH8(]w.%Af?xX˸6n={I 2y'wțî_jCI, 㜡.8^iK$]]-Etwz"-jʿ=/RO§ɐ thv6agI|r;nc. =5}JZ[  Sre ;{Stx_/;vgp  (g%2qsUx'[ldW- ǬٵA6Tqw,Mֽ]!Q"&.{bW!Ks+S=&ܲˍXm3"вiҚtxvEOAO"IP.nX2Rp% ϑPVsPjd缞"V7x[(n8*db\)&t_qq3:*G Wtڦ^~7Rp,#K<k0ך2Pbe K8no9'Nj8,Ɂ8MH<(wZVmTN̞;88+ٽxs,r/\<߬:{U<-T 3ٚ8CR.ScI՞Sȿ]4իF'E5HV!Yn.3B<97x䈅"%Sd P 6fP]O[ڛW.YݐZ7֣j𜸊+{]/ d֚Ea-mJ^dP uGwKٓZ]Wࠈ 4+ֿn$^-?j87f(# >{v3K ljdrڢ;34C} 1ћ)u-~L7*M.9[~@ADU*3w|j^|\-{o+/[9黑˽ZmgQύ=oQ1$Ho,d19u`O/[cObl1Zδ#>&G?(ʫwZ;'߱C\L93c/[,a,3.[Od?Њ-6.TGց7̧Ab)Þ)sʬsrO+?$yh*TId1uݸH+*iTOnuta~?Sm-}*9Vsga^Цb3[S ׁS&>ő1M;s}YE(tV\bwoJMjήnʦp}Vz0k("\Xo*0@_NȇmW#*TܗFhz[n+blX+ru+R,yrg[F$@;9s~\\ G0жȜKFV#}1+ oN|vpz8YFSr_ w[H8zb~qM:Uad-"N{$y&ϛ}#$Ԫ Ddeb6p: dE6娻A .x5ΣK|.۴J4鼺S5#;m.Ni ʼnѴ"Q0r|A_33:~m>na1Mi[H\;gKH҇b?4%iA[~ F%`PFRsYFAz׹(svSR$pf9 F$.c+{N=UIxM~vV[r&40XakZ=4M9Z ? ⱌmCk8JÒTv]XOVX@P3TձW슙ve(dvhQFܷ*˸,Z6U>o7_3]01M%*ĸ_\+8gAfKHqd}:9\rx+/Yߥ{ܶ\i6_{@Ѷd`df n}`: s{"k{z.le^sg}uPՆ>4r-PЇQ.Ypk2cbu^ jLJ:`)pԦ~b3kHk1fZ8zf㲗gZI@ WWLXD^BTVo-xљH!_W//ߝp9@)h6nL@n׮eʺE5Cz"z:qެ;kݸ _Rsҗ#|J3h'LN~[avtfSJ2ۿxK~Kwz"eo6 zCU)ԛ&$C#rn>Cdx=.)1IJ #aDiռ G}SQ8Lݫ:^T޺HwZyt4}R?B4a8}ۤKAx숩\emu]H]9ԡIA#(`ZC weP۵ڸn2p0RJ;n !̈́pa>{>Pʴ'lpzw7ێ7v25?aMڰ~/6)_Vَ2ȁ0iGutoa/N1ޛD9Y.g@"YpK'HJA[T`\y[G1cm8=o$1RW\ Z:Di)d7c{eB1\Ƴć}m{?9Z7Nx;2'dw6{ݴ3bbc.{5u xflfH?bqpw&J20\_;'}Wvpͻxy{ ]2@)ikS}@Ы[X~߯,C lyu=~kCz<{Jms`[UtE(b%[ KT9s`i];uzIN!3t]kZfsi&3[i4aT9>Jm׈8(.-8&⃛?Sk8F'ݔ&)kD8$;gvs\?R=]5|snsv//f2H/#׶൬.EQ(SVXZGfZA̡JX@fuɝHztrQF|>=8IwA砏}zóyGof (oÆX^J /$Adю3_,CX#Grʈe18[ƭZRM}z"iZ΋F·O害`Opxy!yD8)εm\Jl̋2Pr5}u#xM_^&~|8v0.!UQJ3|x1J+'El6;sΈDkid ^86QJ՚؂RcFżKTt52 R&~p(CKKÕ18x 8vc+"xygsbkRUTy%~sr?C/5tEg x fOdr.1)OP;䈰NLG5C3A%ۏhwŒՀʷ%{B_]Õ>vIdSh}٩t0e3V}=Gb?摸 ALDͩSnghP U;i3d#ʃI"ʂfwT.%[{QP=N@fI-ͪ[P0L&B'ٷgAG~$6ݩ[N-on!!U$nD9bܱEK<לk(YOk0w/0ݼT抣jB1͕d1i&CψIY1 ~2 }kJ슃}Xַ&&ޏ:Gw;,F&HEC%rSĺJAia~ۢ[3_Z%dܟno%DY=Mog7uHX0i8jz89?ůiR;vfDˍ'Gv֍(TM:{~ƿ+4[ynʰUB!=WdYZ-+V^+oS)x@X(NQJdv.ku+p݃Lf |?MVGn>ı?e8S)@DRK坿D.y+ mߌWq2GV"xSBuՆvL~ ނ[r!:k2՞uXۺ @eomRʐ'QVA~:֡&umJ@]s[OzTqSÌ {Kq*Z°FkWIdqh?8TFl%VӕNbhOTӳض%ug#ρ]OFsnζ5~WJ~Wߕ;Ker6j908/4J999+ryE1(2EmTr2@8K2N?NYd^=/N 1ieޜ˱B _N> #vՋX\4:hOD-"o,ǎW4Bj4+fX1n-7Х6[fb[.+qxyϪ7$) vۀ, ϗ\fW]cWݫ}:y4QnOc8(ơEHd(,8g\5neN?E$!FpԎP9dYF:$l8G_-/ &]RBser2ӨOىX\)>xXc{-q{%.^5'ͣEx~k3 [8JoE߻ͯ~5-DdJԴjۃg$hIԫI0 [U WI)%[MjB#ͦ'wzߕ?$t} 2)qMz1~5=!lbrGxb>Xs}f7D gK$`NSo4P-mDhQllw1WFojfYQH)%ҬHGg(v1{"lZE:h^]0rjH" /~:R0%~za]2bqbq.x;2&F||˖:ލa$n D4J(<dM豊DvxWJU)>8zn#y1vjH6ٴqD%t˱bIؓw/aSYkJ:bnnE/=ny:'Ęl!ɑػ3F˽ V56;[< fMf{SX.ne]uUi*0#gީ5da{MxewNT%<\QDD%?TGN'6+9j[0Ow}5]_F<1Kw&w J<7<Ԛt5lվ ߨqp67[J[i/8e^w+7˞σvAPd"&̙I!rV]_8Jz X=}Bi?tQVfKMu$jb4kc:3)o,XÄOyb>(G._lQy`Vvٛ_ QrQHe0ߘ]E&Eբ"$eIݱw`=xl!3))za;L4UPc74)|a6k5v&ekD4*!46PQIZkZCeNZD8}!OVƹ?k-^>=ŵin1V@tc Y R/Lj^3П3Rџ ÖSΌPÙC߳.ڜס򠰇HȲod9XG+K4Z+ pԖl6$uB]Zg'Uŗ~x{pM `*&kx^'n2t5O,#%V9d#_D^,vIoh5uѽDƀ,󡔰&bt) $"wۦ]o a,.嵱cnPG*ծ)V2vdJ ۈq+ӧ>/n;ɸEwt9&YȕLȡɝWx} ;xBsیNhQyv";Q ˉ$~'54`2Yi39]|f~ o;܂;wc0JADN)Vj'lׁKV:3(R o2?XN@^U~[rc:k)A/:z{UWt8V}y~]g+NfmwΙ-ب?*̘MP}@}C;Sd#чZb~ s< ^w;l_ԁ9Dqm$FKMԱM<[V7(OLb-$C-Z nZ~ LWɮ]Y&T+KZ[F< 3x@ Pboj"Kxt].-Ф"!C$ZFwGԩ=x`WPրdI*dE \|Nf:ek˒NK@2m'QOWLR`$w,c4oKɌE"a:fxYqɁ"2UG>!Zgܖ /@v^Pb:aͦsttծ>J|3{HƳiFq7bKRzz>?]RGJ$ˡM:y? vd<|[@QloCE@0#F-VuEp=2wP'.̓+{)v쯿hٜ=@_`^4iD.TjguE(A6*f9s^fW:6>,aTX84D+^Cc>o߸@bQ= hLP?1f/pA]ԢXqN{eؤFW T9 }jUt)VIҊ%̍ Ɨ596+(>@󣹀^1S*3,rGdWoIm??\rٝ!=3sb0p:OpC^۴h|B":<~@0xt0,If:vO Z,sg/~צ~?|8aT0]`n 34trSl% 5EjGlB^0q?sa:[1 2v/1Raptk%F)}3p"ήY,Pvªb S`Q =xv)zg2М';ftYtR;mb[!o emÞV*C6H!:gL o<.tUz9zۊgU)ߓ9s$:̝a)TGz\ƺ9geo"jgCE?,2+-̩w T1 LX(skGXf{cmp^Zڡ]ftя$s6ji+AKdH0OKeyAa4l( ![T5ꌖ9=yb]cf=)`MHL+BJN=[ vrFRNuP15+c?udƈV}7#~f XB!D&)ɾwvp ![v\|eor:A" <\sek+j&̲iw5b`oHv[^}ݲ?O{4|\+_Ů ~5ӨIr-ij-ٴwY/9r? k\8qm޷I*zyXǑOs&qV.9,?5/^ʟ8&S?dD 0P={*Gw&;^$J\#22ٻH<T1qvr]0rag7!s<0pEP,G:ޖGms(_&X:57 {c, H^#Yrشy;O†@N?(DWO+˾/)l aCB'NN8xF$%x/U^6Bi?4p&cIru*wS6mXLhZtGz A?a<<]3 hR"KiE gf{x5wH^e3td46OÙ9[w-;?Ӈp2 ᙛXHj?\$[PJ lF_AzY>o9?turHl/+R'eN~m 20ݓJ'R[f*sVRxq:eGere 43yUt:8<ّ[oSx 4[4hNs(KJeڬMAdoh]$ .KjDoimW]7I*m֤mݛ'u0?$o*`o"In[ Z֖$($*jG)XKBDUvFEѩ́a2Jȸ9z wcm=L Rx$*[䳛b:Aqm8̖AyD=v ®il+'3ףky.w8) c$ȼF6v9]Ryo)c\@:{(2bf$3YsΗj0C-3gE. pc_ݙe4o!?_Oe(B<ݵV|>3סXJ!(NkeO6 쉵ʑ7j񂇶֎N<թݩמz2z-YiNu0_}z/&#{?q2=dҶڐ FZ149 h`l̅63sf{G~5}M"-ڊ/)鈣ciV8Lܵ"HTQB Z/_8+Yg|TP4[v!L/}w`kHq8P<뚖AMv_ K L _' FlLGL.*#D~O>Ggg/6".P-+N4xWcCG!¹<-}n1N͵Xi3TL̥lq٠mQY;Ɩ&va+̉?ŕ^*0,Q1R??(4<$w3&wegqʩjE:ѼDy'SIj]ump,f, ʼnij$cY:Gybu(K)RNr :5a{nv,)ˢjj6I7-}f~k7_bvCا]:2sگw3F1{,TxC=Fq[2^N@$r܄~.oj~<2=>^qf%0VOCZ[|j huxGW=I/ps⤅Ҹ-C nօ?R_쫭pKDv2 )#\&_$K=Ջ/ @Ut ՝M%[c._cfWz5p}A.m $}i/8& vK^د+ X}ILhX`Ͱc@yPu`EJ.>yXx..ϐ֎ӿDP.zzE^I#\}weN5T%3Yy?Ǖy@.~a ,{ow1a4f u}<H^65kcaS1{/ +qnSW V?cguɂJ6:a'n3Do*UKEŦ& ^kM ?pW7Э6dtm{|ҞCQ VVϯXqF3m3Pk1'63HMQ멧lΉkVΗ+TQU{n3CzaVCHWQ_` ]Aaۆz`q4UkwF;bl1%Ef1gkNMiq[jyebvdmC"8ij6%i7@1lJZjǕd+ؽN ~NUՑk9l/^^59Lcdu"daOJBwMm9!}a$7L>#LLVf\;[^ cK:jqf\jMYo:x~"bwH}tdIRD?G%kg4ٶ6t֦}Ǵa;mJTrRLB߆S暍Ϫ+PK_HL]i:9[ցX"!q7.P٠XQj-- ZT@>2_,ꩅ)+_w0F< b*[3 RWZ^n#6=e:q ۱QMwT 'D }YPQF#֢HpTFscNzs4I:UBErf3(aF&:Wɪ1?{=¸SDx=kak#g6K"̑H5]Y OLպ҈hUz0|]n^-Fh|N1B6>qyi!C<^Y> 74J$|bP-;.m1+f|B{!ɆRvm;3a7Lڦt&z"[_ˣEHr{z@ * J'|թü r v {s'I/)d6dVB:_mSogs3c- p-MknÐ_WWR|N v^c¯וVQ@R^Zm>-h~1(LZX \*nW8Di܌c?b:$+gkH^D a4h եI5ا+seu;fYF`@Ѩ]EBy>]4gHNo&Qc|vq~N4c>z}vL;{cPoyLp܉k%l1J N5d;`a"$uhØ=&%ĝ@-Hi])r7A-Sy]okGtpkuLCr֬W5B,dtqJv~ɁL%‡D\wo2867v.,?wVER8M*S'.o{"MŵxGxZ֓qڼvnmʊ1bȣa DA=c#cD ?sZ=Hѕ"2 U :|IU|O%Etvt$WI+>Ik:ya=p z=LCdʴ w\/^dԵ!aSSxw' +?,'=c)Bg*|YRU$dRjDږ%3aZ9ɶ; wxcz 4H\RYX66VMY>bD0?Zx_vЪ_pE^x1kU?S )PlvЊwcD'ϖ̘xJt"-2mqf#AmA1Դ~ h_'d0LG/s9Iz_ϓ:S*c2{Tog8姊$@ѵ\$)"}w8E4QL~@7O!{(,hVT!Q{?V(Ndؗwۑ47m4 Zs6+^4*t8 <}ysOy{UM=DOD+CQL!xA#B|29=}wyQyUmk"wHbVw_ޕg t{P22Iemښwg򣼢bL\tƏu}J]fz^.Y5}m0\e )ËaGAx}W/**O0z䁔9^ҡGXL]vլj<:_gL!'~s!x7젥9N<ѰD.5JֈDI~IOU6(0YS~-ph=$<-C1[ggIϘόQ:=GY9d9h$yfYEkdI"Lܧ$0$9 ) N[řˬbK:/mRV/Ydti-XZa?d Ӏ7=_r`mWdOI<8c=f@bD@s|ߞ<68mMlZHq})"ew?69a;t޳?o,ؓo,~O-/?VS8_c1s~/ u4sd^;& B뷒Maޜ︾xbп\CgoNPu1+Xo{\lfͪl܉6 䋰 4 4if'39ظ8ƻF:,&pA횵W~2D/ޙWRpi$W-4`Zw㯙NjwO40Wc_`ҁ1MXrC7NA`p4|bqiT ףsMh]Rd=٪Hۧ [24x t}>Jh"@cc~E(]k5 ^8szon#f kv<-*`м:f~NKz8XNx*娎YB<{j+Nf+/8XePNLq`Gjt((nN[kOfw8;"v'sdra߆C,J&lHD'hwntg]x18.{$q cD#u6"~LbW:: )%$s[[e"lNB0#;t#Se'^ 9祗b`k9<ęIMvd)/}POۮQjlC;[~g$q V&D`mEs2L1`ŽӮ磎vޯEQa[{=-]\t.D"~؎b#ޠR5y_)a/l^p|#/a1N*6;ot8k13#KADknq<Dɰ]8/0;L36[{v/Chg8*z s`<24vD{zfQ+e}h@%9hF%&a#?+eS oKI-g˴T zmi y#8}&x̀XS8kGP =Ԇ+Lڭt?|}q+:Ta3&֪U4k?ĔeܗZ̪݆JzFmZ;p8S  : zjy j&XZtB:іpy2KWD^lKbRFN@j̕O>(7=]{7yͭ j:Nw7m!.^I1~HYACz~C(ԝ,?|KvrLQ=N=n˳Pl2mk6&׫m7َDC~N3.AA@D[Q>T@lB?GaCEa4(0V?bs̒ݢpOc:vw9=/29zH@f8VU_\?Q=XAcF8 XXM'.U:Pm-mF)D}`K9m!2A]wHeBB>{`I.;C-w4xR{he:(Χp y'} q\ R<䖐,MC@+sq˦ ΪT+hܻ4d{;Ԟyΐ LW͘uи3 **S(GAhn3tfn/1{el1 Jl7Cƴ:wN͖̋si؇ 1 n!B E$xυ8 ]lj̤pD3Fe v!*NB UcapgA{TlsZ2Q=4X0T~inX[r R,A"ٛ`~E^Hkqz7Q1'?0qB)lG7BЈ Bj=;?<٭Ʈ<"Xd;Xg0W빪,b*\U,.f(!N@o͙JӬ@f)r~^q2U̙Myd%eҒQRO`,Geykjj`г}T.hlSH;HHruMۿ6֡ >ø\Ҷ|0a|5dz*`c|#+l)TqV5<`ʺ^֡]37M8^Ͼߕ@#rMYOWHa,:q^eﻵճ䬂D!a[O1I5CWɫu 8asCs7 { ӜB[<76&*|-[\pn^l4.$k7SKƚ_I[hlmڊi&0o`|ٽXd13z- d=ǧѲW>̡z{?2Jy/s$#}Q&pbGUp]LrRv Rݚ~eg߰JbHK!ڔYm`SNz](d[obyZC(z9.Q;GFZ NSڟ6yXB!iܜ4@58zpc~qU f֧">_M2L!YՁiP>̯ѝ*iJ+, /hB5EEk[F@Lr|xJ{zP0n3bvɥ2hmdyW m?e. vL&lOOqCsKju=L[p(m%Q6jm9{<]N!9LZi ᤹@ 0H\}K6<gvȫcPO}\(XlvբRws |JҶkݗ=%8 &e)*Ӂ;J"Q!j]o^n跾rVڊJMrFC ˘ojaOh"Ugu(Bޛ-8Qv(.;՛XQq}Ȗ(ԏ !2i5Hk#x6"k-k.;oJ.R#0eJm;^N+[Yy: h#lvMCb% 8% 5$Rfw֡m'M*zU~X>~kp=W,pM}L˛h-;^ĺrィ'vOZTPcV# f>#lw0  Yc$J:nrog xsY{܃T4EL].WED":Ruj4Y )?ڛnytFt> %|b~E9xY >ᆛU#[29u05F~9/jEO7 ]*<^*2_F /yGeBAuD )7/ r oxefB'I&nj7|j1q}HCWբ|x*w'%a»OYWyYO ?g[|Iu,G ȡ@^"o _i [dF|u-tcǤO@|0Vvlş"ҳ~T*&ǯ]5\G>L$㒀c\Y(=چGX}z3ֻGpo曋ԕgfp}l),M0Cxt'=6}2@8+-\P.~Mc|r!gD@iّUInEM܏z[@{N(}$APz$ccܨN }L6voXm-2b+cgiU2r+Gܚ iR赃7Wv;?I_x}{.hVzA@xMgr:ofހtRxP'mL S~S<~Z٘š.́\X\ۤdf.z!m'߱Yqzb }%2u76d_qmj;ִ Lktt(˻+9Sɥc>7io A/:T 2hעb,S xW1xyo,u(C#tٛP7+sW`S+0Mx P.*0N V:T36MTF?s}„QgՁD+9873 SխmIA6f[⸞ cGl {ț2E*5#w[`t(y?ͱ~Bm9PX9>}6 ɌwN„~e{RJZQr'Ivo)}WzSGx+ ,]3]0@yR]3S"yrҟ'_Td-lNOۧ񝵚Yזq9W/ LIįOo굹i|63 |oޤg(ڣ {IN&Gb9KvOn5-s%3>^TώڀuTٺꜭ]1}(#Ʃ18lwG,)(?}4sCZE\_; J 4{k5!܊X@ӘWyn[]4K"+Kw?"* 'E;OG)ro}uȖu.L5s}5_o01}Ֆk ~abgmZ—t8CP|+ߚ!wi5fϭQpkĻ(A,}mi1!Imme=2&Rq/x|Zځ ggܔ( .Vtip͛)C"/6=ax7 6;aIff+Yse=%@fo;TP:uVc8Q B4w0|z$s gyk22r+|įQBq8brֈ9ɿl-: װ{loNwXfn];Lط?Z%XQܞ~#NpLkb;.RA:H𑧣CF=HkE_9<亜 Q&Eg :D7~~s1>:I۔)9PU_V583w:{H~ٛUsx #I'ְ%HnHHqP.]kv#Ws4ݖlD%kwa'kKm578cz~|z1A:s8] vO#aQ|yY@b}GX, ad1-4+kMCPpH3O|RUL;SNg(E5J ^Ed}Rl$G y7؂mOZ\rҜ+<;v2ܑrq_kաY } xaPcGu'Jdz^ͱy;=>"&|$GTg'jH"؏ceT~25pDS r۱'[(/e\N=٭=фhO)d t_<}\ %͌x3h.05@b'}$8NsJOÝ p8FH=rjehGI>Q֡ bX 6t ~8<"aRHiuNA#TR'r,vݶB#+7m8 O:3͕ޑ7 )LfcDlPe la )2 )C.d 9f;' @lW~V7;Z0">[w>C\pe{9Xz6NƜ9b0eP:Y6~ ̫Izˆb [v͇l5% nOpˌu7;ؚ:ջd7Y2IQW鹶sg43K \^̞?!F&o5Nv6uT/3E#wag'q  $7o񕃾oۮMa̤b+8fbxxPX>>*İd z`F; 3q_>fś|,}'^bZzy,2w3߉EE`A2LݒNR϶|pz0'hxzh) /XpT[?S> 1eρpWq}v.Ѵ ݢ%#H9Lh&u~G7TJ!2䦧*ba7&!}f`{p]7*W[ MƋ03u.Y!dR~EW8ҞrHBWoɎu` /0d`.g{U`3cx>ֲ|k6)܀s{'/*pܯ1_o0'&ZKLz%T(&>U'`Ϣ2N$܀EIssd7 9C^6$wf/ a=أ;:f)/^b9列vC7R So\\i ?/&߭G 8~v%=Gc7{Z."+3%_۶Fl/NG, ,cBC8Cqѣ iְP7WT.as9jǮYWKуml/Ekʜ3 6.DؚVsq˷=.hn"E8lB 6-j ^iy_M!XPY`)PXeTCR܌M'hN-€3"~)좞wb_lAm0.,wp@T8vvX`&~J(fElG0 49A̻䂳t@+<´= Iy̝aACYT6QWy3z Q~ugMA k` {x/]f Qghe5:TuCd'i0g#`3Y?k|Co>Վtg?IRЖ Zl\]C6u5@֙J9SfO^OUQV؁l1F yQZ]P r*|Iv:b(viOkwh9@|Mw;筢ǵfmX0CبEPfj`'.7YQN!( z ۺD[H Eg̚CaH N/X!h=84NC4XC0`6V$zSCGGz$eX%ޮ%M@=N,-c>!{~TjՒ씸Q<|$mgP(W\ M큘*́9܆bZFEu4>G^,nY1pF4vkӃգEjhXT.LawԿL/͢ ֜5Ndg3{3zѠdrfE[:vgrcܳ9$DaLV'L_3IiTהs'k`E#&)رd!Mbxw5cfFYW.e{#ܖ攮]]ZF/٧؁]t:cdx8Ȝ+d 9po1R^mND4hG477'{$S)˰t=ozⵍ~ "ʃKy&h[9=comݤ$RcKs+ɳ=ߚ;>6㶿ōZcO+Q@E{a(O7NGJexj VjH6sTCGq>7vh/bjܾXKʨTCcZkڂ$vfmY¬Mfެ,N |a)ΠÅ짜92EH( j8B𭓧 *,= \&/ r\$'6@-Et ,y}Ĭ1%zR tf:ve7M ֟3'I3(fÕ%^yt1WZMle_G??Ech8(|"q:a+i`rfׄu j ᭁi.֛ ď X'{qb-,MCSD&?Op}Z'[@#h!{ChPk_*tupap,_Ob3dr5?\}T*ˉB".֢׉ZS@g`E[S[Zs_tMkK^%^_rU[3 )P88xP1PFKX ރX(2CH,sm6KVXwȐ}/:rU/H5ɔ"ҲVz+5%Sf,|v>hD埬-fkT߶z7hbDvT:y,5 7wآZ[X7:-W&P\<{_fr@]*ߙ_n@f\]θ] gh^>LCb  wor/ 3+PBy*4؍ Lʹw*wu&ޭ:{vEB_qޒ)%"lxw\bߜ2̜nsJr~>CjRD[ jk؅3%3\{s<43[+). QA62lp6L/ksL@ ǒpߏ!3^~ri-X5nX>L^a,.MHPR|WQ5)Gi~۝[kz cI1蘅ŹZ N?(Ķm?ޫ1nmG}_a9mglM:9,ےo Rg3sfζ&W RM@`{tŻzPK-i]kgͰǜ=F<ن}CˊDgI{2#'fMUq訿RW:!zw %9}.!U Ú㡺p[QMVU?m ]i +Nk<ˬŕ))*߶ "ݭ˦aZ#ZP_n5y}aewfnQ6}'"G_ *_Q*aU~79goj3zԿ=AjHv7¨؀w.h #747zC~%ŭ+;;L*l#Dׇv&)(.Sٗ1 l}n: (MΚ=B,=uiE8EsG6?UG~ vo^9݀(XϩA/h2x}B˲6olM/#܉_0SfV,:<`e:Ys$Oale7=9̊j8dzvxh}r)Ќ<m PI 3dOc&&ihP~ͶD^溱v-q>xS|ʺBުW B\ւ.n1䇛anKǹ$djl/٢1SB/x cIƏLÜҷШԩ,8a _024̛ڑ"?hM򱉂̏s= ]Č1*|jݖHCie3,ES"ws=Ҍ-<*%e$-:;v?!5DK=M윾B>O"9jɆF '5CIr9PcH{y"axoQtg#ېpكƫ}l S9k{qʞs@.M7w ??rjzgS2a*Ax.EVXU7? 1VAϾJ6}zh;JZۍftZ''dyaoPh]!Q\Ī"}:`0S2gr!QRB a! RVpx;1"x@2GtƐ1LGI>7&gqO<͞`\pˤ٬Ȓ?$7$!h:!MzA\sqו`8YwţG 7idN~~ hW>B~&ZB P2C+/DJQblIo6'm/EpSH>qL# - gGIuyَDglf׻[ Sg Cyˣ8F .pc?36};Txh=A1YYo#6Q6&/Ba?Z©HSEtȾbXF$T~g'S7\"վr뇫#iTgƱ#ȣ]9AR2˹Jg뵬8`S'oӌpܫz&!~ ~VSU1ܒ}Ȟ>Qk8iNvVF!Ebbm*IFgaGfc6/+_>ZT|!\ﵳk3iB -Yl\UF ^[rP!xrtȐ;Gc;Ƹ6Fq5x_y'b$2l2D$w̪k8!B(.!ƪ@bWPJZ~C s_ݏYBB/փ5*K m7349)aj!Qo[j79($t5a,6OHc\Qtcn~r/- Zd/dzY@;ytN,n4#?zI:(:A.=@^if.h%Zn|Y;^3Yp')>HUG{6Z85Nj;%~lO{peFjNa*B|Ȩʉz0v<ӳD񢫚,<hr#쩻aq<}{)ɩ\ͯav ;w(4} S;;+N("R5!̭E,((SF@QFiG )Tˆ;Jog}qDTDG\Eݎ]t꣟řvS. Pf,_tUu9Foe~]J _Sr|g qOdP ˼h, UHZ羄*SM6@I?hWJq1vlK5rk%{oں.D{iq2WA_L[ёpJ(,$2 4cֽTV A4'fp83W(/?&褍nXXn%aqMk>b_SBT@jM2n#f}5Q"+zZ ,|otQz{NJrs輶ɔ덭t;(w{}:eݳ0v `nJxlJhrS1 >> *Q8LKU;uHM 'MzyGMf$biå=)pvQ{WKcHĔÞ~:w-<6[Bȴ Yr "KQC.qerO'UQ3!qw/% N{5*ǖ\/}1I_|t54vzNeSCK!b Y7I-j40f="^aT,'?bC̜~z($La ۄM#x{#B:ƾQMg-J;X77vƽY,߿=!1Ygi{fE T7?j 5dJvo~U\EWɫeY\%6E~5C:lX+ˋrn2p}q2:CK{ EQq~MӭŮ ǀ W1Eϲ|׭dgL pvG'g,Szgz8Kf/'QlY#23o?Q6E"N$N|5W1?efXG1zK]ޕkl&(_?WdJ)Me Sh(z#7Ixjn+{}:tσ=c]_)Mݛf~_F alMi '.y6[E<];U= `3?NCX=y[^tecXL;SYQ-Xf3VXڈD\C(^\cntKΩXV{",ZO-9Mea #a:HһI~wP8>MrG.q4 Ai.؟_%T^̆$1B+ikc.麍@ ix"EڦZ_e ͭ(çTTYW?OgZ4dуUBolEq:yP4Y,f}Mb!Er M EX>L E5$5Ԭ-!j3(QQdZ, (3O Y]VL"pdr^εP$i^3dM +#<U}']x%Xzqqk؝-2N$>wYp=#]ؿg+9CX ć/&QURJ^df6Ⱦ ]9hw-Os@5t)~ sNZECwуQZqn~|TKumEMZޔgЄm[vkh<+FFxDCK`k;kȢ,'I=BV^NlVq2AvGbZLʌ#*Ʈ]mK \QcR jDJX!4xMtqv`o]~zA0M)Nc;,EP`(~I3$iňlX{(`,Ń>Qlޭ ,œي)Vv$5/J?kzf}z7٫f˓T*\fbaE#$96=|~=%+v'K0Q:=rfYM*v4)&bťz=v?M;k@/κ²)8v&{8ٿ qC n[_V]FF(Fc ;KۀJddte? p|^LyBMĜ%Z0pE8哻:-":]XgqMb\d5 |_Ul<a;z aJ?-n,| 9PeqPcԨh[;M˟MTOϴ3WãMn?[C42S0!Zǹ-[.>d(:ehYqlcX>w(ܰ9Tgrho`+P*U^gRn,=bqP^\u_GwAGDDV06 l։Pp8'I&c80k;)ICd+D-l/w;eoX!H|9e0fZbEo?k~jU¨:d0D%DNY Q'"23=Ҳu0F)UxA('~eҴrzJD Ga;){Z8@4~0@ɵjf[a v+hSo|= nzwYҎSԤaePwS]|;3z^{%JfhB휉8=+\Xk 8Vu/M|x+C,B&Ju꺰*VsS4~$7OClR~hhM] ^"=w>4Ff<n}27svlG#+u<<2?fX'E>4iޟZ/E!j$Aze22jxdž0}rB؈<]%Y<DӽJ90.TXh0ISEGŔuܵ427ѥhvNQ6>K7F*xuktMSàf@)EG]Oun`ζ򗝞ޏ%0,ΒY7 c8 &dt7|ٛ+vt#4 ?'oW'3g1"[60͐OӢ,)9+;e4u!E}S&s)&xKq5Ո6x'HZȪYi?]UU [m\T~srI`e2oIYOaeN@3ێĝ WҨ}F5&"a? #)[a$LlxƼҿ\lEkT/;(7ѩܲqؚVr]p'm})Ёk5x书\iNF6yHIyLMKYs^C,װI2("`m=Jg}C*kZLIz7[.7X hvVAg8YFjBUCyNn<-&5fX wL$FVfvam_W> Uͳ'ohexlBQKbױY|p+_OAf=tdbc?|HV {ݞ ^m|1qߥ 0u&5Ф d1ۙ mG&B+:rcLsJȬ%x֝03aA\f+*H*+*{5{RmUa$:ߞ/ajo&iƸo{zy)U8Uc90R ـq~a2ze tBW¤Z̧V^ޟUnL9AX{J:v3.5R ޤ1+`?L[2 b^ܸ6?Fţ$~R ETuS/$e`ㆊ7l/]~|W0V\;`\ڈoJ3 OgHӁtoJL}n6_w6xT t=b;X:aWߘe0hu-&X1&VSWY IcQ$ܟ0<(wH.,;S~g:"X,S6mh ;J.L͐b v&ݐx H(,]iTW1;q;E1XǔzIqiV* G'J*J&O zwPH3Cu=L,`a5@֡+Z|S 0%lX_M3v'vd9Ww.A 'b 3 `{Q5UfƮל/vϲ0 f=!d+qwUc@tѠxO8 Юv-P⪖c^W* z߄6̍rv3i: #zApώƇrv 2X]~Z~'PT>4YrGݾޏ7A:P+ua&GKJT ìi#gAR]FIঽrXn?١&~(ŻWW~쇺ESqvV+[W\)աOȎhf;jWGbн6/uFjOl\2ќ)q fՑ[>%e혪lx+1xVulY6&zx8+$<0$׉DzƼy*?}M3|]=ͨڄqqd/>dݤr.p7Ezr]0CuVY8XD)Fc}Ҝ ;Q@\x}7'/N #"&fzoq HO:?J? Fw )Y*2^bYH?N?؛4f_GhxPK;3a+d*z!f.lJDsom WK8t>Bf"С _r W.Nxr۵k{XxdAH`1?0DG)*H_ :fjg0+Oՙwmܛ!T/e%~6]ś`4*߉*rz%ܿWfT&f"Q[}+Fvh[FZ9Si)]bSr bj&ޮ6+~[&Śًo /|1rY.41);y~e{ŞۜQ\6,EF rO=]NgA,J?ץZ$w<]>pol` q&wv/e䱽eO(brg5 04G!K'X5Lc'lՉ/Ff&nl&.Znɖ>MtKsLqY5AkTٖJ@ŮxEA[(*6`pCj`p#K{rxHv6WPҙRNWⲖ7gߟQ)PIg&RRdb_CAb\v MNSlXO:ttY]֎q}zbԴ/j(JiX}32l3?`޽ߍHnX֐e^R;ak<)x ϗQ@T-sDWSqhLAo'L~ c+tDFyi=frࣺ#% cWIO~6(θ~D~ڕ/Z]o0Y}P6W.^U)>ΦCs?F ^bnMjL#:<Ѕyx #c QoGl X~{1a+ɻvV[dk-;q' -p73 P%(SW#Lx,ѓ=WzP:Ɇ0/ 9Cs/=u\1؄Bg̠/TIҙ:s0JiQXM6dFsEVܾYa +s%3ٙ#:ǜj_,RfʛY!Q2(FgSGHL˘UY>ų8;?1N?dD)kh_eHWcّ N~a綎-z6B{oUNY~F;> .=]֗Bj1\ KdVqem&kcamoTtͧ n mp ;7~3 hAzcEj&AdјF8/'-_wxw}OG1 wJQ1 A"s&%gwЗE9Kݍ qѠRjv yMg!<Ś@a W-ݰwTc(yF+ *ŀ}s&oj׃ѥV]_Y$V3I^߱3'O =NW֕rưƾ4IJ: )mb6 ^m%7=GJqYu>;EiuS[ |=9F@*82G|-%;=!,z=6YܩHomً9詽&KfsZW]8ozo1.ƂZ8#<9r~Ƀխn:f<=rK^$ؽOpc*j; HjHn 0=\# Dw%,0PX`>EzXYM?(f55ZؑGlkN@?{R/=v˩t"|>e,2W qЎmR=/xC'{U )f>-3=@02S4 둴H-U?5qV7M40ASn,y;:ZVDXc޼v"q;gL>Tbؾݬ't/H !肵wtqywڃn -.oԢUSM4ȍ݋QL'pKwP)>M=~:+9=ܫ_)ucUlfndjV%o\} 07\X)g0^Xo5;H /[=xDjᥟOP.{=t(k鶮&fa}1?bP}1##y?J~~cfb":ưMذ`cM0\5Үвs4eaK Zіv{GخdR r]N6 ${c-EO[2c02l8g낶8>2t"z_'0y 0*a`ǙR4uqB50vHuծE x'nr֔l[8hnQOlYE{Q5%:W4mez:KH0w@oF: R(o+n*DN-b(NiA:%ڈM@m{Hś_2!7Si`mR)_~ƤV,ũ 8T55mBC8Nv9.7Ҙ䟵p+)B_bqAm璜5kEun^dQkVlAm佒-(.2ALfֲ.N2ELӜ?٘*g{Ä%_mŕ׸PB e WXH)rؼ=%N:<. Wݎ4ԕoHemۢ-ۉь.y‘6W%vVh_gy8Oe˭W )n qr0%opة\¬P# Rz`F8-=cы,XTnV x =Bpo'flDL]+:r'(d90{^3jz= {duQ_ggaRF׾T ^8dM߃+T '. lj֧n&✥ Ʀ˭U}   VfG0iɳ= Ȁ؂vԼH&MK+zUo 3+_+g93WUa.>jجzWFSCmT~Y~gvoQC(Ꞝe׀U<[4bp4,eFdZ޴Gm$(㕹X|))*JtEXu埦G3z`#? [.=%kxx&"ī-J3F6{ s#yB.Kڧmzh)AD~vAbdKEw;RIXT-Si]wK$rS}#|4t>NY.Yir9kv)Vt[:Zƴ`>8?e\;G;0=;zL(&;܇ F?싰gͩ`;V'RT7d؞*6pΪ.<#S;o^1 J!BvQc>dPª]@+?9Y<>Lv0ˆL`{nJYIK``oTSm` Eaѳq&+j^F 齢^0meFzvS<ƴw&BrkA5 N5qhSV%l송Օd2e2_{l[Y=뷾Ӑp|z??3JtmsrœKT wc@ jbޘg?IOwp&2}7e.ҊźS/D$_+Њ,A}=~{Fv'ҽ3GPO!sCssَ+ rɂ͐`uN+ijNuqc`e=ULtTf6aT6UogGX߁Uܹ hX 7ކ?Ɗct>3?ܘeAX♳r/~{Ĉutu:}I"׭c6f(&q:k/ؖa}n}:t2?a!UZU1c@&Hb*8Wσ8ѷ92R:O#̜@ $>4Ж L٦7L=v(w6,X~ G>l'/f3PXx"̷(}aoUT5N Lr7H0Qu(z54Y cM@e0{07sN'۶~rBrm}^ck1YI:ٽ Jag5b.}qgP4ف0ҏKʰ9>e?>""[ 2F&F ["E؍(Fڷm  X5^+ЬTʎQĖ5 faQG8c[욬T}FŲ3btm4]k4LGjG1!J-'%յ{OݖzLaHY^U뮓ՙR(be[(a2a,i[OԦ6Ҿ̵u;gƂgwf<6=T*XDwAHq/l57> UH! w=_{:ܽLɌF~bZu]è$[ $1Ub rӞx|V@ﱕb s3x#662)H6JkqvuI {d:S2}d\lyT1ӳ> /% $0խ\ $qSp>|oZ`K5fCg7L Qfl u9.(r,. TJfY(̳yq^&j%Tбṫ&sD0pqF~>|^^pmB`G@p'R p#c'c%-+uP.IIqx5  Vuo!vA>#9~vir43ї*6oҏ'7]%BYFrHG)=Y qs>vG`FJCUqMo{U"ډwH)ԧXm[xD5o\؆xC7ac{okX0 ~t TQqgq=D6~+kdyjAq#O7=6*bB |Fj-j3x}X~nf.hA nˈ8k\ ZvrT 0R̀qvnjYW˷ЮSoNnjĠ.MkW'2~pK#)餕;XVy>"1LrTʍ+|R:FgwCD&.Q6Վ}랙EqnPY M <ڒ9 rb~F)YqFΪ~М`S95n0v~?p}x9f ";xcOZef2]İ>Qtކ ݍ &&o dcVWZ |t},u=sMcZq~dwQz.?G5l}]XPXEa%hGyGMXAYh+sHU!ܙ`'wAszi N{*lIk395c?ե* UWoOYOaԨmcN')N+ȓPEct9>Ǧn2Øp kswYTK;~KnlW;\];W+n9loVaDÄQ.#uw L/"l?6GᕕgswzER^E];odnnWG6¶,NwAUBO醒ͱH߹qg}O:"#+>7-" Ɍm[ơ*~#'`6-/Z_M7爜=&3Hnt`(k+q>Tk(8([ߍX#BQ?S$(Z  lLTqt(?sl bzw,H\wsGK]Fq{ci=r\S\e$9*޸lF… V́q6z6hdmϐ.1@my#J%l>„P6C""RsYĆ%f7yH[w!:z1㥬 xX[Ap#zAEzj>ޯ^hrXbhv*|" bpW4I@Uk,G_z_X'#Н軿 sR_J%cI, O[-~q޾GBlBhjhbnuW, zݼzdb}./Akq4(f j5;+ hDI6ny|@~GTKߛj}H:-,mBsw Ymh:yNsTtb9\l\>%Dj^;R^Uq 5mս*[5uY -LWCez|12A6IPUHܧ <Lw&sVֈ #"ig>Ca;V:qXs5gwkk ALDT-š{K3g"䙋?531M5sEB9%,/"?cGl˃!,dac eS76*uZP1z(␳B˄S#4'VXPb0Y/a&:L΂ytrq[sҀEh};oW6NVAu<9|}̅"Ll?lH4hr|AdW eD;2P=E5k/VyJG9t`_@zȫ谲-wʶc%l salGOTmoT0y[@fY)kcJijˊ.HD>%²b6=w{g_ u ! sxO秴BO׽)+[\\UݪkË3wnO1{ޖt'F>d"kq3\fFvwLᱲxSj%}DMӂcR~NI;{\\x;IGnoU2­r-4V)e2Ll$cXw*ؕgQY+Sxw0J@C?cfK!=y1 tvثlTŢޱDYJ'4\H98W?(OgVoQ7a,!Q3fTqf?cׂ]"zr);.PNbI<%<KD=v{~4 "Qqδfug<%_^4]F.'XEsV+0Oc%PӍG`; aT`I |c{sL tӰ3/ &f%b+5w./jn%_ Ϫi^W{"-؎;Twà;cRzJkd߱{vp6lF0TQcv't^6-=sw@6*Äzajl|X6 ^=76-rҏ&3|bx5aGT ʕM$6FabW`FF}& J/`(^pCʸ+G+EgO@ o4:'.t_tvˑi}}ɂK%7Hi,>a$nbci! 1i.̂0(S5uye ߅ia3ϙTbNgŀWxݰ#*A@X8tBC#BHq4_/b66.u`׿asg7'` E顜JQwt?=/&0b66UbHUCk{W/7>@po a3xH8orvv,z-QCA$Z0|z: 2^M\'G8Jrq2T+O!4)v.g4驝u\AN[?Of(mig#,]k>MM0%HrT×lJ/v0gڔ@?,Ϲ (?xA SWb\i>ܧoh*'G{LVu5.ͱ tچoP}Q ĹnP)#R*zcY`Yr.]wN Gy{};3oj7Ğ\qkW}{nFxuU_тo 8{nWmӉ8*M}Kv;̘t 5\НD nFc&[2P#v6zK'ņYCӽeAw97E֔%mW~G#Ѱ<8ߚ~c]PԪr O%&_g)Xpf}[NjZ &IaCAvфUI>Cmzj.+FZj P0P}-KN]r^PjtǕ0SSPlʅ럐lo:+O%jDK%ө1pzۧ ^uf㙅EHv>:[`ʍE0tJp=ISME`Db GQxE`NTV͛ͱpn-9l; CXxvV[j2T vbyEuļJIZAHһUjm`TtJ!"=M'G7Rk–Um St7,MqYNG~c`e낞ArV}L*1?=P|)|v'7ey4?=s #=o>)t9N9QjғF]/sԁܹɞg`Na| c.h\`德*L%YDSQ)_-8cJ'( |\@(]2'$6+HuDr&EǺK[$: >>UەiCmGѶhgVELXP/eaW!Ս ]zYvY$S %Ư8"XSTM]f Ta\xfn{rݫ+ ܯ>2gK-MviۅPRy(*RgA:QKǰ8=hSb/̏庭1//sֿ 'KFֆxy´(6Ȳ{qG# FX}&ĢRux)Ԛoymk6X>ژ"jJaB0B=#y["=i˝D|u+$#3 A Bo:lZ AD˃rcu,7KU, 61?>Mn:ޒE(.NҌi(*ϣEQvz9۰x(ElWg3*Qϓ2S93ViAD=8)Gq[K`!Z,A0Sm2R.Ve>ǎل3jjr;@AY3ɇ CvY$̶-;Va¸j4vo[p!*:nPVz_ ƌ,QIfչ?xC\ ϟy,6Py7ٌIzY/¾Rl2Ph*_dH/YfF㧛91ʼ 8yXٵ xvk*v@}Ho*u3B(ʥ4=&So9YmTؤOM+DC&J ŏ GhN *Kijtjo 7TkVA;9v&.`Ry"Y5y  r11o)B2SV#;sgzv #Ga< 6չQt()u7Hf ID1=b9&h| Y2NXyS"a2|zUĐˬrFQKEaڱd<`a2~ AZcm&X^=DC#VX'#( r|OT3'[>O܅{1Ԕ̳!=e2 AX}R ŦD`l-QnQ"w8R- c4wv{;<#UCkD pJTlwa^!拻¨fS_HiA=]vDʵg/1(mP^ #xe5$+<ꂡyAX^T̮(X!eg _FpK#!( us̲n#2,dlw0$pRW038W[l}مcF|oPɻifnC! 'at[$/ "J|=Wa!KӴPљ(#yݸj7lk 0sSAwSryl‘-5} @Q^66l$q6*:էs^v߳_9+sVk-Z 4_jwBap[/aYyBYٌVGwȔ2d8T'͍q<'#?-câ2{Ѥo55Ӱ$Hwj`6W?1EBqskq pRf6M6BZ?~\w8e NQ#9Z274Os>'?F3:s9 xm[$Ew<̼uhnc6"6hVc'9tqB0a >MP&N{K"+].i\?p`NII(3&'xdw y:/"/\TN5ݺN+7N\ d--%Yp/?2ה\'h>Ae,g s4,tx.Ld>c?4F;3Yn3U@Ǯh a/Zo3[Pb3ы??pfaQƷoۉn$(s"#DS!f*U+r&fGpQ3P:(7f=4ټށ`Cr:ar,{BMgMH(A*: 2 ViNYI~ m?v=.n& J+L!Xkӆ-Z)Tw|vizUaӬnn6q#y!wIfei;Ԩ:_NAvÎoϏ'79g2XPUFvy2]_ o`ЛۇAj|Kp^i,+{.nzįMK"G=aƈ۵7a_%M=}g@^s?4`pN;1xUzR- ww3" u0SIFܸZ,Lz>Yam=fDD¤F#x]gpqaãOGAwOKA@sS~Hbh訽<. c C7pHlZSWVTi'W>a 6ͦ!Ʒq\^8çv 3|_dA#ÈX #q!B;pۋ_j8(e`6S1nk lVa}'=הҗrMNKx>Xn].gfmuSw-^q`hx~(gk$1ﱘ/YuH;!2m]nHMLo<Ŭ:Ŭ#D?Kqb/re9"#?Ocʜ9()%'9/L/n!^OK` 3jT]{p#~[HxB@Nĺ;zX ]곴L속5Jx0g܃#u: ez(e#vx¢kj&W; `w ruI3܏N4QrQ Sj{OQ`k>9 lSԉ{&/7# ofv5!2aS`ζ02/2ˮ MSM`pXj> D1Cƈ@ভ.n; S`>U; 0QN@Q቙x+ʪoi D@ tquKR j,m"ժ㩞V[UWGkϿJ',/~}(ke_mԞYX8,F[ ØT8Gj~ Izڃ$&^6aHʬX!gQEH/)\;{C᷁80,F5Xv^Ԛ_PY&ZHu&af x%gխ*1ѺB{<[Qc}L%z5, "0Qzl^FL}#%kDY;TdWP>jX wG{6n vc& X& k\8k0lm0ﳿkKDpzT}_[n9,72/;iX)܂C+lEuQO9CVُP:z~+QUO A8 7Rwt͋}| ~3ǠKyyy?;[,лHCfjgaH+x|ks')sJ@{Dߜ˜Xkƙ=sdVz,?*OTZ MrM^+PX>m615gQPIH1kPĺ6^}j^%o^^D'+UOhͲ8˭7sYtTrb&MGVKhNp]t3JSCR}!GTMץl!a\s[lfz#YF2PT:f^r{=,) zOsS51o!. 3v,EIM*G|'w))D&0%2H#0}gҡ:3qi,xyk/55gMY{` End9,[YeKY^ÌSNG5h 9t,Mļvޚjz0U 4}9ApirP{t(5a1+& R$W:KFfw]Hoʸt8Sv=ip¼`:p\ߜ mcnUIa $A sʲVv<`B3UKU6um@8Lhq̗"%pV)&:r jfG%U &2tJg츺/mr|yb -=^}E%9GͥIm ƈRl[|V6  "8Klcv-P3l\~) 1o +o[JAc=ېY%1H| v(oX,a[}5^cm T隣Dʆ[("04鞝w=p]H@K\wVBi~=Y٬3¹ 1Y)W[?ʑ+ZGUqKsj|wllf츃~JoFpY {O :iw 55$dN.=Mi/аcm_LYkZP**W:̢6zT>k~ZL ϋvJxp+LX~)/ө|8->2ۻřgAIJDxsJDD?L* ŭVs9"9hLz@ї*MzƣJtb,t/l8:; رju(b> d%g.bL(_v y\Q,݄S#קUX.O8rMHpѩl_ϥ҇Yo+ `#vHD36qta Tt (8dc| ʋzhSy*Q# 7Hsպ4ZiDiK ?(t%>E=^=Ģ)70c[$.ߡmAwUX%\c;Sukte"Y(>7p6X!˘8w-ԋ2!RX%6NRKQ+{Ҍ+t}.aZ-ܲSQ.Uǎ3;xy&ICƞje2q+OU,lyۢoi* }e ((_Ħ~Lcqc~aV@`H҄Iz h&%%wx h?۵5DBNo#w{yo@g5'KTVך${ڳ3n'׏.Q!85dd!F0&mi #DƦa$Iɥa|~ߴԛ 3 !:ٰzSS{''g&`GVlrLihLʂbv5X\CM|z/.Ō$f-dE:Oc4m|nn9g t@b*\Ԗ#' Ɏ~k'Ny =UMnH/-'Xד\g6hFW?Gp\>r5S{-܍逋YXf/1Ŋ>47H`K|D G5ȫ;zMv;LQiO﹖f6'ˌzWK!nOfVK a&ien({" F+cʖJU2X'b zl)'|]] s,["5<-PBU"ui$Dj4ו+ۓg)|e:;=Y[n?6KK/`](d'>sn&V脉f[:jɁeb{l _帀5{S .!CQ%jv{ua1|s 5J`?f!l ne#LcfX:%},ޫǠsdY !wUݚƖrAyʽ76928l{>\3ٳuy"UBh|(#8Yeۻ&[5==a4ϯ (}- ~=KdK# qK RLU ("cz!0ߑ <:oldLWl WJ(*ֶ2;a{7뵑OcQH3eV}54_%rѳ>-5 Nv"1$xS ~kIk%;r 8 H2{韗Xe?e%hC;ts҈q=)L׫,O'ZI\7e`uj>X x䃅>Qj(#Tud59\? ye&zOt7

cV3+\|uKck ʚ1Cz89\cK%8 rOQ|FY4?Javzgo %!8ĐّbYen)lLL!9l2,Jo)tz"ryyr3s\ 6=HVۤ"YㄍV&񭅦\9$ݭ72 #0x1O=#98usNպ@J )hstFYmя[P<??6bJBvtnS!6oɧt\473z.Ҳڱ> 2cQ=vW96| qeQ:ʿO1,Nu2T•(A!/&S_S903 yaP(Lf]nL] QQ\,akpjqBt'O6Ot[SH֔8uIwǃz/c<"339k[L(-[$݉ja,$K4BA [mR;^hOeTt38]3 U2 -n>'[i7#ˣm>Q}:Tfc&,)O[Z\{CDEax[SpT+c WyE/^U)uΦl'ݓ5X=#eEYkHIH7ꨂcJJ~G{RƂ'3tDr\Vt~6“ǿ$W^yoFsۇ@{[o 2M',sSp_'b4L4ͣ4) ]VlFM!䥊)k椡oQcLD `3/O-d>R!Ҫ"-meS{U%=8yM瘮6wڎm+Qɚ8cnJl:W 1b􊭊D &*\+'\sӋ;dkS=jvqi$^:pKD E 뼼hU/MD wxzdd(*-Z!!8ETJ i'cWEuL~=|NW߰Sz)\C:8}/PR!W28!:2NY <~œ-Ynj)A0\^9MeB89zMLޟF/љA\嬲*ezҭ[DbԹeNp.7U{,Իݳ.E'h^Q!짆H~N{Hnn+˄XM@W&YN9$x`E%:j~`+O+QWl28{`MO Ü ˜FjccimQMbMO̰ʫj)/[DhOO/sN&Vɽr4+W5;4^l@ZkDkob54$R ieĪ sw87J].YDIo8t~Z5f*d(v $M4]sY'je^-ߧTۓ넄< Ml3yl n61([Bqp4?*}tܫds{.}n>>虼(~#SQu'n{Ҩ9+6Ul/tn8?0B41Ė-=;_=NFǞ|͑Rݻ<̧,L0τR5Yb%,Kss#󑹌96o^Bn6Ua8esm3\2#Uu8{ RfkJDRM+LrIrl΋={lM ͟V #'&(>.纬؃)gqJ&ZΈH-tfS;13fvvƑ߃vk`1^Йf^nit!^9#YruL0/e~ w0B @+%{w/wGQ̨Uf& 2KAq$bѶ-QVKY?>&s]ic##: ##Zյ#C=$6󌄰b/2>vmbˠj|̛s33Y[cSG`:r~R] ya,B2te`q&֤N=OBcz&巓{oTŻ^~=sƼ8"8k׉"9es#žD%Sm[MK$iaR8i%s-Ln{A4 oh ^TrfN*H?]Wr9m kGyH 2>.dO [hnx\Q:+yxsVZ}o:ltKNʩ'1W.[}[3F寕s,CmNm`jiD`v>(>1 eȄp*z[?No媻ǽ)- $z#R+ülPH%a;& cJ'ŶEݾ ckS[95/)#ĩGvaTsё@?N%5m>sm/OD;VupՖ:SzwJ[ϞO v4&$xal[G(4 FWϕ:#ט|,Ea9i#vɖ'(WJ;3T0P ozq@)6&vnj.Kg-1}ZTZw7|HcUw˯xf f#=㧏o1Uio#xtۨe@aSiᚚyvB4;U~˲s.hVBY4II,. O9\QD)MȻ5]6߯@ 2A9߆ e1i&/YQ,Z{B ,^/6 ^?D(n/oqRT ģzs6֝~k wxAؗdIa'0NokˊmĢ1!}namO n~dCqg_}OF`& -H=Fsk:yN'B nV땆]>O8c̞WbTf dXS{VC'4\|ަPZ#;'$X|MzzȽ- yE2 ._fOG &|\;ʃoܚDܥ[QlT x![OQ:Nf%H$hZ=DA atJ5袹#8߿^Z/W!A$E\oQUj5dn$b87e}~iB;Wl6jT/KnIoCώ,9w ݙ?:VEĹLʨSsǹ5C*8smDP1reЀ\Ĭ'1RX~d~P0f>[_,b[JkUtmQzV|pKW|髫>VcJV844~YO7ςvndaH utΫC;?݆rN)Mv/%О.*9rh_B,2>qU+/w!$5m~.dVƕ?Ow\ 'a2yeit1gR_ˎaz-D\3czHQ$5zF+4%ԸɎmd5"{qCB4Vۀ krw@΂320|k.xFMJ2i;uQGlLeFxXTѮ!/F ;Gqk[_Q\ 6#gB]Ɠš seY9Fv,4#Gb0R9&|DG}O|2x0}r_{ƠGvŢ7j&–_Υ*):WvDAX6{ ̶}#-fx~%B?.KOacr]|Za-#vz[hl!Lɻy%J-뵀fYL+_IV9c4ČUwHQR8 BR]]V 7q ßg߰Ct̐dPu닇[p&tihx\c^U;yEk {_Q6*s$S}˻X5b{6LTvDy4~IL)1J4a 3'y3, }!-+"[͈`OՏZ\8΄ɾW7{F0skJ#mlR9X<žA5/-E;●`w#xgK񴍫QX Y i*h'.DYջ6[ݎ,6~}u5LEq)/:`\Wz<= @PPc}6$ gV6lVGd/Ube}AXqO͡ v3 /~ /,ek\+ !DsJ0 +:gq5OKu)YTFk?BڻĦܫA8,/#rts+i_Ip:{Ys >*  4hzGl.ƢFNB4l =WXY2" +dM d9/8vTϿS\gT coB˜&s"\24xAs\X}i)s1/ Yb0ϖD9P9Qg+qi2ǫOpV.= #yjTyOTef#MwvaW3*Aܴ+|t<,*.졦3V^xc'`-lہ1-l,`W'qd.jj, iJ'ldzCT|9S>v(dDuդWϘM鸐"QqW{V${Cp;}o{@OtXk #W됩q5njDD>K\ edsoS=Q9HR,5iEsG1\z:}obq;T}=`0(%gW`YvV^MG=yW]g[=Te rغ\{_4UpQfp^ţQ~LANDi!=@(6 PřX=N%75ċKNBforvPS]QiQ9:WlO3|8r@sVs<3'N5 lMrK;!H 3e᭔Й3݊>pQϤ[f=DkGPy"/ni^HIT{4nH^[aS0vA75g2)s]UY5hPG|ErٟaQJnz!>U&+yMQw&HCոHDiWbty偋GȼĔt/QDy}E͠08Tţlx Zi,%*JNniڧP`c jɎR2e ,'%kOR cK+?BV594( e>lSc c8GYm\vS@>moP)xuk6M1Wpe_d娆q)6Xeߎ3cC 7;%nD`b,TQnN,GO- )>z[xV<6`YބupSeRXJX!- ]WnTi`)Ȝޑ +/Ns'<t<_* QG#oRqm?XqqC[-E*{^r_ۨEz#7\\ &"݄c:9+;%%b¿6:Kyh/lm\NYur~G1B^j@[뢡%XӞD'yM5a(|F4ĢL9e 90uCc֢RDb~q<4wts\! 6v9Zz3 QC1UZlwm"mLJok+Dnr&DlBPz _Gc>=Z~;*28b״YilJv||ӞĬB3/g*LӟaFtK-:9AYSS](P+\s ۺÌ]ii¤hog˱HQ< mFZN{+3Y|cE֗gN.8?Buw:$jS0a S; ]RX>] w9DpFyЦi& `?ǾƯȑc Vr8Pp"{/χA{aS4ój8Ӝ/Rcוd' }ЗNOr 4BzgfhK0mi7 vE^z㓀@5Ş`-zfP.u(,So3ELSk3[φ?պlb9T쯕)(I37=ދ;ikE4z+çXN Ƿh4ҭRޡ˾<.uAHCδL}1 flz#*Xhg iF5W)|>3;-i!hgq\bFZS4vn;.(t[G7c}()ޓ "~Q]d,Њ Vv-pVCSQTm(5,d oVk^Tz+]CO V7#AR`,ʺL$2|ޅ>\Dete`m0*fkLse|X$\`b'cB )@VH7S+qҽSs5)kJYΫb]{Tv "Ttcr}XPp^]h0,Ys[^U?e. ӵHHlWסu9bpislo`V0𺾧HI!͎zf6phQKd0M~!V 1gBFZi_q"&SɚX~ъY!AsI>.kJLzܾqtR{OM;:f<_zMD:{2NJdd2g ,,w٧Wd:aqH$r@Ec0Z5 q&%9qnv p.[u8(]}٧fB"]4(!>C3IK4w 07KFyô ]ai/vMߡ^g^Eu͠55be  A@:JQ/)TFc67#E]"iGYhrC./@u)-<p^J9vjz &>e/0yw,%x? [sM+&xJ6}F9[&yokPpL"؞=k\YV"s\ĞCP*f^3TOEl*;$S/a L>9@JZ\^$/Hej\hLCGl{%dPJV¹[mmqd^vb3DѽZoH/璭F#X)CH)AMV3nX]BNbWq V{P֑o7D?t2ߝҊ-33i>T>3;iMo[˼S枺 6v,M)|J=d䙍A|1|m#i`mR*Vbpiũ Pv<b />J>lܳM@K.3\M; unXZC `޴V!T֤|GAlPZb1:p/JֲG8H0= AtC $ 8FC|O - BZ^kjE}F=6Rc#y+Y$϶_Dư>Omxgoo􏠐m(h Z~2dA)cǨr%^ 22K9CTDZm҄ދkL,[t_|Zeӆ3Q p+MƟXTg,ޱ]-#G0 ctXaQ*߅34jo1 tsIy>,`<}P/UxoV36tsva" FY3rqذ!оw>/ei+ͰWc"htꈵ{ڴ2 ^G 9by޴O.XbHtYcm#߫WL~4 .h_3fo|Hg ȖECq|bCHt%NV0 C^9 >8!ՓÓY/)DmX~LK^=wFスi0]$V[g8WP5#~ 5[LM9E =|L5Qk6tF{]5Wtb_GgPU25]V)=6u&;R&[{5bGaדCy[R%.y9Ji0._CϰƆkS޺_иa:I֧.(Gg+NMŏ__Ve5 aкK;qDv;9Ȋn}\#FPV-3W&Lڙ?Y(!sV}N4?5 kW<{=_3nc' *h2% J1SqхQ2``Mv0уզ\\0[fҮaᶘP:G?`ChXcYiOHc(^9A#'3v\>,v6I4'կ(C?]{qQ T²exl}z]m1ӽ3YpFIӞ]Ʋ`t ]8&K-2KHɊLG}ۥLB<wjQF1} >?*$ҘxoU`wDbSv.f, m] C5Y,x*G!ʍc]4ےRId-Nڋd Q&ko[~ # w&X"\i0j˲d2MEE-4'A M^AnHl]k(?X,7 ՅV!%NjZS.+wC*YV0R{\z, *^l!&yu)o+G_hlNщ::[Y+{Ꮌ:ARXtjwE^7)ƣAՊ߼➹ ;C$m 嵾k"]E+W,۷n8Z%6izWu&y>Esf"29> o࡫=#8;eY{ kQ}ܖؑI\(CUOrHм㶜-E-R c.[~?FJ8ۀ= 55 6IL!5>q vl>VMT6 A4HmׂUu,]u#d6dVQˁ%^ 8pMsԡ)JAͮtޒˈv@S--yYYPqFi^Yj4= eIY,uPQQϋYߴƨ}+}e9j㢸(S+bϥ.yQ3̓"_򂲀>7B:LFGVI_'yߥ),?NƑ1ә+7?XZadqJr)L%m&͇d86=ӚKUF^t>{:aom$uQuu||+^dZH0 f⢷ty/_6aOc^EP코fX0Ue mƣi4Om5ѴS…jVnN #K`!_D5Ȣ(;uWN=`ElrY6*M8Y6z^TqT)4*ćqdC 0Ih&|1mlF OӍj-i]PlHTGalGY%J^҄DWL%ZXpCr9޽H74q+v)U2BBy1n'> ) ͢Aō?֒asG^H8"(2%T?Fd${YSA`ܙɛˇLmto^5mLZU}-vDO |FԽ/ wh ]vʕ at@S?mBk(#3&j7@";|Β8(f+$ͼKw=Y79̅*BiyL90o.qqb4gq<aC,p3պrċO7ĉMnWapT~C+c?s#$G F k&M7XadbPbƊIg=# 6ˆ AXM-?`tvFYaCI.cAy5B ]fZltqn2sXȹt@MW2[pË #M]207?_v3DL`w7]0mR]2ǬDa+n!.~DVPY1n>6cuUߥ 0Gi'QPA&CטjDg5י/sd1't<,=ֆ Pc7IZnа+AE1'ǎn 5{O?7ǃCܓ1-3Gyu˃DnR9 k{KJOz<iO's[X-Ogag6,Z뷕P03羳Yy'PMd1?1{-~u<'3Lܣf ˡb[Ww{0IW [;,yr!f Y&ghqLsqmvdfYKF?#gkׇK'5Ɲ5LgVbDDdg}yֶP\. h }V3;q͂ȴ_(8{8!1g%Qn>2bkQ7cs\h˟?39cZHcz+fO=(/(>lY3ykOVh,0|4q9ȅX$-ہ9P_@:vUH?^'mwY/mqk|*Șrf>xm)QX0ڮ݂^7׫ymmƿ_uԢH>A&L`Oy:rŭ$T_d#^0ҵETfUbZ? u98%8*l.K@;D!Ts@2e~\)7sFοF$-m$|Zq(j'Coʥfx2ٹo}U&o+H8<8ފ`[(Jl$f&&enj Χ0FRJ<C% &ÈmW;KML8-"Dx^ToL6 F([0w "zWʫ hQ]_m1L7h33iۖDPΉvƝ=ēנR +wc7œYjV{8@{|(c(heo%R痗'שdrLO}Kce^]N锈)dgJZokKoK_g槯/UBP G:g*NRN WsB-\-jmilG0H`·EDrhIF"MN?_nf JkȴgD}csK1gqʯ1j+h0uϔb[|N};>M Dڵ2-r\ G\U7PZ= SWRnUO?8 SBJ!dB#d9|XK߽U:8,9r7K^8#C2` oٯvxZ[IB̂}x tAsL>;9_}gJV&IJ\o U`,]}k[ W 3NG{vJh˩0|By).fFjvFθ3ڠ}:GsF,rom0mvJCʶM56Z2w| }j0RI `g,<5l k R1ݡIto_ZZۼ>ezZ?q?ca{ aEmShJ!q'fȅkAoMQK>:cV+ ,ކ:9ݻ2\05 "|a{@3&* \(*V e?Rms._3"z5X*`kS!\r85S syf x:+[?+O&^ν9Cf`jzrȑ==/_Yۖ2j ڏ\`ͳJi5 Thv- 2Y)O8W:2OJ^ jAZKx/K{b+xA(XhXힺ9t5uuG|~ g㱤 1S.ٛ~B,*NLٵcY$ӚPtaRcXpcS&@_,Hڳn-<(T̯`=\ hƼHOOY|=t/Ņ ~KI?oAUJ\p?MV1MRy3#^Z3;%I*; 5kOaÁv]/xS806i~-3#[!<:YcX`]ĬoOԞ %J9>}X,5ċ)3l/3:׹0Lgzs~S4*v4-smQŠ&$v`Ϯ}ԒhKmIY5F!T[X!ykyV=> pꂭqthvx^qD^Fc"c_ D;_[/ä>s^]stN4Q{bW5g?[Bs˨SVRÔfI(h|Җ <爴FܧU0x%muMo&WsBGzho cC37P53 Qgduˎu/n_ >|M AҬLޗ2lزYtjۇ(LvG6 p" ̶I{hQ/(}əᾂR-X::D{&֌V*Eq]R LTi {ֆu&!PVw0D{:U|` "{ݭ `sHe;ةd`51X2 3R9J'cRX<̡Bcr@ LftBzmͬɵ}u (u\+><=k0 I4lAjjlv3Ζ_Tnj:Ku[o <ڙ{E-'0gttč>(u);7&˕\=XZMq7?'EC7mnSn =erN $R(밸ė`4M: Ŗ06 C[S.oqb/{$*p?:e۷K$Tjʘ.;"6ct߶8f2t7dn+E#^-U8AFzl)򒩭.`S`8yb1pkl׺k\Gzch1G4f:c "z uZZcpCVJPCyRQ)&0MKr`¨muS9pGNl &k,O"|'75~\zVRxd[0r(a}\IׇM0Vmq mSyY3֞US4CȚ?w݁3\̓J,/jI2Rha"8.r;"p6+C8؉Rmy?j>C19Na6jb:DU]W'&Ecgkt\xe>EJA96p#́N۰^%[޶ N+XRœ[1?QxB11ۘb5tP7RgCDq g$jfܶΨ\Yꡦ٨ ftwؽe?~Qf;ZoCI]9ènCp:GNJk$y\)mX*,lr34. T}7.V!);l&E{N)fmNTeݡ0+mkn8\:ۛ`kOGP;/:fw̿pwZdIG(# #pۿe]pphu?h]J?w:|G?Xcz15i`,Op풌y٣>^&֬NoOEi=7eD}CFjOLO;ϯݼiVPLY+8ȃަ^Gl( ֖pWs$3=.^y1-ލ+o/MyMl 3zHI caݾͥq˜ѽ*ʎs3輹nShIW~a[5D-cHl/t=Oc;I 9\?4pRx&D"#qnBI #y޽({?]5'Ml3;cKHѣÀqs{)bsa?c5/*3o\m102Y2 L%¿t:qڹA>fokl(xVC׼2o3_g0x2ZL?$Vzێ^??@C)eecl{n Iu)S/+>ũ2N,|,|arGw0Գ05*BҴݢ^C*)N 4YyEpֲzX -_ L}J9#I/2\JE)-X,՗yk{_;9>LϋcCP̓(5P,]YXp1C ѵb,ؔOffM}83KiiCIxd:3LsZw_3fqƚg-g#u.%żH5ut~ MO<6K57":Q?XSn 8NjS.c>s(ʹc -Hutdcs|%=ZDgŬb'3^tD٪]ГwOtt#H/7 ), -}S ̇ŏa`7成Лa#!GGdza4QǾkPAoe-nnDR rm)a^̼A`hGp.>u?ō"hnweJhu5!!eE϶L_TT?,0cgD] x(KLM$D0%0Kr/Oڏ6OEO u?Q?&$gBc(KU}EׇȴEgX欇sw>J?ffp2ԙlan8gʮlNLs Be͔lm7.ow;fK Hxkyfb~W HvC&?O4Y7;?^s1ǹBvW}ȗ=m.4w[I]`\WOaMOnu)Jwrn8#rbjc[I<.!Tm>5ZUq~(-x ̱3 tỶ C7%tжvȾ;R#;U#YX鹟(T?B-Ma Q ÷d5**=Z b3T1ntN6&g< W|p*c| !7[F |:R&U=XTv8>x+zώ6aJ2fGY`gQw0j*ݧJz3gM7n?g\+vq<+fwegb u,ZL qqS#2m/{* ,AZL}S9T39oiǜGVivєĝTZ`ʘ lp̑C`,7So=7q.-PP< Vd04@TP:J #$ nKq<-]G4R7 iCFtW@͸BاEVY(+Rk<8F15NK[΂jH+fS%AqyL ޶46sn#10u?DjOiF柧Tj-K![ěb!v Wl/*v-Ϻ0[I˖'VSTOnk9@["0$gӠ1阾ix*;wꪹO   Z؛ous"Cy&f+͸Oژkm~Vh#!MM[ZPb׊@=a/[d73@؅Ffϥ]#rUR* &K7"Q0>ӽ0roPpB"Ȟ $6? Nf〟ceFơN`P?.-7&ɮ;Je{{ztG&e#b猡;E+JzyX瀲uijOk؇ؗjh:D؅2VCz6 \.g6tV1l|FXPhϰ\RzZ9Jn>VjswƟ ,&rkV,VI$ESXfYԩhHۇ=Uuުdn/s|ʺy:_j!_=W\ Q; q{†iPfh \ᒺй ށneH0ΨOo951j^8vf{T͡*Ctm_b`eVPEF5 춢FT Cuu<g/Zkugd?$_oeD{D_3fˊアOląƁ?&:|^ hV ۢrMɴ$ZyauO7Dce@¦U ؒ<۶Dodu{ VsuPz]-]&wUw8?$ 3nID%E~v"}i820s"#3ϮޔB̓?9z֙Էr'l]O䅬Wzh< ViN!pBKqShXK*w``nՋb'izGb}#eJma[^o޾If ZnM nW m'UcoH,%aXeyEې)B YMd٪-6Di$YFK't1Whs:BO! Ψl\I>Oc\<4he'3>9`~MH _O&I!ލ0ƍZpbtpwtAsESQ+1tH$TY ]0U^.Q/ ~ȱ6 c^Fwv<)iUb_ q{CAu5 n)`6\Z]^14ukydA7vO_`u{0Ux&>IQНhU/ "#M1SkE~I1!k!dC `k–skuё+h,n]_V!fkxCnNa5. e)~X \D=\{L6%޼Vb(M+a)F/EO 6_yV,/<F%X^9V|rE\-JaCu^ )?U`_8TٿPg#ubCЅCJ,X7Uo*yS4ՓTNj{mZ}wɺz06ҔL\CGHpY"<&MkFLGjc5$c`VY!B\,E8EoQ4  $b>{TWoe^-փG-_=:zKm'uu~. /DwL l"XY?[.5|W_߄B iaR{U) ri97^ehWa7kK[Ms:ϯOoPsrj^Ѷ@CV؉=+樅(r[w%Dj"sz3, MQ9F7(sܦ!?RGuNHG@Ǡܳr_.|rPRX^77}4$ybCulS^4a% 78_\,f6%~`s(R-vǢ&D/9tG Wq_d HOWگ2C P#'FNGG+<8 p*m>H@1Y9O,\3{[3Ԇa@{];.jrki7Ϊ+7)a'I/3!|"'[XfW"#<35E.sw@:rp0e"^nSR( z e F48;ITW)#x6|ۖ޶&}u$}fCB*CL`,s !Œ _Q8ouT"5BsC;o-2uf'|*LQivcv8lUze3Gݨ`\ [s7ީ|,l 8;6*Uv2.x?SDVNAؠ 'p*(T?%I#dsr;PazNh]R ImKngDȿ- [dwX"h yD,6Vj3r(=^zT\aJq:ֳ>E߯ EIPw,rٴYx a8G0C'DW wɗ6%1frZx$2Wmy=і k4) d# "Ս~UdXf脧?8~o!V*jS־8]Yy{^sO![_Nұ8G+zSSg,_vvJ}]Ж‹5|,NʆP 'ug&w;1H 2ᅣ!O3D2g}3{Fv=?tFCnybq"L%G=ߋS?7TJ$N]O޳J!$@+ 9ólIlIʖ$Qz8NW_eEY}$f>%3}2$?uTQg?=tkwu[(ł,=&^XEL(Cs#1xu{e?4k6/w낕gځ +XcW&N0}:wܰݚ.[+Dj®ވB:7$wvӌM*4޿Hiu)&4/,]RfЊZhg{ sMֈG %iy[vz}[E5`feqQNjs]s(MuQ? O "*t`(!GӾÆ9 g V mZe׻6 %1SFuޢ΢ֽ6"˱Fq>[I(By8TS9Q&̧\0İWQ£kYMX.`Pkvأ"j9{̟<u -tN\l5YFGs'v Asr൭-oPvm, o T̠xr+:s#\̓MWf&ri-Bsa&?G;גɘz߃].oݞ$k6d 5HfɁ!te!"6ܑU6X Hn6o$)O Ra|m݆ Y. dpCxˣL`BY@a.`M&0 "y}LQXGukQ,ܲw5+!ȁg|T‚')4d:12u~,$x4;QR f K7FN11ZRGSPAL@94-0?usE`012vp:S"k ;{|ī1QĐ[)ڏ" ŷH%1 nXQMO){4 gk ۭݮT6/`FN~.*sdՀ5hA:punQᷯ 0M[͌uoNe? H#+Nan a;q]ecxu׉t;#cSU)qO-Y~kU;`̃r`&ÇsG l #&#ƸafWMaA|=GYemtcWوUP>!Nlq0?SNwޭ7&.Mtx1Em$Lnk>-${%ơrtY}~.f7iPî؃|#?ܳA8i؏q9WZ1p,3J4 $)J,֬ԵbA0)%`=T_&[<-;v{IH#="b>`_n9Nxvb)9L;VGL2&.MFbV ?K[ΪY!ZDQg39Y2/bg4P[k Pf8vW0FȲ`Mst3Mωޥwށi Lli:J274?G>TⷽΊfN X<{~>98#$T.)j8=V2Ň0'mk[޵F!9Mw;[e.cJkw]56G͎z01D^-V; (sȄ LIʹwL@,iJdJr#I*lgˏX[t|B$ ڡ fP`e_Yh7:?/|U2*]h;a"%FK|W~c~=9BTxSur/.^֕yO!W"j)Ɂc3ƱJsmtܵLR(^ؒ_@~.r!׏WU6B~Q@&M%UCaE+XӑDʬ\ I(D<0P{*5ꙝDjԪO6yFr&H3؝B$}CuCYwycNTƣxxO ,W(Ty#.{5!=Z&;:rzE#؋ŕGsXsS*㢃-pby fE F>n%myevntzO~} }ڭՍkfp+̋k+u uw;7:8&aS4[810}'1v7Oanq >nbB030lѮ˷hSgݨ}ÄVw @^4+˞,Q\کff=YM~yu2R 2 d F#_m c.<~~ɤ1Krps2ͽ3z.&@K%G毰k-~.(Wet줂g~OB]|UQvM*oaȎJ-RC]KZ':igTFM;~Gscos=mvq ϑKW_i>OyNqÎ6< aULxs3}x*1;akd|mH9ۃ5/couf} 0eUiuJӸW GhnH5XN bRKbƒj`7"~(AR/Y`{qrYőJ@|J8,>#t(:=\KHԙ 3IT:OJDt=^VJDӁw:X;^C1:?Q ˂90JX{7L ".E͗#@p2PAr -Ĉv(3yie6n6Kǟ钬C )t5"ĦGfmdAciA^f$AIaiWS;? cpW[Qy ;e(X fWښ8bQyʇշp:z޺%cɬ~pl~A,Ltiu~&WX 5m fY;LFls@5q[!NCR-"\zX&Q~gvv#\@)fpb}hpPN?h*,u-+K~DkFzrճxy{?h5Q FTe*SΗlY5K1V*Q!^YoZRw)) #;HiQ+7kIPe؆ltWXdl)0L@ "rCbfݝzTxjXq'.i'@~W_ТձY@ꌂL 673}MxVvvG337e뤌'`j7̹Y/x|auk}inT Sm0$p$*vGKvi9{\wt]QXٸ]mئ~~Eg}?{|XB7eAܾ~{TUPmw)nD>.͡KӅ*1pч]Չ~~]Ld(ިv?1pyB!Cְ,$S-Xc?jQwK|AGYorfqjܽٙg1>8Y~,YBObn{\T.SccJϏ)fh^U{檌dzRUC:j|ͣ\A<]Dlc+ֵ'[- *,Ж\&h)3,6t#9"2"h 3+p vm6m@?31Ƀx5:H0XHihH_D:BP;GjJQ, 9?m=pT԰,zŝςw0112s^ 8eO(99d23_2/yr6Ə䉗Qx[ !!)!v[堶 "Vy$$ E@%6h'WGhqnޡ;e˼v8b>76v*6e]G\ GN r #QSqtI3|y6=CzKezdg]ӕ-]IԵAc'.$3gx·ҔZ:q* ֱ9 vaR=,u+\@#7$E%L;%ȚQ[Ҭ,W+6HTƄS*b@k$תnT_(!6s\ݫ}0#NYKJ=L; 7VoMoG5ytgE۾ g5&abw_auflfKv[|@2k=cȩ@e\7_Kvg.=%Lvf<[3 8z&#.ujXu^w Cd9. W#)jy Aal{zvz=ItA_B\(sUx\/ү+Uͅ*TN Z:;%ެ(MZZbjyų{[b~M ;u~6՛-B܀O5ּ5oX}HC( Y3,;n>()Bur{0xG> #17nfu0Vg{\/* m#ӥͯ ٣o(,#$+9(SlP -o iJPٹg;ckʆjT_$ӌnڄBog^Y\oy1 ,Kvga9ZQ-.*cYmάhnOzņP>8h7 9 Fb]Yݺnpe]xNB|D9ՒPUe p$㱘pOw ӔBX)B+-PʪD:Tֆn j&L^(d' 1.YNC*؏Z| !2@dQ8<׻qn-?uۖ d.WZTc/Tʽgh\>PpA8)bRY2:s/I/ c>2@HB10@:wYol(N x`̫{f”S)keP}ӜBCbQk6'{Inh҈NVv[MenQS6ɩSM~tl~{-6M7)9J+j gWBѳ܍z@P Ck;WvBۅU >GİAhcRe-*\q 9'mtQ e^&qw~BʌZ#py)Q46KTAJ\L<^kDw9>)oO!;5\싐F,-$;kBFN,vi@ FLm+Wߙ #RM=zKpOnYB #CA0=E اgOH.i͵@ T(u9?u_%"~X4O,)ݷ'H|kJJwMx()d>Sf-:@ 2#<:ax<4,Z O: G^~j0<4[ 1j5*,ҳ~ǷW>ɔNqՒp}I !hw>lFz*ډkh &Wk»Nf)y ~ON |300Q*;a >qgN,tnUX7SPMi,R+w<"BCZ*̀v lcw2۹*9N >0mwE]tֽ{1 CR\ ?0OTKBQ6=Ŋ%E;tyRԝrL g l cZlpYjͳJt~`rxH5E_l4+ǧ-uEPۗފ'HVq٦;Nؔݻj*"}}Fj}`h84듣alim`G∟ܡKEk%0Z8;f0^9B^+ˑ=`ti+kfA)yNXt"Vw~4䨐 zל)XCx c#hWj㒆0<~//VLC&euxb[.ۊ^#^_]/$zƆFpAbe;u-;yaOwlilB|Y@GwJ_NO []4Lhm ~[#:n ײ:6$> 4M۽||9x*|~F܇QMA gvI'0HqMmItT-0Z7i=[syj$ިHEDQH{S{P_[&Ruoc1Th{ <'5thL_`je Sh6s;l|37ϱdQcd]w=븲_RlM;kl繳rze.ULHdsI+abL:9'ιu-^'2`/(NbNT=" sT'ͳl/Ab还yaס&qy)(èwaDVXՉmm_m:M67I&edRB+ s:$ad 6+jT/ѣh߁ K`P!`_#?SU ֧`5V+fV,Tս>V%&y[Oa{4x}iԯeF_GȓJdZ ig=ꘊ]^75Sd^ee kK#W}@\8 cwBe܋yu}i׊betOܽ;Ӷ}?E[bQH1 ^[aHMĸ!?)2xxf:jf~HQGE5$9_X0cڿZZJ6Nxb+tiҠoxݠƧw=[<n#KXd%rcA~Xm|2%m3ѺtE|ޕggf՛hHܥlʐ, xHЎ(;,E#;O R#iy̵QkhKx(0xF@/N}ǂ˱G8w/\ Xţ0N(AJ1ץqS6LH<8ORJFeYXF}fB7N}mS+I*bC b|fVsx {β)0]C-#vkN)lBgc?v_!LzxIٶWVZ>KwfA'6N ]q_452G{yД cR!~~>WVw[~ kK@v; p ^t~-ڀ$k,=ė TД3OY`Xa˖w([P]4$?Aw{z=Sx*7<:'B BH,W_2UgWdz|#vaM^q{cۥ"g&By.a7Kyϖf'9/E5A3s~~k:؃Un% Ex;4wf A:(fWw}]=}xiy{-l]{Nl=?E{UVliEKJJbQ:ܤ #gF^ ;S).ʋ:p^J?.-'H/qm&omQ~oԀ2Vmcb-2y2r=^_OҖƼF "5eNE>ѵ d^o#k\&̒Ujv;`۳KliVk $ K`UmG ͶepԔ>93 x/z;B3I/ȏ20鋳M>y)tp}FƬo.|7/ Ǻ!H{ؤ4 7 +*Q:BB:auv}]<^K#bn_7{Ǖs8Đ{z4HZo%zs;|]¨d= tQ{KQRuQm cPeP3D4wu>]V60ruЊ~ Vp!T^Q@)U7Ksl(=$f?-԰k;Ih1 1G? VZ˪ P&eb&H#pr1qbrmDRVj9 .6a]&˪ȢAVD4msZPY ]u` QUYw*Y)1[OUqzzQh*D|DzTBm}@3hG v4n0,_޴5;>G*k⢻e&Xvq8~IJJ '%Kp Q9sf\ #O.7jN]XBFM\F@Ѐ?LT)jU^< 3J;|j+TA5\FFm|ٽtdÃؼus H9SJr^?G1>2%;~B\rW'7)l!7<06Ee6 .ڥbG>3E^-kt^`[9#&:hD~qf`Z&\GKO}xS?\O[ %˨D=z2S]8]J|g;4Je/Ìj/Lu¨PbΕTBY--ycH, ofGb$fu|y鴸^@Vt)1 @Qܗ3[j^*?CwGuge~ t K[gFcï>_phKbƇkHK kXDYc) -8A༌L8Sںͻ2kuMpix2 au:s{~Y|wlah4?[ KO0?èj㞚}~aʈR) uh2>I_򊒵v (MbcV43]6cuXcn]^}V(geX#V%ΖmoKǮy ']< V'|Oi3f7/٦*c- mfna>0x̂HƴҫyxAIT_YlI|L*I)=w1 ߅5Z/EAQ"wmI*#tjqIvQlWCH!FxӥFp P(FeKr(q4Qq7H6+&Z\0<^>p:xMN1-/m/TSODrVj.rc0^vܞPx"ڍh(+ UwM0 Dh:nN@Ktc112R ?VG?oSOc}Lẜ*<甍)dʓAsƶP*"Y|H/r6l5`B.V^g9_ihf .Z7ce6t k8,nkJ,ph-Ug؝)2#'88{y`4ߢ #Kǚ 6$:Xn=x.PI]CC]] PChf‚OX`p8}DF1GAn^#3h).+3mDڙb ڎ% td%\IBSfvԌTx jXY/o2qk\ǕgKK?5Ҭ,gM$~V677$6l~_|׊!6 tFrח4"0iB o2M43*5dȁ5 Ů.Gs$}OʈΙB8"c58ml>{,XX✧?mgHG'b?}}fO=\7`8Fr;f[Q^:HuuRX{ kl92} ;1F9ZAQi V{ToJwfaK%AuR1{p Y [Od ;z,kj[0<&tud>*YCm#.caCX@>!ʋq6KqE#{]Vz9_AI[YyowF{irÍ{Mla=")a]WiyA~02-qi(K6z*x(*Bt q Ǫ?wZpJPc97ctX%XD{^nGWlkzeaȁsoNa=Oa0j}uzUt-aT|XM>LYxĺRsJY-;`1{L%W|N \6N6-!A_j$rPM ukj,},.ba!XpP_F$b-wMxf(4=?~[H\K Eu"?τB6vPޞo=_ƌ)ZUX"lO8)cE` pLmM ǼGF&ĖQ,e7|NZ"Y;eօ9fs01ʟ!zۙXzbDcğ[61/v  $ ceLDNC z&>ڴe+uj{'娰1 # st{i57vY)Nuc{wk={痥P{9hcSA5 .t0LG=޷0rnYE)I8# ~GƏi,Bz2[E~BVX9i/i`v8R>g~As)OݧVtcThZܰ%S jͮ|eZ)V."1hcc#)bcgi7Fh(5fc}NF~ZVܥ7H*^˻<=2xmUm~-;s<ں޶Cē]qj6>i[! wN8ӑSvtdak!~@ZNïlOe'}J; ҝGi%7P\`\}.jJʯhQ?+n?u2 a9T섗-T3.bUVS pstCS̓*7(C ֊,mUW| 'Yk8)m,~xyuד+S59.5P|zjPH 6Pm;O^>(3n&eu*ZeaˀȈ$@6K{1t1Kf2?A39s&zy!D$o<{PGX4p>K˜F0Vݶ[9$#HډQG0ƀ \lPաv޷mZr)8 v^(3ב2ۍ[4KF0 w؅MTR5:&WExj?1Yv?!p7X(A<Hh =+aUx"FRq˰1FI-?b? :!É: 4 ,Svo9-HqZa Ʋc6Ht_;6d,>jjT\EݟG;Dsl/c!:Nx99nك-c._$0 X IbƯУ/eOH6&e2tKz8H-UlPQG-hMqlW` k,{j< հ9|/OXGOA->1*mDtt/ {Lzlǒ?֜O6Ɲ'J@ jbV.l?m>NzZCCRygj Pl*s!?1Z@zeWȆy? 42-;G M^=! F'~[:dSj<5+Ay08`Tմm#^3FmB.,RW] p5 q+A륍ySV=VN«Qs` q1Vwk[rFCBY.vj$ZepyTGFb}fZ%Uj`l{L /.L,A|$bֹ5貫za?= ,~o'tF/oA &XVI h5N\-]BmZks`J: 6EAGReUpҧZ-Ms chRn=]EX[$}XX*Qq–]c suSg+@bsD紶 v[3~V4}d~$Xٗ' HlzJ˫ LaINZ)He| Q{$F$. tר"m [si BEevOρW<LuՈ12an06Z3N#X SІzV}<p^Y\:Ggfwrs+X!tW ѵiM3 "cetQU$ZLfü;<;諙E??msw*'w{3CsЙPr'J^=~w΄hpbC{ep136MjͰWʤx HD,:fOV5ŮюTʼՉKH|iiju00؄{Ո\-Mz[b.㐿P@ (:{eZ%!<6\’yΙoNGU_doj#$3yE곓!Рӻ[:~(e(Iʮ[m{wKrVxɿ`4^갲]gӞޯNɵ1srMv!wu7S)ZM#{8tw,@d<10gX#zRC2Y3p/6_#&ʀ=.4&ٻR1[pL=֊VC&N(Xb?򟿹>jA7MX,wPduJ<Hu'9Zɗ1)줫MDH{uRev˹68M+;_JAΛΛR1RMIUv^6$4KJRnk[~QAzka'pI0n0 ZiF]":zkkN {TzznJ^vrK肷XF'r ?Miu#q2`2 cZwk䔕3dV(*JMko^+ytk,kWxFT(v6"5D m/G\Kd9 k3))A.n8G]5"nVh =9ʥ9a,$ՙ0b~)0a2LjQ'o3eaR, 'Z'7x;g4J ;٭!޽ӫA- 6j[AN~wQ!Q<_A [,^m p^:ZUQuQ9gs_KV*28Tp>]{IYE:~V3o*-6BLdEvĭ+ aQ|3c`"RzUOHH׆KQ[63E>>\?v2 2E; bҲ_"%Gaic M}ڼc{`O~^vl˟ByޒPb+U,§=>4۶9,0\䀁?xY> HwJTb긑 oѲ~uͶ[6)oxQ_n됳k8 J^(9zU-obrĒFPXZ;a;,ɝIl$YZ/Y3Keg(Qi'y R%efFVNrVwa|\ULCD JmhGO+#.m! ۶UK.kX 玕"?*n;1LI̽ѯpvL[LWͺU">L=!2{޶lRhZ>çB}|pAâyiUL]3zso˜c+`UJfR;Vw̩pWN1<^cdIX UMe^ݗ~*Ld-Ei*+PgP&b_;Ѐ:D+`.ʧط/ldU vv}ogW}ZBC'#/[C%XlND4_+iǦY^Á]zItY]\u~\Xv /Bp,6-(f2D$O-ͽF6Jt½v/kPr^BswrbokhbW2k&x^ܨQkx0AaT[ɦ5QBگY11yKOuK!,J[8Yjʪi㱾Pv  cy3RK2k} a/eegGɡK(1q#ZgCc`H {6(2$p_<̝C?8V 3, Q#36DG&wd>&A XS/,0O(H^HzI4o xm.`7$;7ѸoȘˮ$_ ɲcYQcȂCL8-*au%ap9n6=rι@"IxV aTi iuk'Ev ; #[s!ޠŸf'=aTn=3a =P]mh~)So3CFB4T^ӢχӖVk;|zR38E}zuX>޸dޯa,^LVj-ZGxb#-*٪Kx`v/_"DߎF2~=߁Xk(Li]?V*'ᱛawh7m#J8ѕm͜& 3`!B_]bTy7M'&c 4/lm#o%AmSṐTQE/cüVE$Ey|AgJ*gyS6ŇZT*oA{6s}sdnBalFʹv3X AՅVtivdBw,'ƗLI-Q8*9n`Ek0V@jRQCiQEjҜ>tG {e4akN-}xaj8yCqvď^ck3a#Tф| ݘ\Դ)ub's9x 1ž_sSяmBvrf1g>&}VOR 4ͧ5WPd `7'X #;nUm*\`b2-r/= XB@U~nжs\'/!}m?綮H}kذ$+۱mKܷY2|,;>Ao=:G̃-iY<5ԟ,&ɮ_nyiƔ^;eNvG!| /k.*tE  /GLz` 6̓ yi," &;LwQ07ôƩ+pUnd5L/nW̪*y30MyI7\1ꟆB5{᫴mSW4) `x(8kTno1_:tGqY#,Bgy%Рo9XG6>%R`%J@2ݬAPqr4WXebn%x͙;g;eAA4yDda+NS<{Kl‘e'D9~kC6_2{FB|=P§xٰAu& @nUyx'k{Z=W˳x_rʎ؂>> %*ؗ:iNƠ׀Af<0צ R~N Dl ]b΂K`%Ӡw'vjѹH35yr!(Ly%oQ^oU_t"?4dV/\G2U a;lsgOCmT Xv~UApw̐FzɃ]Ʌ&_o'!{{}4k5@x$#g, 3riXEyXiٱԲcgfgE:Y5@'ai/(Y6կi [jJqxf_>-?CxHAZ3;yV5wdWg_^2/P]yrKq_t\rcu{/^`=؟]bL%OL nqW؃w;PAq#C:,3$MIzک/Ɉ a_n;w t.Azĭ]fAGL0{2FP}R;KaB02]0h:joZ;Re6T>ufs eVTBOfS1a2?Dg;v`uV8yy+mŰD 7h(;=s#uVe_;|V->5Zaj2frːб@v9]DSVe[Jg|3jY) Tè$Dwj@oڤ\3i?D#\ KiOWad4OzQT7KFo4t; U!$藙g*),&@CcO_ƾ@d>emݦsFF7EGm]R>8ҟ1pA{RVE-΃$}mslpo=\(mؿtֳ)&[sNł )C=xrb9'(<ZT[8 gu`zt9~`r޹X3=Q\=Ծ }nt<^; ,`t"Q<亷|cUzƇ;|i!5H|dk!{& %6~pZmmviq}-Aspf_X]@ }'z,oja 42 \ó`fv/oP:6@ߖ=OMla%kQ;1|q3l'+-OEnEOUŵ.c+#|j74 i?S_>\a,e9f-2g?8h`w٨ ͒k;2i7]O&O eim ui.SS6JSccIeew4,n1΁6pߍ:% Mt{aVV-:}Z8}٨f𓁸soI0f&PA\iDcpkj~+3*ȍo&'[uFiJeӞdbE.:zQժS޶*" AC7.@+A!J858H0L+xM,.7ΈzGoo<2CbZVۼVlӋ5`Kxʸ9et=<@F6'NWc3sqZjTe9wOfXZE^f ,4[l޹2sOso.)ىSp}" Uwkd'`=r%y>q ʪDdF'MfSUpתCh>[v[s\t}0%$T~arjF;W1X&[.1%]Z^<`\~{[/V!şk~ʄ9t&O>۵Ki:;t9!ϳoRJP}uX&R;i.j#}WV^@blr2[eT0pӷS;;}g^`#[{Ξv;7 ΍E;K[Z!g8.f )ƣJ(PNJ-)֌*uTz1QU[! &`E~7I)wʖP,~qcR{ %)~<#jkU!c)1GMk_x B7lSҒ;,UYdu^gf|zs 'rU f_&@E\+Fp9-FH61$X5Ld2<="~ЬԢq2c@:)0c~j.W5Uqs݌j`!vNa6 Ul+T"UgvEc0 MB:ƒ!BiĂ8rHg5\LC6:Ѥ݀eA.ÆME/ǍY)/էm lwGFCL^]!Eދ dΫa~~^0_P}xV9.;?OJ b-/{e}m`$LY$!CļYj~˷RҾ/6q,pǙTW$[LƼ&!WC3e;QG˨w)$e3 #iP|RA,u9qEf#C)b+,g /T$.AD/\Eŷ`?cB 6/uR|`fΚ-8ED|+]͟k﫛r]AaUeHZ;! -Rέ o-†;*antq^`978)vt;k}HС~}z v?@Ξ|9Oف B0~? ĒdƊR@THF0n]Ί`LlYgm6F[:W>]EASQ3]^c3xS̃6F4wH` ="Xg|G I,Eد&a}ZV.ٱ#qt11g-(5i\Zrq0#/'}/6K8pN#NK|i mȆ}⣭m˫0JXV! Kx0Yٓ:Sɴ ڊè@|Y,a 0{A%޴b"ֈ(W߶ͣGps{\^L fL0QFO5+o`]N@j</\}0ζ Pl_D'x,Rg|n3**WSjvqF.ڕ&ɏ}w M }RjmSx X u\xgN۹wa[a+$O!.L -S{ens|a@ s{dmZ[V]fq|F|Ƚ@"IPHyJAwD?>G]8s{.7ek5JXC+W /S;X8+#EpR֐3{ʔwسV[ W>Z<ܿ~ GMkY..cx@:=7v<h{6P2l+u?;+3nоaDONG=uj#=0B2J̕W D>D΢yQa`ix /ȯW_yP)s-&ٸ pBqkV1fq(=mW7+1-lYTdu5%w08i*ަg{ĉHagcd5b py[;ϐG"aܙ,H9z (&Fsugɿm!՛T&@EWA@;9iHӝޠ%{d/KQ_Sgٞ[0zJܤy!,֚KƐbi;M/U٦<pptZ1,hIDui'(E3)(&hУaȖ?p)ٖ>cGcFOwcU&ނ y$ƂNF݃#-yLh#:޿FVs(yĄK72z DHpP^zk`Y`.j[Xst/O`_cn,lTR,Bp Aa@Hv'Q@ |wK|.A_̸,YN$*3f Q qsW8 ƭqNz0LjNmlf:^" 6Q߶}fon SS7*gfʚ+QkSuVER)c / y\¯tj2uњI*v՝k{cxZ|O;̼O$ֵe31z/QD7OaLn#B#)vRIƛmK56.[-bZTSMBUY̜]#CǗOծjN '!XQ^&0 g[ܶW`jMmܺ-%bpIM>X<6)=Rh ʖ؜VEE䥮9Kb1l뵫U;iё[21DVqJE|[?g~N4 yWl&ˏ Q+Jw9'L/R N޲vK4t9NčY9YjȠ]+a4X%% 70 Ȝ?1 z"5@bI<3?w=#჊nV0qEgULN+NJq2:}%1 yP #[C;@ú|;Èk}BțJ,vlCOROSRJ,(Ej,6X3Gr(]7p$sx]tIoQJGXYܛkƨaIYf ,޹J2e٩ #153d lVؕ뜎d]1jԔN¨P;;]*lCqЮ!0!`o>V6n%9:.}Z4|ߧ zxx<.3\Ė˙%mqASz5^Zbte+0D0S<'p$`5_v]8 .5emX, ti 4%C\F Ai;G3cst@*=L6 O%oN6ՏuCJ&Ϭ]2C>{_;" 2JY/:14#i<1ԆM&?9ʯ r u̫ebelܚE-lcsqs>XV%ef>v =vbλ;|N(u[?ס!~s9/ed;ci^Ŏ)\vM[i 3Z~&`@*)Vf0Yl$-ՐcA7{u]Q{s۷|sX ?\%W5x\EwF:b6 #ɕlσJn*+CG6LT) XT_J+hX9.[xV)i(lEqHx_"Oè=_TkPr܎RNtV>S,t8Q+i c6t&1eKޜ^pvlLg@{ĶˊzC?rLMu1޸:8œ6}`{@X3-w$q9VRbBGҜ?2aF_=[_u ̞ V$*5]mH ^X`0<^vЈV"%'~X \wL1ը) C&]n#+~4 vDv[:E~|nF0׺'࢙k&5̖:$ (avSN5cߙԖi2`x7ADt{$Eq1xw#M 3+ZLQX`K|G8I>sZ?-q<*O6zEuEgL5s8UIX۠ߡzB~k-atab5 Dhܼظ{Ĉ<2ICHR/cǞE9R7g$ȪGZF;S7 po3M)We^1Z:y3q&tH3}vb]B˸. FuzP r~jAB,ήʖGc虈TWfxl( "Z\_V/ u ˇru:()8M(jLZpc66wu/!^b2$?]֥5ڈG_,?(^ԝ>|]2MOs0φىw,iGlp%\X9EYsϞe^5I>5 QNqPm0P{nKFuCͺn1E9u~RoE~N%0wP5*+ޚ@֝6c]cpĝNbssu5IX -bajn5It/ل[ QhlLͮFӗ8%(ݲXm!y{cZ+#@}`&:.C>y_ 0ԎלDCȋuT|Q7l{tg6N6\.E1jvpZY*=TXmzV.6)8$WS-JZ6uw9OEfI*\ۤ8aoopuIm[>S' 0vm@E!) ,WllVR~rs{Q(VWJ%,ƞ'8 w~P--i3(ܦwsQsltwoLa=@%DG@6ioӛ*[8NT Hٿ~-b v~+0)˧aM:0-~,3~ڳ蒗%/,&,a \/.N@0k~F =Ϩli Z OǦo/>_L M۸ק֚z-kx;󚠘=s+lTX8xQ>,u*[?HiA?j?.XΆ6*U44|FCwvpaad᥍i6~%[0ΕH0 1&|uvД2Nwe_clr^CnKbdpPtiGc0LdŁ>N,r"Z1朂v [o}'̫:wh-ycIr^].fG!/؏ $CXPA)T5V~- D%yb"F,:.3qDC 40hq6DڄX4v9;XoՅmd(1Sg3}Wl>ȃ?9}u\O2,W9T n7d﮾LkK"Ju(K>]ō}AyX_D`398+K |o8ijcOe(R㤡 %t=Ld΀1,ƟW`Gm 3Ǣ/Wheҫr鷢xؓckr#BSQ)!S:$G'md?cg-lFy7lN6>D>Z'dY73$kφ5n`\_.7JBupMv< R^\ݳ8O{!˅fZX沌}6ʋٍuRsdmؔ9ah)&g>JKֳ&:LVć,o6j~Y3ѓl;{J%iǡ,Iš­~sEZ8p5$^ NͿ Q6? &xޚa¿H%Xǁ{2"ۨ E!\A f6jvcȮ`l\>p٧}:a]Ib#OF^ @y3ܹ 0pw-G̒Rɰ]sLi\ծa$LՑưfC{뷵ǖ\#n+b:Dn4#WZܙ3+0q-2 \2n$<1%ȑZ;EgwH;=mKE_g o f7_eP</t-Ŕe~(ƛsal50|8h!NM;Ooc'|w: l^Ěqq2&V^olL$OkS A9fof`z$c8S:豟qb9C{|2k8(JE‰u&Gߠ2~$W08 0|'fa\)@"/فh!]1ݪ{b5'#Skagb2* YҠfV+҆rr/2 9h&2uJxFun:0o5l W[a\PNY.ff Ww<#3R. niژK5C BOJM}RA'U__Gk}L0W-F2&Ve5 g>ug/kl9X&SbճgZMmMUi9A1Y9Օ=Zu_5~9b\old-bY0qB,/|ɽ2oiڬG p'd4\0}6uT\]Grd2ՑXSR7Mݎ:©?Ms'M9zSZE&jjuŜ,UTI}^Iyll>pJ{%mab''cgbCdE!`%E<5־m*b{g*It0Vc,P0?U@̌Yiن-aՁW_οaJ&XՑ2yAVxlїBCJl1Z8:~6}zm;se{6XyzCkUp+X7hjj8Oi>bP Ǘ }XNo=%V#ZlQ= dm58r.2e'Dr輔ᒀez Fm)ܣ!-f8@ՙ~힋~GbJ'U1-]/C4$1<gvhWyc0賅-ru/]u {'ͫQ$x͂US\6{ 'dUF$#DȪen3)@?'Ɩ/aY$5Y3_ bCcZ/A[ s1l\K}Mo!8ΝDQ`Y?=+BfU+Ϋ'86_L($m}%|h/Ϝyo9XȼJ>1:c oNt.f$:c':ϓwi%yK2WyQG|[i޵Sa %䔄Q4x;U:G֭rV WTc^?%| <Jv1'| :lx\\l,}E2`S]J K.jkvoɢe^0rhy nO|c4z CT~)_"_(] jCn+SIIҼFb|TyB\ źH_FA سlGů~r)mZL.ݯԝƻŭfFęYYNN T1{P>9'[bXV:5̜phxhxM,rIxfg1۶w8ikh#]y@mpT%=yA3 X&{A-HQ:X :h꫏Y<1U5YYjנD1[ @q(=(ϕ[鴊q@;cq |fjxл Ri҈Q >wZ>?w>9+%1,)¨K1zH'Ȓ2P\8S8H~oB.gTF9&ye^rar ŎGc908G*Z<"LE0\aUɜW"/#oW%keYTCRVPw\o,.«BLK3=X/:4P錙MP3!=AGV=S#pgs7)k"M$ :4/èL![/堄3iL ^ T[q,ߴ@}n Z]}uHG\APx9oXbU.A`KJk"2PK"炰qhT['k;j{>̏8!L5bw0ԋ?*쇥g*O4YJ:yVV T%B(iMev3o{Öl&3o'=¿Gd}b-J?O&Z\+)i+oKXay:Y2\!(!, W8=p2q= 0ܠosFqv]|h j=vNÂб.`b vq9d5^^U3'#7}9ctY=$S'w8,<䊏3DpRb[(wzH3S bCw{yDv¡_ݯ,=$M/Dk&#$i*y]nG/%8[8UkaH:qUh'nR/ xn8vOunS%AHy^UF 䟈W,B oIEyg v٭7P>kh5vfZ3` QyK|/QЫ*2lԴs0 vN \+r#H5q~:H'cv!&Zle 0"C)ZKM.0. ݧUYݨ`*pqGRIrRR)s"'GI'0 R/ҭ+50&]KHL0x INWϳOX YZWԖNp* \颭rm! h=5ʹzY8YRQ+#L1‹DE\1Sw.߃"uDvWׅ1EXӟ4ޤ^M6%W#6OH<ŏ]e6Wrút# qZbSLpLJ:]Vj۞%bH q,;= 4Վ~{T2DE8c|E.{Ev sWŸIˏr49x"xot.{_(נW)&o7 e5j )׎W^6*O\3HX^ѷ w*i$ՑYJu_ЃS-o=`ğK:;9hYyRyp*v qg#a.#֬o'L[C`Rg+ZSNvMyTubm?&ġjmڡц*0'!b[ѹ7t-4b .bќ? md%tYFZn\>@Q }/3 7L1̗268,1-Z d{ߜK7y ~\hq!r7;v(?>}jtʃ7q9c!0O2qhŔaȘyڼdz lzwW3!I oؠT!4kg1 rS\:?/vbH7GSKq\>;=LjF~PfJP-.[·:u\y4uPwV"Jp%;"h "[ˣi`e{؄/`jn?ԣyff]@nݾmQd dwnxo(,x) Ԝ59YR\ӻK΅|Ăx~Ρ"2yi W<˸l[w*5_\&TT7賞 K~o+0HhWY^+-1kBOh)%6팟*{bO5bl ^U@g//ns{ܙX`@ʙ[T}S 9`G:c2 "j[9+>LU(=}V=puP@9S"(gPl PN>},G> ~o[^ߚղHXfX^KqZOO&6u <#eKObiJ/,b55Syp˳ Ef*8a.gjw({*/dLJc(ܮVۓPҧ_pg/O <Y(&ޚOŊAo<l Ӏ!li-!̙FO!'/:#-'}]2 q',Jҙ_v\N9%k|(&R'!DjQv'Z9.ǽ]p{R zL|?kDQf&"$IkaeoDJg=34=^Ndjnx#2%$14r7_v9\=&g١P2r9(8WL氎aIjaJYWvalfߑ$p8iC,jLL0'SpYkW2ˤ}2H\gʖ9_Œg,ZgOdMaĽ?4 5r&DpsV#0#>FvIJ %xX 6 cxvby>kO,FR#ӏzGkxsxOՖ&NޗgĐ~)4@:nO}~-!oݝo){7&'w;5)s/< 'tO ,L?rj1FxM'wuNͼߌ=1þn"FE/M_Ex sΒo&ǀ\kX@W7*ԄP3'-+e{PIdҢ=KXC[`;l|7t cz,b"*㓿ȲM)7~+͐{1@6"G[YyFA dK=҆qCGoS\7oy 3ި 6 Ç3V߸V:bs&'Gfcz׌@'OcKn̍t[`;:ޝym98rjT]kgdDڌV;Kg&@u?_;l)FsSd:m׮1rhwz hO ;AjK*NB!N?@ݞq:+S6Q G&H8DCFN}@_i/uN^-;O #]TV ^h_TR x4%%d?5F{{aSDC9-ǔ%^p4><۪W2|ƚ^Vsza4.4ehV5we8G3jgx^xMϮh>JO\q_ '_J=Ow62պ)߯xt '܉]ȷU^RM?Xu7=\^BgTfEIXsi\@Lޝ_qj巊˦u f{B.@;xWjN@J )}M5l@ Dkޤi}C_ SAL}/08aC6г.UplèFkz1՝Ƣ{i r3p/D Ogc=1VTуEG$ԉBKҊT}OiF܁9y+/M !s(B({:OYEvF̃A<cC:bT Iz5ޯ +w"V8r&4`m!9,2N.Oyivc~I=*\^ǯ' nRiBOYRڜupW].~`;/p5Υ?14GjKV v'&_ҿ姁%y*xy0•9 ,GȨ<\ (A ka6ol=$;Z繽LW_r,Q>C[kLF`sUSXQ>|\~lk 17!`:!><b(3&@Ϟ͇/^8p,<آ 5Untlqkm[?݉v#4vyoI,S nըY~vMv\FJɘ[ڨD./K0(p^ȁA<XN%HA\_aL̦/:ӝaP6cnDl|{$3-i|lK7,N=1Up@8k[W8ӕR_/KtJ6V&'ʱxS{fxn͔p[n]aRRhxBVg&S~ ֋~ʀucoޜY0Npk^ 6}YKX1$t *՚H T`I7;h ?R}hqcaznVS(}_Mk3=ɰlC3?Ӑf#ҩx|^ȗh''B';4R3[ѳ to DM PӓGΎmG*er<͟n+(/]hw;)BAC@'o=~'};fs<>|_N8fBw4)'ّ!'Q3HJ]}Mr`$u$L} rɃ$SiFBVgeSE$JDaIs>h^~nX"M p 1;W;3M1MC[;)ə uqA!mky {Oc(NJ,)J2D39.pWx'[ 4XleWkQ[E29Edx1Sܺ1Mk3?qp `13EYE1Iv2$%'iZ2a2%0i?Ӥ$3|ʘʘ<;/tÔ6L+2h.NWi*'k!bL]VܒKņ4+?bS^ҍCb(,|f'c:Q`~M*"c#"'#G6F ػ >4E_Vsz0IW9qDF[aTYH-c^<^Ac*Km:25(Jh7q=0ێ#F箇CYc+Ru:FQpX#x4cCD9Nϰwp =mW|-N9e~ ]W'?_|t~|`gѮ|18EV^V63!sД`dr{ص4['_sn=I"_b00+|_vÁUjןh>SنR0,u;Yxnw"D/K)+m{@PwM%Gٚdzwn5cNy|Nbc'cAȱs-SPӬUd!J߳pqH6ѐS#4Tx|Lqd'lǒ"paRHB`9Ulثinz onsN1Ԕ/ݮ )^6:-(R ^vlq,"j-6oFIӟH3}]nf]wdb @J ؃4a- RdkYRڰ1 E׸Y oe˼_$M3Z,#T>h,զ{#0Dqy dWꏟpfXp_Oq]ugrI&%9 D [Yp=fI'|plU(Vt{|h䘷1z=KAkc0SȽc!I<#O<~Ae~F?TڗgN5_x|tr &Sa_LA?>;.~ sMd'ʈj`18O4Kҭ0]{OiuaVj%HP _]xy&WtJKсC@NЕG\zNӃ.ѐ>t[==M,p07&|l9( 2q9!Uu>-<-޼(\x~f_9D+x_84$%q|7݉_BSvdΟfkYn'G8 T<ľ坽AدL[O;ˤX ?'fМ(:`.ź)F=dIVɰv$I'L<8NnY-kg<Ґ`X?ݲ3HoA|H,LosSnmmm+$IAiYg 2k'ö\T,lUx#!aYZ SUrCRe7EK~>$YO_ El&rǦ|=)+*KP?re~R:7-ky#)O/yח!YmFc8<:WD%=rzq;[b~%2Pۑam]j  i0k|uGpa*,agMtN`l@\讛r.vcwlR@IZ^BNf 8V` 8d&YQ:;P?*v_Ft YY pRi:8ML璘~@k`+Cw`_ AØ\#i_O5;RƟ+ @9pn"g? a'cWRyqL }ISW.5wV0Ul !RpBSA9)@ yޒ [p ΛyOvYBTwOI]8qjJ*Rf2b?=Cξ? :B҃Ts y1| begtɥ|ՂF] mйwo~˵ʢO,.+ңO?}]_7=hFzEl:PCZ䴧j;}>1/+yP=W&e3As`Ll|0pX=pdbN=!Mˮێt4OR nByU{iw,n!kz9n4ݖvuc0xK?[ZNQ9}giP4ul.)@jYyT-5 \~oew靥j)M;Cgwȇ_ O^DTݐ݁#i/tIŴU\?cB Rs%{VS^8~n69Tj(d,;ѥ}\`PA*C9_Cy@ٯS4+TBvDqMu|z܍] ?~o[cVܜwK&$+'g3D#@&?AAc_')<Q{Pv^gCTb.(!hD'%]EGI Lm)7=,5*R7}JSz'c1^P%YL˘ZdQ0C 3 (aXꊉ" {"U[IIBt p8eUκ2Wo9TAYHcofz>3dd.K Zj𹽕AmZd .IIUumKnH!MF_M%Jz^w̟bv%S 2wLE&"V 0u]&?J_d6y;DVBNb8LμI@DU;< )!&C׹/Ӫ05-vV?;{(Ac+qMfHepJ GK#-ui;9j۾ҿuy@FAݱʮ][)K"X cE +ߓWnܴqZYxdۣ*k W8l\[#3Deޅ]sv[Hs4DLO3D7NW+4VмJ]'I,16L+{멅54DZ0_70o11+{o%YFD^9SWH4.FDp`Hf&hļB{!k\S|ρ^KFeP _I_xݹO/9VB3}lL-C%,Ox9'A#pK(@vRV8 ?BZ3~F+9_6Ew!yk L mwIFӕ۶AGĮ>ϏϠJ2:]^s$auJhHa}bуeN{6%(nZ Kix TED9 \I~xu؝I $ccJd"3ABA[ef%lȊ4RnPx5fI展DV_B-F 4䊙Dx(&'~g)iX\0[KP/9 n1V3?}5Pϒ[dwJL"uÓdqze`qBh1>&4 [:a\8YN 'q4 :'H&)7cPV=N&]@cb@9@^qL ,$giλإ& +DUKcjr4&n$;d-[$ʌ+F)oЪz`^.oGN#owB49/O0A|s$au6;b.*MO弞Xb6b(JOcgՅj/_Ӭ3P~x7A4KSG\q %j6G!-H8Mؠ]m7դZBйf][G,-iiqPV`JA ӹ @v*{o#S+sa8+5\ ?2YQ.kor~m.m->[%NY W #>ܨ^V&>ƆřFDLSx.: C}z9^aʾޱlWe1=\q]Ӂ{w39aCO^TbScl)=*}!es^XGyܼM*I鮕÷S!' J>8N jNŷ2V0G?0 }6*~rstC( ,jomcHVXVg oJ?ڏE=Y?Wn* \ k?D*ƺSfOa :Ф93%,ۍ;MDW'_V\uYR^nަ*>?Bd)Xh3 syk7LI} USV {|+hO=}07w;eM z)wX2|N;OE(56ݹo;se?z%E|.H]rqwO]ILg5&Iw`d~%̢uj4yuNr"wW'Ƽ#$tvk>*OS?1%=9?rd3Ql tSd)&)X{?[s֔Fk]F6!vW-}v/ 'st?e<4XeyI`$x$m;e'tՋe6@gM%=:1&ByI]E{:Ȧ]P#)`3?eI-ҰڢT>x=j"r]~AzES WKtە;2e~=Nb'%E;Ob-%oRS3{'SY~ԫ gP_|K kM~G >Cn>_NU&3$50o<-TcfF({k͗u-]?3|]熏bNz|-gZJ@E+P|n*7_H/tNvZ!fr~Ms(Z_yyE0xc;dDIM鑦z~|˄1>]s.< e?ƙ7S/ m-+joɈ|+&ܴ/EȉN:\=W|R~pm [C `=Ť`,ENN]ӷL8Cs2E'l7;'d0'W f,^2 wn=ǻӣڿ49Z[טiBǙ骽lHden 7pv30sI&#RNIla6-RIh'Oʑi1٤f _I#0)JAEɄYf'vu]xDgF2n7j1ixNqnaJ`ޒLQ[٦[kܦF!H-.s5̫{\߂qr>[pꡆuP>yzH~^i0,XRqe|"dS_Y&,$^n?6'K>YXA%12ji3: ማrިIe܁G6ʙMٶ$}`4$^Ui]bO-zZor)Ujie:!&lC$Ko8}s+^v5/ld%$ nq,]FXDey}+lzR eUjm\bsC?R 7lНvCƷdgg!]<98q,)6,Bm^mf&2{v8-҅kᗓ1-0 \\$eZH?8Rr)76$b^/KTN88cb6mfNПtI1ه7;^3mENjFYPW_l9|3\2/ 9_[f_S{}@׾u'}E_ .=Zƈ\T+g79D~achډ1{4P ]!֧/" ^%\VKt87WWנ<U`c2#%s>i⾋L%i$a q=ǠfP,^Ʒ y阬HґBy\c:6z?V-G6=E>8mgj3r_.vcs!b%\cm<̺x%_c4 jpPZdivFs' ;yX o,Cg-o1C,f96nt v(rgj=_.5%}beod0tAHtq;ks[씂pOద:z/_WHCFu>g^ <Ҽ$ה:搪x`//Ҭ_9s"x Nk$ؼ]w@1 X943u 2PFGAfqw]yd4E”kJ"5IE[9UcЛwBQ$o !6"tS)"`)%:Ԗչ"8D R"C82Lفf%ʄTZˎx/?vZbN:dH|6*rz@)r`8qLT.:V`Z% -׮ockN~oV>֨) *#H> Z'[fK&v,Ig6^'cQRm\ hZā kpG VOXq6Úb_v606pDlzp}Cjߡ1n5 V ꂓ8*]^ciC>'lE`hҩ!ǰ魥4EzK{Tcۉp9*^Ŷ-A/9o.aLhxtC'Όd֬<'U.;> +WNY-ص>QuJ 5L7Di"OAJ!4~@n!l9\yK-uto, 7ݰU<5 ].;:S֦>⑍]3J^[H9@$nkz7A^mOk2«2OŎCTCz(Ls[&k_L6gdX \,>^QA\rۦW6y,XP؇.nN9ad/ [,O˂l(]Yc2SˣULˑ ΗP4nc08U:Y޴Q2+m+z-kPrnmwNnݠyp %٤3Ր\%Ⱦܶ _iMwȡV eeo0!Ě g~Jف+Ler=AM1j28:+eTDk`KFz빾>!Ǝj"Զ~"o1.ޯ?wU zL%pM܌20mܜD!#|G_ U>aCn @ݕ 6eŪj!YSFƜkM5Ieo;v1 r{ N|`1߿5`Ӯ_ċ4~=l#Bom\.cBºIiwխvcģ̗iАc)զ<B[S9uC|[JcyeDgѺS![gƨkɥTI._9س-c(|1>Pʟ^7c`0--ڇ=[:!$jXw_Ş0*!$|X+ys?n w|s7eaPA눖:cvJ/!N'@Wuf6;}{tN=y ?:*yS(Fff}6;LGjDR'\YoOddRbNҰL.@h | nFKt %[8OҘ6*׉,sښfC_ݾ'uafvEI*AU}5e*@tAe_'rj6 /}YF+uF4eěv6#Ӗn8|U0-LZ9 n̔~}d, k|YIɘҸ{t~)YSa(,&_X5zy !LKu%XnayM(ò4o{qԥ*4A SdtM~sgߡ-;g_'e {;I>8^% kk1K2b-c-4O& k1{x!$\&:y;v ( s0 \mn e20\P70{=SLl|c4"lT76M/ 0wl-)sa88zsyukP{LMH(Lpfmkޟ i_I}uLfc&𖙆&JgJb/H2CI/&_ch;|{W}#׼i.{?^ ~};B9y[sXJҞ YlrvjZГۣ ~!ܑk1ϑ#k9;(Qux)-Jc_•o7*#CJK @eC#d1\Τ`b)G[mJ].[/U<ixGKCC%Kٱ25Qt@3KZ߈]e+&ӡ& er3/Vv@O_~([JW}Ltף(Ui|&- % K9/e9H*L lZܗ5 {H*3EOd1{Ϩ0| ᡝd?db?^j"(kj!HyuW?DŽl̅Ίq)eٺm~~f[ʄLA(GN}T'/>)Ǖ"z=7ZNv33)nZm*5sdr9XwW?N)}>yNoR.K@53 a=dJpޓ.z{ $,ev.叡dJ =?x7~i{)jguE9i. { A(PtgF-S%e׮DL^QK_I<0A8.peV"-֪g&ۦ. Ŏ$(%B 9OlyU75YMǰ%CxgK*Z&wkk>aSV )qƂӷQ37i2 xoW:3*+"詳SSXȪw]e4aȍƫJ@T[E~~ڇbUtAc!æh^}*8toqA8ZxC[sLuk=REpnrOD`';@.D77y vb+Buʎ NǃԼ,& 4a&f3ih(ENϷhJPC2n(O$:,+7Cb,N5xbV&:irePl۬.Ch, $Iz"5A^Ynv%G uQs~|& gS켨Kӗg6`~7 /z&UFX*gYl 1愈ti!@kq孇`D0xbQ^WG>cvޢ@@f!iڣ$ě #$!y9!,sd.*;g:mAXIF>Z)Us]Ts'ٶ!Ǚ)W{`~EDg>Jo)W }<nSKT=;QG^Lxiw D3;|$yA LNrF=CTVtr_2^nf;] GSgջ66H9rݽܚn{ϩAߘ8:l^)Nl_"e~v|,phcɘm'v>>G+ܴs;5*Z |VOV HZIXR LSck77 0i >IW:hB.CN>oShKT{T/@7B{LH^({=ZO0]sw/*e-ʒ xZ.xR?@ˣY &b>}>>l(St&.`͸M]|xT?kUfaKYbcU)]mh_}Ӓ6^ &9$љKT{>Ue괚ml 6~TivI4=H!PLщKMxS`{,zL9FS0/c9*:fh~VgA`7MÈD!)g˻76޷_* ,-0΋(hZA2zv Ÿ)kN(R@MΪ2%Ĥeu= [ Kd$ov4ԬIgKo| 41o ~zEpMƀSWX[_ZMsIMs㩥-9}<h{%t"Cx_".4 XT7pPhp,YӅ U*Ϥ90\֦֗l# +Txǭʩ9]%DžL'!R7kZV?u-x,$3]䕽 !rsΆ>ϵښ[]sʬW2x5|29h/e5I~~h3;k[N󩒇9z$Dj%bwj)) ݚFCo?v_"k&ɢŮTaP-X8=*n!sG?3n ukuu)E{[[?'8ruAkծf`îz,>&OBZs.V&l51!;@VOvI OCJ*yL00I`9.",ްP3sge?>B{,)nR^xŵ!MOE$3bzbW%ɤ{:OM@ah0e l:هنLpS&ÍkMH sTR {nW1+i^s!Uo[8Azu!o|e[f<-ruWSIDh=TKBk'su0š֨/٦)z=!;RҘM;JLYsV3 <ϟ>%u0VȚ(Ĥc?elo9f;C:ƕ2ľwxJB Na1O95.-^U檹K>t ^ĊA7 MS-Gf4HBUUMݗP`K-ȈSvk('4 _p 0.t =CYq:o\tUaNXxYm)}0DWEgQ\^<탣6t:9\_|i:- To)Wt="qߘudacfe{Jl(-9wv+զw$0,Sm󬝡c^O{a{_,]P醝I;&;h=yDtQnYڇXT-G!ZEDv;%4D1lEٗk~qfRN\s$[[ֿ8zeIY[ #<˸ŹaSM+6o&d^ }%LO'2-7A:ɭb93eo=ˡb"9[|jsq6_Aɴ=[4WXi< >4uYq'@HR~"1.&W{FDA&;r2T)WwbO <e'wHWδ_ȁZLP M!]CҽC,̮);s5887EyPsz1tH؅V_ԂV7_)lD)taJb$ c K;SKt:9S׎x//MSn4-y񟺛rgѧ5dxB3J/U*(iH?#UJ?}ХIhlO&OṮ?*@QQ1|tM¾ "c_N-0 `w,.0]idRlď641;^9\™Tubb+@Zʅ}_=Ā|=8ږuy_X:,skC>"QZ :4\~gOo+|@Jh ܆ϵ>+LL6bCHo`Xjb~FL>k>fyL ʼl&^%o]|yǜŢe$l&}|F܆R|'.c!|].̡u}#ל"̃?slX ͌yM\> +t}P2mP5βJ͵[e{~}-E|;63z&xgrWl8I51˳Zw[d̟鱆fuLMusI=bL<=fN.2X͇$E0:mA&%L;%c`[ݗ`؅ң $ja41ܻX\x4IB`25AAafArL%vK@1oп񭤔ZƔkmNL&g'W{Y©g7=2iy.v1$5I_G ;_@*Kq賷\Xzͷӷö5Ɉ 3A;%ʱ= WU0pj,Ɋ!b( @7F0-Pd}L}P{^6د$[xPAg)*wͫ<6н#ҥvYYvoI=}V~tώ @ڌ *jܭXk[|Qѽ N=8a/(x[K`6J>䶿4#5|b/[bcgawW] CS4,/`g_ƕ$%4Yb[tl=?N}t yСe=xލ)J=Lk"غjW4 هC}w4_]rr{zۚӆ-F,??\|LCs8msa./I>VZ\N#\`FWӲ8#l/5ޞ3adMwe1}9[PYq/ }5#vȶ1&$hhSBZaX|FZW^$6l+Zcl vpAzv_aҰhS4wWR]څHvϏTPQ:woGZO=C vmgzBrs3XLlT%A=:ĒH͏So2 BNd^.0ni}YYLj1 `X}䍓D8TR Y?f'9֯m0J.$uS]Yԭ| 3P]LjU6/7SQ$~UE,_:.љwaW޻.gSsɩ 6U2}./{(7n Y$ ~*0 J%aIukF챥U?=q5aE[GM7D2V((!D/#rGһNiunbA(,!ඵWrk߾Ц -Γ7՗. $Rdxk:}p33v.ZW)yU[6 *OUkeS΄ɉiV?\9 $湬cڝxc"[TA[ck; erdm &GF)ðz/F_ `D^xXC?Ecn"#ILmtbp:=lr%oK`w?#(\S?kX7I+ۡI? NX3TȦU [e(7tz !FۑBH͘OƘ.#-n~bs$/ a 2 }cPOcVoGAQA=c&\5(gh1C IMC닞\%p5*f^s)\ ֤BX# 9W?W>qV^9PCjwtR$nnC9ֲZZoX %"7 9}V3hl8iAt;$ H7IA+?o1NzNJ9GSX VPuz~M4G*bXfA6+;hGPRXj#:L5/z0ЕT7]L LRulx teZZpbQںhcQpS׾ )2A>r trl0HVnHNAV$6`|x8ssa)W}Q94ГͿ+%U %+*WW7IzuVvR&Mڈ&/N@E[YJrޕ7?ؒ5\<@cR~=/f6ɋ/UP=ds{/ O~5>X2? FS驎d26P2 *E@VzgϙR oT->VM0e 9r:8tZdɆb˯,Զy+4*x\\rgZʸsTFb_KhsԵ^*렦&]zKGx |&Rãi[c1_LYn͹s)Om{t5KBYDrcUVȰiPپ۪gtM-{sT X`.gpaa3t{, ?{ȪSegG=">ɇ3$-R\Zfd+} cs8*SlYz: V"A}f8͵49쟹ݤ#D,kSz DFĬvpDtGһIk?)Om7ES~rmmfv^V4}Mpb3!fΕÔ p4Es{"j;zU^m͎K* &cs $$ݖ8=1U6eÈ /DNN;8ŽMAwJFV,ĩu%O=p(Iܼ͌j+S'zBk>0]&3np$L@=Qkt=[ETYlO͌ YHb6X!Vf*޻MqS.]J%lJ[Ywfnї`dYQs-4)7;?k氦|p{o4C͜czf.޿-E~V+|&L2pB1VWbDS PS3H:*c'˶RTLc>SܐSM"McG_6iM1`A.ߥt8 \9eGU}glwh3g.eRKNCk~KiD: fj_>U?5"ե-'./.noìߔb>ڷEf M8~z;=z^1}qn>!Cϑ ;B 펶iz.o I4)w!].!GnL$ )ٛ ( 皯N9v_PN_70&d)>o6^4Z'(O~(J6>9zףV pIDwFԷ;]Lpycrn *e7n(lRw^xS3(ʽ.o2Qzk:n)T#;#Syy7d vFnU%Jx0Pgⰹtl n"~Ll3e&}9 "S]| jюu)(Jm(!&n=aDO-֏$R]8y߯侽;U7~b:YO3r9?>ޤ) ^*CtwQBvڭ_i9LqSa%t%)NG^i / zC(mu[=g/ԽksҘ BCA&ri]3:U3Ugaf$^ѡr~u]!H;(++Fvnؼ.P6Tޫ֗Fp A~g@Ju3/j3h;hdK!idx @Ӏ_v!7/7" 1Aj5E:p_,g>n=q1F 1ye@o;t;%!5?gGL)}QEa!p"R0+@_Rb,Unguc Nr-1Y2Qh=q&h0[jcO02Prl,C p+t0H?+)xH1Ŝq>" {}fD=Ra9G)-d$Ͳ(UKd<,k,DqL~{N Ï;"$cbȄ5-Zo|7xgYG΄%y)^IՌu9u0q6} >#PWN?otsWMHrI͎(u5K6 \،0oe6~Ϟ'QwHPg/6 T^yf~L7dvB,<3G3urTЈ]#G3_"Bc9SD(l 'nT/yCO+{3<(C6JKVφˀ1!{~AJ O,ϔaVnelV$he {R˷t6|(8,u#UVZ&})B_|Ps'Kyw8eGe C'5{4É?[la*6|d1 2e*(vck'm`Ų{MmܔbjD.KWx!cu d{-΂ڋ+{!x-R(!(>)FcQeTce3zG&7b~'e ξ k4#+4Ȍ 1:`ꀉ#! qT·!*'w!I}(i9:yګO.;b8?y;[CA}K*-O~ǁM;!Q Ds= ^ K\ѥkl_ib/aEڤ@uNZ^'a`GUP[Mx ;\UQgW%W9eMvoxƒQ΋M 0г%´#\s8X3ǡnL/9pxt0#\]Yoǂr+j^_dR6pstuXm=? _׳kmm-eKyϸ[ӧˏGD?_xlCm^%sG ( ̆!,>>i5Cr.k:[NR23t?U;ŦJXyjߪKӏ̳6. Ig;9E2u:Dٱ+~ϙJ#-C\ f, '*8fBp,ai.?IO޺ k57Q?ߙORf"&w!aϸܟՍ'+YќqMIG1WnQ?rU ad-;fҽTf98"s!c/'YH}/QK pdlݾYN`?4ܜ{ٮS"?hG4o3Mq[ 礹ܽ%Bk,dt)$(]rYg4Uyh)E \톧Y VpR|[" yk('wC,oԩgB#JFTw;ׄZLISZTfAfh ĺ}kt,5WOϝU/G$ڠwVgqMˆ+;|pLr:YNOy&.A0jkJX.H]xZAP+8^c$y \ e۵7m{8ݾd_fb2qҕ5=ujk.X+'NJܩf Xwgq1VFx|k0"GIbY~![y Xlo# ש R._w388,CΑjZG![[/}jEh:Z3as/fih/Z~kjS_qrc4!Kj #Xg׊+|cg\kkl:\7b 3,O>M3kw!Q4{i!6s 5Q] R÷:>{!2r;!1-&A UAOXA{ۍ>Bo٥R~]]s;ܞ_;l͙~v[!BL ]DbLlrm2D%( 2sע@AJlO|L oV'~􋂾|HPW?eCj +lj {!CX?9*=S~cXNdhvt !җA_R\ҍ3.z |bfF-L]CxU>sk!0k{M3LFKOwH*.vBD萄8wU0]b۫2\څlHN<ˈ2`=~唅6Fo4զ6a6^Rܯ-lbe۲&h-Jԅ%:v"QrȺ XpVwvў]!/sNJ!Ҙ4=Ms/S-_dGHwK|ŽcAo٤MFkWn%{zN/3|?^aus{,,T Q$Bp>D,ZqP[x+qљi/"#tv&פ@HV?.k)W`DBq1vw5PX̱r,OWN@HƱ=M% 8sA[U;  d CtT/dv/ uM,l%A~}SپYQko-PWt.Uo}GVCn ?1 YEDw4L }U1V #/N̓~lA;K5 GBҫD&9+eveEW{Qz Tٶ4>%:9Iqq=?C\wفvn~",+ e]=y'끙7*D2 S:b ҠJ&U$3 ݔJ'Zz8gwvTz Tv]lRm3 N0buʾ.9^6FWز<>T$$Q3s Vr̈́(5vFO*qte Mf9յ`fK}O pEɝiVa/aYN8{;#U}m:d CrL07IPqZ:(f~3h>N<6?ZCF4$L5d|5'иM9}y˅]vTf@9&se h't{`")6n $ٷCęΨ_GgX0UDbAtKy0P# R[VxVĆ-3 TPB;,RGh5y s*}Ϧ#V\(g@UCTݩ:G8T7mLZSԪ`Q:K,.UJ$'ՙ)kyq`}bjV?mo-C a8~mTn.kÿ*=8x x{c@rg cFOoƟ^57 8z DVyufK|%r[)ŤyP/6;^/5ҎF,yQ[> pǧ4l/JZ0e?Μya1I9Xm}.'`kGpYg07Vȯӗ;|!Ă8m9{uҢ,5$BiҢ,MV $tH,_ L %>~Y_du|kI^1!a!WF?z-5KI>UELL@]dYYu2vx~Ai|03t9h${bf9Һ-701s-L;L~YQ Ε@[V7O2"Ak}TIU~'pat4rgtܾc[5:]{豆CM ,IޠB2!NU, *\Dwl^ ^8 n I[W'g` τ*R-cvt>_~ۡ*cόe/E./dsJ[Gq!g^ObW}g1A~,̟=/AUSpsM7d)/ ;1/eiDm2.g` !2m5xV+ƥfcԊdz@oW]3+-EvS ^엘0ftqu xM^r č[is/Od* w/2CD䂡1ezZIY>iV̫u11'1 EZԿKDW<+hV%v˝Z?/w-$G/ua0tib`i[Bl Oݿb,bqd0!QS_`lPNgqlʣ9t}ĺ4;/m#d,er|5J+uy-5_(D,!cumOdH蜇Xf_PKZ$?ᵻKwp.C,UId+)vwW牨1{6m6o?7Ir{GzFņWVt$L펆"Uӈgl#qDzW$QН[4%cSJl#!vBs%Bv7k[e(ꔢL|wʩeOӱ cuofv ,:cY!h=:%WDأ=jمLOJDIwIߖܦ0{M 65{^% WF6(qU wC$U⦦Rl 2J4O+IӈFʔu)YI1/փk(Œ]8.[?Et@^7mԐQ'4Cel~1V"[]6kz(Q@ʍ[Z65uf ]:kisp67¹*& \N/aJz[E]Mw$*2H:=lzʋ9IOsC&^Coݸh.]ǑǍSW`%dl22T?upTU>A݈" s~fV'[[Qv0Sv+DԌM>#_m* ]^A`їd㴨̉ .:}6pѯJL*l .>S/c@J+GVgrIY?Cá[nՒII\ׄwjRI?Y[ڥ&=!RV8]8A^pl]X8N(},[bnvG1_ی6 Dz} ধJQ4u'xYi' ߅ DI]E"w%s8z! OP{c ~'O)uzffK8g/$D6F-+hOe41xx}]"W!Dr ^^ߏ}N0]9.#E&ty%! uT)_T;زL[F$edxE!bnɯ캐b7/3{emUzVS<bXF ׵TRHyVίhқ0}H@%\` /e'ᓥs20CgKzri0nxD;㐻u{?â>?>~534=W,w^.!BLGCY[ӒӒX}6- !k4n?+n2R(ϷgsvO$,<h"`Tfa ^ f!~rx!b!Skvr9 Ce=A LtL@Vn㉷}4 yiwR+9~,Z;.+%u?v[cEǖf"9p%JroKm}Oaca5y2(mPoŧq͐|tv F] c@%Y8_%-V:7kn.&p*q 6Cȍ5ShMM"#.Qdm?ǜ4GLuKYQLx~KRL3ud6oT2=ә"<^%KkײV 4E[ Z*1*\vL ~au;vk2>coyL)ߕ/߾掼yOb,WH~F軹0cy9p<'f#SXҾTBv65/AՐ|`ݎ|_Ne43]vG^|5a8&8*}KJPNO(ABftYYn磯—MjAW<ªǖǺq'$_7̐Ȗy7v? *z@Wg݄.X$o\>27u YµY4ǿj*zT1,y3*̔枅\?=1j9\vťN"OS\Y8Ļj:{Ȯk$gj-7y\)ӂL”Ч;hocچ\Ԩ17 l468"kԔ58*>'tenkAf<_>@VzjU5Yr ,qp*i*`i\:Ao.DozN;<烄YOg$Cx}ͪ&9i&3!-keH[Jv&rJE];*0s "˘ @+56n"{M{ |?q-Jl|o!g{wKt&3 U>0nkTfB5@3ny_`1[W^" qڰ{*(F-ZQI=2]/4t)YENsiiѝfЕCmgYpn]*x쌧I{fZЪAmq WIUvWI=3pbXj{1 /l<6iqn -[Aej le9r6ioA)џJԕ7u~H{~]/'*JV,H^b_sHl1" f[Ժq*~f/N M2I}6OCĞr"D=9(+^N@Iؓ.UD?RRC2}eie8)U @f!%EA7*$1qei_ XLwu70ݝ>pbuBxb=¾%|m|b_CdɀxS +w7í pr_(qY\^ۍ_:> kNR""~5 ]4e VR3C@`KPǕiBU^ڞHN?e7I:⏰Ig"˒e//ʭsD rHR%B,wZvȟ;"}~-R[xagR^}NNjvg^$]hOuʬ\t?1*؍20*W 6piw&kJ@!V?`*|-\;"tm9`T0SWj) ,W >9T?n]Rjs;HxbJX]vI} 6]H(*iY)þAݱGs*XD+…>1kd/Vjxu w!r`"<5 --dW ֍Ӑ3RW> FHb[\<>>M=l}RT߯ۉX59MQ5+/X#cleϟ{=Oz .O`187ow8 ЫgdwŘPj(G$ìXM g#9RQ8I'bԊ8+V񳉑y^4\Z"ȪT {%O).^)1\^6kU3q7q<ڇGXm);[(lӐ%X"G9|䍿1Dv/eq^~r_>%DH?첿]+;JHM^eus\0 \9ʾ5Nz> _\E0JQ[UQ4 yݛgd'LUwoU;k"1s!H׎~=0}yW叩 31gRgMɸ޴`_SFuSwqNJFos4"g K̺@q,_ke;́,w8 |~u Y7Bt3qDb4)xUaJf}5uڧVцƝ{c<-pvmj۾WEzApG:ybH6zAϜ7[JqPfR] T3 %, )L}4  LJS##{! 3生bt]4o! %l>wלskQQyMкz3UR\S|;˛ ɁWt4/韡J.Kg#tNN.D4%%0\q lXoS|3kݢf̞%Hb)o<z< s96 PB`R>G;kS=K!A1m-aon_bNҹ0T]g` xTؔ"?QΛ"BIFP$WgƺB+!;ɪ8MhZ]ǬE6{tT(ՓW&:yRթ\qsXɐR )z,;tj۞c<#薧/H_^k=6cH<f_^lIo,15fETٴ]ceT3ͼʿ"{Id{Nܬ9RGXmk4!2'Yu Йt ׺}0( g*3u.o|_ؔ geq6W1׮, l@$?ufڿq|)xbuk tun K$aE4&Byߪ-pC{x85Hpn 3.&.&٣UZ䨃Y:3J%% dZ 4ʖAPw!<Π &[ u&ՙG,/\KTd՗r%*b/*=ӵ?sQ2GC3|:}4A ?E\e f7h=o#<=Aŵ1џ)_B??ݿp 79}%}Ҭ, ;:7Ak9+~& :Mq3%:1c@Z#,+ʹDG3t ,{V /1!M֗:d5k6植5rYsrGʰ%hk&r>.uAӝͯ^9TMI-iTr:MXck%5悴XBsz͇l滷d&|Y@5Sh˝'#Gd ؁X>Ӿ}Yۯ*7c,9 '+VL.o§OEy%6o89b .*V^Ǻx%} RKrG(+jU'Q=A:?X&( i~^[Q}t$ZWGN4j_ktrՌӿ/KSF@8yQvH 蛽cH(LhSWk*.jS",G1 5Mo:9цof;SsvՁl*!Ѝך#+DgyTAA- Gd~v-:V4cOsc幓_#ܓΌs:F$#~uI`j!TLMј?ԉ4 =z(CpH"@i[Y#W>kk|rBW.<_vvq?T.n))_,o巺޸g{_#6o~?o{uxSٲL&|v4_~z)T)T> ©WXfm׵D^L,>Lv *$Ir}WwqތgL)sa₍0Ps'zA#Ϟ8hcŽ4vJljFL0ݲY*<{Ѥ>h.U:.cakR׃lù;m75c&@:>NCny7 x& ' s?jS_1)b߱<&p._luJ_A`]n^V.inaQuc5>@ Dk'VB?;|&WrBG~zy"½HD$B4*K_4IpcEƒp"e13g@oٚ*e%Z8lI>Iġ(%j5|(X8 ,:Y!tgH>YnIKx s9w2(6?GlGLÆ~;qP<˦R5Ud$ug7S׏bX$_4S.xTȫ7H6C*n(Jۉ xz>sqYd<%j؈Skl~MU__wO[p0ctڧw<9<% ؖt&RNInrǏPy`{"_pw.0=?t`6ӵᐒDu(zoOY|ѹQA?;5N\tdAyӛa.\_m͗J9)ԐDmvq.DB"arϷ%RUqfZDj&ݦ.xHV˭ a"&FkbtC~_1 ?{g:۶SnwHrcơ%^{LnaZ0''ҙ9 xY1=bN;]<3sRB{ B6r[6e_ҏqHO(Hx?v+AUzz/N;烽uA&R{9\TM3Y.K b^=jobˏhSdj)wd2 ~Qn'e :Bd9F8S"/Ģ%\4LhfTb#7sA!`:"[j ܣ'㘻BЎsY"&_xiM4uu-,8{)Aj #Bb-ǕԸ̔TTdsH"0 ֔; 0|=Vtal\BC[:ʗH޿1uTzwLќEɞ.Z|Q#x5BEYA*m‚ּD*.ڙxBTr{>>lԁ)bN~DtqOJC+KÒJS򒱬B 4OxWê|";RR[a:j hT"ǶEgod&Љ./(.f3/ь*9\X͎H">_:Q^fkfȦ f ԒKG} ^sm% {Kumy˟IRA!6p]b;P4L u \͙|Q5&2p~;8#>'*^Hd!N߆P]`$A 3E:qM|A`eP3[3۶ʇu +^A, lܒCzB k3Q7/(Y.5e5I3 c˟3i9 #ws2{y}Aw c 8o^e" §&яX+o^P?<$TYhylO7qx 8&M˹?SETD!Zyj{d}iߒ¨T\Qzw&`,S]{79Ml DCqb̼BɁ>1v6/f- /Rn )GJY;prIk;ppȉƹ#!6`p"u?a!Ńi^Xhk&s H#:;4|gմQLe[9Ik08RtG^ Y!{G Pͩdn[o]~hI>?.{4qbMThtblqWBa{^׍҆i<c~({ޗ9ßCġvA<&Jrђ iIKܜ5ގf8N7@x+)>Z#t6j{k?@ɟ53Q"3͟JGGT" ߎc731_QDXӟ2 \6ɼ7݈N*5g)Ud(Rst^Gh(j31#D(8+:$"Sb-ۼwM:_Ej1nw2+,K7sr<={ hI<5<:Fw=e=SFZƇ̗)׿LiC%u܆HEVnyUl0RleY|Uc?6Ɣ!]>.<{{oǹ7G_*ſ<֔{IYCOM1@!C -W!%(Cyw39a>%3>* R:P\=,/ioD"S{Ղ!! q̍%+iO dո6xӷ>Lw?n QAcM_8Ҝy&#.o6[ & e#xDΠKL.'3q,[+PMGFj&Ij[LXw.MQ\ K0MO+^AIUfeIOY*XxBèk !aq=q ;';FɁ Օ5+pf,}cJ&I~ew3ʇDŽ6C(r\!=5Y,1}vA_<ֳ}Soʤx|A,e<78l Hb0Z'AkOm-=t]ߑºwP@F7m=_iوO75}B.l=zz|&wI$x[J{q=8D'*=[<":@m%~FȓK6@QRxE#l$t[;M5G% “ZUOymBc*|o?;RNB86AnV]ơ]R(>| B|%7 G;8̱"0|!qHˠvdNe*ym1C$D,C-B! J$yfv>"Hn}i̦Tl>D2WP0nyS'>OW==Y+^Fаڇ#5ߵ$ȼwkUtǎ ~C*1C  XF_ƫg>'3;i ؑ~pdWvi@ D>3G;Hdc[s orv' Cػ je=I^]0rlTLm&I8_Wonͩhx.Ucrdc>Jɑ@o0̧,D*OjYow08Rl#! nI<7Rݢ:m_ ޫ["?"SYmK;$s*vN1S.Ԁ ;Sf&Լk9x'zk/6Bo"(}3u|y, - h- eKk EdK3SEQ=fVs"8N ^WhղMk :9y].;.xկ!c7"w:\-:~jn%s{!G*(y5Qv( qH*H90ӟ{GQήzp]vaE̟|ʉJN+T-*eSB X93l<[_2iLM}doQ-øuVvrsAޔ!>.X'(D'd''NVv;ϒV3Jsgi5 YFO*%[lnbQPYAeޔՠ"P2 L^?xCtqZ-L{y^o@|ZZ`=Lry)6X<#M>d,Y3V%k[sW_UKl0sVsIx_Ó׬S}6.h6[9$7r\8#!ġ?K1O.%]vlfm*?ìIڭ0N1<%}TD/C/P@ `N 8a2W\wC+^;9" ݨ G.#)ۇD%MƸwߖS"H?Za?w3hܠ;ݺnoalxhIӇHVIۖƱ09p2icוT(GAVWJţv+̀)CP aI?K"ڡc'`;w&VY6WL~&Jzt2YFxEfVZ~^YMFjyf2lwCtե!E@F0E ;SӼuO)k#نfװAz5Xݍ KoR= ĜɚQxra0w9lCۍ(ƻM=}Q1k>I|Ɔ}1]uڸ |w68nfofk`4YIS7Lmi-7f^{RC0l9_I%b1-E.v bМm@c ;ojnjoܶc!bN Fڑ]W_?I ㏚DӪf?]m%gȝ*n& _?؂붚Pȫe/mk>)KVWD4@6h#v^b\Vנ7g^֌_\6աZSB4Ld mS0TIVM3&M]6NrWNcDPaEΫ\5q q }1U\w5>BZ 7,R;3CEB.IUF.;C%Qn*m xWV[ }/d̟怒{ :vmn(CL!mX*xIҹ[*̸$3/(9 {.iJOR@ E #mb#7k(!'2U؏f B;#o5`ݦ-V^/37)wم%a" IT XV,VךċIMt^F%wW{kǮq`\%vz#(rœo%!ENF$Vel&ezL=T3+nmٴ{{Y۳ˠIԐx$E<Vc^+H0Gϑqۖ/!H+ KC<-&-$>vnԹ;CFn~=ㅭq3]e{MC7L7+H%Qq"%~kO(xUEC$&021W+˥0\eu'{n=aT {)#7/kc1/jR|DF ,J+ Gϫp%"f+tSw{+o85IaK~ ':դoF1`~eh<n0橁SH=YI-#E-/F{@BD9mSA40 w\r[оbt]@ r f'3| rCnh'yok"XuKٞ5[׳1{_~ /RQE<$gP. d뾬U9Y9Fz5[QPPtjJ鞎'!BDB``-tf!`D:@bw2克D:=b@o QGas$z!ڍb}TCo 6<ē;a3 ex%`24AMXEt>a+heA cklYS`пs}0dpJRp({!!v+t+hEܽW_4$lho F!{^^\8h @ds/xU7Jʩ8ŅL;-CyJn^eZ'؋3Ąk~:QjJET-)hRrc.T9p XpSM(S׋Tkҍ]gx ߀kM)o1Y|{ 5o뗉V669Jq2LwroF_Z8+٭JΌH4t:yzVMl g<Z8;,h<01d4z 3U9[LZoXFxE#TLg;=eSA=3Sj%a`ܼvqȏ3fU;b+ SkN@%US1XGq;|9O*mV".3ԒĚ)Ir  {u)Wo&V~sNv{ޱFYX<ЛKܩF kEi%^N6Y,X ^UN;6[+AȦx sy4dgFQ$KY&Y֧c$MKrAG{#,SM508w2zl1VNz=8$O4 s 3El Vyn,D `?Yfsb0i;F5ibrlgBm'˦ Zt9"("KJ6deyP}U;7^67alD7I x| kD1᳃ۮJ!۹3dp# PO z5i%fGq=ʥ9ɆX<FVJ-vuûCşXWG?DS+a) #Aoՙ-+B쯂,*,Osd_oR@5l Bň3N߉8AbcVFu2x=D44K_.!`NX9DwK2rDyN݂)D&tc!` c\:,l89&۳6$oiL ) K$=3hأv_#^hPP/FqS>^ː:]X~4tպVNu==1؇ʬq}M\- x!!Z'&\>(=CCIsS9bi.~xxV0`XnܹT1N 1FhfFvQ{l$5O`XE`0ܫP'GW.BhwD#r;+[`H3SZ?jo? =mu4:VLgg!\^ӣZj8P}yx`e!YEIG.WK:OLB;3 `1j;o=\x?i0xf Z&' oE@">ѡ#-H85ն&AY"vQ̱ j9N#*~P63KU-\bYf ,EG ^wȑɛcau} k Yky$iʪ+'m0/Sn˸HMPydFv)*zS^SmxOsb3ǘx?(*.  FKL<<.5(ZؑPU#0k8!j~]KGt א =9 ~;rS)>#BU ن"uovWBysIDVv*IwH7= ?30W&Ad"x^}gZD׻Pq]Wʁ[+ uar7]!ؓ#bD*{`5]I`!cQ鿘D쵽vW;ŧ]v?7._AȺj ,VHyoxz ̥Ȍ!y{xtRضYuHt#0׻>3 h^6ï+wI)5|Gr% rܚcFkSJBܑ{/S(Gf ;@IJ#;$t ɕ|,f.cP(tGLZMU0r.;r (:ֲj*[[`?* fvX~жC{)ŨNOh} c輸YOH+އ1͓ ]Go70b XcnuADZ%J Smڅ)c< 'L؉[֯#/we6ޖ7s ^cf(ї)hKw^bq)tnsR: •/\ɂ5~?q $vR1?9X-m'î.st=2sd=Bm#ff(fUϯ]%)5K"pA9O~^ #Pe[bu8h*tKR[En|Dכn5^AaVRQvH2~ʗ &oOS#hVmgBimS]gl0hgfceld/fu{@eQb Evtb="Hv̬uATw["̜י(Ռ'٦]3\u'B2N/×a /GSbr#2ycCkĿǺB|dd?ґ99 /=ƜKu27o.țԴ>Vl|͢3gejq,Ƴ`,,#-0r+Gz RCBQRV?=Z^M?z+NԌDlH<-L(AwhXYS)n[0B^i޻K$caE`jdt6t 7p; FM l xUf#O$*UlmWB(؞mSrampcg'*'Ȁi!I+GvTyF^N>rbS>alZD+Q MYۘ #Q˹էca̬qWf ۍs˙ESl2Dђ3.]v5Xv^á.7so|hwNgɊ  α 5";3a2&OEg+&]UV(쐬O S $7Q}/R3 0[ RY:id'tx+n;}& E.$-HAP>pB jp ;Qs92Of=1ROCD,lMW1dlZ rz|RWlN~ia`>~4m1ga$ D5!I#Zv`>J>DZRw%Ew6 :/N #g^ ~'Q^݇0R ]k`rשxH]µsQ[6m)vI%+D% qsid=1_HLCJS!R)긿"Ǒ04#=Q|lfp؍zXp/}9VtߑAgI@-y0V1XGI0q}Ij21B޽l&;-b|Ӷ M0Zvd@E[ Ops_&1YBً)l^3:y`|js=F)v19е{.G=:<4<0Y@GbFyM27EXt$٬#q񠍷Ep1/pE[6~J:2V5mk8`3VQ!&e$pWczPy0YͤĔNJ%n߁4C/Y;θ?~Ԫ7~0!4+~~zgT ݮlZ#nFmG˃!ͼY*᳏1ge/_E X+tFûzĄӘ$UCew̛gXi6a_PΐI $o?Fd6R@_b'}A7 [A1Kj3^4.h[곣*,oX=);uy]2qWGb^ZH2+I3c|X"L]W;םd>oIDvw:b a 0?}/㚠? cϼ $&ȓw%|n;Qc\jK )NX^_Ugؽ &Vtc7A'KB#dFzhx5N/P4dBc܇ .(yլ^F!1H\}nqAglы Zw`X 6E&v)tS4ݐec&"ŀӾ׭Gʖ˕^mb|&4Q%ASL=ڟMSkA([y͏٤r Bp&6}*4zRV%9Nܵb&/Xa,OM3)ĹJHUٍF" ϟx~Q$1(hfYsYlY15A=_ !lJбXo"|WGoL!%iQ}䳇E7ڈR`Y"fW~Q?h2^~G?ځ-JDy@RU2 =vdImX^Dꋉ\D1ͼSuP6+& ,gG3W.~O eܦ`W3/9calxaba>[tv\,e{iIsbVb "4vmW5鸩d{_ԗƺDZJJ {`#i2Tw[/v,˙Q<쀏ur)鲺g][?N3%P)ҠTHyLްC}BEsT(nW:^b(onwN*vƢ<^DqhY8)+6۰@Q$2lN/pWO$ԈF4i^4($7Itcu1'ᕦ"E=#bjQ̤إe>gc035~%6]`>L}73hF)/loɑmb"9gw? [aȚ]O2hcRbgmj4.K1a:eud|W'۔TKqİ$aԍs'J@G7) 1ݎ5*;{dWzX@}T6(Mqk&_% E$Oha!V++Y^&Z Ҏ=)Ɣzh+[Gto*dw,=Y\H;4KZ>b۔LvcbܼzJE̍ `I#O>óyw'^feVY"HoO*l!Vg9<3?J^34S%Ze^ X4mN6ZQ&{ zڃ =-l ^+ 40ٵ`t>飪=J{W>i[&s s,Č5«INbkmcN(;̠25Yf3L fZ=Z-qrzw &wBt\Q #[B_`)^9nTfq]><Ƣ)^s}+IwDAXXY)t:"G- SCxd2Ʀy(3ڼe_*:1r$TD.G0=ЃJZ %ItqJ Fm4 yw%9hۼ?2bۿ$)CPO D e H%j33}' 9r'$:|@~eZ&a~>Η75~Am~V\2Bh)mX~j7PT4mRZźRv`A1%"Nʕ:~q LděR+nCcTZ0ns ;MRnSؖp=XӅ?JMa~3f| n^X懛NC°}'g̦-=p^ :LPޞ"o(5 kmqQ !10pelȅԉ ZT fu2C7E䊭-ȕ0+&kvKѻFjW0Fn;ay0K}|SY2 M=̦ vp?"Iif_qp jcUw-D/W{gLa7Va5m(f/fi$ͺPܴe$ (S3ƇF3ͭ'L0o-{fF ٵڊF˗C'9 &`E@ R̎SlZ߆f=ReϧG}x~@JͰᰮEBo"m&'&B ? =l̦(zLÈDVrRr!TxnlGT G$l9ȑ9Hss|,XOn><9ְbnM9K:/hF&!E:25q?Pɹk#,[a{kx{o д\ 1J_{K,eEQmdxo:[vu(SrsM͖<](%+w,;/|{vQɨ4qRMLlQ,7>ї`j",{iR؟qjF=)XjPeN6%!-6eLv2]k3bd5W8c"}jXB(7ӛ±Kȱ͌c79v\ʲtV'j]>R~&< DN@{ ĠoMm̳x37'”'t}97ŏ=07jYcaJ#'+Si鶑D^SlIT79@׀Z`|#\{z~bcNtaoa/rXE,gRrÃ||`߾prnz4X u~ *ݳcc|z$< ǥ\Ĭi#NhsƆډ1D `xoj vuɯ칥O@;Q&U(!ꄮAÄ_>bTYo=imtxN)>òuPlDM]& c,n SdGYWwSe\D3;K t,O=+ztpizDF~7ܽ @;$(˞;^T_?aRk01ҷ7t5al#?V|t%h`mT K:d!NsbODA>*NNeUzaVI3V5? BI`)TFdޥv.|x=G9[ ~MX Vx~ήyLv,$kri\ 2x68 M42/][Ă$,;B V? &h njq21J_icEʅLxkΈh5t$#ةJ=veuXV Jv&8_ϧ(!SљSA#iq0+'PG< S.w55$qi KJⶥɭrC;䥓aC"etK+x!PG{Sy,Wt7ؗ|ފ;\[v&فMeZ|Yt׼:MlLxV"G8!*.Q.nQ1bK;׭}N?կng31foDYpݲevm/_7]"_,~F 3[ـk aOewF0?FKA.jd*5BGɫggXJPh0E `id|@ZV-}wnSïjʜ&:$EzD1ڹN>fU)%u`wu|VH9V|Y}`!AQBj$3Sy,g7U_!c^y ozmc܆ +F:!5pH-qP.x{ yX*VmˑɁi׳oخ Uo3y)Xd4̫׭wn Yu`̓Ⱦe,NrT,OWV -Er~Y.PtZT3'O1/wXFRAhO%FMmeŶ_(!v~/(W\" C~eI{{éz8>YI! SvC;ek@6hF7k?C8ZJ:+roȷ/E IJdTJ(c#N >a%L_y SSg{8)ףLAKR3yuql$Yc^zL'P05=JRCqt=A ^d~ESt0Yoeb;*$mGp1}u=EK7o -&6al#GY6S8.@VGݕ=SP3s3 FtT8؎X>} c0-E+j .Q3XLJ_s.4B>ekKd-x _2@cAg%gӠ"G/Fr^4*a>30_W鍗 =>9L,%>2c@AeD;3ZBև3*V̀~%G?:K!@i`ʷ?he@?GyQ0&3y "Hpi}ԁ^M`uk~v۽UiԞ2{+ob{OHJxl۫'}톩(e <Q*D RX,gY}>V}Hj)w@:7za [a΅#fq^pR#vLƀVr+Sqo}""dTg1aN߹觢xGI'9717O +i/huy\0[+Yc/n'oǂp̃zv Jބwk雷CԕǂS& l AL]#uq ą[$+ajtU *Aqw-T*1C .=d'Ghu cСEڔq.P)%1e6> Sm$1L,i2yVuzy ,[Q~u.|*,rɧef Esw\uN=zäIY'PҘr^6p?y3`2sـM\ xzSL,#܄"Yh3dy)cU =v&UnM[k/_"OV2!kÿ"]8(s嵮Dٲg4T5G2YhT\k>`:2Sۨ0>^_f舄't{~uZRO(OԢ֮P]csq#/H&Y4}DWrr2fDZ(S0u QiC18#tp 5V : -~^c^n=ddd0 asP GKSe?fAPGQ^gUӯŻI6۰gY9 c(XR: [:Z Yh@pJoϫ}bvdCplTڱ8dfK[b/) Wf5B^ѥ@GrqOywᧃ8cfRs؊哋̩CS&/w!qe֚9v=#`Çzpi<ڪ~޷n?e.QANB2]ޒC= 򪒎f|oUZko2ȫ݌i5)N^մ:6SxVt oz2[lWh[OGVfCp IPa$gwEXciNiċCv: Y}DLZ=dwIw&=p y]W"Ű)1h[LEI2^1 A; m 173Z+֮;O[>;d$9'H-ɖ(_v'to-:p$€T&bDa{䌈g'ڪIcԓCٺT0#4%>$7q(Jb' Ɖ٦xЈ8lyn6(2F]q_/EsNHfB{Y;b,@ b4FLk`vW3a/6r s f5!?>w2d fIɇ0 V?].KP){$j;ט.zs  w`<~Gǡq vAכpt lvݿ8$syfYvE#!ΐ^}W^ѧuq9 p'ff]>-o? } G2 a4 7_4,DkV#[8lduF)RoëwڪAx| ^W!1qOy$6$ݞ >K >"f}Θ A o[cr Υt*{a񊙟׹<VċN@\-4mw c"E,ey&*u*Ji. ߠ28 }̖70EGӵg3d0];R'&Wbku/;j+%"%2Ts\9Nm\c_5SrDt9q:gs)+Ys$5AF42$$?ʓ)тN`kJzݣ2ܿˀe8k4mN W01-\h -&A“Rn171߿ظsiyfe)1FqX/^d fag&Ѭ0NKiy9k3dR}%afSԊƴPj" -Ɨo;b@ {=#\|im Bhذ'l2?HA{0@P*4 (Lz9:RXq"9Rxi>-Z`3kaqE`% ҏLII&e,Ĵ)GH 65ܩ1ι$N10FcmbAA?&j:dNwspir1F0R=LX.&B.l7c `XhcT*˽ӰVu=?EJV4MM'c,o\Tҍ\f3#O)md3f3E[/imAPqp_D# s!&2R P&grF29gfǜ̪]I:3ccP˖j3F[RG(2#{]"洃~:)Il!vv%mKlY49Sޤ:a6gO64D3)EjO.Y!GJf11V׌>|2I\e~.(W*%ųB$w1GB t멢o`Q 'PpID{Ytz[g?7ٔEBB<&.81 /6V mJF0ic(zys%>fd6Ydkɹ],Þ .1Λ.MXp*u@0o@*HKw[CO*a9o=07:e{pE'T/N`kY{ϧ&LMs[Wj9TiXf;aӴ KndzFtk~Tܲ-M79 6' (Jpn_픍mv-dc35DDlo4i>yNj2wSA?2y|Aq^%1ߖyK$E[%H'4dyab-G6t$TT as6LdpYw=0@om"RC11 l/T{$K3WAf L„u>WƓ"bU^<ųH~~ױ[_ψ-PorL%y߈O\N(fzbcf|'[qJiK1ړI?^k ooj˲,mUKU2]ƾQc*CjmO)dg52Xv4y[5c5|ն!C3_ 0:77l|?M?Oi?È-XDϏDPر TT~Gc)+ySdVE>/J@iXז:,͔_Y;g妿xˆ~o~Ҕ="!iS4a wb[rǫ$Zd=̹ jtmyNsH)}Z[T/Q~4L=Z'1_GH߉]>藜 )oOeFNJ@mVCP=n${&UpO_ÔvWaY4p)r By>R^BUaj·:ȦqnWI5dxac:=#~⸶l#Mf0 .nYYU{dR5H^зRN_/?Fk^+ M,vh$;7 p˞Jʯ n u)&#gCh 9V̎E`N_mcOi\ 3z,rfw 1\ 2G(E2.VPUX{[~_4{KC"Jx|4ljʇ [=D`U詠 D+,§ M9GMuŔ)L?0ˠP)Pp%qwu*]-N"D,4r)s$cϪ{k%*!\[/Ktԫh ֭B9V%N6{l3rCWc6O ͪMcr͏?]\]3Z0BDhNsVWر s[!%xw#(M%ٽqLPB5sFL ( -c3 ozSơkۏ3_DA #BMRKUUgZ !ZĒ_E}:b0F y2д:%oD&IEutwL,ZR?~Qpie]t\/G'@լeb8q:97#"=@g*La-Amy+"ctXO0m&lM= 4+8To"3f *hCH5qZmUp#σF>ҚհDwNM~ Xƭpd!>bXYnsӆЃ&5]݉'=SFM?Mj@ܜbjR0VY궯$Xe6LE&jV7{qE<,opyke bv,K!2cDVvk1QIe bŬ&˄yT1ag{ Ȇ Lۖpm6; _14,Q3żZ>2Nqjjafu C7Hw)_}:3i< 1yurkt)]cP=ec868ԫ&3ғU$hɑu,d հێ(H fs_yu eMg~[)[Cy~Xi"a8&O#Θe.}!ak>¶Tz8`HuL@' "W-P"6_#4K#|EȻ_v^>yF*\|?VsqGg:*}pIvE`\;sʹV%%"r8WY4y?{oia}j|, <1SMl3iikY9^G^4_~Y.iyPg%֞vu[$[uiza]p@o‚֊ jZ%H)AT!gp헗j]aw ے!܎qذ'" z[oc3,'&Wc?rX?<+-U {j>ӏX[&Sf&םUjY: #7>/gG0w` *Meb;eI /%eNYTa6/v?^,1bo3.On\|s)r<،hީe:ϲn23g )SC*pq (:'Ge7tEE룡_,@]92(.6f]b<63FKѳʪhG.ַU=;OfyAr4K,MPB|tCLGmxɅ f@JE.jY -V&,sS?\Wv=3y47em? dݣcpoXbygy5e*Øm`K#haBҴH-7P~vRhFg㷜:p,)9d9.掷O&0;C6%!x Q,X|3Y՘ {O3P榖 Y{HNB4ݎ~q?UP EXҤTȂlp`ii2}kY 0'佭lH!o2 nO_@VfZFa7+yF M􆡾NV@ֱni)Og@{R217v\nvv0ÂP,1u<^2E b˞Qv;LRIb)+y&Zxd"o'#'JT!/Y{|bU ow}o123"0cCa%j0xwd`,\jb-yUf7l5cpUYT]=1ɏxsu9$v^1Ձ:~> DfKskyN+ %}8,L&Y}Mo yW0ҥY)G2QƆ˻}uxfc FuSvNf~o(OzKwC c>/*kPkFwo.œ20~TDa?ZF f+ԶL0 54&bHSecbca"d;K3U8TOACrg;RDK!,hh%.~J7<6@0>CV8f"$|DwZi?1$30yFk4vRAa: ;iw2{3o VWv$mma s"FY:/zGyEum~;S差4%^5I4",#Ć$nk,H]*e}W^0sG DV6W@7<cyqyޒMt3aFtAX+o/e5rN7|Ƽ3>pW܂) {=Dz-dng*;(GtP=X6Lo"ku(*$i,7<<{>c+yu HC^3}.C#%W>Dم8~Bh.p|Ge4-x(G-B]t,R'/`2z`pY`VSj@\A \ 5o ?ue⮎L..JLE z[2 ~K;"e GM7rIr>Ŀp^VbITa(w첞_2R$vUc:}&x\imw{\NnQuCr4Lˤ$_Gtܸb`V9jb tÞrFөXW>6-A!,Y<,SќabDI0rvଓrIkoM7e膫=X lR.Xݴ5  }Oz+ܞɶlj Tp,aӟ= cc%}ȁ#p~3.Zeg%R4NiQTRάA% Ha}dr/ӝB 'ƺm7tf)#n3WU/rzo2v 2!PBn6`iK Y'kA~YJ7x 85^RE̊idֽgOu@kvA}{Jc8qz@bW<2o,)]#힏Q>^c `Ot,k1~]qB}gX9d>U M8;my=W~O^h)>F2FΊh*JhF>3'x0'`x-NՏS]Rɯmwoh&)=x梫W]s$\x/ w(L6}{ ldIh&܏|>Wu4C c$[70o<Ѕyf!Tn/Ϫ mƆC} L` W1 ?{H'׻:N~%M.a~(i79`d1_e5R,*8eeW#] 5K2͊0q&F31XߴQjo_ѷ3-`!Q0SM#a[~HtDOA&xvJ#zxe'`kS}>,F+ٞ/օXIFi1k)˃Mj6~&Hn%0Yʥd_m WLDr?aȲ0n<'qt͙E\ ƨ#һܭl"ZKݞ >7jM/i# #Pflf"ApLzInIv>uP:S6#zPiu$b6t^T*M3t`7dsϬj3Ղ*1{E;dN!ovx'LsjFĵ c4͕ Adq#eM˝h;Pa{XzwQBBD]5ءEukGwcI+t0x]Lg>R[>Wh2*݇5p:(H8Jb'%\8OŃTSM$J~DLV3 3o~2WoS$b7``, *!t֐dtloH(MDG Sk %I=tkiٙ~لNY_ Nt+k S6D A-,#9l58UP7P l^_Aă׵ʖ]#S|nn2Uric#Y{qİ2J\1E\ietFs|f?hY$xΗLf(Jh-;,>>^GРAȩޱ3yK0f,ǐ;zN}DIת4 O-Iٵ"&n^*zvD G{={ b8/ØKNJvek7O"i3B -A+nhMr Ccv,̑chy67M{c]!0<SA)w/Za c>hF#*˒(Ii>N>^X [w6a̱$u-=5ͨ'zf)]M6MC$ F0wʹ#(cdJ-?Fv)ń􂋋t>n>)WDFM)@Qus9 4>ـUqyPwxCg.JAcdZ"ζ)M85]824?ل\㠪׽Y0j'AY flRsCT49\H}ο?i8izb .4\"mmJ9*HqԬqޮ=46ub^ލ -bevh[S5/n&0SӶWB\ISL Bו{&Ct33ySX{ݔf dO?l>=G#˿@kK}F>I$6䢹P(XG]/0.c ?^_nCˋ#Gݔ^8izn׈[HjB'g~8˓#l=AZ zD1laP'H|mG|#e375ͭn/?/Dwơy456Œ@s,D"xut_qN%5IW7n:]rΫk1on:20La_T¸@*j;ErNɟF[7eLTDZ.Z]Gց&i#i^a aq̇>8 c9/aL*nè[a+` ޮ@G3bʰa (k'od!uB} vLs.}܄hC]n;@Ϧe0;6X9K[kK^E_1.R.^ 4ӼCH9I*3|4 '7bkd٭d*$YꖦÒ>(M>KR/aٿ ssKU.d=6LǹK,8|es)Y`DQ:8AƊz05VVsʣ ({C)2X=^RTŲ+2;P]ZBκ3c辕>>bTWwOgqh V윾NdL\k?AX_ÚڻD}u$?bFP Şba6)qt$:rǻ]|CS^ 6u3Ca_CG"Rpk`̔&U&3 3 4E ]£I?Aŷ1CTĨtɺ\rflPkܞ[ f3R[Sݙ*l3; (?-e"#I y!)0"Zɵj^DY艅./קuZ̗a08IUoa[Jh^(LЯx2w`3+KGfc}9T&xݩILlZ9 n8Bpb>dsm b,L"* NO30? #' ai UCW++a܎GVe"bS#l(} cLR<5)}Qaj_ٮWȄlMu! ƲH4gdv2 @w [=*q($a,˿a7Subu.za.w=r hqtFk‡.janjǒp6=Y׸eiH;I5^[Y(i݆Vn_>q:WAyVrfocDïN~r S+d[Ea^{aƌG禓EPN ˗`5afNJ̮;Eai 2 izs֝k@HFFWĦ=x}f"6LF^G> 0 d:`-5X?s[WU]`uvjNcq|" me%%P>k\o'0`5B4|8u{\o2GKxRhNj |DqgUPM^YZhl%K:w>5mIY я6D> C~7Yu9@;>}]2/VL`O.qfn HH 846JV8~]Q0h2l+L U?LWRL kY'ecN:U?PTmP/R+)ea7Tb3}SX+@(n LP8.\OC 鴰6K;p INNijD1!ƽ]0[Q`;Ӱܾ~+ #49V .e1H0HaB77 ϸV" qF1]w޳IܑT6؅9uӱ]j6VÅ#o%+=ǂ*A`*(UM&A'kN˖G,i> Pry`Wsh<SL)/ x;q}2竎q=Ӊ17r"VɆ"ot>O#a5%DZ;. m-B'w^jeS 2i0vVo:ס3~줴6,O6V7:#+YݹO2xfʹ4TdBb0~n/|@)2䱻18٘pxxPIEԊ!\zz WH+1`޶{lfLxDs|mq5HFf$|@Gi맡[1 Sy;ڃLh޾ڍ-lHthx/5J}U! 5|nޚ$w~ r٬ڔ*LjV>;u$o?"nV狞 爙q&z1W^'}B<ζ).,}HنrLUcfyVg7i8)rc6[ܱ(G:j]cL4K)gV0lѷVJcڭA\\XSD@" 鬣F#- VwCk#LvhY“B7R&mu^Q J0:fU6 ZǦASXZ>0C4| cKe^vSgɥ ݚz,ۧRut/N䴠c;/\L`R~! B,L^EN|ÏU?f-s&=~2b، 61`H( mhnCid2jR #.#KR/OhmaWp503dl%Wrz;X1)Q=ˉ08oI.Lm+g7xO2o^Az,%gy$"6FLf\KYN+ <[λ`Ə:Xǵa.υ}GDNhIx DNQ^&6YE}.izcѸf(0hƴ FtCemدa6e6}j8c L$(Krr:h.\!1U0!ƒuS]t4[z}UX鉥r||B[.[@o_)巓a p+s}'YG'V/xmiq co~NsX{ &%w;ykz6= P|D2?~KH[ s?Y>py.p*lt/kdZ^cjNjzy E9h -j 2mX%t2 #粌W}zka*svi±ZZѼCL?#[KDǚb _ a|dEه7khfʚwy}ng|X1D@]XJn*QMhX0kxaQ߸ ݓrR{(r/S"IjD҉$i"IHN$Ie [JV xQCmNcUi_%X:q[1MϹ˽̑ca %bf0TX|x[]z'|/]^(5`KV$kptiewei !JH:bUGU#E+]r[Cfh*+LMlFTP-ww,q'XQ`WYoD Ka( I3>{u*A*,$Jr XŬF/ 8u9˵:`ADY5%쬘gIGկoFP)kWTM!N2^tkj5%M,B _jf<= N$&sxo3ݚ@Sp]!> KXɃ@  Hx*sʬbj+yf3Ѹ'ڜ[VGeS⿈m[eē]aBgΞ;3GV|x867+`5|c9J4 V5Ô܆;Wv?n79@஘{VnL;E)]H 뜖C範 DXmYRԺ~xvQyꇜ%";:ZQ,v7ާN_Pˑ$vY?:@ʭcnuɴVTG&ϥ4WޑUBʉ$w[cM͚8dMCɱ0ӉP#됖ݿrsn2NPso(j8\(FAf,8h~'M~U7?Mjn I=g=j#z落ÿ$*%wH14&'P_Eg|`wJ<qi 5 oUIsŻ@ Wn)`ߒ C0]''8PG1cƹ\OM& uʃ좙1#{v9Ͳ/y3N?Z[ֹk;*; Uz*_L,,Ezjֱ+wbt!X4B?sS!@CٽaAkZ<΂wn05X[X]YCW՘ʼn9G#292?n 9F҂<]NJ@n'a .J:3`ќM>41ҋ"r_koGpׅr:}4d޻0 yu+ҔZ]b 5Y% cbo{1XiީA":[u7 cJFޓf+8.?Gz9]ˋ澎waQ #-=b"Ήm(j+>9ȌDqLv-b#;qDގؗ(|Ҿk> dFNlΐR#$ī ͧ61d\$6g_²,'>hnvXGu#џQLYmeHOM/dgn N>wnAvL_-X]m4uu陭]װbNN-:ecd^cd!.Z!Rb9}>LV7X/TmGa13^1[)f?ܩ-#~Q]ɒvQItXc m93K)!go{<iiT&(ޛ'6&td8Z+Y}t$2UQ IP90<26 Zϧ*nɶkBFӶ@̅Up͆'-^/53D7bKSZNQn=nr`սNX-1?6=qL{d.tc7iX_r61 4 t#5͈{QD@]NPY{ð2+KW4!_\^]ɅM\9KuLu.3SQ3ƢSgY -A*qodS7۹pMӓ 177"tV8V~_7o76gYlb7\p;G:3v N+M@$F=PJxtO(iʪ)i)8) ?qQ/2ro VWkD4EM*y>Ud}-ĆBYJ0M8 T&@p11*ϦͧX[u <#)&./a2 bØg{zA-5#|E";%7M`>Z~ V`%?m"FʍjQp'mZJӥB+F;AyfD=loht$w{QT褐UVY Ͼz~.l|K*E\M6{c[6g|)O\ni@+L%N6Yw6O2Ƙֈ*( ZLK>cdԷv%bt/4&ag(0MgIOP3-^0߁85CPb)f0"ڍ@1_ OZjr#l{Z }[)f?arhv7N_wpyW 7W-b/kd, [&Rtwj /nva8Í Tpme (kfp,_}23 (ǡlH~|Qa,N,M7]ٹ[ǎu CS{iT{TNKA7/S]Eh(i\X~;9[{gax1.(|6m-D*%: u>2^ /9 5OX |Ӏzvugb{)wlBHBŐɥ,^x|cI7ّD6 13k'k9iK3.|3k6G w Y/47 wyS:A5#@N\<u~1ԟc?,/iD1csw5X/]S$/ X;4R6 kkǰcNځF(|[}k6\4l{bʀ)X*B~ƷCR *x BJ^fv`P5k·Hl>@qR%[1 \"i(I 0Wٺ![&g_N=ڼ9inRưXV>ɻߖ1!ȴupH*?X%BK`̞P#yBܥskwL).Q͙n}i'XFOOK2klqbT]ɱbeΘLFCzxH0fCrRx9-3t<} #^ @yL4*`'G_mNqRm< cBK W=k kʥS%Kn}.PՆ+PfM*zLg 1Q}|"B};,6/3b!^lf[mՏP߁L5{X)E6eqMpһoH0,ABtxoKrH ac˯BRNWO!|{Z:^R9| jRxH$;^0?ɥJڽ`m ·”vɪb]:+udf]ȭ.w 7IY Ӆg5Ȍ-tܔ%O(iaP Ւs Rua2"~ʴj \ErG?cImOit ,<=i[䨍^T&b^Xn-/de#FB txy72"PpZ"̈j1\⁡mYC`ZyjHk9| V܀ֱ3DKYL|'j5se%;}ק#]3p4gƹ[!AA}]L7mO Y孓Ćq E׶Bi: 9)9Vef&Dtg0XKɒu7-kfc|bSXvp.Uq5tbR(j)$dr:Ll&}j{e>@n>fnį$SY֋|cp$ CyC=17={4HSM|S\6߶ÙWE؁^|y% yVy^>ai|:c:bfu1{l8,,{Eꏲ͋Tī ۇRB3|6 ^j=Lfn IU1ɉ+d/ G·Ir$cLFX*< 0g1tTDʤ$K_JQ4Dc%~9pq4~H%#ܴffAYw%N!m;2:h75Z >-F"ss"GU-tH̀ cyxj`+V0a9D(Zr ޹e*y#ScZ=ܶ) }vV I' l1e{`i*cޝG9<Pw_Du%zI‘{`BQ͹~7ee|W֬筙n'{ř0&1ktl<5+5W8Au'`_d{z8REG;"MX`ћ°]vӧ< {vя X' S^ I)'̯e#36Tf~iY6_8LX Ji|daY#1gTt:bѕǽ%[Aͯ[f֎[̹`iBCT @>h "Qq P&Lz5U0nfY}|<  ݾOt r(+3ZGN;R'{OAرm*șAbE%ٴX3 Yc;6A^huuS?){]E;&Ytv@ _S}-fh - AO7˼ s>gbZULsd@"i}5iR0'!5ͫ-s yݮgV/ͮ#mNSRpd,a>,UL-dDt&W# ̯ܝO2 $5j6.&be>?a9"(Iװ˶O5U|-OcuFǿMPҲdU0叝K_0i!Ɠzˏ8'1Iv -]>|ՖrdړGӋ:ɋSw 0"M-ՉG1 L@1]D7H3k etDi2KD6I#r;LV[\OvՍXj|9\FGGYH.E7A& jfx(!VrJz7lCIH3M~k>}ldYg9nNL6]uG- к ^"b,@{EafJŒKtAl-Ϩ6z7=+jW}dq#^ߣDM!desfn$0b(W++E7kP@%ӳOs3XUD^9ïYlme=pZscu7zydE{Jaׇx%bz_cKܨ!)KhRt(O!Z]vWB^7lk8Rqaar~1 5suVhs@`cS;߼0b7]vM3{$xx4bH\ͽ#7ݭliZXGXޥk0i%N;L-ɱ(Qڅ?\}5jc!#cib|,hynCSN%Hꈔr>F*я|=_Wb,_^9'G8Xy, x.@>XO)c/E%()Ek"Fa27bڬs†~X_铣ǸzL~ Pi%)5bcx _trjypǬ*_ZnKPQ@§e"GY޻Յp,̂5`%o-fL"adz3V>{hK$̏Ή"PhPfe`f ՊbkUe/R1 LO`rQ7A͂m$ )+u%yeS Nw[?7&H( TNЇm"A׫uP{m18~bM𠙒vȘzf])[FlL9tu#Xna/@:a z{K޴jx 3Ni{6&E$3K+MZϪhu T,ډȥ. 4>TV O4&:R*[ݵ:AXvh'0,*Ԏ"XZ7Jű0rjD@5Bx`hc1=ũ+a&Ba al6/HPiP6x50]WN^vkݒ1C㫬eeY%4`1 +0fS%l*8*rr3): Tb R"an5=ERX"T1nd-W x/bAb:Xy-Uw_v|$k8<>|$`)lo)5]әܶG9p\%EršTqqUJ1 X :P./Q*kvzC ?+pebx跴zuȷO^ cb3tU? c]) A |3HQ3+wF&@ިON{W'cHh6p S_TъE5yw7-ѱͳi^1DgI39:umIl|:1KkZAb܇+Fΐ_[?9]f}2- :t7@bOG,Qٔ ag8 nݲ^v̡NPkܣ,nwkֶ8G8z>}.g~:,? \xRP4&Jw]BF^9Aqb:}nix]ecKgpKDfZ;ư1xgV\o4 o2Yczd`H`ѿ>_bhx0)lNy&I 7|ԗ w=8eU1<0)[5Y0t*^՟&̳jj=ϳHM *HMl?ւ$7xq[(UǒcO ?'gѿO9.{е2tjN%!BKECr^[yZV5J#L"X1Z4DUFdN|ZwL9FmʝinݤGcvիC{Q:6y\}0_몇1(&hKC]}FT2%{(ڳԷCޛ &!{)ŁD*oЂ?0ɍ SfjBurI0Ěfm] 'ܘ;>r[OEٗ@lr?Տ}*Lv7qLE.2 8 y !D~yk;f0LhmHx1T/q t<\?)YĘ&#՘߄_ a,}ok/^Qse~CU\rX::>bL$J2%ٙj`݅ {+~rߨn S Z'"t;%6̝O1&VnK=DrNߘ): LH :eՃH`J*1X^Ž/!gb/\xDZJ{Oņsp#mUBz0esC-s c7-*I$n5+?z<;{CWV PXKbLΡ n`\JwOi;=׃eu$ix-ax_ax}zP{W ܱӹ<%rۧďbmt߶ΤGG'bThd`)Xܗzl ry,;;V(y#chG:ic'7Z}AEk-;*gEc&EZ}ZϏxeBG#/́V%# 0(g6DfA)`cI}Ko._Se|j&'+,5?s;VA-fuơ^׮Hl” ќ; k'J8B&jZ\d{Y0' &%|Ӯ =T~0+vΧ5f!e UFzJrj /GlJm xk3ԙe."jJ z-Mv)c>Oob[+k@0{L/iggK:4K~wviwhp#,Оt4\`'9oᬣP${/@)pw_ KtecO Ejls+z٥c;^-R7Օe4y+ݺ}}dԈR [[hLVtί]ë[˰U> f/|g\Nw%PÉD dpX`'g]]gLtXkݨ:ݐQϬWf\-2+\ʋ|XJ2O f%|JY`pjǜ;&p1+g7OC\c6r*j^1}힦VxFmx_tϙ%\YA2ږwgvW /P\yi͖?*k6Hj'}Uqv[g ʬ2Roi񞾯yp Rkjc3jBjb00j<2Tmbő⨓Ph;F1?AO1svT?TYsP=fν!,Zqvj>L}*64܅&7S4F"2Cg/m(ĕ쥔z\cQ$HrhW?ajLXHpݧ zf#ˍMz6O`jd&@T?TZd9Tkv=2ԣdnMyS]{-ɺ8&%7RLr4v\qrz*l=\8  TabҜ9׫gPCCg{5 cCif2cJ*IW 3)GITƹ FsEg Q 669yU#8ћ+w+{|P)3~Iϴ0܁*|Esa04 ,}tl|Tag3mli `?'9R˵ rE7-9=fTPnޟI?O^c0Qb-7+HTba/ʴ0R, #Yǚ eO[, ՍUØOX~333kn}'pf,VsmsuG8umu6ʈ,ਙ5҂ո {8ǯz$`@/ѩtVO&IG|ni1Ӡ26B>[,YM6x<-nQ&:5θ%W,y}%;1-w׫\'7zֲxi`䦇uPj[˩x#l;(ߣw<^(>T$ZHN|ᗺrdӢ5f NFj6] I|Km}_Ptv^P=ۊ\ *n:_NEA X?i>7+=oi  )LU:|>y5{? c> w9ӯ|!y:ܔ6ً1һQ7h4c'LX{&\b}vTEXLvA颾=STպ-{6o ~\EqsyIP9&Ce{:@xG4{N!Cv' kU0#1+d26CQ޲=XD,)ׇf_%cOc+ʟ)rdC 0 o<.AJT-+^"OeƐ+L[lR,N? {_dex~) {ubvcI*mǤ(<n 0(qפI4,XD?q:meOlY{$!,SG(^;}x>:6xݘu?ZuoWLJz-}f8 'Xw*3/٘A|a~2R]xE:V=YOÔ#jIuٴ0e;Iw/wѯc&L@`xF,&4%<UN6Oɼ2][򅌌\c0 Vk["nc'XSd;nsզ  3Tqg(֤X/ D̷ {x> ܻ<5r-3M [3~c#0/*>{0L&JAkzR ,t/AzUG_Z=M+Ħ@.lbCjaߘD y$ߴsv2LpؤK 36,7l >ٱl`3@GK,*|=̃ 5 c/5EoZyLFzeDkVd+g|աN߶%>a[)dayHl܂~lU6`f PwKZUˉ]N ˜s[q[ӦsvS>5!f4b$Ӣoz{򜘬Z:-ǽ21bm =rXR6RyHlس;{Bܵ [R{Mq,hC;l"cq `PlrMaKI*2˹ćvkr⵳gBZtX b;,H|#mBZSjXactX,Zu+ǔd[ Hz_V6;ɡ85q乻I gjH:ud8t됟_&_nF㚩uCjv бmw,NԆ9DWmV8M=/ɝ0FuoIKʽmdžtWRo +7ݩXO.嚻Zg=wuÀh{k: P̘-AQ# sU'NSI]j׏ lC/^+t]hT SOWS$݉n3qgW:lI`9zZD7r_ft=AǟlD˻1Sr ^!zЮRЏ~Pbmbܫ*E ݗ|;q ɣ4ǧU ౒{YĹ c2idQ_)y%s鍥}dX@MF &SaY3m[i'oԖHKh$u! 9QV}BWmj̕0V$^U~9vyXi+5wC+Bq 3fǴ`HN v-p&p)j;hBY gN`n"2^DTrל7cIg 1cKO#$GmuR"#r؁qeSv0fdB%ǣl=X]hwxڮ(ܹdk'aϨ-R_VXV3݉fX08<*lupyYW&sĜѫfJ/AD+=XRXG[d:؞6Am֞戠sHb  } \k6ģZ۔\ >Q b,1#d Cgon6{[,/VHvb}?@F>W~a`]cyvʠm)"w?Я#Qڱ.?Z9N֧&sG,I@RW1m& '$(%Ž:˷߫teEFߺY8gfAtj_y~7D;͜_5)hD1& P̛0cTy(J,~'*ؚGR85}/]ڶ,N\oqL L%6}ĠY[_XOf_;p8#~a-%|T_Ybp9k<VV:AsHi阫阄R Rݻ-U$Wӯ 72MU8#%FV)xd8hT6kwc7p}m,>E&lof2lq+7-L6wS/P)viMsi1F4#ok%Q,k/{7慰Dpݚ-i&n0@k?% 7ީ?oU?$O eDœGYY.or"h*(7;e0CHFH1;B_\]$#MV,4  8;t3;%W(ØCѭvR8(S+A=>P"TI۰n8ӈ%@q߆=uP&bl0՛>Đy!g Py!FARA0tј6B+Ң5NVcTO/SWS/9cN+ߘOm"Z9}|qX<69);qH>LGHJ"1;OPga{:댚:aեŠC`nWNЛ: qڂ40#+CiC% zvϲxtyPջ5a?K(+g}܂k 5Hy-`G۱T`V3ǧ\Ζy|p_LNW骍-1~-ϯ? E9࡭UVer"3#יu4yeg0]ɮ1^Q^; kR<5H,wގ[AD^֊>Vcf_u:w#4F},/Utn~dڟxyiL)SvLfǀ GHE m*|W/Jmt.cI,h2fFZ.<͂T`Y F=|Zza QG\7hǂpT~٩mO2(3=cj|J8|:=XC/RŦt*Kʭ N^ 4[toĢL7b[|l^ @ ]fEČܬlAE~kU&=4@SuX;檳jA1nF[v˜̕H36iӖ` 2X>!"2r{7q|t+XF?Dai-IN!Qk@Z[ɕK^dE#Œާ |KjyS55#4H@p%78rDGV5j+dq",7L6XC1k[&B ?؏ߔ@߂m?( bwBWDgG:6pҠS ~%wz~&ou \{v1@ZG_4*M|b%vC>#bVzM'R·wc+"~'O.*5y N٧>}3_=Lvuدh_j{a~G]@DZcⱾB$Cc枍7*w>EEJ\ґwgr:傹3a:[4a`1}k]Xu@#!)HWф g 5h4N鹠u;>kZv (.z{ܽ!5jgÊ(,_CLnah,"t$%?5mOS`,φϑ^7H$N?pZ|O~hf>GOcje㣜2g5@#w7,`دi 2= Jv'/9{By>3_2o&e,+Zz9'cz9KEa8eEQ 'ڕJܞ47ކ3@(;:~T!6I (rSj'Jpج6mM*66Yְzmk.َl5^sPi0[2ӮqV{R8fP\-Q~Y`ErJXlŢrB>]j@ҀCѥ V7M9tev#ܟ mؕC` T]O?kk2k vDN&InۤqVq QM ~0!YW-[ՓwLEyi BRkj1uP6wdsVYlRF; B< 㐣bddٜBMBUNonCul 1ľqz֋b'p 龕-F_ۍ zϼy6 Poi"K!x+n_ Nwiaպ"*vLj~tJit% G¿S.nUl!*6RYE% ˅HÖu;y>+LfgRB嘜(N: u||& + k<7$C1:IJ޶dq6Cbuc^7wJpUjE^PY {URQ.{B\E.9WNҊXRk+u6٩뒛xhcw` .lf~~ܚhw<9B':1AOo^WJ+Axò72IUΟ> f_ZKU@+з?P'mkndw<ےai_5L YusyNusHb2F(C'yOF5!+ L'RދVxS7&>qEO2vGZ^b-EY̛pĵd2ֲUF[ *ϡL`.R)XԦⲪIӮ]mE/CƼXdT1'򕆓u@ 3 㱗x@zNf%~ׇU$ ޟ07#&@Uk?V^@.St>?_\t)mi~)o #NqE{j7fȭq纴B{)rX^urK3˪p!. ŠjNaZm,RfX,"g?@roz>bhrrm;wcz@)U֛IǼr\l1(lKb*)Sk0^7dv|gIWT]Wͮ]R/SiYro$A %l2N^+q x*M4LV"M)$2zsazT't%-,PHZ$ c9u77Kșt +eeօ:\7{E;rC-S Bڜsdssc`V+i8Aì]By̙PLḢfXi:j*KJr(j~~!A \qzS[h pBX\p:x6<0,{K](#:ɹ;6:5O|vALKݾ2//@{VMCye=nr 4kpaoּ±`s9@X3léZmHD/ Ƴ 8a^2m=B6` 7w늊4̹t:٭Xԋw įlܳh"~O/HP\$.]y(4AZ;3ejf}W!ק/o(P&cfƾ`#]vc\0]Y]TEs](8kz QB})lDW?5}&^`O0Rz9P=>k65 qĘ^2ƇUHΘeteuw0&x>k\1/z,k0G1k`oA\I MgHq0vAm֐l QbpX@ R83.}!4\Sj UzsiqL+WܮӯlHOV?v pneo"fJ7kUyEK5 ;9ӼaJ@Pz"HVTWނ+@|&&p]Ro4T\ؙσxZSZXZFc'q $cN`*}~i'\KH]E-ʍξ'I`4aƮ֓ oU F,%"9T bBpuld»bՈ2_&^`f3ooC7inX3/l+A>S"i5Gt}hz&%e_ ﯬ{0 NjkƽWbYbi+m09 |Ǜ\~.ި+ Nj~4@“TLD&7:Te.#01AT6YEW*; c 9 {0gSB^%XB"[L;-%3ӴbnX{wrԛQ #^n٧ʌ:~yh*oi gs^2XfKvʣi'8T  )9L tYķ< d |zV[޴9[Rd{އثjꄙt<0]a#ȦlĘvˑp޳eZy., ADZf uI%+f-> K~I`9.QQ8-h>%Baj96 q2Q\Fr4ڔ¤]9]gBC=":Nn<۽rSϥ2*8q4k8ɿwRl_ƼcɆ&Z:P ׏`1ֿ7| c%%utdY/.Ci3Qrf^()jB faG߿8x"8;K->aq>&t'^WZnLl! $u;w."yo jw]La|jy|Ovmef`Zk+0`$gü,ܕT[ɣsKY2Xisr5X^2KCٞ*ȼ*i]71(ˑ_[ڞjѫ{Lhh}Phtn-M` ymErgi;hm EK)尌7t`ZY-M f.ݸktyԢtO<1݁ X4X\ m(nCd=;@I+.x/0cB ,MD$z]`#3t͵UeEӤϑnM6K}H<2A{lѱuB&C$ΗPמ31 +[tZrov{gZ9s>op|NP:5oR#lSEl}Ertyt[4_A%k{(t`;T:x(l E3<=T8N);UKL_n" }N@#Xx6]`H&N)eo&X5)OK:.. X| c^(vpS I6kNU{{՛sFXy˹xꓽ4H{Ԍ%FB:ɍV $vt@ g FR*B,dOͫ;z:)VEEW'VȐ̭9Z!0ث翙>"*PJFJ)6,K g@)SwUQOn alԩ S[6hQ$#VQN3?bn}`{sk#?g찳CYϽЯʓR܊\0)9ϣb UzR{刢j@qRF.}$̑$EΉbk㔢# 61ƛɸgo% 4=X|_ATŴ %h 9lmFsZNӖDB:#6;;v>k2CJDvLY%m_sGn_Q˘Q]0rS;n^+HUc.pb`6qs3IQM+dQB0m2 s")uSJ@n= c.=9CR,.rKei)d3s{&~0nԝTd# 8vrD';Qy郛m4>GRCtC?eFpȉ"\TO[2\~B.s:Ǿr(LW Iئ{gruJWЎ}{X 2_ v^֘X=fSc~{M S@(s;pM i{F9̺Am崍U6GqprkkregZ:'1ݴ!Ӄ@|s._d6D'8ˡ0x37s^8S~W >I." #K+;Zm*K`r`mlMw>N.`6 չ#|S>&0p*缘6/ؤsVgl*ͼdcyO'!%X KhI&$&.h"X;E=F7XJE7?X*D2xfÈ{Z"7 <%FGb#wq0Wљ,;,ƺ0~ObD_8dr #=0Vq\{nˀ1/IYa?9wِB'= 6s<⭷My7%Ϋl6߱n_t/kB◓)`LjZμQ/ߦ)=k#^g [F«: is3R\Waα*#! uOϖ[^99M坝Cۑq {aaˋbbm$3k +g2u4d4{N~e}#bra yYR=. \ mߑl]+19߁>4/ϯD˼K3Mvs}C}YS)xr||mQuNˎ6lx_ITiyl6Es^]x}B*KrgC[/M;딼ly&5`Wv3z{Jr$뺇.&md爲G`(Ґ7883zæI qoaӺ$SNC` 1{`q-`k~B-wX'nX -[ogtIU zL{Rrt<)ZPĘE$-f~FfW_ce<zlTf1[h~nG-7S3 {[NK?\ R \7QFΤ[qev< ރtƩ+!OU1櫘`ML+stkװE!w9Dמ K=?tI)mȔ^70JSv{B;V{0{{=\4\5!f2٤34>+ @|l Գl ^K3rW jϒ:]>m4 ;(j0,W=ʼ:h?*:Cʦ0pZdH^r#S$R8RM[=3- vTiu`$zTc_Ҩ mE՚NBJ:U n1aB{W=nqtr O<ji-Q/9^#@A= 0<(fp_|@o0oUAZ'[E xba[h }]8ۍ]ГX[DBX$ir%W]s9Gd1-(ys+ߒyHdT{"/Zkb8Kɑk_,ei=VW)OGT_![EG) Գ",Vt0\\9L-J z ̶Ab|2;3Tka,>}}M0ֲz1oued6Q^*t;c8 ?>O#& s/Ƥz`p= )*j۬7):ĕjjev; #z b16RIZ4m,4fEIwYCh:z)6+`xpAm^5<#Mr\> t@FߪIx^Ϊ60N8C{~ٌA ڀl7AU7ʭwW` zq !׃#)8Qe/^5rm 2VBrJ^s:=Ŷ+Q{SeW4Yvi w™E>rrT2o3:xYY]{Sn̉">d%o+ZD&/RBYB m >"e+t ӗ_ ˠs6'+xxΧQ!h^խ24h.@k\}~LEmmv[LUW.7Jp!.>;Bnuڄ8 cxn_CjU5-C@]=w)g,J5MVwS@Y#޲z&ik}QI&DΦ΃Y4 8FW'^FD_ gAD+DajTҙ26bs ɐ ){:z~Sm^`Ew+ ҕPGW+m e$ꑃ8vfldwI~'-rb-NmL8gz2N'Ő ('܍r?GHe&/*@P\VO[ ׵G'-]l"$a"W3WvT vbכD.QE C?@:*HI> xh/f:ߘ)`J)9~3dO6]ʽ Y {EZ;WBVo,-c=z 46zxh_0IugX|QHTTqTY9Nw&@;WsҨ4Jϥ2VoF,"FF*{pqKiLb[Sq  #aA$l@gPJOlv.oM |f*bRX='KHwM?jX 5,YFKx=Mq`zbc$Lkl pQ9]?,f$:})g$5y P8u1[PhJw=;,r,;GViM{4oግ1-aʭAZ K,YJK9z//?Ff(~QF,XN.2nO1Y0Kʉm#(FQpn:lV/a!Dl[j;Gg^Dzf&"K+P5' gDf JWX4a=] <dOYAeRC ;جWnTG/Ő82J>  M fjO d(Z#硞NW͉:#g"2 Sv<| GސM!ևlJ,{X/kk\*䄕8Z)E;ĩiI~0!W Joq 3\m*nlslԸǶ[ X2fgT(s0[pu\P]Ijo3m$LU%pPWI!t, gV ܶ[¦T4yœ=-  2QBV-[zTJwq=BM ΀,v5:`dd>5dLgJXcc!(XֻƋ?#ePվÆ9y\㪜ەݱz %ASE}>uV?=AH<|8YMV`$ ~vS8PWq  ,YX3)l׈x7mr[~#lxnܤݶƨ:֦۷z4X_ Wc "1q8xnesh: Cƪ[cT_4m[r Q`88-Wa깝1Fq?!f<7XX,Cf^\U;Pښ~ʔBBre̦5'n`|)VOm9h;R0٪]~v|+%`<\ֻ[< st=r1~bT |cƾ^nۗNI;!#Yi8S6%8W[Qڙ=[M'z[ӻӨДz㈍엟w]+;wbC[cLszۇ `c| Mu;64nS5rP `Vjq~ͼ:ʮ/R;G^NlFa%qFo""Abn4ꄡ?wީ{~~p8A-v͓ap~͊SsAF|ڻ=: Ʊ›pɵ̅ӤEʻ8{4Py98L=)m]LafL%w8=%MyGXGpڤM=#)}u:#M9C&z.[v|kb 3tUz|n 㼄Aޭ{To;o{EnA ϽCIΒ%[Cܸ*Jnd%` 0vwE[WQüu»L$/sr=\ C?"h|pӯ`3fjS_^O/FM8{UĖũl1R $Lv+e1f7y*|H8\+7gyh?zf=Ac_ {$8jOtA[LEac>iW6>"׌)_N 6ǜ0Nj٤kŮs߶R^s6ߩtR4zzBS[A<캷,IxaG4>2S 92Z'sS4ŵ`AjWcthռ06 d[]@ L0t7-A3k^5jW Ly9K^_Ⱥr!@PKW[Zi^cP+>ӫ)>"r]VW5;%J AM?k;ltPiY OuNL YOyTڗU:1=SA h )AEaz%=Ir6^hOOƔ=gU(byWΠ Z~vY07vCP2QpiJY?giy E˾ijh5/CxoR֠ݚ ƶPD@";1hqs}-\SN7)Y[P/ӹ6ufkfe:p1%EiƐ:M(V.;)k踿?DZ`(MN1̤op|yޔ]7(Kln=#ox r »M(g=4UWAfW-Ulqbᦊ50'q\CIfdi;~?9E7[p]L Ӎkq6U|0{7eTF,}ܓjFq-azzR²>=NR3^~|"0c;O-oL#cE|IYsv\[=R {s wj6au-S a^k!D ɧJ HkrYqvpV4e~%l{vg}d5^vi1ziu#: @N?Bl9!|t)lsGll̋.ArjPc.D[5e cE]MӔ!,ijC\Μ]˴Q$HE/OauK4w)@@Ec[L""JUQ Lɕo}@jh{ˤ>7_ ZX-أAtR_";#e,G3Jϥ3ٱoHN8>csŒ%?֙#vh8{-e0|UKftT1:ed4i '}"}| 3m$jr3yEG3sQ e%CPİcQBJx0U3͞b"}s͂_2%# mzhɕ$BOuÎb3܃%LDzaUyƅ}"T5Z!ӧb+}Չ -і#R$=yu&[Kp̾<7t |IB;%&EjDakۆ^\2<3үޱ)Ovޘ@%ۏ-R- ,)ʧO֦ׄSI@ӧqMl0]^f (NƉ4Xin 1ZR)dօց`9u:sPWL܃`1>N{^fJP>Rf|x>v@e-~XvWƹ氘. ®8C2&zPr4To|ܤC_a"\bgYeO:KH0;1|;(G7$:{+sG|읆iϢ5feli65*`P4ns`Xb56G,t"1zAZsO)YD,ǯ9Xa  ۺ6ׯ 1Ӎ-Sk'8GxdaznWٽ"h%bAjkrpRvUtn7P19Bz*\O!Wrd_> c JdiF=te*s5tYCvAq'Mu6S ~-m!۾ Șm8(KjTZS̷DX1ckaJLw#3\CPLwk05XKn7 c5 ͜ ^YUה{s}~=S\8ԭvC&eU&}0)8lP1ؔDOd 3<:EpJy⤵Di'ف*am Ȟ#7%ˌ5&s4%j82fwJ,-/2T[*҂e\}B˼Hl0x +e `)\I}9g.ECJ3D>DefWq}!`P ?0_U wk ^;Cf2dy1"+Ov_xաРշ&t=\>0g4V@*KbFR%Ϥ\bͳjw(-G` -\2jC 4Q!pqI,C53lO`H>3t)~BmsdxVDi>\qwfYVͯH#1?7oj)dY>; *Io% ؘUZxb*_Խgv2.,I(Z;+a;ڪqBd,[Axޱd{~i-Ks:%ưViVkhϡŝcx䤐 2ZVJ;QjQjQIaF n,hCt-'vL jEsKLnBL010Ɗ9'rikbaD.*R?mMWlf>%P8T-qL ʥ3W7f@.;+2kVeg=QtTGȔ2j؂{^S*S,,>!{B2PJH|^^0SqNFǺj/p"/Vخ˝mlU5r] #M`FizYG;-BNi '0:30*j^|z|aLK`3|iQ9q]QWILJ* jh"!^K6('TqBg WՇJJE/bQLVMIBVA@|~]]s.l杔Zien!ΘELpnRH՚{>VÐlT}0 Օadd(}g!Kec'OuDEŚ;ޠ) VIN(% +UE(4cτ+wT%a,n-9LUC@-e6EF-cNg[-㚴I샂*_5;QӲO#Շb7F>3j[ٝY8Dfɩ6({W{ji2>XWa*eXdL~Ro 5ڒmAt;TXTβ6s7&W c18%N.TŭՍӿǪZt`w9jG*!fkߴgU~l¦Nr`e^=˥fZ"Gv0ҫ%Ψұzq SŠ^q]Q6Y<1nJ0Jgl>4Jb#G(k;γ,{XL0"Q˗uzz ')sdFV'x(,0N\<1'.ewR6xeE<D1ss5a&>ME=ۀ-v-$~ñNgEB_RS VJ<*axصa֝|ja7|V&w2|_m !>p"Eۇ6-!<'pT;΀*H١aDa;^<FbMډmFX]yr*#WGѸ}^m]핰3PalD94ޮnӜ4C\`9n ,JJB MPJ $Ʒ0EN[ahݍK_K9l[IwdX.6=9^HOP'2O5Th: uݮ'A|߳  '2FN0<ց2:lnh끺s_|!oMЗ(%;aZLFwTx B;Iש c6pbJtgבnֻAƈ[/]H6Ë[Y6?ttHUشLL7gt"N [gJo/L[etz W~)s azmoE*m#;fA a0Uͱ(t/i}ħ% ̳'awIV~ɖ/]f7k&b4>P[@֌ f=x *Z8k0BIsc@ǖ[]n~IL <KL織k,/w܏3AH OޑM:I1ɧ4gE]GSu۬`v/10+vH%b|Vcnԑ_ |VWzxRK{2MrAHJt= iv($[e&B%̦' Q޶M.JteO, S[f1R$Q}ZO%Z a \PWxr#P\t ^es~H ݬYNSO:/A*:{P+gDWF [wz&lBՎ.Wqga3 8eD/J9/q) lܣt?SQ X3j8T1z͙Iֻv#U}lz;!$g"v O1 8D3^~pbA;$C>DېL2 ;x.#\9 EϹj|x]HkNs{Gã2 v>5o7gV5j"*A1TzOS8m` 5 26^b~r$Bd}@F͞-+"d2Tsh3eK21zշAed?qs'rrbe"zEe>؇l?mQ]-/q&Q$J8m ٔ3 <`')gn+vͣ;ZxÙu>=A] 򆩌qZ\ٲ2a#\.͢WT$ d"=:u$rMo`W𹎕Ŧxć!vQt],]~t[ZU/C.g- SɡOt^c'])gm$#H{srv@i? ?&b8*>ۙ_˄ <\*37/4ʒF{7;=@@%}Rv<eAf/O|8:"Dl3`?'Q73{nc<-\a!' \q~&LA:.Be,5lc% w1玃nu XBًA&NW)׋t޵6dh)<M|w/X/v"q[XI;_V۲Z73-ӌv*D&bn7Y4*g5ҍ _Ĝd(YVN'BeF&0X…vm0>ګA43oД//htpdY'àhAsRS*w];JMW 4SZvѮA6\e_3&̪?l^#˓(_˹^ا" 6w$.ٓW1>|(SR,|=ZR3f9 h G]- Re6'>1G@E);MJ٢sEVFЃ^2*۶C' `rN+~|䧿 Ɛ"׃_U%d}zۼT&丌>$nkϚ|R8BK UM{;Ӣ[SڨQxSCats c>e#UĀNJ,${8yd98Q1bez4vUZmJћ~'*1*S4씜&^\8≩#g4`iݱ+|aj׫շ60n5sCf`FHv|m8̱?Y 50E0mKVrR~f&wy't_~"&ތɥC8 B)6ƩZ0;=;F UBo^- ,#űd =#<Pی_f3uQ/Zڂ,@ވ}]0aD!Huτi'<+)ӓLZ\a]e3p=qV2p@mkvQb#&tN'0|Ew2(뫽u=Ls)`dvH`sQS1\iʰ4GX YX8c߃v,Y2rw64&#S`}| m}[P'3&K3G΅JK9J/mi mGY\5*|f(L6AvЇxP/lq:φ7ׄ3i!z:4^2rY( Ro=άwai6bxFvR|oٜ`x,5ֻ.D lH,^'f O;sl؇{ɑ%@z<6czrHJbZґsl+/+[/0@p+?'L3|:Y,DZp}V1]hmk;Ney91ӭz=@759ēi l,HYA>@uegeAS#sT-׳mM*zb-TܱWi<v:WSzڬPb4ʢ+7PKe"N+"4}W,Hp,& l ]:Fv{b[wL(a~1/1Cd;^ķɴ7AŨ[1S1-t%͝еx)1T??ihɹ1\ϥ ΛY:or7W|)e<[BHG.AōtSfL58z:W79:IJz/`~ԓGM-W/w7 琾ɛr^2wAJ jT=Fe_ު6|[Gˮ}]B-;tǨ~z" GRzةs;7t@<}\VOk+ =MvYхq3&.Q >{-?1=B ^5goi%Ĕ%E1YW )!*ckyYռv%>#Z3XWp5Py^YtqEk/Oi)E,m7X7(R3\Ӏf.-`;9ܽT#?1x[Lѽcjhn3ƞBLz?Ʀ,c|])^qdC^󌹩.rpg'?"ŰNF;͗09lىOa%ZKNWDP lGQJ:K]RHSKyFGCnѨO,F}mZw2fWݽ)sWM}0+ufWԞpL pJd}4R,*cI/ I- `GĔeaO'WCsqfW08F3\9LxkC= *7QZ۹/&Ag%W@!J*x5=v9+g7n$r!8^󢺼lC<5.4@%ǦfA\f.X׹9[gLBU'-3#Fd4`n ĸ@.:LOHxns*oR>MNkc?^XᙺnUً/m [C?M ^<"ű(^9ɰ8t`YWLXQ':- Tdj#D_+VcJ+a$Jkyv*Xc0+m-}xTyA [8E~`PԀ j=Я%#[" vwpGa6VpѡωNagxʆ\,0%G$@jdz4,5jqr'~ԃX|}L}VVUآ@dn^/LWkW3aEg!l9@8vn,FPI, DrĎ1#52d.t9`W.5Kƞ͖2.k @f=գ kV+ 4qj ?%.E*:7yE\؞S 3䇀OlK\DYn%* 8}N散0h-b46LNOUXqUoAs*;&Kg_}Z,/vknmEK/FQSe2l0Ec$2i9; r ߕ U?9,| "q0qN&g#GFY7s r}ߪC$9-9ce'jԘH%tؼ}&膬iaddh3*+n:HIW>.IÂӳ\?]PA+wp-gbN)~R?7Â햞&g”lLP4~v8~<21F a,f$4H0a]S@"$"Fk:h}Xcs}r>3zO7Q3ّE>5r|H aMW DVUw̖-x:lvsWH}Z38͏18M|;|E=`tΚɁHTyF֍뽹HaZVތ˪rq]FƁ뤭Y)X%xw-:_8 D>l(WV,e2ΖKpYhCqѝVyP# `+=-$.^$4`23P'LQ*Ox!5(K{K02 *PKT0|/̌Ct' Ǝ9`J6,׀ hdVtęu^<7ufYW'@l#%K\{~-' p&2 0bȭ(V9uE162YO/)?#[ņN*MpF2L6W9h" @FP24b9'>yANhdt̲ ޵hf;ϫsgjv>pb6x}إ>8gZⰹ4oyh!}`(ư1)TjkãGbxޑ\ 9-Jm CxSRf?,+K򏮈0TN2mzd.a0[~4-uXʝ>>c[X?xgu("91 ih̫{k)`XlqPst_n̈́Lzԩ-:VO`\\y1&CaE#1[e+E+ D잮bH~T"$.g+dʹj )tѝŞ)g鼺v~\yc Mm_ȵ}3;m.R8D|ga Y!O3AMWsBW ݄)Q.sg?s| prq4:QB"3w8[M՜Qsoъ%чwͪ(8)\]َt+ng~4{T}ζ^\̺"&oقY4#_QR%YF'foYcfj#:WeAIW0v\~&rJ 5 ݠ24Wr윘?3w+:+,ONq ؗP<]i]!4[?>ۿOnHf3c[ObfLL0yV^_~b֍ ۻc~J(H(l[Wz  < z$ 9|Z!7E`eLWrnf7?ݳ_QASE`)RaMKPX ]NiMg7q[JKԳUxIMr_] 6wGp8!P"3K*9lP`OQgUbSvr,N6͸[B/Dp7M}>>55ɗ4S# 0IubȺ}\?3}/&߱sZ 핏<]CZpĔ 4~$ ޚC:yy>^,7d)5ca?\l+qP&otq4?𫺶+ta- OCNu7I(0уx8jy0]73:?Wh;~$p6YG`~)[Yf}%vhC6s|0U* ̰|,H0 c5-–z0y pBSU;`& ;"m6Ѐgş{r`dX`̈uco}s$hnOs{/دCYXE'g dYmP1t\D3W)"i^2@l+4cpAq9eMHօ-.(XR|0ߒ'f9|u;᳉\a[ H].,Ơ?'7$ \qۊ dD vWYȖ{WG7Eoajm~YXϪGrؖ'G![A3u/ :Xކ/"KOVm^aa vq6/1Z '#,-"9k= NU;xI^s-T]osMRΘ}ƤF:FX֥ozs'*cAu#Pۘxn۾ gnr.T_~w4JkFZnwg 9J8GQCv~#ur8LwIroJN#SRS5ȁ  [E"ջY|DO4K4<;Wcls_T \)ťQoql"Y-/HIS3+mqЂ'ՔyRs=H&Akκ ti}_!U/0J7d$i(;S6;1]]نpVB So/N-df|_i { lXkbSؒ֩m,TczgnSC:xhXw^?Lula`ji%Mvq\a8OZuԑe1n5HeĊ67wBBV{+5u935] EM'Ju"ԔVV+qGW2F`'[@k ٳ>w*$ix L3N3*ge9qG]ZkQLS0ʅ<$=6 7~7]V߿#(j O5@'i~mt?KAؑ?nd,kzֽjnkd"Kf?1n}?$C JT2zevq%p ")?E@ͷ!=(~bW\׭ӗ2'/1/GFɖTXM̓0Ud]0ܱcw|fzqj/~l?ll5os^:3۽:*#]ωuk:x)jkYcP/]S+RC9 ƲULl1<$ zD( GO'\^;̙uba&tM隶;y^QSqf( Gh$xrro30 b\2ÈֲdΦ_ cC0wVf~Cme?+ݴ cŸ#O[+b: ݷ݈h͈ %l6hY?SG uHeW(|2u/ý՛S\M4Dcd.j rQ68}n-K:[t*7-x8ĮoL'[t).nDSD02Si >%ǩ+~3,r9?'̐ݩYݯݓ)yžT%NK%&Bn<9t<0rٟ/D,ad==^-:H@m^xKG@4kט$T)yb;Vi/Ap*;m]K7VXPiu _s}purO6>B˒,œ2g~e%.^e;NdaYA$B0Gb5(t5GɷvJE4k=:bu8\ү n[9"i/y Kl't8h_vX9r0U;A#kʝʷ9=7]Y;]f\8XKdjV1} r.`5,ڴ,i_|df`qq_iV{)gqY#¸w"#*83F>__1 sf0$2KZr_Z!~sT2]Xy4دst,~zveJi ^R-ML-Z;έnL Ѿ㰂د6R xt`~q@g`vsJB gѥr``srh)DA?0m{?b *לǬzkRɍkD\؍_(M#;p9|LWVf|``6TkPU7\?f:A|Y3#?) v/h 3[Qu6sR go6oL链(tƏ3TKoDͅ+?J-*$W}{J 0(̝2pŹ$HZsI)Hp."3KR|K3D@u^ ^|AҎImvMGzQquQm+ 옐aT&葘* z!$OsaE߷WjDђ Q-$GfO违mUW:ꝙWCߤ 6aM%YK3c[Ri)]PYd8X,=pyTtX.S {A !ΞGQ=2^7 h ='R lAu'0ʪ?p³} D.`͊u.V.T$JɮUb2Gpϧs>Ξ a\{ ueB*}uwA$}hTK3xNiNMx^嶈<k[1x8& EO0X99&ֵĤYrs8;Ȍ2)kdܒƒ95s1蛔Iݶ+%A)ٷDKG/ˏ!A9o, Y01"{}u%A|UFn޺/Gñ,(0"Ȣ!g$Pk!|G8X Ps]%,'`t~eQa!2?S\UIRY]84Ҥu@iTXȅ#woaCl.;n2^QynM-t=vZrбٝ2z3"3DQH=i4W}43),o9}Wǻt&}uJaid)*Sѽ9} zYꛀ$ guc3Z*usgS p/fPցMz%A9?WF$~p) > VR {ky_LOhK aYq!at mUP#s8YQʒnUg[Sz ^|uVVt2bi4/OME$_q:rO%fƬĖݺ 0̳$|F1} "EʗDo&i,Df]}s65Mͳ¼72N¬/t #f8+\.lnCgKX̹']*%L{lQ <۟$a oe fZxjvr)̀SxzOٮ|&JXJŲq˝OfTk}ǛhЗ?Ӓ7Ft"L{@rLAHl(+/\c[ѣu07򍝇ѱyM>:ako3k??q\O]CzC`nςґ(8=}~do򬜳SبSӷ fOnFm6O,n9u"IEO=Ql[?SۆJmAk߀qyrf^m>Dzrxt qt_n0(v+vݗ[q^$*}4oi>n'=>(cP3U{qpCهC}5;D/O#DLǨ@,B96m"XG9?9&oN0E0J!4C ?VT^x`~9k[Rk;D@@noh1]=lLZ&}V2, n`E+i%RK(= PFHdinI$Cۘ i?,OKZǴnr>tPC^;q3lxLDEOE_E_E@EhD1]eg]7Ұux;xRuś[!7YK%JBg[m߈pSwc!"ҥGXcl)aJr7e0[Vo?\NP߉8(+4fuΟG`} i_a\.f3Mv 3QYLЌx)"$+%`kշ[L h콆7sf+!* >^yBP y,wGs/!Al>?|v)3VN zvN,x%[|[6asQZ=:PU 7;362laUr ;ǀl!G ĊyL]ϧW4M--LgUpb&8!ʖ[΃ Aoۄ)rq?n3. 2%84w_ޘXTQb_dskqXJ=φ=51h9 s`v/usQPCdCR ySb>`0^ڤ>Ֆ"N_ps\fjVb,:g$sEsP)Bخo [6~2}01몊n655fZw]4{pfy¸NNTuX [ `SfF[c:r.bj&{9g :c!3c^I5z'8Xa,`fY\~{-ЄH-8NgMLoWx{zTY7X0V= F O/!Kٵ1{ fťX2[вHJ_o)VmfA_?gS_vB0Ewx@HKdtt\=` P+T*z)H.6u8 E 8I0tn%'KD v9/̮yqɼd=TH&[E)FB% C(:ܒ z q,QJNq?GaꂇO]ơrR 욽sLLwY"zo7M ^Ts.6'kٵZ/[`nK|FU.}W_٭:4u5 fJR|;ƥv@-@q•Vl+D;{Z2}wd/7ܐ-1o[H=ZXH$@8яNr1{ѵ{%;ckY<^=ٖаf8%JEYgi_˜㕾ſtH_ {:lRR& К͌4F[ 4#ղܯ6*U}_ۉYyܸ?՟n\p VG:~-Fnj 1bawӚ3MZfVc9ZύYN 8DO2G^_riQV=wqrXߔ Y|K:#xtP #Z+R+ÍZ9?mN7)+R"e2IF_ jk/'9={a/IJu<ՏF `'3SvPt|Ad\撐+ݠD`.$nꚻҳ /n؎vkݐoiɟy,đԘG}O"]F.GᙷHCKf`yacrt zuyֱte5\}5{O7NvKߩL!d1RG?E3Cf[T{,ۥt8wƗ>4$*%Pz.Qù'bfDЈJre@=TB\ۧ8~rNzkfRJ  ^FgrYywNqL|G&.QԤOimb9kqgJb9#sۮt.?EhzKld/o9"I<>]얍 ދդ3Nj6C(c*Ǖ%v-uqHcΤykֹӆNf N"MM u9M5,9( bn:^ƯTJиͺm[Tm:m2Le=hv87=/lMNy(1cu0_6<ތ7h}sP)q~4{1n00 Fi~aqz%fKjⰭ>\mVg!rxeܷq咹bQjloï,m%k2EˆyW97@ hq>Ml*-jY#s\w!#e{iP)r;0D&ڦX}? #QI)J5?9ܶ[ M0sa%!H=y~ScO)Awoahljt_g)lͶ|?>L0KS$ƒd3v1$: "\M{vZY]Zlq⊖1:õYjp lq V'M0Rӈ/Sp̪_~ I<O2_:oPivˁ,U5V 7=bBW6yv,xYݟXb[$6+m{Ru66a{L?\^$zʏќmXւC7o'8Dvw~Đæ (rPHE{O(Lpu$;SE+GpLiJz1߶K) ]KU庌uvCܟԸ%Eϲ <yO[2ݍ~k0 R@=#CD7xe/j_zK% Jᣏ@a7a$7(F!~ )vΟbqڃTqKX^/eUc: O % TgK^gv(vΔT8lp0ĽP+a'Nӻx.Y/ Ju%t %R^vK\\އЉ8mD"ALd !>0U4l3}2Ul]t3iDDM*jO>:. rqT˰t͚e #5M~ڷR ߡx,N&_a$jy)o ?Q8f{¼sܶ#8-ʏt/-ϗ~WwP8w1i}uqI0 T8 `%ǔ7X_{` I %̉c+4|K[emQ}# Ę 󊸐8|R;/?8JK[ދD+ kуP=pI:RLm{a5% QTtH`VїyP!X~fEuĐNi݀s!H5֪4d#"!搠ٗe9zWY&|8\8,4bP ENgv55'>>0"V"~1_==txMj*÷8->V+1X FyKbk: Mv^h0=ȥ6d~3Zص?˰\9pp0}PzK(lKTVӳ0w8t8 Zj`[51pФm`Oؕt)aoHwOD׫[V>Ȱखq;ݙkDr~X.L,8;3-Tau,r'g> \q:v~!)] J tsR1g#Sz#U⠱I(rC46K~HaCeQ ynY{{vyuEZm|2ZǼ>ssXZ?ҠV~Zk C#Y ( sWGai\Fw>QJ1ﭽ$28'UlDTnk5LCWOSX0T;Iw|b1P&of\'7^5ע@ۜlt0z94^E&O~ 9 SE&~nn.Ae+6Bc뾊*__\a몦l+?rRƈ{ ->jI߆Q^vzr;k ibHڪ rj- MJtV ή٢{160o.L=w;{(Z4'|(=bX@K"dwts1y;-{3ߓJR|_T`/@"9_a Gl1-j&3ߐyy ËaM&&6O#YvK-CvF(,U8&>LJymUR82cKgb:_6sqEu?9U^#tmsG:6` I/|,%r7$-K V 'yENݮi/uM`c)z*GkAF~I8_Tq\ >bo:0!hH,D7r׫^xj}M [HŜCKdX=HNmJm]܇71OKcqxzGBnO1!}z-0B۞^/QL$ce\AAQ&S $/W OޭtVO+UJk ,[ҭ G x9 ӦR  I|OUM!{uoZ4u@X3 #[~A*0vGLl%fxcIZSθw W3M_&(_ʜۅ]#px(ןݱy]וl۝+n-W2L8+4Mx_aGoz܊WgYhчr(Vo8J3 @}f{e͖]Kvt􌧸B zvAY Uft^,ÖofeY:tZ,S-h"/MR4 6DNjڤȌl] cM:qeS8e;O 6o %ʭeV@jr^.fҩtua'x ͻb:"0q #\Sok -+te{tJ6KmeCB|IS6Դ,r 5^y1{(Sxbc瀞kϻĜXccvY+=kSCla%Cz-wG=QvWWL?2 I^rNj49y S be3W'7.qqY6o p߾ӝD3k0Ksj^,@Ks٨:7!6 ~c y.FnJ%Z5߉BcKxtMCrDe=\(.g}oywX&|Ofa˾8Q7~N=˖;K; x"r8B,I,_5N[2pYjb1FRZY ][*zSPoSd]?2T#*d:gS X'6Xڹx -| {\gE+ȪZ&~0 QgaӒZ^iRho,Z9a1b]T'Pcans(H֏.ip8ABʢX0L}l\O\{uYZ3Ǚafz1"Բ vǬ=">O<67=+{^}5Kɥ#qLyc ?|'n_@]K36S yAEPy346S}\ݹFǍf.w<4}Lc$S[t1TT\j赔S->pl_kJ\ƹ E- n0)1*)"ayD䒬yFd;@GbT!\P²a-+#u[hؑaNz$ an'G"EѡRBwk) vBϫxΓJu zjq~L&f<tċ_.8?pIK AgKP5l* 0Law:p̓\_U_"-iB{ =FNGMTY Qw4n^d>@jAboTѻaX\J XҌ;_T1µLX:u1=NդD/?D(伭$ɇB |eƃS@cɯ55kٌ;9MǯxކTos\ҶțH CMĉ?5ʆ,-qNV#JFǗ&JOb}r$99}K{mV@ ;]%H'EQ[!FXYX/֞PQ@-S 1 ]aEj1=#1v޲J[)A_ J N;6%~ lj5f 9usu8g6VP' ZO𨂵y, q; hHi#Om-=aՅ4_䠇HRN8jF Vj]^Ww`KMb~/u}kOS>( dɇ߇t6,'g1=('kxr?y:˃ӶJE3*R>c]>rpSV&ap Po½DM!?isْ2Leˏ|hlh0 6 ^uXCp̪"l^f>^IQ&aNp[?;/䭆u{:E^$&0_x M;i<"!BX0BZI#) DEaR1.RRJe/u~'ǻŽos^*  ^mMG'?h]mIl. #ehJZ".u)0ehc<9Ч<^FOmHש>q6gM]ѿ ᗕ,X6\ Ҽ;ЎQԞg)1G&.߯;6;@"I Cil+8 6֬:qڕ]pSlDAGg̡0slJtbCĤcڍU&Z}8.1 ~Vͩdm}-/ޖً|Q%7dy 3( k)G.7R*Q+@䬚C?@3G\b7x'[w8LWI(ObZXT B!\^< K8D s懲8^اBP%O7Z-LU\,VǡX9~؁(v{ͱo bݎH) |j([ :H9+F:T9"/aHd#ͬK f Ѱ }D%T4¹|)\$ņ9EH,-47Lí/ ֫&7>44{a4GT?Z+fWay#F}KP-\M ( daKC#…(F/,$/@DaEiKL؀^ujkϪJomDm4asGlw"x48{(&w$&L'K[ L{ktك;A5Yۃ'  3U*mbO<`ٙANӠ'ỉfBӔ3` ͊NW|qeC,"/%Zg 3 A)cEBqF9 > jkc|`F_(DҠ;ieՓI^L7ayD3 H_ פN 淮v,#۔(hZm̬N%kLs_&7]i[Je9DR씌3⠽' IZԫm% C9_^T.o ]lKB ǼB*n{쌐ܱ::]O]O/]dUE6栭LrWINHi^1) |C4MhNڱֺ6f"ƵXJ⛿ cYBO..P$wnD8E/˜&i("{ ٴ ;]CsM–'aa8usmهKtu@ @,b6w(Xs3^]@bn< & G#اG(JogO_GɞTZF]mj?K;,~|LyB%>L ;Law>ssf N) ,K٭aȼ.'>cTz;} 4bvQRÃ0?92[ ]B2CgwY 'd"|f/ SW"C/M:mU2lwDzzP nvAu?]ZT/Vf0Gs"[-U&)ev>AWִ2q rAag35qvy\ƄF!Y6;won*NZccf ZK@T'o]w4Q]KcMP,q}+F6ݔΪuY9a|č_Pk =tj r}r nE޲Wݭ 6 уifZgZTn ӕy˕rX"+&6Gɚ&:%߷w~ȍM `sNl0Xu3h3C5sb&yd ۍ+Jj:,M bS;=6.٫~͇g7dC21 GZdemHJdvrYr'jܷIIot5jҰ iob]<[ 5Vf,WufU^|DNX|<߂Y'+$cRfV}yByv$VAڟK+-sǛ0"$0*Yr%RLGvXψNo,pti:pzHXGEHEV[3rW`5f2-.;)0f+=j {XTy K頕lEhgf3I&Mrïvn4 :e뺒|Q]}Ho ۉ q;k$xG*f xO~3Vpx<ŏ!P$;?8j#/~#b]vV +Fpgu㭰}¾!7dڙo_s!q$}1~&oلkHm[ڹ:8n^;׮Kk(gZD?Xv<s_˹$en7deH?=G]VxfamW#d:&;̂~ONqVm:6kG|r{&LԒoOj NO7-a~ c!#\R䮽8doh $.k+sv78aQx H /xwtk#:- ՂzOOVZ(k}PTӿ띣egt|wa^6{mjLF+huc*ADBUEl$WJIWT9J{ 0Z^ ?%kV u۰f~?zϋ> !< pzRL1p"'/XXv6rԧv G% ~np;$W,"RF@ CIÇsql|Ci Jz@I"aRf_+ , Ui! EDWv0z.Yxɫ]PlWn`M{LOOJi EzV9<[B >IϷ}1/hUBMgs+#?hm2XTMc}cU ( ph `Y65ݹw BP1 jdgrDQɔ3vN桒I]Rv?a߮Eo*4MMYt6)wS);p(s?,D=9A 5GtAw%^3Sa1l:sepv2V<{0c=LXsg݄YW,ꊷmtI%5k)v 2i ]n"<0O[vIڭk[N?Unǻ5=0YN%AI8h=L: ~3<'xQ){:$W9cZ6v料 YϏ-S&#.wក;C f8reޝJyxɦbQ龿gaP34۷; N%lken3Qqÿ:|,kQr8%],ٶE,ЧXy(3b&'st'H:1c~oSMnr?4μ?WMmg@DtbQ^]`flQYb 2ԹX ENSiN'5x$ R\B0,0t])@*_-;+Sm}C٪ ŵ1vӽ0rn 1u ޽WlvNƇVșzTt/Whc/#4`9)NPS6%n!$2q5)3ϮQU/FVqer] ch2 /=7:r`ЛWwo{Gv$ˢW<^䚠d`[Rogfk{I*+Mdd ًJ]:,;lft5՘9Nw,IM~U1P"5yD+4B&FA{I]\ ߵ1,]xq683TJ WRȈeТԐC7 A 02ZHHՒZ(0㤩fNw?;(6C{a~oaZB7v0jT}i!Tț9+Ṛ!KՂ 76ԛaeߧ qAnAaF~h3cM:tfʀĠ['FuNjkGCl ǟ# 5Y_}j?sNt̂q?M0eVi!`mv#fjf'65sر85Qplqc=g-t]~=1)#DQ*!Q5޳XFΥٓ1%ĵ037Ng7KũRB_W`˴yyw a,llW+=1!0d5;~]D:yZs9fس&p>MX5?4_5801Y7 x!cƅn8_!j%iw+dۂ}q<Alčlny43ix5.R). cvcg E)~Q8 [Ş^a{?`TY1iD7J|rCN&3s̥d=Co|Զy.ߵoɻ0@E|c~50w[ cH6{#S_gK3cn$:D]dǞwF3v4?TAMSS@!n2Ћ4)g.wćLdϫ]|!ٞȠ-:vO쒛Ky<"KKsyj3"!@Im.cQ29ȸDw.79UZ -&I,zBa\Y8qlښ [%Hv5 o5Msθ#@;XA+k'ҍR#VAɬl@} z Qw}9'JQ4|GgXL3E'pTKd-6g 9 $MvԹ^xi}@[X;.YSvB:i%3X&8lqUK~JsdAu(41aZl]T3ÿq W:Wu@\!GX69)`#mGҾs(l|(՗Kba>w#vg=`?=]B.b)#:NͳqVEcGcewHz8mCT/>{H2TbG:QWo"Ϝ+F>ה ҾPVk>$ؒ -N8lK"sQװ}۴S\2ޓ#J_%ŜA>N9?6.#= R=]8Y3nLLGW QpTA 胼֭lkw9{e x-=9E+y{J5CO׈.D0Fn/9s4޽]g2=Q.&\Lr\N f=}hi1)zॣfC6wdGc!0+I@]u).PQS)qpMT0yjL\٣5M^ǂla22B5SEiJk.}])r&kH &?o~ 6>1/PObsT?}^j 1Ʒ/p!og\|N:lVB2jҤK v un k?["! IT$!UfhE;ƐY]˫ͅ$Wm:y6-O ccwzP ޷D7#@ʶm, /sGIE)9vlsOM3|#B,}h>e UX2ՙ8gPHcQjMOO~(nZA$8ƜT:aRo)osL0ւ9;L~wp[^|*j>XZZf뵕&)*ֹR(l!hL| }4G!MSa EdHnK L1:[Z45 { gvDwu`K|h/Y= W 7EԾrS- oY7-k믟n{.{.ѡ`FiǯpC>ۻCv&WLw5O7qzcVenUv' aIO3@չU|rhU)!,TM~$f ?kܱa]/a΅hI|r:0:svN&QT3VX %{!=FSˆ.ѓϨ |,VZxsH٘ fj믜SS -^7;&}=:6oulLuCj[Mm5=Z3 <7J8@O1RF+>?|rwou6ZBK0$7Sl8|>Gw_+Pȑa!/r*bcpĀ] 3\Q^!?ˏ}i&~vԊ"q rdӈ~S씺9rlL(69d8B?`-wB~m-Kw#g`:s1gC6LS \lRo`cj\VnjfZnfӛ{!vfx~_p %zo_xiF:,Vcrֆ%EvDpfJAM$b+%@WyA{ ɸ%s^(yq=-FLxF$%^6S:QFb-30z4G[,hJuU ƣ<+='Y.?3й"#ᙜ^% GXJ}1qS| 6f%3s'# $4X_7Ójqj{^GY !,mI .ج"P@n;`lgYv~]^>EdׇveVhq . qgӈZ&?L'A*Z_{Iɶk+-P8~d-ԡw&Nkldܸ+-r4c>٪ϓW!l}MݗX&J 1.{ 2TXRdT|NMVfx+8/-yɹ374 w{ ȉcD *ʔ܂U7EqTC:h[P-e􋊧I7I2RݺC.9cIs|RYbVW"GƱ2 hǙRoܹ2oj_!Gx)"ֶ[{zҐ+s- m=lZO侤←\pV,iv]X[[5SӢ͛9΢źo]8jݿ!U2HeYoݰbY5cLL-QlA0ʧM?j䑭cWe==kƨGt[4}r6;M,J]㴵):$'0: TnCZ&a>XC[@~"hɩ^Co'gpP(oEFAڅvYO*HZz++,2*6do"Ś(g18eۦ}6dD]ϗ[F\&>ԙ]]_I_>a9F(,R;ɗ`"_["h'X:V0`mg`i[2Vf Qjt=5}Y8 62>J8!3~O1`<)ٿځX.dCy c_^7=K .hG^?XtoA}E=~ [N_C;k@ ڄE݈=;Uh۲XsjӏD(nlNV)AQsfyHd).C\@.n5߷#&v{ҜI.n1apȏ|CcMo|ήqA7,[ו5|lЭ--&MfheX?[#Td%<[ڋR8˫\Cy&U".PtL;"͘1@[lX~vs^K1{Tz(ر[sAk}$,,v繃Qrj6E#SYݎEfiwSoİ1wXD|to;;>TA1ٟ~honͩe;-U+lTj4ܓKg^W%`׼=A[Gb~p]et݄=]]aS澣 ڗzɮ?zgs ,&Z B̏Y.Al>H4_b3WXPCi$Y!I xguBJ#UɫxPԬ^MbR }[zw=V9w5kz=T~kLIF8wp.'i=HeEן]F%!6[c6 w1҅yٸ1*BϤsģL/SqV+Œ #=jQ ʜ0q܏ n4܎t|;Z'ȔUzޝ*.%O9+V$Z4%; `(81TH8i 1F/e1ΖK\pɸvxabUn+SsRsE9Hùb8߼R9[ϔ^??7ݢZ|LxkCi,%k750}m =MwIm!x;9 x:0 :2[d1yWm *}H@;fYghX|[Dfk>`fIk!bZ)Ykz+G傻i] *CH#rQ{gZID`ע6>TDP:j?B!2qq@yj'-o\,3 ^I Cȃ͉`&+g#;/ {Sa[ {}`<渭?R*I#3FG~5j)# Y#s=k9s ]"- QDS6/ה0B7⮧lR'{- 3x\I\w2|?ϑQ1´Ҥ'~ Uܳ[ 8!a/aLInsRʫyBLCVJT@8. ?o?$0I#(Z-rt=6 q̴?:ĸǑߑbhgw&S;[;]v+_kdwn7jD]%!؏_^ !-l-Уe 2ڮ02+?E +jEvljfܗ3QgWȘyn@8z<_^Jsw쀊ݝP,D?<~4|jgz2^w^OvX茶'xE##kDT'2WfTU/x/Z!9'=#qS)OAAS^x<~e[J1cĴNU+7 a_0zSG`% {bbqC[fZm346лT[4{qW6tcTi^-we~۵LgjNܽ}Hbk4Xs(ް+㙤tHJjvs06@oBXL:Ȗ﻾:dRDRҦW)6. $w Wr 7Qh;`K'ߢ's Bk;w29䢽|uijI'/cHe_2wB\:S)ǿ%fRpw-B}.ɞQ*_ƺ'4U&6Bl.nrcpbD7L*k: s$=L8]/ ]YixL3V%!Ů>O鵚ыE75uCm95Cs7^"ww5ϤoA&(Vi7c8Ob6 WHƵ籒Gf=dAZJqbb}}-E)ʼ5_Bk9:E%gȋƘ均aAUCcS(ezؘƳ*iW;5FBuْHWnMٲ e%,LMZR'&m`[V}TdĔ t%6NƤgD{݅䯬ɵ¡*g &!촌t@[Uq2pUPhk0Jo/LL|eVqɮw v|v\Q=G~މ7^x+پ1JZAɀ8mrӬK'S?OOiW=)a]"''b+r|]Vk[<ߜ8?r}ѕ5恗 / 19߳<<S,66NmWlw;:5=H;dgl#_<$N%Fq50$57m-AgRxe]5͖#hբsb'!sN,Oe݆nN-[nS"橷̀uBĥ8,5|n)39Hgg$mdZL\"ADQ(@xzED t ×=7WCs2s)SwlO+47o[giAWXT_pK<2kXg3Ta ѣC0֘Q$i`7p0Cj;s픥qMֱlCƘy954.0n(HGhO&A=Mqƿ$5{ӣ\"a #K\|=\ز.Tj?ų̔6ڙ5 SUlދ<7,S|!#O~53 Ȣi6ծ(GWϫxD6/ b6c2`t5{%n\6!A͕i!̜*_פU{߱Q{I4κ/f*4j;yy\̸͜.vr\ _`cW6׋T[{מlmS,cRČ<e0M4nmbI|HMڜ1ƿmZ~6n&ŷ*SVH*lrTؔ2[;e 5Nof*6.:-- ßg3Oц߬˿r$tZ7XI3+_Y}Xv5oGExGhrGaW} ?X4xv%E(Z{W;`|ƹɀO=b./fKkKYMjMa6q1HrX'$N}Yr=,(! e>15wɄn3aO^_,nc9xy YT1ײȃu~Ujw e{kBwD6[ .uFgiV{b+JxSuwϏg7 ǔURھt2 R1 Ddl[a꿈LmK? 3Y)ye7 6`TZ!+v34dQ%aC'y.휕^Ys+evI"؜?RfTPji5 g$gB d]'5Ǎk9^\>}]g̩nQ)/{tB/0Gٛ'p%Dڹ3ʇA ̗T^ڦ]aŰ߭W˅jJ0t_dqXf.rLVCF}/w:%Ğu%3z ʓ1({(}"jֻ*WohT/X@|,Sw7S/yׯkaY$p=2=zN>G$Jx#wDt/kmf*ئ ݍIHGB9] kb9慘4S1I3J28}.af Z-4ʦ2Onߣ53NkǫZ=4#+U?; +IJ fWht [>k z Kg7Wm A4LYpl&iSL&:|Пwky݌\А8@~G D+kh̷t*+ .}yKzLqٗyP[_L`j6v)A'-TL=K-k?J<.M V<)ʂnLCuS/Zcߦj50vvߟ~ K ʩ;6z,:˝u㛊Ɏ G϶\59yzN%'Ww_Sd{a81iԞp Ć=%4dSJ z8Z!C\hdy.HHs88g^Tpghf`;ҍ=Pl=<)gγ sFZLW]edm"gaT\m|<6z#r1 AyP]21RO޿c f?SȚF{z=#Q8W:K+bGL}jQ 1ℨIX=HLj )[).z8fʶ_0Ԉ0LS9ܸ joe4<2ۇӜ,_͋wI=\sDz%wSmՕY,%!e&|rS0Ѱܯ[l\\5(:AYG7Vg3RlS|F.ct6,[vLj[0 Hڛ_--mTT٠է,u^q~=A?)x?. <7l=lsюx`)6(͟,jM>Kϸ&}^&aík[}Y̝%h&DM-/BٻGzէavG8<60Nxu˷e?G͘&բ z[J;^(69S4!]'mؽ^ S(=J6̘/nccB2p2ʂ۲<,CKgK}|Lb[(^ڪr @|rx~-,8vbxF=_<1kQ#HT0>Д Td9M6]ӻv.UCͱn3?_bq,4|'KH BTlt+ܼtj/'bѣy"9W5&ԝc|U·D(2rZISNIo eh9QG7TWWLb&B.[}נOqu->C6wbM엲\VvTn<)ƀ ۍ3o{7y~ݥ3y0Ѫ? eFmrYϬz bdw8=;IB$rBVD-@9-t5X qn0ruAkjo[]c 7y.~$U=)WzĴުF~uȪ̎1j`(ȭwdzNYH+M8>UFl_S]2\trU H,*D  8 VeR,Vԧ{b*tܱzW Z +>@Lԧlrjb;NomcZ. KDMK3f-aM t_ѿ 'L=*_ sQ{[ԗe BF&@jS3^+F8ƃ`U|<_PW4nHP11Դ2HTHyƓ.C)'[ebԱa.\!63m^ ;ĶY _PvlE/xV T_L!dfiۃMs_Vw#]ȟ2RUawFɫcIXfحo[|Fg@˩0靯} ިD!;I\Ħ&xG'o>b-6}yl|V€>{ޤaA\f?y)SK i9 JM-}p'u~1*ek>j.2_oXuIof@=W*k0͆S,V[88 sE$h[OE[1 Xes@$X"R\@!( (3"kw $ ӈ̅ DxSUۦ^ugޝ VeU|Ň%f<2`W~^gD[iMZ,oˆuoh8/¿GIYJ}YnM-!q.7+&4'3Rꓠ^8OO\Y)ojB Lb=V Z \^zE֐ߕ8u?oS+Jt`W Ă1T)9x.Qv9>}A>IUpcIO& Y>[asV:gF-xm;X9{X]lNqM6"p+cpS77$R9g51t'nk4iA{K⨪Z8&6WtӾ 9E;k{+5wueĘH+\BpDO,A';nn~0g-P/{+L%q°>f?D/`"Z);LL>m5]*ˏT/\EYJ{n)YLiϑҞ&L`z( y gxj.FAĘ&SQ`zRrlQ6V"wrZ\P ?Xn:a{pr*}RtNҞ}pm,lABjS˓u 'gAZ~%v4'-Y0F46+RsXKf`0@?䏑X7_DvDC~VUI] Tnqʚ|4".+xT_,PPc:Ӟg"gdAnhD)(1#Q~"͒ɗmxs@fA;}]::?c|Qi v!#mVTi"bkXK"pUR,7eag D-vJL?|6IXY8gVeߚ+aeS<;w:s=[zvN $R/Uc`Au4baQǫ:nӃ:Y&]Κub[.\#n?y\  9{@F4?ڴVZVnw qpMjeqQh~>k+ v* 0]\T֑Ub>«gIL P 6Eq.hv ~qYYn)"+pozE7dv0Oeo_wlW{>9< ВӘ_WS.(i{N yՏ|"t[mO˸&Mƍ JjzW߰X9R7VfacoZ+.OCT$BalFѻ`[q$gtRCgfȗk_o b ^ij*/!|_c|eeQX4DvxwN^l4 JCAۓ]SuG'7u>t3Uafv``P>YZKvwn qg6mfF/9O4g1mGf}8RX X}.3驚a{˖E -pM~AS Y4˖ALMl :&B,m|5ݫެOC|g>1Փ|=27ZIr(f#Q?;(l37=!okNjo8& %{V)dgǍ)f wۛG˳s%C+=%ȡͦ)BBY#˅v\V7ۛJ7L_7FM2sJ82_QVU2?٪Bd vpKu>-I// 7`Jinǽ9VbkJrV?[tzCY_xmjzZT|x.x%Dѩpv˕m'3>kNݽTewH qCx'=SP &{r<v27_0PtaAp sHakvE70~+z+2AEBԑ}=&3Lb;4~Ν,Z"Uy4MBv^V.20j$/`coG8FDe֞`Ŭ3ֻOPuz?v8%P`kd.#G:Y?1TLudal Ox۬tze/l4B l`K~kaҮ(AܾלBfܦDdx<"T$>{Cnӌ6ɘG|V;A0Pj('/ >1>_Cb.eNvV1Ba3:,lmu<Ui6Jo ȫ`[r޼x:;[k%ijZM%SRczQ#n`=(r3H0'MQJO!bd,41@xJD\F"2 9}sU-d6n<!{/x+XT$';ŦCN;lo(_qZ{}}taگU4kώ>Τl};ϚbT"BRslLLܺ{TT>^k_!o6'SUK|+x,W>ǎYW!׷*e&,yu®2._)MJ@u>/RUUUP@UMn 7|6jxsCrƫ:'f _JTڹ>kke=j{!<槿w9%7Z(Z LNbzxlg6 3ڰ^WKk&fTdURؿzZU't)y(hzVFChVr#*)Scudwm<*;jf>M]*wyL_@Tf jXqvZ-xC둟pg:5.XGa 9PIgWpǕh/"Cut 8~95ݛX;FyhTt {ұ`;mTcw{\Z>4u/*+ф1kñBd J^;Kpҳt!y#gz~2ίɪ5\#Փ;cW V]$_=. eBd33Q׸˕O"݅8G*[3δPQM)qad1p;Q@F"X#ޘ͉I֍ if?Ȝ~wf b%&7-ߺaG.>vq,ndͅY3#T#Gh[892Oܳl1w]!'e 3>f8%ϪVg"u^SB Ft-gTlii Q6/Ȉ{).>"("m%w;R+=I,*7f6Y,wɚkPҜMx>GUi* 9㢐 OLLU !Gi.+d!} h@ij-3Iﬕ5AM#մ@A, +מ3I@TEwr2pP0"-S7{_ܢa84.3b%{$Ⱦy 3jå>IL6ϡ>wʏ])^#pcw֤6agO :9?H 4p׮UƒFzs<%;Ļ>l!?Rb]+RD?ZhkB/#bs@`8 6V%\9F9vA 2lP9"]84翂55x38Dj\|.rx$MR=LM Nݲ4IPyBYf~UPanruv6,;WϾϭs'\M_r{2ֵg"k@WZɇaj'νx^1+ugwqP$qݼ4eTnd|iRtUBSPw!R|Xm>+(hCI [K[7V צӕ|_V4P `0 XpjNkT.؛o]{٭j"g> $] i=sxSZu"OP&lHԵ5dC4 R&ӥ%oN;m7"S_a>전X RU5BɈ)Asx) Q0NU<cp8X3>uSt "x`y3v%{vWE絖l+:z^Rg@E [8E5kR{8kEklTة"I֊({4% .8Ga#rvtEnyD-tJ(f8/ثkQF?pMfR=K~Sa؅+C3zI&WW(tĿN?kPGvԭ=@^Bಞs`4q>秘JkR:OÕo,߸* ݳ&- &󣎷wP,I\qv%*$sKj zF'S+c~zw۫>eN 5PjOR ʽd|Y|lop,~8Z3;+z@(saq6Q| .c4> ةc_T;_>Ï8fk4l(Sy0D}j5-isnOt-Jieq+N^M9ܻRBeɦ{{#,c -<3蔔}xh3O\ "X[ݭ=NgG^;DyAKVh'P=rqԈ~˩X|S(#ogYOg1?wI6'%ppHxn&pˌV T3KU଴ζyD'6[06oz##m_cO uG zLl~qDk" ㆠ'5څR|o%v7Ej^~ta8=J#/Ͽ OמT::񑛶8쎐,y&U=!(0@ޅGPzbHZ \F1^J1Վ'9k ׳ZZ!v*Q__]JJ,Y2Z HmV5dbE|c$hc9 CK]R2sJ}Y8ÝAQ$֚Ӿ-b̠g/B}j7i.=_%-RS-1ȷm9uK۟Vsk0p 5JWv"D&k!aݺ k-kxW&*~e_ވcb(Aҙva"(ȁco0U("tY[NaKK\FL=vwCsN+T;{ClЈ_+'f= qJw% H KZ9dBB׋S-N[C2 ,_{=4WW߆) 6aﻯAa#w)"\99rp.R>B]z APO]9^%܆Ue}ÏVADDdq:$y\J`D=> wb=Q'[0$Վ2{Od}ɡ{Yͯy.쪫Z\iW\cTs[ s 3ڇԗ1_z Gޓ;#nAe7zJU~Ã1nڌjGOFDHϵW,쑫  }(.z^޾,1^k}I(1´YaZl(m%L 7}ϰ\%R~]rz-!iTq:i-"RҀ ǠuU4.Q/^V_b!h<4NnQ.lUdgbrb~3/b6]o蔮t:4# [ʜKeJnj?I y66 Y,sz=7,nwƥ~f,ġl3C:)SN7m\4 Gpa:ޜ 7-b2=d}_ĖB AjNb`&6DfģC2 N !MS`d?uQ_3{jom3yמ JPeZFVqOSs,lkLYd='*i/xZs(ceQ=¿g"`S~?)JW\2ct-ljlHgJ* ˬ.y a8 lIB>rq;liOD+% ؏ͣ#}9Fj 'Z[KFOwD`'6D.&lk7.e-I/ q ~7ϔ%VƧ!6D^>=cHRDF}Ym1G3DK2Xꑺ斝 z`HV)MFs.|_Ɠ'h _! X`HZ(>) bP+Z% ; lʙ`S^/YoǓь(m nNlNv)]y"-5MLcoəe±=6"O>Y%H<|D.M]H;Bk &z)Hʁ7Ӎbϔ>6 k q/)$/;ܹ6Hmכ^}׽^V3ؤ(U RRg%p#n Q~~ؔ/ЦP|ny\*IB,nχ?$Ꮁ{N7RDڽ/'Hu Ε2i^F*2]hXV[i z>8ai;" _3ܵ˼mW& cӿL]pjX@e'pNJL~DyK/oX^'6Ɇ#F0PB/_uJ=XWM%1'RE\k)PMrqXږ:xv D`jJ5:M6*ܬJ2 au4 :,\w&X­!9gPSa;iRmWw.gP_MP=EKv EcIul]1M- _`}V1~fyeט8D{GL3RzAsq ZJT43/ AX*>\E)+4gf8+YT4h9VÃ9Ș[ n%3?uC2UQ5Iu.]mfgy q,mbRb)` ~M^!ʘIgB7u!6nSt~h)IoI$QM0J3A&dD{Rʉ.ÔVI96 9t.]-(M/2ŏ)?TNH-fC$-7t”s T w &(4a3xjw1/>i˫g䓣Ό/>ͷUXYxqK_>: An/yMo}o2l{6h F4̌Y ^!l_Es⍼lIO 4m"Jha߼LB%$#!V;(4;BZ5 939;} 1^w!)>ݒ(+U3~vSjK&3UUNɳg%[x9eZA:[d{|a{̘PA!rB|ZC|,K*ݝ!ƛ優n8 !6y*&ĝVmOieշC7S37|:hޏ{> "N$]#vϰ8&u;TrrB ") $G$,=A}жq6|VluӬ vj;$9!RzSt3_i5 y@-dk[mt 3u9kk̓i&K ;ögivy:Z.??\B'E4Si=D rf?ogHP&Sͥ 8-?Cp\Eڼ⊣<1d߄be7L@\DGQp;TKvYa*Yy3՞}=62r5/̘:қ&.6|4dcێʸAG8G*˭[g|Yvs(leE{{:9 8 :/m&GҫJ(΋B=uLD8`[cU >1J,kR195Xj% MF"k oXS9y kyi51n E1O^[?zmU4qq,~9F0"CD,!JӠlGyh,UkRu5֞.vC:+WT9-keל%c7(kE^ bs o@DS -gR]_&fݻGeoMÚ=8D7|$̪c*/22Sbs9wu@l{T=j=2^`xf euZ$!$ݚ>X5:kRZ /p&җϭNKB/ %K(n#ˀ}c Ĩgx6-Ç~,2Ñv-|~D3?}S'ܙc>$f*Kw8CLN5뇂U'pt)Rn<B# N ,}zD#a$v=YRO2n"Dlful2'Uۢ這nmIa J̆ כ4X{ NX_\R ,Z15翘@kfqf-Z˜([\+ ko5)~-Hb{Y,;˨MR: Yu|{ 20 'Q+d~A[Ν5'^S gC3Ur2geC@)*Ew_9(p:~^9t7+VQ}#: o ͟e2&݋5 54rtCy(%@USJɿ4`lZaO9(]Cv_o9lP͉Çu|( a-t})l"5xqMxJ4v^˥[2KYQKs&6Y`Toבe +|YWSCKUf`+5۴ *zj lU;ႷfV0[a#`AJIbǔj컜J1Y"&S= މ5` .^:k 0Ynu 8\, #]a5} janj4z}V}Ï^%z!^= |-oʫU!MX¤MfO}p m.34T؜C!IPEoƜ(<(-[Gx(#`d gkudo%d>e\upٝZ`u0;`F:UAc\s%3-b˖4$aGr-;eBau{,_füRR&3 \Z5-`_8sHXZS}X˞j*8br6'"]&Z=O7lmؒ1+@!؏Y*Mɴ8A2K G`)&]foaB%n(? H)/OfjLU}~Ǹt@W@+ FUwH?%r7D:]ɶXo qS_Q70. ѝa'BVщ٧ojRJkC…5\1 !u[L-&oj-*҉N.{Yw/4i44rM`ۑ^XM~ӜITIݨ됙3 `` 9"~kl-ɏJz`۽G{LVx3WYqZ(:pNGOf K؟}r#YuD$keڄUL8|(0%ʾ?{hU2&롾>|ӹaC%!6zGѲiwY5CSg!k!>ڇ-59w!a`'O}mwt=/YP~؝-|iW2ș'l  ZeFQLf%}}2s:2wv#t& S +a#˚>!F'&~xR*bs]!Hix:7WUˮjH f|bS߈,!bfUb=NtJu[%vwUejێΜ`ԹLK/(9cĹ(%amoĘ&0D]Q8S kX*d-#ohvBR{"z.z>i>_Iۉmw#'‮ȓ.G[yߐ>_s<1O9auzLo "u a 1 j2hwMz }fdqy1ݰ?vA A:{ؕ%|هVƂ{zMC'O.%:/BF^f9,}$^1͟*H7rϨ1Qœi@_+Wha~@>AzrsI!u,"op,`l?v#|ptZK\q]`VBx>!>]/ҁiSħwNƛg!7|:g жrT3r- dؘ+Ѩ׺`6C2Z"0ITV{'Yz\sG,?$}N8 Ͽ1Ui*.hv,jWЬ4DJُ!DXpؑl#) 2wQ-ZȚpX)ۑ/*dkT'o9qՀ( 3w\5COw/Ar99u9.xm^K#?3ĝV(E6cr.ܐ'_눉X^X5ue}'{ɋl+Ն3,;6St=r㸀~"\7űm{fRdPYU6-1"w6L@]1b]oH64F*N.n+D̽\.2& GU.L0<~Chv̧V% [Ň?$2kЭ苛#"{ (ⓜ)}v?(u6"j6v*{I\2 'J.%LG]{2>1kt[MvqN|+KwoĿij^ m~L *S1ligtdqMZ CԣR3WW"S%S4itY 36ݖ((AH{%ʉ-D'?K~x'0yjZч.8LV{|l.d 06Mj (Bܒ`E)aJu;53?Xg&?e12zu.5ֳjצ5$86Z94*3 >36qF7dbVN ENy7T Ջ!fWE$APץhzhg#(ˑԢ|ͩPHp~.ڽ r*d)5MU's@sξ۽8:sԿ+Ύ@sQ!Nrq7a/č3Hjd{4WlqY#J%=.K W&}[b3oRZE5; -_]_>3=-?8fbdCsCRF(=4W)u]{qu>MW*J~-Ԏ:tM+Tf~`Y. " 8nm;NCz,oBP[vI&yxKW[&lupF"m,h>l_CQ1!у)?^'ifLB $oo9 &Jl-yN٥+2f˶!l bHSsږ7ǎ@뛛zpjb]=M[ փu0V֪U qB2*bx<34dEP,T$GV%I Z+WHkISR>| ;|:Oy ^'GNDjApK&/-i&o=_6Ls<@ͪ#8ְ V6ADJÓȮPXI.3fY^7W"6F h= u o,'r~) ;٤Z*&јaZVxjd 6]XBP>(lsRȖM'K4d1<ƞfVßZ*uLi֫(pamCVmCSrBy _#z0Z6eTs7lbgV+7+@&6?VU%qol8SƜ?+}\z8 Q!ju)w@]Ѡ%Yv )EceB 5DВw U 0 ـzs'@N!I-(]%zb._ϒ1g'v% ጵVG?(oMK֫n_r ӭ0c2Ʊ[zM7EϱV6sz_ f663LE?QIs̓$ӊW0Y#A*;;6{i F0աVkC*[ 4ce GazWФBAv3 %@RrW+~3:?bsQ9WuS+ K6wUj├XzpI:C¹bn"_WX5/ؘ[QownUxE0(+u3'+sse}|jl{>WT{=e>U4di7ݦK`!ltf1nU_df8vi9Z6L LtA9o)1qzXi \Yn)vՂO}뚚=w鐮D'|6yᑶcl*,8ͦՇBD=zzW7PL-CdQ܆ R fClG%Ƽ}b'M1a.D>߶K-k&&EN{baPlSբ2p +-NX95W~܋a2UԱajpV26VwN!_͡E.< m`>W/Coէw@ *XDiVv9MO:T~wYkżki`'{Sc,fXFxCD("^]>.rmzMVdJY*]z-z7TCKjvXevW.CB{a"|A(#Ηs)d|cE8!U5:W+\5|Na[iYU@ˤwlqz!spݯnRYudӵNcave'ȽdX->L+i@f@g4;!W7%'

,׼D$.btN"ؚI%L4/I>? l<4(5vWZa3auJr/ҮU{8{a$QzoxdPԵ'lEleRs0XdZtYQIs NLpQӽV5 `U3YPٙ`.)(b_Z,YXw~wno.#̜j~9ѻÍmOZnMreKTgw3^mMm-̰.IqСDK\ƫ4#S@R\̾D ooSt&l}T4Rz Bv,-11ހc1k9rGh3k;BléϳpZDr*ov\=*U TáǠ]Tp,"AL\ٳh6pzT'GlG1+Qj ڤx̋*HIR]Oʥs{')#3SJROQ MZJ9ў?29ϖ_4;mV-&:e&u'ᄱrKf'3&00`ΝEGGoagkX't&8]n ^l5V=F6KbJӧ!!-dg6[o)fXKB|UrbѰX8'Q!`, 6$ eE+"$F<"cd>dtsʗrȜ2'Dv&"z/l,HH=0YzS*[a*{]:@mjѸXLY>7eCi6k3=&[2$C,\jk/bH ? dNJ\Z9$LF L&WfӢCV{yi#RP0H/b`LuۚG.hC() JKedÜ؍҈kN)2AorLPMO>W)%#+AܙiM Hy'L.IL'U>O@oUv?`R˫;Оf`Z0em'Ǧ1f!Y +G0j\}po"Qf^A WƬ\8@uk8ftUnXVt >2^XPV7[@k3$7CEޡ )ȌH,c=S7bs͢1OLh rge`lNn[jsg0o&xѠS-hM -Aޜiq!ęE9^R' C; 2Š{fބրMNb~I;}ɠK?2Oq:]^a?A#'9&1(jtX=;f%K>qNPf]2<c gS=Pɋ%EL8`u_F@hQ' 8DȞ# R7e.59yVBP̨yГo 58^6}J w3Nqbdf6aS: |DT.|Y;xo -8QVWUlظV4F$w ;Vl,q7WozҴ/AAtZ 'ȕH4<,{3fgG8S-/9X P]`N\f+*|E5+W/{^mIGP6Tn/.'MukeswyOy! mʧǔy"Uj^iԼҩyԼy1OԼYԼ_oа. Օf(SM7ZT'SزAߒx(D:ђSWp{4%&&YJ'a$'Dum?kJ0$VlQJ0CXpmB h@&ZN\$2{4ZE` ZrL\ZJBN'$2q}I2X[ggFB}c_$几>/@}& sH5 cue÷p {1erO)jv!ϦƎ`ES'.3DLnz-40SC#Ϭk-tžN>3)" ^h|qOCv<TnffJu1= Y"0m tp;g 2A%z}bvA~^]іb}!Kjq*bY2)WCt?&J:'sP;$(B[H}`4ڝa3ԝH'_ɤTa7̉"ƅgcNv͂ =#!ʽIa!1yd( 6:)$TX{Ȑ #8Yq4Υ(ș<>^  ҂qoþ"'\WabWs<(LO (޻oS0Evݜ8SW*k2C rXX hʇQ{T@{Yk tm׊_$"brկ>ZM!t:^A. Mʉ#C >/^.؂$#Bw:Z:$G?P9i6 ]nOČ&7ny(qQٰmoM}o5kBK׽\~ӭNݖCfohzP)VoްO"8+5At*ɇd*EA e]!@ 0ioxt>~aZt)/8BIyg_\}@ęz>ư@#2/C?OE-$װ9neQ^m=i c#9^< tQi8Xת1])P/nK ܍$Ƴ֌nm_oR_5e|doۘ?I;~xRVK'#]AsH'D|K:15fȤA mֻchgD%uDle&Z7LpO'8 6Aa=]ڐAM<+n[ l2|nĢ^zLmzlIA(}՝ ȁ]kOo@4o É x7®B1F+3*Uߪ{! {e0{"g}{[@9LTx5 17{wNOJL߅cMJ6mfQYYow`9YpǢ$|uv=!j&TEN}[&E o+!7N3ibfU fD$=}@ܝ3J2mtwda$TD3pޔD~ +fMHM^S&9*I;.'6j>)\w9;ɚ$${\W>pD!HM5:]ƛ}r, Oj BǠ,XN'v#06~sjm_be:|e[çsga{5 ,\*| Xb<58b\f7ڙ`"yI&cdMC/C203-4U\%, 6۶Vm8IQ@n)n7y=}V /b;'~+3L[/ ~W,4(kBJ&thxr^ʐH2(_T"Os΂/fk-F"Si5B K@;I r<ʺ*")ϥeok:mr:/f4"*qdˮ8~r̔U/Oai,q* L-^R`6~ #oΑȐu@yg$##(E]$|1@cb!z6vk_щaÊ8&y'5Idqq8 ~euusC$ %E#\Ae)C~c7;ˈï^Ӈc1*ƙg-3sXqɖwLiu*A M*N࡟+xj3l@@\Y1cW89Qeu-X~|p_)ևg+Y5aZ:lT3}ai8󓚳F2(ue+ӞoHJի$g.C&ḋdaݐ \PXe`oz(=4U䂓kL$k4 6M~{9YY^04 Iun0һoQђDnzoi{FZKeRGx!DfuDz#[> St3RA#.)4)̪C`ŬY~D' ^.[Zq@֠ gn)s:_g$A}hO?/\loӿM_ s: \y,)e2MSwHx; v[]{c#-8:+= QL/E$HB sY\6?sm6}:K_x@yA~?}OTf椵bq"0H~ LNCh[Q5aFWlPkapњو)% /`.X;c1?SuKvю;dqenR9›d5C8i1z{7)VŹN|"%Sx UޜL Ny5|×1]LKyκ1?3]sɭZLZnjN W+&5z>{>fظ lV#הw v` :ݭi~v}>Dȵuܗ#yGb׺if1u记bPoz V4%'(e004Vr obzH*UGeYb#/IBXi5 $ISvW95gdAntx&KS8Ɵ@J6ߊ9%ZXydmH, hX*9kk6`ht.W HL6y S»G,Nܣ_o8{ ,lTrZ$*rOXMm啑ړn$-T\XMY\6qr`4*$Cۈz⣸zZWѰT~ ;߳grSG;/\bV@yەbV>~1b5[Ġ}9>]趮oѶᚧt$SE'՞ȼN+}M9x=t/CW?V„S00;%x ۃk# IW }qϟ =٬\u}q*ӗ53,bgCh!A(죙Cq xزZo-nILY4sGchuo nٮ3'UmՐG:GOŀ|7"K-a.CL$Xf.JuqaگL!k۽X8 BIs_ ( ɫ>S{˟/W9%5JBx'|';tduD/Đ_?tX'vj0J-֘FkOWLB)<TBǎ`FthC6!<#Z Yρ{X۝9z2%e?O{>8h }Z-YG.^W,La9-HH5?; ؑ=K,]:z^t̘9_"[/W V?(p)jq/Iк⹶ЫA2J9je(Ub ~&-1/_#Ce/Xm`s!³Ofօks1*I%ZaЙ=fP] ltfͱ@->2Z<3gs9[$!#ea,)W Q'f[gb겯f%T*)YqYXx)X9|*-1^dpׂ%; k\NwțܰdMsK"9*U0pּ]XsƜ+c 9Q4,ރhtb?(Am#f.q8E]+} _%3o#':I@!"rr/RyLM r!5QziN 3Y?rc3 75;".@#(h쨏7fyqF5g_?S{DLdX5Hq5<|@q _؁:;t)2^ajU3oSh*ȅ6qI3Ndv+v3$}JU`!(h羷x+EtH:8k7K B$R@VۮrcUBe֨O IHmCd›)'!+q㨸dqa^N*T@oL=ƑiA5ǘJV,9?hLv\R)ӭdF#>UD_l<5 ރ"7%M8~[,Zc2ipw`pQGx_G3!bj><sk ĨT5X1ytrLt [`k)dWo9G (r!eG@"w*%N!L' ,\>rCExBFa://pih< ^Ճ#⬂ԘQ6`\ڌ鑦pM+Vnbz *_98h-ea;?RJIyVtm" ::ar㹰W'jBamR9ZZtzr8KfN㵉L g;]~g?/M} *AyvOl5{o)g2* ӧ@myzvɆ)rBRksv\Ƽ  ,!;;F%}n'R OusqIq9M-CB\gvz w bяk7L1K/?zm@&`Kj%M~H'Uk"J;u9oEM3ˆwOmAm:3k'lݕ%T n $.L?Use]?VߩXC^J1uɁ <(K0p;QuXq+S~ko>srÝ7 myNқn4lzX*ID%r)H 1o3*[ƭf]y|[,ڙ0j0ٶrj`Ϊ `:f彵c?rRVA\M} b2E4gda)ya6bH }i~@A_PcvhGPS?ޫ.P͠Zȧɘ0x8~ܠO)" ȼdꏨ{]@nĠȦ}V n({Ҥ4e}%Bո9w=D'+4l @|"t/hS^r3RiD&ߚ*u)Ĝ&RT]`|c*ib +'/c<%W9`cwbKX*%U]=_[RICϺ|2fv${Ѓ%k?z6Bٱ6uj  5âϚUJURwFq0i_b@y͏B0Eޤ47N҅.5^ C>f;wֿwdLxD`^("5paSe?Zv5޴ "q`}#9\yZ%ht qeWhF +;d :-(LN͠}_ƴPyP(ٻF(Y y J ]>/"5xܘ[ߨ}Pbu vOdž /IgWH1XWt !? ^w%+PSZpMagA-hM{c`Vэb})kk;H2I9lzSi5JQ[.zazTpazܞډ|a7vc)Zdg%"7w^r0xjˎ|k-'m\ḥ1wOڽ<0ȑi]q^qǚg+a_* d^< pʂwkoC P?gzw[g +,Qɸox^.@u:0Pu1}z1h8,L1xiWvL{e]('?&HaӮ9>p… _8ַ3r#wao:Ğ*It\$}"q4oa%a䁙XSRM(/Z3NzCpY&lMYI+V@PГto}e}T&w sƮ}c ݏ֒7)Vj'J$(']0ޒ/ UfL? 61)ލ_ϱU ViuaE֩\^"^[ 9A@Ԁ{N6<Ο8<=}5FZqg,SS{+3| ѷ°nxn4D3^pȋ(~Ir f'j=4UdsM4=haqRNLwwHLo}BuhTb# oc֞΢kJ}=܊ fAMӰxk8r*ľmW^@z3*@ɨV۹p@OPmkb_pΗDLcvmq[u$Tfئvoq쨰?hA+Ƶ'$Xv-VI_&m|34ֺ򱯛X8 x4DDvl] N$.1_osxR:;!2quٲ$(tuUh k@.? q|</83065vg2eeӁ \[q]3lJ2|in换Kx$ z 2G:nBǷ.S!4/uq_F(mV[)8 Ub\B,5O,5,V{(VF*'YdC% " πQ:L7Zż4Q"sMkMҩ'fYk؎VLhZ4\_>mԭ$?U vO^M4·"8n7xE?_5y}_}C.MX7{=D,FDjUۡ-QLʥaPoү0R߳k{D˴t4QdiކdϯhaGV,vVx@ iJM~mz0uC8'argiW`cCҖnc7j@%aL:`q;3MU#H<0g/;h0]8N+nz7!Ul> DX''a1,zb"xl B/2zG>q8C,mȃѼa*EbZ'ĪED.ֻoaʢOUrCMCeZIMH7pW3SD{߼`85DC|[fo=DJ"-~qCCGC6,Y}Oad#5M4 &l]ex@S#ZG'hAcL[Dk|KPe,&W_39,G?v.>лfX"b¼Kd2Gu@!S?V=9UGw|e ̔ĬuʬG&G"y=. T4uyT zpTo)xG^m׽%}M$gP=;wIt+bAv6rQvNq0_4otiz )mw${$aiE5_j|D:U5GV0橧آ_@}j~(#/AB&A1gixͳsR0{#Zs3(ĝꉘ4EbFP¼*cBH d9՘eJZT]$i&Ѐ3og 8r`쩀aMDP/%}"!#ߧ!i!BY7cI-DQ,'8{UmZڃp2̟~]40Ysi=@ CvQӘ`~ CMbön$q\ ?O&&į89q9QzBYfeݠy#XzS{M⨡̼f&.ބb4޳Y<敪@, {X`YtW$aG3|qx cw;A=&_faY՘$x|M*$G,v A]J2d#Mu]kYbie)# H56VRzEuymƃf:ނh3 bQ"[1%1G$Hՠ׭<5`(.)c8Mq yud'_nȷ{;br=S6m422辫MGdV_61]H޲O瀏 `?|+.z;#k7F3_0EYzhjflq?nC_3>Übݸ"U 3lGhkJCq 'JtY Rޞd\IOGdt5dh\&nG-r)=r\šD淤 F ȓ6Zr1WґrF@aزbȍ&сmk:X#-weF*NqzlNɄshA} rqOkaKŀʆf^8 ,ɠQ(GnYzMMb U?nƻqZzxެO@, "e$$]eЈ_'d9"$v1BlV٫n;q4wN9D>N7Rn/gd`@o jlI8wJc-fD/eUMq+|6 O- ߞ,flEYD_;4Ħt)qom=)R\m=@\ (sVLƑ5&'J~v،V،GS.g1AlgPzkzםe5L-jC>s~Ac+e5l LR%ƭ&/A챍I6_S~tuCSUzbio|s|hޚhg6\3:jvxqFԗ-f֤0@ZzVeG7X8 5яv, w.Fy|!`KY&wˑe.y1χ0f# =DsU%2,R^uByRЈrH&[0o!Oi)v u lq^][ Vb4BWlۮX[!J2(nR ;\d3Rma4ۧPXK f_z^˜;_xXJͦ:ivk *&-D[4^ĵ$c֗ .[AV_ئ6ئL`J`K=-?l_S :D&m|g-LJ7BP=Y *K' hKEƭT葝>O԰B'|z"I"$ろ]&Cz*  1IK0jQK-^N4A1)$TcF)n/2hՎh,I;Ɵ;D:f4)%F\ #^0kזUr}?T~Ta#VcG9idåm5ݛG,d#|>;һ- x)5-|rBH,0H]<޷Dn`tM lĊݨ8CpO2uKͲh ))8 pV2)H޳6N^|;ʵb> y& EH<}s u 6fbeg|Xq遒!HL $Ψ_,E xaT94cvGb*!9\pL{E%ٺKcjDn-)BRH#pL,u|L 'zwB\ iU^\ZN ?$h0<92` 7"^ȮONC8o$M\ g9AD~Xdj 5j̋#~f~n:G5.x֢{G1i`lzThj)%IDRn<ˆHQ2 H f´M(m'Xsy5_nC<|> yTԙe]ySE*yv+Tig(y| bq<%fn}hWN]aFO=ȩ/go1WP0 8nk;kG2!QQwUH5SmmI؞eDso-aGP\=\ 49VrU(esjbn$.x[tѰv 1s E=? E"ybh"Oh^ 9ez^+3z:WuKSwYv3g0c4,^E2sl翁ڄ4`opݥ7kdH\F’k*gte2T$8rO\+eIM9c#) IC|^̬s+χ։<.g0S`aWv5K˸wJ_ ovZ & 8q$I0By-ڗUK-=o}^Q#.ۢ/.yĄǽ!N+ʭl|7y熰Xj>r"9 pc\"EoQYu<[0!6ְJ;a$;^Sc_(V5Z7b_Q[,F\D۴vl:W eƀ9ٹׂZDe4㏇؛fTV7ni[N] MTTóT{RK+.wpLHN]*bf7#3=*Uᄓ«2]ᴧ~`& 1hE6`2zeLcQF[OT]b!b;UjBr9!{WՖ]M-vb3e|x$a~Aj?#8'vFn%u#Yǧ BO~#l!(ϧdwh" vkpz*|)!gģwfpZ:[?L/bhu1eY%V3-آCUo9B/C ZRtuJ)6.@+=) k Yd5HnQ- Mq7vvM;%aE\?{)6*E RX)XI)A_9sڐPή )dJ$uR!԰÷v}~nda &H)tfp}@F &ŧ?= ;c]]s.iU[L"?s":'E=X^n+BrK gA3dZfgs\h; UF:vsĩBMχqY[m S XdHUԹu?.,u!LE }&N&T lTW~r^hs_ԠL_e8vȜxov-b5u?o3HlDW N92 = N7&._LqdG[ RXeҡ֔lz #Idʺl\A†6w=56O!@4ǞgnE;-w>\˜&nlE0h}.Lcwn 39|ec"%G9;C +TWq(gA)ZUG+v #COMYUrYNe8k[8pz2䵟= )YI8*9f OtNo脠gLy4Cg`>Uгfocwk "OaN>Lwh#as8qI[f'sQf@ROn?l#͆m$ (2R߄LÓ'_9,jf׿ I$ )tb|K'߱T^C)Ѽ:hV2dgԅŽ\|h6Ut 1Lя^1A&"S_LPضZ$0?h$cޗc{?O("xQCZ KTX*!0jKgt:ǬB݊=mu~֓NACNh39&'Hi'{$a؈جڪT*ڸ%eJ,hhD24)DAddZ*To酐gfXe_;vſ&',lV] ! fAD>>im2q^ {+[.7ojYOrv[DCmЍ2c+cǤwRCI9.(Ez:;,iFe4m԰dGI**ayx4f 8~HW0U rFkR$ctF+LPs,akgK"6[4VC4.9 A#݁"tۥҫc)ǟb[{luT Of=j~> s;$8čJ[hs~34&/Lz nv9>Z؝KGbq]^ڨff&}vRs)!WPc6nΝϯmQiIPSIA e+sJMiZK8OkgFj>FǨ N>d>Ц;Fn4\S[L[4t18M?oox ;5Εxƕd@@CVM7#=)88?,o1K{gͭre-j?ӘYM`w{¥LK7؁ @[xFjcY#A&Dz Ƹ9m,/ъ)vdS&18~JF^Ӈì)_KgY"#`2C0ztNzMpn=`ۊɘlT"?ku;>͎t̾Y<~4Ϧw> 4G@I*ߙUF졬<&ArU<lKkB|'}Aא_l7&$z ŏ0_pg` & Ij=@vtʢ݄NNUՏoIK@le?7irNC%ueqrk59M&z b/RRkuXh#1Bv^¢Rڟ2XYQV:^[>dk?z"6A6cz6 j8?3)Aǃ5O^ m@C85eġ/?k)+R7>%i Lned`A. xMԝV+,A]dA!hlQpCĒN2lYV[$[ӦuZhp̣C~4po0CckaοD2me:yq7v_ҳ-Q0 S82ڵό37#W+ZZ dSW0<:rGQNķ߽!Vib2Okv_szdv=#6hz E쁹 r56eo4@7B*jqF$6ZDy4]b˯z_n7|g *۹g N\\ O<zʓu#PspT0J8ds5 Yb+yBKG6f*PB3O6#{Hbajvxk# 3UMeҘ K[J7ų;^Msh=Y@& l#]'튠&}gy rgë2UTڢȆCߎ|'|_o%+=$±`F==%%Tqt/Jv% };:7.zq ѻScjZ?1.ydZpe%P.&(Q_OAeLy ! `Yؓ2%NIՊ"ݻ#j^J 5akT0; B߇2e=zwZVla ÛEh fHo6YUOn@bH|*aQɃӉP0E(e1?7Ԡt[SؾLx򫶽ϟ$&uu}1r`t,һJ2boio_{g˿`钍͡W+q^̶zD O;݈j{%Lf?, n<7oz}_hZWhܽ\VC7uO}@r5Д^|#Of~mch7da:U.7x{s1]=-uylJxYpƘcЗX;SQ3)).@0P[rZ{"Q~\ P$ e8"Gq|6>O@|Ld]-+>Vg@M4b SB-רS5H%ky߽{ +NLhGk0cbTM/DɠTplFۚIl&C>-ޑ]r`է 41S-./RI943Ze/y3gf oolxL(bԭ+2$6Oq _w^̧KMly`%~.\L@ "`D0_aɢfF¤*V7ɝ+/ZTv?Ŵ>5ӶUk<$AڡxrwsŐZ|*^wG_j &,= o(5,GzMtx`˛vR׆1U~~Hv7jt} ;F2gzzj\$q~YbWmeIue9v>p< 4z <;! {}8\`zS!Jˆa b=y7xrB;#՝ }Z41v]+w4f+ ddX^y3 a}ڥ-YîtF9_2Ѫ:Sɒ5:G^pu+n!5J뻉^@'s[6 M|kluYUg)(! kakkH`8|1mtBiĕIFu߆cu˲CbjԿK]: y 'IWL/9+WK.3z: z%7#y؉,OHaƃGXƃT}̖)y3_jz;(2qvthq( r4X2, (>gݕT!cr=:ASHdF^47͎/k3?hy<=FZQ4 ǚ]N{QvF .ը/TYȳrvGb46(v`iʀ'&T^9U[#>ST8%+ ;B%,̪ }*R#r,w~?F3cTr1*AiPmwôч|TP* `X;yFK=3}bu^T{χ5 LM,0D6~r\ ߚydmDf'[A3D: $"&NWeY:7̃gcYGggdTݟ+Um9O geH:ӛ栤37/)xQt=Wf3UAςbª1f4y@jO!c+BDk_*TZ3uhW hV(>9k7#4J2T/*HgȬ|jLs%-Y17, ۦx;Z_9BG$j!םvFTC ]S< Cdևt9P苬Y- ˞=@y#Axn+I^niaab0І%w;| %U8JԁQ1-ڷ;:3Nk] =e6޲?<rs Xn?mֶ{v%ܑ؞Nj-$6 =a[=uf[;T6}.&ՙhus -4j{f6'kwo3:,S胩xm\M/mL* ߌHU(ya?BDXLSr_A< $#inpk}=m9(3 "izNn$.,*K#Z1vF Dj+Pm0 4e"9ϓ[mgua03dxe m8˥A*?'$1L=='[>ᾓ N2[Pɍ:Ȯ|sD<\YVh3Z#1cyCM;5s9 ȫ4STTJ72VVwDMh4mizYvDU`Mr8e2SOOXt-QmA1|M${x&7(>=(&~.l=DSv_?,QzPC~ηwJU7GV4KG#!;XF`2Is춥 O[!\*_r]BOZezsʽ2- ` jN3ٽ9zS9\X.( F͇p.uưGe5\sY$>^X,)^#, ɤI&'p]9Jyj:_47*#flwځH>rBp흮kW _[#M<G~1:tYޤՒKS컫&M?WBNB_^ t$vuw0WGUhc- հZe&aszyvvpTr+}ƹ`Z`lr+F敀`ГmD'~$%yh+P"%g\ƪ[Gؼcz!tS{eVrb r:;?Ӧ<}b <?CK8z}?0RЙ`: ->w'MgBO(hr:ڃq̺ /2R0]v%Q嶁9$贻blyS5y^="pI ޠotU_ͶtdEU&/|4f:򍭩^Kx?mOʎVv?.r?g|CjGBߒ C؞G%fe-\SgP~)ؾLf@DUpIC{6L=]r]OdH 7OÊff3?VÜ:zej_QO0+0yĔ .D'zYj "wq+Ѝ$H:U}>js$ʭ\mEvX.?~?M\Kye{6Jx_*. pjzRܾ-T'4Rƾ|[8$j}bC0~Co ^Л5tnturj<%p]f:x f=1@ b쎭- j{UȣF%œYWC˗4N}?m3R*)C_G GeW)E ]l烮}HQs)<,IZt&?@z3Mho9fOG Ɯ-7E]ŗAgXt<57ͮʱ&]nkRȻ4BSnlNO= qziʚa m-dѺTf3!7POއ=:kՇAL[:x#ML(\ tϩ^nny˸;VeKiTp5dtnn=̈M@%>QnqQ= = Wc\8J[$qhicPYM9wg(AHƦvھv3+I@i1U+`|l/X<5itrk4wEii>;~ݘs]] "o 8DgQX+b \Hʈt_~rM\<;7-d޷+n94A3{0 "Ľno/Z  O'\>Ha'맛,2<8TӬ`xdY2k2R(7Kh ,AM]_Nvֹ{I&V+d`Åzر] X]}ꑆ2Mi?Ed{QbBnKɆMapu+ ˭nC,o!0:G CT{M;}+"^rɓb.sMuFnFL>{VZ2#GF?j,[=dx둙^NE1e"4]oGK!"kF d7('g-01k/ŭkl}D:ivs;gL t4gϡ62+tdy֨?ٲ#/~J}䘣. Rv"P`: ?Fy.+,7?VuNz@'ijW%d^tS%ZXcP^P   R%X'oz*3;AX JKG=C. c!0-'̽㯩# [C2[vA(TvAP^wJp q9l3=@̱sp=~rJ`r6BF<1ȊOPv9dYLk@]muPlתIzN83XqN-غ\K$=XhHIL>n#پAYSOLƨja5+VɁt%;j}F_XF{hur;~d5oa}ݣ\uu*EYhc'o8\ (xdv{bmW EW\ʃhmN풲?q0懿л\H6bl겞Fnm~npacp UA{-}R;7)K[!:p`26`SU![!dipy-0.13.0/dipy/data/files/C3.pkl.gz000066400000000000000000006576421317371701200171420ustar00rootroot00000000000000?:*MC3.pklz[?-3I]mf2e[vDFVi=챤^] WB$?\{ѿӻ?8E'KKQ?Fy2տQ- aAG7yEx\\^$aqI{I43K4,oiߋ21˨oG%eyeErQR od1WqaZGA!̸0L2~Ȃ>ZA`aaQfYi<仢" 98,c$H8 ~3!+ \%v1Lfd)7L"+4bʁ~H/ajV2|,essoAh::j8wykEX?Ɨ>lGyl~|s>l|m1gg?ug_կ![Rϖ[?>|,gߘgհ__~ȊR/{߻^vhoc0 ӯ~/{/@NV|z5i'MaOëգWfGEnaA.JJڰ!zb՗V0!Vʴտߡ[lw/ 6,,1c{Qj#[DA;{rQ1'ՠxixVbe6OWѾԮ- wavc6o 0S_aSgWЖ8cUfW؞=A jF[A7?ڄR;z[qj"9aXnuюM<7Aqbިioϑoq+c^XI7+Tbd7aOLX=u6xlXh30}m>v>Šq &NضDύ޻Յ՘zo/t]ډ0VFzR !p'Rrc5}jEf ,w ;CbOWCKⰳ>mMN*_C`}ӭ=ƿ~EOPᝫev9 Kn!\oM[s 4>l4\X]WEDegv2l1\D:#IZ-P]qOK 0h0x8D_*՟EίAetee&8 8Lnbr01\ "7{Z l6d0*zB|~?8ф<ދ6Ws va=5c]a(=yV/`_ВWJl]=wp^1N]a.fo@60ffjE.v :h[D '%xe5 lF8}O)b 1 F<՗R;JzPBZTΉ1T$Wj#⴨Z%lVZ%5bxO=zBrx o+ԠbGe4Ǚ>ñʴ3O>C1qxh dU`ph)\L & md$Ցg,N(71>U0JR5/!imS?uF8HGH_Ov\Sl DcT L`Y*GxRqmTq YeզXER={]s3{-PLpӹ;̠+7?:%S06&o* ixN *8Tn!xXuRұ0h;76Z xiircHB{l{i1(in0 [B. x2,.T&Vg ~_wŠXܮ_ Oxppv(j6xlJ~fuzHa; Ðszf89 %&zumL60= %%u!@ /Clzdl1_Аg/c bMOP/ǘ : r ,):u~lnp^osɰ[xY-@6f3,Q83^>s|C*\ŏ=:jи{v\~74HG++xú1#RhNiH(Tl;rQ}n nF0iB>\Q2W&8!H]!1 M2o$ ̟Kcu]QOhmd@;x{QWn k Ύ+0v)U|а0A6 ALyXN?3jՠA-Aiz@}b߳ lD\0h3D0Lw+Ći7(v6(8"$;ZH5eD4 5u J<ծK;PMic#)A"KWOE-y`*ciWFp^E򙪒~u*h?Pnn>7}|'ONRS >V? ~/#i LP~/ldg'KZ7>|u`G"aU ?D02ejK\3:[[omAvQl:Yg@՚][b1i_Hs&=(4`2՗sx9rh̸ }Z=hJST`ac{B'Oqpv][_ZeKaLSd9ס4 Gwî면߰3` c'4 OtV$sagюxQر$eI^]Y75p=g}l{}| ^f<\a$WL0U`wO;miW}g7{ط?+f@9{xρmEzFSKYϗ ^7&\JK#sQ+r#M+ 8,Ӓn(޲.& ) g$/_6Tܾ6>4lD+\߬? ,P? Wd@CsBy U1Wh [)m ԣXd4nғ "W5O-a9(QE 7 n_xپuQl/N^e^2pla j]693ڜeJ6.L} .~d|jbq0ͧ2h|1oM1%H{2@,..nbbE!_OɝNhgn_mzq(`% @g Y¡V6Μ6Ƅrum+Ή-i qͤww:4;h ;-`Kms*4h+lQ-e_$׮U5 F7[akէ:sm͜ cA;4F-1! v]32GS.WeEtxL2`~E.+3b~-.Y ݣ.ZAj`1@x"2ܕJ-aDpX499"-tBŒ.y{Hw%B"BjF6x$A`*A9veGI4j\j&>8qT,ԑk;ZІ5p_@FGH9Áݚ`;8(xӼVjY`$TEt2 i0`'n:8ht+>/7tEuW2¥qFc^Z(fD63KKPmp I~Ią&u-/kh]:0=}sl]u7ݺgmF~5&t3ðq #x ݮ VA(2',S_j|P`Y< RKȍQ4%v_ևtKH"<>z*k}9//('JY7>qˆZcxu~6}b(X;nlG-t&6\)`Ӱ0x>.3+ ۸6?&〆#[CwE;Km|d CKj8E'9dmm_xiP}@;`dqE.vP8u>dZڭ ?m]ԕ:uBToabS:[=ctBmt6GIF1U C%EwJ;?sy8J]D&<գ-#jwaqUR7Ċ}$tǐ$3gbzW0UΜ)&8^;o:?J 썭vy|aVCx!CO˅o ?}~qh<ڷ8IA- >ta5YI@̇nv_t:xNvga6s?לSO/_&Ljn!t=]3TBN6ܴ>^~voxh.t 8VO X֊}E؁, ?OGIL4z~)OܡJ%#.?e=:B@ή&1cŇ"ǃz ~~vLw$F%^qx*#{{{} тC(R+q 摴NlFe;CPOD{NCO|UO)*e0s2bD,yB&'aFrlz"m3`\_xǤ`'l=_ 7d F_]@083~gj!ck 3tr v'6KMjfńM -yb!zf=1OM `oJW헫7]\+S! Ӯ2LĄn]I?1`Дԇ멽)-xԙ{,@ ql'Q.:ptפdCkit_ Hb ;-hJiL!"EDI&+9A8hv_h-`Mx_vQµkȝѾz (F$b6{fQӝ/p20ij]~sp!L 68 ""{x~ʭƒ'ayHl']1`t Uu8 7l2/pQ&F rƉ'/au|t%ygE0=cm缞ԛ Iu{3c#\P |AHCh tˎp0 0:k!M zN hq \gnA}ßZHgyRbSJ po+ F{Q}Y'A?Sq ;YZ=Ea'27F)ъmBo]"oP u6&4v m5I ^^ܡe˿u &? T*;וBW!FxBZ6S萈y\&17iikG4T1k~_HntR/͂E%[9<˳29D_=tua M]=Y;s?pw@]zM :KS<=!=z g\a=s兆:+4ЮS:=k80$K璡o[6S% L;qvS9zv6fF#^:U΀ߟw:wR~;vq$Y#wW]iD ʟ5] P%gzGFs'vNЬ9b}; `DGdglm|Ax.#0A7e5|T]@{`$l+vTYmd|q\O횚R_ 8#=t)Ǧw}PF-@ob=rqR#KaѢZgm WOd(2GQ -bL04(IWBԑ9l:t8, }Pu!8_hn+s$Qfi%mMiq8oɴrqO_8!Z U9h VRДa;^ѢaaOO\ 'NHymF$kO4<}\wV>u^ӐqSHKY-`&J3? = iݥpC&J\ڹ&ń[S\ၯ|;e!5e޳f&B<d65H\\9yx3҆xMZmghw+aMRvEy9C t\Ga00Ff0Ucm}{Ņg(vd/F _H*Gkrex}y4sQɻrd9|/ uYz??I`Bo\p-Q>&jRK\׊Z#OU _ 3Xنkf}y;oQxq@̌)]uTNs)]lR\U$]>B<]oMkK7BG.ε.e !"6u$7.i4(O6އ$j!Gi)ׇ1Fgz5pmR5;#щ3W)? P6I'0P$Q 1v>!VP=D]&8 R6$|B6LwN/3{qpevR: b 9gooH,>jyqwq(46{θ `Un/8aBRg,mᨖG"z r_@(&q}tK,ː/WÀRX'/РNhwl)CH05>eAc ˺ˁ1&t )4GVjH۹ʸk+u:FgH^+eO~b8klï' sbKR˜Ó^ȐC.'J(aG̻ϯkP=&u)?]ĘS )^TJwp* .umv;Vl7l8*p YK:@@Ozd<} JQEeH6){ Xƀz*kCʅW>Gٖ=<B%1ę7gS0V cpEwnv4EЌJW쳸C()ffzhmwαZFW/lhD O^ ,;WUk0C$g̀[jzh~kO\p|zw%Arq/msjxGJ6 Fd*΁IzZ K4 F=:FF^ʜ/w"r&7CbqJw1js&zA) ysu}v;,4&=Z<ߦ;-,nӎV(=< C ;]$1IEV'C[Y*o;A>ue%Fq=f5v1arR&t4i 'z{y_1PIq4Urk, Kii=]M""b!M!t5k(tM|~3 㯴\U A?~Z՝@~1mZRnoZ␧c2rt1ĒZ Jgvbepa)g + sa`ɕ%ձmP-f\sd6M(IJ8[K; aggqƒ$RnEB9 KlyJA}ScPnhƠblyoIM}nF2B)NEڶkLxGʦX0o;& oվ_~t܅1PDGD_>/Maڋ).3IY#ߗI=HGLTNSQq-ڮ$Les|iAF缬˭_fSDD2y >|@.Gz;B,UXK¢M>("qLI|R? ,)tN14"xv[P@!( 2E;Fqi$ϓo5Y%"3' $yneCZ&QeneЀKQEut!JJc iFynq`4濡u0Oo" 0'q>OJNVm#߬1aq^ߔQIjl0EVd֝" ʸ$l~-o}0yӲTW/ K~q)2#/eaY&&z(ckb^~MQ!ϳHe1((RΫ( O)$~/ep_{{ HM~*߄AVH4+CZؘ"ay0\- W1jOHTϑX?$6"Q0am;)9cɭDF$=Г8ea:q¤LSƶL~Afs^+]*Xei%~f(jG,#լI!,'W=p'2}Ore)㵞U;Hh51J}k Qx\=S-7V<"e2M[f7V@@>2J1kF5Eiy_Zib3Z9a#vPt*5lCp[;odQ˞k8d*k{MKS?<- @3sÍ(fevBov& *ݨK\B4(+_o޸.raHNz fl-fp2k |3D9~p N#[}馈P) ߘ-I-ځNh4l.;_(Y9in޶ʔgj nCWg4Kؿs,۬3[x%=DCJ1ehMw7b3ڝILͭR| ʛaäVłE` ɪW d)DHcwS qwP 8fa3Çz]gOO0$#,b6 7;qtCTUh wKmuW1L{IW#/ ygvͻzsȯ!l¾%ϱlRۢq2G.Tf`5ss|qg |BDYuNW} \cpOfXn>_ŴWW^|%>vHhZ8 *o?GY2Cہ^+{y8*;w:I %G┢6suۤ1*2Vu!Ƣ"سjC3E,9Z4]3@):ED^qu֡]QI%Ԉ̽49߭z$Zf)Vt6THxtIU>}Gۓa0U)4i=p@d ORi팕?7&{֭kW`QRr_ `ט^x^oh7 TdXTotARf.-Dr.iNBl@{$d;m@m=49٨srFDK+ݸ+gwY]W/>ĥk=fOkKS&J4Cv;QiUFW=f>"^ IW;Q8T @کn`aLuAuV)=Tl>Gab%.X̚sU/EMU{0aAPEjeF;3OJ-"mUTn#k>KWT/]>YeL^{7 pza.`'{D|h&rFXK&8VYﻸV!g,9dÀpV^('Xm1'1a%T'5(t ۓ|4%:l6PhEdjI7n^b^0;k1H~ k' 5Y,\6U+&R{k#?fR෿._ 5Q?mZoxiS~tShLЮ^ٶ$`0ۦEE ad!T*^ox;& iEA (0O*Y'61$0?c]ޮ=m "8t˧Ӊk $&bj}nb >گ LbpiSɹxAV/R=ͣ=-s.&T86#EkKŒKh0cPC,3OIA!`8T'J]EX~E#l0Ƨ[IhJiGoj}Y %Vz7Wڷ;et9GV.B%wZ3F P\6J39i+ѶoY9堳R9ʎ &Kezfai; ){[?]lsAF'/%Xrg&j]*4O=ez^XȚIb`; üVO,伋(T7K6eBɦ ^)(TbޱJppyq U:֧O<:,2*5vW]rJw >5`8!=QĞOzie vؚ%hR=v.#p$[7BёmVPڳwy\WCzKUg!8vIb)--g/GP,}KPSj"֡{p .键w1uM~O0ɿFk oZFmڹzJKjnO9ӯՂ;5h$2QA4^"B%yhE€_ϣ97!*/kk3}d#&a/חWfU6|vEϳ:ɣձb^ yQPjpCEz:({4՛LQ.cy7)BkdXL5 N.`=&CK~bU&MkZSX+]_La<{7M?)#Zcɛ1POWfUX.31~i ,:CHNB Ee+?3K(q_Vꐱ|cS/@ b-.z*rduEWRqM“HK"q68/v^Ǘ"B~by=[ﺕrj unO~8GM\W*cf `xb_[ԗp|\ v{vx}R!dΝ4{bη aVO% XILJ/5)db>g&Zs7sۏ]dܯIo96 ]}_tPl߻z"\}4}Yqjm^b{Ԏu8\% irVjB;6S kS[i&4p_=s+z5U#T2yG+wUOY齾buY;#zM-GP#6M[p¹ Ʒ*{CoCH6_~k' #K:2љ%/<Ϥ7vcl=[S塦wF),)2K!#:DaOp_Jz[Zh XeYsJk&2# m?8»3w_y}"q(/ܷzD#%7W|'e'|/_>:g~.|.CPb4VOX nR@ʞ,To^Z[ڡ.C {{G%2Wv^?SdJaĖN  aF8,8؜"xrәϺ3ss:L6㹀}oh ISӑb93luߍl٥q62%4v֞_ڙ:djgriYO@Ӱ8p WQb>"\ʧnG;`w= )_k._>5adB'8~Ӓꞈus6¯Ad9/NZ=_:8kQ=i?Lh;˟xKՇ| ~<)iGf,f@1 j ʷjKHaT@B4#}z&?~sξiWVn 9 oqw@PBg0jw?J62oڋuRyc,ՓLj!' ]#<J>aY^ޯ%f&'Wm>rkq3vP Ib@YYvUK:,Ab0nS0U2e폼]bn."ծn<)`d|sP C }+xy,C4!~W\tVҠNa.z~6_X<;$s%$8̂T`2T=3cf0k g8JğQ2UxF1az]SyXUBcLIm6 o$t ۮf ]ؾT}4g9>gaoM;ۨ ys('@HCS=),ϤԘ'G0صu< b}\|,kLJ[3ljapmNHcMwa]}u;vOh=PA _ogXY箪!zFE8Kz?TCelrٙ;;Jr`-QT>8P /\>JNZb$:Mq6#4J*aڑ#hna .^XM#_:xf%god wĨU;_~],bX==ru;AyMIt81+|7X+᳘E`Ɠ+s9Л:o0SeL {M:jʞGdt1G= < ضr~LϗR"efFƀ#36o3.Pϓ_B٪6݆s/wgU i `IH)ϹgvP -8fX6/YQxms'QO&zuq`^SA䎎3yB{HCm+BgOT.EyG?=@&ypDiSTʛh.x8ޥ˝ĀjNc+HQ]oid\TʔaSswf8&M9'oO⶜MדVc>T|:/ۢ> BZu s|4W HFsHK( y DM2{{N {!)r"LQGКQc5(N>MO_@D\x N)@{][1^ @.m0y91nSYӪ(i[/4b،3Ez Q ~%+ԇ5>T+T:H7< =)2i1[۴m&="SJ8HP'R89^~]ϗ?c0M&Qy#Y<v5m3"ŝy]9 i2u%({Dcxуx+;9ڼ`6 pǀ<̼BtQ (% ]-v*Gp7Drt@09~Nv-^3U3$멀(]x]7MoZvyݐo1'-"~!G:ml[,8*؃i+:{-g,RzK;.3EI`n? DsnQ}`de|<8AeaTT ѩ:mvS<⮎Qď>QO0a(x8ro0BpDnw(.ėzCW4t60.O=h.I$ ͓ 6=M!]{u6 Fcm G) m6zٰ H6dGh-ɱKݨk^z+^2l%]>U#Žuy̌\>m^5/QBa=PQ_ץ8筢E\@M8PB({Fw<@옱;:j01X5#PE<Ovܪ'EoZ TH5!]e@ fcGzF:o=36Q9lS psZ4(NdN~UtnfjG&)&m1:sh!sk׿4z"+ |xifIىBz(cbc٦!(i8J| d|ZM|ZPaWb*Lo]o_w ; ❴Jȡpjw@%^z!]Yyof1P8D~K,4J"%xon.;HI„vω7XӠ_Ovb`G57|~1t$nxM'@?X>'k hr>2|2Ӄ}tnpWSw0FPзYրw:kfڊe~~ΕТ@0`@fiLG6(Aڌ MVW@T7ܺz!~v| ?&%`xl`A 貣ᲓiԹ,Nt:fPdž~:&ݍ  zN*p%&{n ʿH09+hJp2nkyŇe[Qǻڢ|j -9%tSFC58Awg~xS\|rc1P+`n 1)2a\bzv>[@lz5:r#pz")Z `[^ey4lDe:R\n7}q(=_5l|g%$$ Z,uP"=Ɇ5sݢn26l!,D_owˎTx< 1:?啃o6Z?d ;SޮG/I) \?:O l4s `k)cJI>M,sL)EhM/jW_ncSk1am໗g|L0L;꟬;s|-ǥ} ߅#kǐmsRbʭܶ*T3Kcf8.ڮxb9I&'<-h=L>OH3ˏO 2-Uk48wbmLbVP}qxO<2 Z>*SQ NGfnr! zF|VGOm@n򥎈 NL^,C:} =G2ۇbicYT s:빑DAM{LJΑ.@hz3l6nMxZA/eO7L0FV7 Nh~|_0Bgsmy`Ɓ Z;+ nBYe|ekb[=5Cˋy1i{tp7U1_,VlJzSOJS{v2Zl;siDehT& Q̒ _0.W4,+fх0G^⭉_S KD⅑.ecjAn~i,~&9~}+b/(D6,Feb$c(qsVX;*]pի(]4 c4&`8R֧ ӝKx}gF)crALNy infioDe!zH`Gm]E#N-3:9B&Dt7 ARMGڬzud;.%dXX}0>Ԥ`#ß\2>^FI-h>Üme3G¡},m+QpdnmAĦTw7dX ;&oَ˅ =,GG=ݔ34&WѕomOC kf<4an na&xrh~W^p3Gf@;4[*NmB9:APM| Xm-Ldn7fLF7(ktTEy0B6ڒ*KT #υ>R֔fI^ JTq48p ~=Πs}ۛRYP70 :!G=-PO)oa$#?ܙe1;l_*:ݰζ&t{imXOi­!2~jdK sݠ>Bn"n\q/bʐnAjQHf8d\9)#;=|*F?*{ :QxE_^^IC`fC ~ʢ8`K!ysK& e'I'8:D1{D (Cr۸F]~c][X"ۻ~a#Gv+ʣ kP5nlT{^*(!:ùoE貽[)P)?%(j#-^aY6,^Ғv߿Kǚ~مœ[jISayjZ95Ronj\A덡\\X:|[O[Zbm|M/q;%/(?۪hIe/ 2JZ)Fބh~XH(53,(k z\}a tiH ਛmxؠg9aa*qJ3\2([f+3YARZ#bI+ٿg4ShUG_ֵr=551?UK(2:ՇQ-*ާ;.E9+ܧIQy%3ދ C$/=-~) 6@kޞ%?}Z̺0Hc乚ӧfΜ,@fʫOp,9ALSk,B9%uPQOaV(&d2nQuث+fǴ ~-p`J;8rY`R-6d^zmj=%UȎl\^Z|^q$ , #bMb xN[6u(N XKE7%o˳j0^6v*PoScدg2cw4 p̢޸/ߚ9_m >\|؇b0ޝ+tm{Xw?EckTu{zs.arS>#퉃` 5זM0FB*XhCc嬨~DPCEt-b@|$W'˭ "־ h$Qlv^wތtWA/sD^<G e8KNv~&v߼nB#EwS56`K/ 7Pat>Xz] }272ȶJ~gviߵXx+ [*KGHKDuw@<rF/bv}7ՙ(6f8@Σ]9K+,SЇ;aQmÎzD,B=_fLw)Ȟ?+`?? }xP]lKh>kx2+u&+Tr Ї 5b׍74}V\3{ 1c}dtuny11f{^0NZ=,sHV&ws9E)ņ1+VvP(''nnӀM;NOö\J'!4ndvآi*/͏ (JEքy̢Wc~*|l ^_]\3ox\ڥ &z AO9-zyrD&r"(e I{=hOQX3(E2l[Mׅnlv`}!_ٜU!!)Ðm?[@ zɆaKS]X-vG$mFU'U᝱S.(6+i|UYbמ-u Fg> iD z_|ʔ̯Cek̹ {RJW}{Ɵ.O>V}Q+Uğŭbᶾ8mmmXa!mxԥ6۰F) md~ & 5TqS/}Vq7~bdZ/XW V|N]zKifwQ3]-6,[qgzxF ų3{e&6Oeq%2O4%Ս7LYdQ/m6;r%={.29lW#vfjUO b5l|Xoޒ._X915&5xE5cL_$(nRVٶ1Ɂ![3v>Z]t.AlX V'9|w_1[vWN.k% beسxa؀"g_N} ;yL[̔E~{ɄJr[uiomu&8G)H#=F${+bC]Wcc|yqa]P]v9NUƇ.UwqNT{!x"xgm2 !jPی)%,o\e« )6oEWk9-nJUZ0cv ץtU8i63 ,$;C#X9rJ,Ս0_2}!P8:f, h/ rVb p[I0Kz\Ce≡#@-#rWy%+,5e;FH0XJmx*iBD^"N'*`w]Hk3t] T[`(kڸU@ݚA 6wxs;65lS&b%}LΰgwѢ8$~I,?Jc>LG$R +a"h"Cya!p'ܙinΈ [zX}^(lK.ý.W ̾`l眠6Pt19WP8h1ۛ~FPΘq%zr=tZ0z@;>k"L c2"nE.\*Ial]l =,{dïlDe1¾@>*XK/Tf鉥1?<4>{*|}/3 Ǯ?I/9/o}g)<[OGYm\x}S!g8u.nxpFblNr:5N$S3VBN8ˀ Nէ; ,8wӉEiLcݓQ!/eTVLx|NGV59&ɊJqn8,Ws2J3n̫dc`25*cs5Zp}Q !~rC 5TB-ƴ{6b:Op ^m3 Tk( LEv@@uzÄ>C\ϰ+LbWIq9P''w@)0 L[ q5S7X!}#NnN揇 6qfc𙓁/[zJ4C/?k~GNF ѝxJ>㬜ɖ:rMo7Mj͝NfI>_pN J޹UW{Қ pHdr? yo2SW{Bgm >2 @+Me {a]׎:81 *fg0at]hmϮ:T6V=k=[ uH/#ٛ:Ll'f*p4POςB v+P}-佩Ԫ[j yۺlξGK(3S]bʿ\dڏޖ@5v*YRP8Ss%h*iHCO& )-dVS`@*[09~cy}^N?A'TCzO5D.3"7=G^K֫]͙1f aT _{T‡ SA%=ݩgI(Zx..} sx3GLoNܻz)evf@R0 )*`QU qƳJa%d|Rk sȧKlE4NO,G3Ƴ8@?MMyR~!0ylCY_l?eFW =)3`VxMcݫs+dVr37˫V0ceKߋc`ڜAE#EZY*>Y!Pq %Q$ZeޒHU`{lgCVAPAeɱ"hxzI`;k8i"HHVbeOtLB9Yj9v|. *vo: ECB.9qՉJ3fT e&V;# G_p#c.e"DD j)fo+V涬oR\mˁ{ 8 ë6XA[vXzK|&BҲ^OO(E D,F l8wz~<~VUA2 % TJa yɲXH]DAm,sѥXdY+=\frPO!H3K *U}:>(܁L%wKl˔5z]Rog~lp%apPKU׍ͱgLS8ler7&ŌܧZoh=P:5Ji.C 2h:CQufCJ3U( d1Rc6, VV+r&ݐ0^0狧q, tYO!L3_+F$PeB&3K@&]],,aV/pFv M@}q?:a}Ez'}ьѽʢ dxZA) F`{{PiQ's"J6a+(a8qn * ߅7=b8_N_֤㯵: ŘŅOبFPǮ߂fRsIar35<爾M=`2*UJ)ܱi~u!Fݝ@#4>n v?BݣFnbbF0>`K`{빨(pX8̨q  2,Ӌ)PcvqO*V$V -fN~E~G[zA=ufMqT|I.|θբA`Ew./ dkjeZ7Hg*6v~&Gj9Glt33UkXbVxz[= f Cvʅ0„ܑ3@[c ZҍXSG]RFBK<2iHyoc؄ͳ&0`qVe`Qr!gmwҕh.܆*ZOuxBn~Te;.VUHUVR;10O?Y|,x3I3u/0-[kl¬71)>X n~XԌ&R0me\|TdUuhJeq*[睱V?0b@R&M&!2"Z |p@ BnO'/ɖ_26l5Q]ţ|)d_`XgW~ظW4bYC@p33U2L5_ҋ(m>Qڙ yek 5*ƃrѓH=:7peqOa ۂ(X lCKK%%uW@*ɶt(b@|$$/ظ; ()3O"yiye, boDӄE0QzexpzQ^ 8,CBiqXAؘ(. "Nc5 ,,K^<,^5LcyUOEGXe.[ (xdwJQS@~ROۑJȏuNU*MMޞF;r 획NZi7 *9*A4{$zBfD ^&3N;@wū=PX3KA'VxA;.3y+jo,9=oT45]ycup1a2fPPpcfgb3&>r@Ijp Gq@}B+ц|\r:tCX\V3]&B 0Cl_`|&2҂TQr8%xM|)Yzuq#[oٔꩊ zkyu3y4%j ˣ= ̼XK}ִ3g2K<.A 4t2Y:9r7 u:-2[[?֊ӺEmÁA4ӃHc'Ik'M@G!6C4.^s$t7Aa[4r׬644Ŋ|4 Gswtp~dc2f0ABA C5;|iD~Navfy;ʀ7$~3x~Sʳ~N+<}ەn :Jd!yϰ@AꝇH/}Dd#6x 7Rv5C !+~Jh0S :L $B;bN|UX0+GJԆrMy"ًeZ$ /2X{=EU!0ɵvK[̅\bZmI"g18;`bOuFS'MFD +a7,rГL3xuoX =fdol.~bBv:h޷{W Eي6J ,3m S,B 5Vl{qہ`F/fNL e'N\40wbxl[^yzRiMݫvaP l :6\JN"8u~*o6POYesD7Hs1|gY9ä\ ;/IGcssh:{V/oA<4MK< -fz;S`amu,~E.=.uxnX:JH) I/|GO_SF\K8 e=CZfȦPON]z{%04(u4OM=,b8̦l0u+J=kо q~\v{`GHAqȏL]1)%$p,<fbMW:"hRy(=1 ߷wL#,KgRwٜSOASe~NB_uOx 9\ 9s'vdZ(bL];{l.2276_j7qOCX}#)m:CYoL!N1wHݿ3( ~p8QjԨgLKm#4X4ͧz@0JʝPÖY#mOz7\_3j*~/lCA 2/o a8袓n"nŮ+N.@cL(>xhRrz*r$ڢP!U}}H~JmG,Uyd-:K)2mp⚊iP@,TaQ:|zc$n.&o0k:r5}uFƦ;kAZI޶DE6ȳdi:ZUYM4E[݆d_UEȆp!$G䅃UuڬI$YuSX*Q:tm-4aaTTWF7y$KҴ)ͤwpV\M_B֜(x}W+ o: L%v#q=zr/w,g`}r@ {Wz #"6k[b/rr#WBN=)¥Eկk?LfY4sU:ӮhgV8+ Sjabx Z3(q8j;A\R`,3fK :psr28cm7Jz*]^4a?*5`pWI&7zVwՖ0Hl !2V,(O;9o֍,7,1al~sr ں>V{ JsNg%z6DƧavآBm!f6 | ǽzn4.",nWg`/l{m#AQv%K.mb|P3[zP~zkԱ~,Oj'%yuUsåd΍ nWxh*Ąx%\xG` APKl>Xjۄ\S/w&>79·3]/r+2S{:k5N ύ۹`/@NA8#7*2:&N\+rk'|`v5vm,UH9AQ VȂ1LXfuf8U.  'Q10Xh0{$a,QS(h {Dp>?*}6HZ A@R)\eiOé!z_ Os¢*Úˉ69pTg h SDzJK&r[HBAuCVJGAh nT@aȹB\ 8Y)׋h8?# |RmtPO̰ btU9C>b3*؈nd2 ID=n2 )hy]FH+}( 9]*Ϣ0%!CyW辣 ;yC!"L:V3g΢j>]D+"q ^ Oo)(!%'>[(f0$JCNV9[5Wl35JаchCF\r=2b+Xe;ZZ==T0,>Nu}ѱu~RZq8hA ʺesB ^:Z=_a }ao)Na=D4o}Ghڔ.% nKvVW VE -y%t3<% n8ka%#HmB hZOGúX%4|8?ݨLzɴR7ڈҋW]Z=<;̮ZΎGqM$eKb06`Xlzo.I7, wBd6]v,ӛvO7p ߬[` o@@U?[Ti``Y`0ظů-x=y2X;du%W`Ş:fy kcb %Bɤ79)}.eE':V0v*CB#7[5:@,5G TyNHa$_}{eFllxaj,zP*5K?Zzb ^ 9\Ie;w6:Aht 7肐;dT%]7gA]6s^Jz\.~{Kh:]=uyC'܃\X3vrVh; O1]MM~g{z#ACPh7g,(|-q!.}&RlKy\Nnq$] Dzr`=#p6ٌH?q=%ͳ 4ezjíL|&LSO%lY {]xb- W~+11= f{އ%c!휘rO!J]N}QYJU60}bX 7:4$JTۆ#ze|8o8W\4B2(q͖0KQ Kt=`̤R浑WsuX*8CccOtkkE4j`< ly6IYm֣E/?mm"=ݑp2,,u #8 NG0!!$² U|3Bn3 1nA,j Z2ĥȝ{ifI>Kc"ie 4J%F ~Șav(*>q8J1ZYtu8ޘ2T [_z6;ᙪK0^=}P=WֱZ&pY?]%ᱝT`"]빠NCJl{̶XEj a̯yOpL68`uh5㯱Z&XVT gn$ҡO ?m0}*ȲEpTLj 5Uq bm LbR5V23%{ xaY ^qn)V6W[PF$bj+t -MZV]Sra?1SvT͞2?˿=V%]˿`*2%ӘQ G G1:¶H75Gw2fKkq&;pN^4W/-S<>@K벿. ^~ M\;CwA~f[^"W6+s)IIV=K˨yaHNS-+,xbhJ>z-{LSX\Új{P=!SIeںJ!!CuzkmP fj3 VceBql ]525YR# A]G[ tgDe|n~L~G-n'my]BAIjDJL"^* )xhj7.5)&r.[ ڼ*q!3LwlԆ}=ے3VvZ'Vdg4&xxmr>}_+Q&=nx0mB^.HELSI-D=N7?iq.g8K22у)LEAFA!Aћ59J" m]OV`S¢J4|w o:ڪPP!i7vQɭvshtBbL]dimFĂXcVF.4V-N^!fBQXFA;TPv{#F wSLFlrb=պkSG+3y?׸]=i@/@(A,:tit &步Vid6~!( #fvCddK\7`tcEJcaV_s J]a揤>;Yq7/8ڋr+.eK~Ɵ,nL9b5}a=r?9z}u: r t='4-3Ctz ;BH~Y[/n@DxF5UZW7*J| v~I+6WZj1[[& pI|էʓ(~/Skx,ru C' ,[TaLe1x_"uW .*H-߼*pe3gfgjI3h_ԡEWCy[dt&+@YY$K0D&m"<+{2E3t ǵn)?3 :IJbE0..fAv\]uu/YF.2]Hc˾VVGLF huSd/^l*z?,brԡT2.ߠAKDvT鄬X|%(, hyE ݝ=QDC M7/Z$-Bvat?EyZ,\1{C3--a%<;&nx5SRTI_mpGwh`3*H< t{ NYi;"<>Ca_j3Ⱦk^t6]G0([bVLouGGgj8$FvWWumMy" ŀ #s/ooVj4{y94wj23T; 8 sݡV߂ <`Ib鑈0Y_~ݧ*hTe:M̒!:6bۧU{KٶtjO5KrF,~zsz**zJSATU?SXMd'"uG\Y,:dfJ^(6K v]oOH A0P + FZμ w O/6Xm)Ydј^zg0{b={Z.Sղ@{(}izq HyEX܂/| TYe:HZ/gk6goC4Ѻ=( F. )Ve&"؅oO%Z!&#~zbFF&ݭ zNJlVsf(T']jJ_uC8h(lB*ҽP.ȢgSGW簉ؒ޷_z=ŭwJqm.q!^kwL",904wxت>Պ̳KK,|+d)J'gAL 4csqW;1m;Nj4WER% PëdK?*m(O+'9&%~KEF2qhX #?јRuN{OKVatK{V[8Dd/?L8`@PCX'j̄O*;NS{.2(t1 )J;e'Pz+]3wʄ(XHaL6{v5?VcOVˬ>=xV\eSS(:ael6#̫3(N̨ܕݠzlCƳhVd6n):AVx`f2;ܛp U$kݺI&i咀a^GPa^KsON[&N"yEygeSӤXţV;H&؁iX?_z {)MncByd5rN쐂īy*>#-C**=YZo qgbL}ktTTbG7e5#C.j3!69t%$gjȱ"OD/C,;*aP ;P+>,ʫD w|pUu#&&0meAmq3vJ!'ְ6$_TwiGIɻ6I=E_ /M-'\A A*ceg hNSf24^^8[ tI`Z ^}fEvzJ<苃Y=tUUu;zņk,H7/tZ 44j w ;vcV3ؙn,|n]4}%lGɜ[޽ dGք5yTGWMX1WYIgD6eχz~fYx[(TQ<~4w4|RWaVF&`۠y"XTS'Dʾkjgs{`sZ(̉ 1ʣ0i !l5O^ʤeJ$FdXN׃MX Yfd?pScs!I}]Iܘsu,2rh sHBf+Sښ\ݗz*ö21U*0r/pU=e1MEzVBHŚCw̧N_ݑTCK"}-wy_;1FxB|LtaZ+Ƅ/+|+I9΅*j ѧaeVrI dv"}ap(du!F=UAz>RnKȦbVc>! m]q.6Tvc鸈8E=O ^^%%C ÕfXp#9E!,ؕXUmy=Xz۠+kߦ\14n=)_CE ֎z^#dg5XSWKv7Xp*\N~%o.)6;jpl ,Ez>TU E*mAüLt#K,)>lQޱQP%`;2pk>384mZ)ҟ-'tmom6Śrߞeejtuτn1㜹n ^{ʓ5'Lҿύ"x~Pl6h:H xr#,4QEJ(%|4m>\+Ңm 2U` \wsjݎVņъsQqctT<_4$z_O=(jq' (9 x"8"."6쒏՘-ϫDUw{]E  D\{ԿݑlJD=abq TS!:S;s$Y;V?}Ƅ2(*S^Ar(fe >f]ȩ=IC)l.{>Z8Q}?\9UU~|fyg'X=jVٷnV߾S!b]Jal"WB~N]/0ى$pكY&D}pw`ZAw!䙏f`ҙ/ 0pv=8b/ŸKbi#ʶ;n'$P7HvZAXƙK*;LEFplRc8mõMI\$`c/y-@6WGT]=x`wm|eldta]ܒ7)z[95%sf{OG/_H4 )״OUK 9;cDb[9S=AbkM-@;6;@:ק l5 t 0k)+o5VfEw VK` u]JN4m&BGd>0 RY(XHT6tAWW%TɋHFxcPfP=ˣ'\f(?/`mج:Z_-oA63H ]Ec`SQ=3q6?Kf~~,%pdg50 jQi?"I<6kSqH( , Hf_=? 2#Q+5wyIոYɤ8O.Xn2oWiǖY:EܩI3I =ZPّ-E[l#{6,nS G!~fa=z\osH٤=]^\>\HOIgg&[/4fx2srAϏ @g/.ܫ4.;ܟ۳-u DW̻]70Rlb1v lUH6֖htcfBad;QR+J>P"z.E5yDzVv2Չ :dMoO54FЖd۠%_EGr`\/%C E6bُ~\窨ƱbdoםW'O@gfmxKE=bސ:+euKVBbzS:/6 ,bSBUJ19B+z>%xdt@t?0H{i{lFmN.s(9C3/8UX;,(p,(a/DW6rJ B1xgc >Em5DD"SsU\OehŠ_u*Af˫ؐ@-js,7_3 8pokfhd,HAc=U&Ҧ TvELido%lJ4n1mDjWȧE1AUS[xSt+oTUfۧdnJ!d4dzmec.kh|ֈ3ZY:@]W[SB=7ExpfoNk#$*z:8I2vN蹛#}2YaՊEklCjѥ+*d^ kA )P=6J(b$j!Xq]*soҏBtXP7Y+קp3Rtub 1i'3ۓ F}oh'FYKnρ#: j6 0lGfM#R. B_]+@vсwv\%iFs[Oa6k7ڔ}3q3;1nc^m=)s We&k;&9I}B΂P ^!H&DSOl,fXEs7N,mM~pأPTQ.5slS0"E]Z t:Z(Eʣ:Ht5 PR~3ciWܮ6ұ!+M' 42tZJd?cgc +M ^Ӆ,Y (s͆Sp4m T0oɻ_W( 1`2T v0EK`Ӫv,8y +oA Bf6ff$llt&R_3_+N( HכF_Qu|zJ[ Y CW(n9tƏZמPج$W|6ssX"F E`TpEƯTL/#rH?)Hto =עQi>I#x |SQO=~Zzlb CWzcLJ{L:k+sc.B./Wdɐ mZYb삼JŠ%4&D4|_2gVvٹsTCauٵT_̒ 86gqzmN3Ɍ{WjZ2L&Xl͆6ŝ]24N<= abf/{X!.p -PqH/=hsMoB $ fPdVh^10X`6ts h}GǖaolqVq7NmUVR7+ڇڗ){.Y$ .K%LQ 5#7MI"@6e)~O$ij!ilˬ 9~4 {ob?yđaFqp5 bfK'E"}Ii=9ꉼG/W<>ڼXlF/ubiOѐ.dcQD NxU[_I'CTCj9@e@264CYLx;3bdZ99ꆥ^e7#nz6TKV/gYa1>3Wݐ8 iű.\ضȞ_V~$a܅һ+y?XG+ӸaMeL4G h|^eFkSݠ뚳˞GtnJXʼa \= $^!76(iDf1KMPT(:S=Zl?VDxυMYUYfoX.38͜,L0)uVS`0)o)tqyyiz;bn#[i_xK>o?W|>=+sm' j- %ar]V 6najK$q:ҎE!6i 1z-% Ԕ Es8s{v%wځkA޽D<]jP8{(8kU(MuSqIQ*1M:y| BS&lh ji~7 -,p "2{*Dz#S?Ze 7Q(JW[2΢珕\j7YfKV2mm]Td޸F:Jџ]SDඳԶ]|L)=mjXK?siccOyůz΢i;Nڔ'Z6˚p%8~J K;niRu6m6#I DfR^Q f?]pxa2JuZn#FT_I)+$(Jѓj 3iTMH'>u68jSzyL ȵW-P;Eӎjb=6msCyJq];MuͰpdjkfm TRU2  wِDkqܫ)6A0RbͶ߳Փvv<[՞~|WU_.Toƈ,6o63Q^C=w4g@X'_50$8FA55LȮìS*Fm2x&)fH/: |*(m%i\vAV{ad X,"2R>81NoT%iΜi"#3?M(qű5B3!MU^؎o}Qw^Պ8}ɋ:,Q|,#Bm=ϊlg ;8$9}^A%Q$@ĝ5!gqbghNoۥĮ]~-@X¶J%+*CJ<hJR~ -J:QԸ(hRh?--!Mit]+;\ΠL1V_G%_lL)SDySlh3Vᅧ &/+ǚjb'BN_DG{DE%-ZEߦ ?spKsl`ʒjA|dhqfF^FQRy5G kdE,#TeaNٝM.T<;$wS9R4Wvi8E7b4:l086i 3!5G}O !)>e^/P:zpakBG|e s п [*[ H"[* 5TQX=3~^@OFEA<Y "GewjZlċe~&,ו LѢ[ ^pgOwP|2!)4O5~0$ -9WDB04O$2c}ACfܶgvQP-+v:S2][Q<>elp'2#,@lkzJLN¡z e558ëbmtHӳ֬`j˿/E.k?ih,xBgrcik2*vfl}4э^?Mq.,7\e/&Auݝ" )4]1:ZfĘIOJM&;چ4 /};w~슇2(,] SE)h^%fҕMح!& [P\G| ۔]BwY>!m+bǘ*]<?)tsf4۴趩ZW;00#$cxjw-O ًA#.hi,3U8l؍aHzncp.EXVNHGFnWJͤY)^N_%H6S>B<_~oGlp  NꙁjVCFȂbΌ CQ :TysoZGWzֱ9; [狳j ޏ__P_˱ۆ:r*R((QnP*#)Ek7gu-y+O(a)f]}>_׆&>0/^c^8mccN$ _R$q>h;^3@{`|~4:^'2%mSklYE)9\s3v|`~ ݱ,vM-3AM&2f}5vag\ g3tޕE{k|5xC7sٹ3_hB>|TʞOUY*]wFt pa1 {3J۳4$'hԌ̒K:̰5e00A_v?{3va͕7L Oc254Lb[ 1I>ep㛥v\Y?`2Whw_VkAgN"/[,s1mf;oAՙBp?΢++sem OK0MG& `*g6{¾]]ww'sL*X)ku+@0M^Yǯ5{.Pl,f'm%ڻ2jEt44銾p#/̩K9M%b?@|8?D״, P̬5֐{ǜ;Øힵ{]'{ CW[5W^>竼)PJj}V7Wޠ95Qb}rlސ & 6T6 .X%9ȳp~=?-*$V(?,\+\׿ fG"h=8-5[dۢu ^Jy16GN~m3zT2чʍ(+G*5+ͥچb1g pmjJ8'OjiwEA'ybk9_g]"u$X$n~ h-aiF"?4T@ %"#v؆56҅jU*C8+ˬzAVu׬934?YU_$ˆe:Dl5k!pgf>$2E|Q|;\Ŝ_7?P(~))7>9ʆb<1=Sz='d[^ˁ#Fc$RM= }:j찣';iҸŃ;sc($ϱ. த&Y .D6U*tk$O aMpwzbVs1*G}nqǻm_!HNUڝ%ce}jqM [:Q'EHQzѹE'LC =9L<%x,_e01NQM0Ȱ?:1mNBa%h޼H'@vhVlN'O|& gLPL.'k%»OYgmgmEI^¾jCNɁ!~n6?zWM}VT ܰ)/2ͧZQɂOPnWooϩ7$؃u~&ioyێm*tuu+߬RCCW7\  k{Lݮv-. >]o#YA2SN!Al|6E x ҺB`M>ʿ述/Ŀ=k_ѝ_poErw;1"M "mvwd[e"a~qEY8e()[DY)҂'llS$QlHW[gA`m*"yqZfEXa^uZ-"$^n;HqUA^S[P9}JgZEtc3>~Y)k-De-}SXkҋ܌*%2M.voJ2N"~!:_Ы 9jwg$mfc. #Si|ZdsXUM7;IY8T*WU=I=3I: 0Re-ѭ_)iPdž+>]UZ]#>0Tȕchלz-oRybB!tWp޾3}1_Ʊ]@]?>1q=l.9zc&z՟Gua G9G?FX'˃ 2zyv-5+mC7ހisU=U?{>a.rW#oےºfkI VW6e%Y o0t)=wfW:7GlEUqڡ:amyȍhl[B¹"&ů1aGli⼁;:[+eG7ØcŃxaM-37-;9EcLM12 "3cSY^L6xy)A(om/![6e$7J%O"Vh)l [ܲXycCzY:2PF*gD 5uBA310$v3d O㶡G@;ocaXFTx iP}RCS倾N,eFfz+ #ڶbuQƀHP5G8C{-8P^KsC t2<LVhX)OE_tfm#|qYyen8֣- VV)?ղ+{ҳӧ |4>4߼tBVK)3e\vָ &V*}-؎m?BD˗QFĖ6R) BlVܳP=# 21K,&]=!+S 6a$DqAܴ I~\߈34gD$9 zkmJIF'X6i}zb }} yꉿ:^!^\̷}΃m }F>H>tA6igc=1yG iKShAINjA)Qcn)RZ 58^PNVjGO} cx0µ, I~Pk뜱OlM42HF,nvim|FD:~ DP94=:F3ǿ: as¬JunJzzs0{{F(L Qkms1ɠzF u`{ «e!W1 v _2=~G-Wxʬ$iad:JI>S +43qF (M)Gz,o<;+z)r0Q.n0WN RCOsZ(G*inηBa{fl`?+(8Rs\Vؚ@f]_,$s&-܅WK(U&fq&:A%{ͭYwm!<=|y񏈨$?Ӥ,Œzi6/H K`}^X-⤆KK5HZ0Tb0+2C}0}ֲ3?BخFwD6#(y4P_YXR薷![+Rir?/GglZz8UV@r bA=yA=1$̚ cb~-Ć',sYfN {"]QBB` cݠ||g4k9A$qxaKŻ߿|_O3`+2`)5+E_Azj(<lʂOD964 "P'*?Pgz$fg̩"v|t#4e.5XYف2g9CqE>آy%FTŢ\ J:iTD.x:ؙ끛޺m4W{}uaFau|e 7R9-) ڰMaR|ϳcOil,:uVWƒi=.Ͱԅl.7:o] _g5ݒv*)R 9\d-!ni(;ϊ'gx?lRzqdIzRO܇)\Ŗ{_?AvB{hyb+FQYT{ >S> #zd0н,/s4`1Hm^ 0ެ/X:5֦yTY-r< <ޱ燈e}$&7r|04bT3Ey%J(i8xPV4:ې%Ueo%k@&Mڨ'S/*9=,uD-*aa :V4 #@8z~e,bLς]:IhY|z|xA[D%Ӕ̖re yV9RܢфCwrFp:X!6fGnP-Ѓ]Ҷpx\m*3%`JR1aj$9>*>`-rڳq[&pGgW҃z\U9{]Q<F⥒,#`C쥾*˜|G/ ٳ7aK L&S/ . BH[r .E̓%cjWGǐHӳx&_멙% |.m%5(۩LҒ.i39fv2 R _RVQ21@GWO+kF蠽a*$ċ r2 Vx/ ٩g>`X(`_t`.|^{ְ*e^,\{\2"]{2FYJ+ XplP/i@CUsזYۈTSxaATB!-K+Y,$ ]qoiN |v0]pJY]?f{ՑmO>L2^7?沕]C5pI 3Ț{"M~?*In]~Raٔ%gIfcyqkɞV*tφr8їyAO`-Ս؀B ^* (ndwox:!'m@M-5AZ5AZLg^ǃGC!&k?=S 8_߱%32P]/mMbFP"Z}RQe}&YXZ/,WYj+O’qVLbٿQiS.bfc(\KIM-"7lSQPlo9zx= #K>E`*- '~vb:e!Gp1+T;dG"HS^e- /U:Z3Oh(g: э'<>=aPs,v= ጷ 9x=E"E JeXjh;NEhe*1kki@ /)>Ikx p>3meT5 2 ̋"ywvZL؂ xzRr[;Vqc=UyA:T5T^Sh8 -H?4A2ԣqTt? uADޔ'7 i#:8O僵 z)_?qDS4wt_՟ۢ DdНmo~ hsmA\]˭My5.5e8M4!t{02\͂"U< eS%+)L!D\1g'N;Ǧ)k0@zr.ᱦFbR?vW{ܙ7MH'h%#F8-ܽ>e%Iptt􊾟emGVI]~N8m[ ͜a8aFc[K[ 3t2+\'GE.?掃Yo𼖻Kz"˞;dSQ!X3Y oCϓ۶R#L(~0ЌkM?G`~N `e'V.S (N//}~T|OXTmiTiC \ĮZ`<+GوZ?[TNHc}2+I?צX^)=Z6@[/ ׀ rۂ X{gT~#%}ZE䁔_^#sK'Nd9/@mrBb D=:Q~ ,LCOj k ?MWzd| \%,䡖 [qj f0Z`(RŲ_q^)TX~3Gsc/P"Z9=vy/Me*^L}x?,0we뢟J\Ȳ}:M ʨw*XeGd] wd3BR7ٱ?+-Wե~I9 S*ZO1aFJ!‰X& xen++{{ǗB̺)Ζwk뮼WP6@gRn`+?gRTqc27MV"Xm6䣫{C R^`zE5 Ob̒DƗ}!cyQ4ՏM~QgݡRbj_I&bӊYK?Ikm=;k#Ohl˜'vN=y= ? `UE7jX^yyK>{Mv&1;!҆rADҫcủZF4;nQ)UcMΆ2-6ˌM;8Ƹ4@Uhb9| D:Jh ~ pCSeZYs{N9 :i߁6Jc2 \?C{nIY y=ȃ=^Jǝ|fǸJMaOksC2>}$aXI]R˃wJt_>~RɨDQ%y@pckhFb멣yac=b)-} JUvì&^l\<<3֜>dspV9Tʗ'~D+?헕(,T/fU.SW}LKsD: ޟ;|5cx 4M*EV)bRW˹L7Ť( ҧ!~AӔɡYrE/7 TUYt9P3M/[F9n'?7Od2mRlߴaP욗3_P"ͬzEGv>AVƽкOhBBPǺ6v7j҉XH= /hŏWF8z[x=`-!b>SG[O>7L;i~&'zm5*iwzaDʻ`ʐuf_ch u_(_F*e3Cڻ ֮颞fR>=ƒvQM1 `'y P H;Ad('+a@ ir qmnʛM_kq>|-o+Ƨ._63٘O7m4le:B7s[}SҨh;K:28el/oh׫&[,H)i;ea0hÃTOTx!}!UxږAejjp!=Xڵ6 -Ynʐ#+6 :5\~ʉe?nǒZbpY=*'ZimCtT}$`|T!WO6qiZe=՚i*ջdHBFu^Ws]1dZm!n%lю[1g؞[zq1ZgNXi:xL%f`x.wD m_v<6#=KM# ~?plT4VC40}ʴn"Q1l#@\sBPetiɒwLlۂjԥ]k/k1%3;BzLdsb)+ Pwp&]z)&][KƏJTpԑ% ӤxR@-٠*&޷tnr'Hg~ pPċ5g~@M!0Y<ĶqYo|[$vtyq>ղe *gzM~Z*eTR5ܹ 5e>A|} z-k|o;t63QXy`v})bt"%NuܐG*HK$3 I驠+O-O-"!lμ򶦱 %MZ>Ջ,xY!b #:(W\VGd燎?fH3+ 6~~X`±-tU&~PX@Mޮr7- u#"4 ҴhIиJn<)(tl&2JN[f:hlʃp*M2αCCvcMJlP]{i5 = {=^ ~>`V(jZJ͹ /,{%&MYӾ*e {IWaWc Qa7 `@m m2v9 3c1RfƻGmQvSar];=o&34&>[{."{o5vz3ơ%8c2wXN02MzR%RYjMDQ\CXS493Ej>d|#.-/*G[ST#\ǯz~-m ]B֕|X]u y AuׁG'??}xC j7lMKz>Z/Κ .GhS YOD|#U(j.F"<4iutnMc&/\bXD#|'i(Euiǻ^"T"Wcc1FL]!Sbyz`^)P6#AsGr\Kn} Iث -wc\Ŀdd}HxP`DHꂶAڠfeQ!;ly-EvzqzJeI#6I ӌw=q/8+=Wn͙Zs 6Nu>CC Mz^*e CRV; DlA]И|YxQ {Ò7"'˦+64Xp'7X B_NJh$7怡]AU 8ZgQNN3 5L!L!x[:aojNTžip Eb\KG}y/[_hrBz[-[9-Ր-vapz eB#\3%36.t#SF),C`9jn̘LjQ_ړ"A ~RI鏴B  Idxh//<0ovS;PHdߧ/⎲TcAX,|Vn;g|eZQcym0_…F{`1۝o%ynH\:yߙSqkܒξ쟡>J qz6S=>'ˢyq^6/o{C Pn!EI9^4I\g4fthMCU]OqK6щi]hU룕Fg~(Qh6LAs#[yäq᧑$eZBF YbAzӓ=E#}}`N&H8?LΛ3K\𴓱x|݇fjKYM6fkkW!r)3Gװ󍏯:^0/윮w:J>W$=xbCx4׽S_{+w'?#=Xwe,`l0_woNΞX@Pށ.&ZO|/“~<>[G\};gwk w?kK2CwK(к\)k~k:/\$_8i\İGu٨7SD.[K}-.#hкkgR]Vd^b V,][ƭzjlόR70\O|1sKuiD)'0{ݙa(΅ucwB䐨-GJZD_qfWpUƇ\|Yݵ8ݝq,APBt,^8yW~A#h^{qlOp[I8XɥPp'jS5mmTGeenN/Td$`!=3h ^N-k{Qp1Z!piSQ",E/}|(%Uh̆ICTN[:c>9!)i*i*WO&E.Sfs9NYVD QnHmNsqd$a0+2f&GXUd(H<4%3X!*16602%W lc؀d0 3T B1TXOx#̶u+JL&/[[^'H6y5\nX&</UAw5KeLf A7];+&DIL|9RUWwӦŞ G ܲ*d\s]E'b%!(6ˬ w|6ohp"FdқDI%oz%Achd0?Cƨ|ED?-sxawM5vLE'߃N!0xRo&pSmltwDTesoH9 xL e:Y"1< 9!ʇ:dc,*ӕ^]C;JcS9 _ M&i:]UzqTُߵv+s awנ7.*XK2148,1̘ n@fX0& %&&si og3Fp3 m2Ţ7 τZ Pȫݷ-052w{;q)E01%Vjmλro-݆Bi&k"4uyign01`,ꔽT߰54"1~:2 dZ`#zK=-#.T16i`0|1ԿFQy帳H "eC3/=.%O@PWWbD#B"WL[. evA{n-Ӈ{S3!gs C 3X If_]هd)\~=! !ޚWK7UĦՍWԀycמO\]z $GPǖ{[u8X̢>>LE^A6/8 j_ L崼jm#*gqT7\( BӘ0t>@cft3`fz(X6/װ[n™gFV )TFU) &zcd!6\( |o<͗A`9?FRK.=~J>kŗ(}>@Kޅ麱8D#cSEe|0Ef<˰ EQ\ڶ"zSw ӥX^ AۖAXtZms{|8RfTDkv?7Yq!(V;/x?AX^GꌌhΟ1<{lϴK ě_k HWQIzm1Nt,l>|ouMj0蕛cm?1*e~=HUl$OѧicRcâ&S$CB~^tV9N!D&PƸv#JI{"Y2RX)cAxwC]&Gf b\b8jj{ϓtFz#ajy7f{/?HA;X?Ic:[K Tb4Z]@vO=Hb:hKAF YY˸, Yv(\cf8&QQ>)*XCjEY  VNjUX)(-,[c6* gbD;PPKTЂ`fW,5l/P).⊐pJ)ܚ@~醼8xWj6$+!Z8 %$6%sz hQW'3]Z!vM`z^?5F0Ϻ.۱TI'[@n)Y4lK|RDe+Z;X&blQXxy? fXJ`) Vp! bp(ⓕw_7^^ MX._TTl4힚x:Fs*i_JtJGyMVzB_HwۄIN3'Qo"*L ,rw,e5}!ՔFpZ!WP<7n6NqSۖVr5%d }pL  a_.+M?,اڐAq@X9"}B><" 3 e T;;w*3EdI AJ䦶]khb 2%鱬G%Md1qvAO #m2I 6JW9B%.QPVhN-9OWQl_VqJ!B O8;H$s bY TFUH28j5 l]۵8%4{%&%pJo ׎W2ukhF7U*eڭ,עn)&<֒n3F/g0\C~VDΥ+c7x|B. _:Ž1΅2 mseUeAV[n,-ރ+MFtm=À6zڀpe0S0f KFM5cɕv#v=;/O,.3:tkXS"_"6iuΌ]ASKsB/>&N2s,#)O6"_p>7PXΠ0~OQW|>E9)(g`'iw8(.PK~)f\MG'Loik&E?e1:[aN8TyK2*dT^l?kԬldW4mW$-cl<5Yx 4+̳S;.l40Xo(ĦLYc~_ЎC0@Ђa/raQ>1QOw\Wf9G4^{F&TVρqq苷 >98 Vwɡ PRp= Ń%l bA[Vlem [8uWXMsuE_]w6ŀ+zjbS(@P\zix; 8=Z{* 3.wguvQ)lֶ:g3\M1ivlCW&B!EFTlReBp9)cz"B=8?},D?~zD3z:by !zl 6!U.RCqe@:(/p?TO2E ~͇zm4Nee#<œ\'YC#~zNwTt7wsM(k,tVXKRʆ:Pp~90[{0?ft)6"v?[Z4؛|51:_Z0?urh]%͋; pbboPӓ㞲 D.ifA`ᓋb'R!cl¦F"Rr_maB+AΘ{5D 3_b`|<цx+A:T26O_uѿe>&D5(M|;4Rt }*`쥠Dz 97yh o"`RVɎXkk5zїp|ck89ec@g-Uph |a i˳ǫ?¼GPF;}I}\O+OGI@t;hyy)$0&+}uU .3|b<v[ѮX@fjSoR{2И DYs=;u@\) [[9 QPRNF<$@(3HfG"T$muA@/ ( ]R{ּ,k <&\\r*ff\ӝU,>++xϊՁ?z8/zW%BW5| e$^#S.d+=}Nw(R^;_e]nhU﹌GfX6P}TȧZ&±- v<Iņ dp-<ƨŘ +M4ҥAذQz2VQS)ImUa{D!l0&و1ky6Q{Tȃ䝺Y|Zy!h(::T?3 e/j1&cߦVM;S\WhA&崎^̍Cr.|B7əZybt+tG`XҠ)mЄN:88ۻ겛w*cn*.1>:V>qޘ3D筭|1Qhp »)CU])?gGg͸5Z=ɐ&pjQnݩJorr'L-d1+++Th엁+g`9z4ن1O--Ay֦FWS&ä22|^_ E{ω mRqUdze[3-sGXVCrja/Nk`d#N߃RINhy.\)Seićb9ЩMD=q|{8eYO/9IiҧX}!<f{jU^%<9 S#:鐏KSq|?W׶ẖE;G,kMx RS9{]mTn(޿3tzn 8N1xes 3jE0@Ekr(@CpF>POa-[D+zm^(LWL`n䟊cϽFJޣl\ wDq7+߰rS,7;/(΅CܫZDLrZ~/{X섽\.]餯lql :8Lfop X5[K0&PAq?*QA`.{> rW,x0_Cǜ5{ Nw G!Ϫ [<ϠH!Е:4Y[R۫{2 4>BDqZ/%?۵l$ > ):{?a>ItOHs,;]Ԯ.hTY%:+ճcZt$FٺVhi>=Z|:,XD6 i=D|~ێ7:b0rk#Gϻ$Ui/ /jїvx`vEGg-dQ@P1#36~i+rXnP5_NF|6 3KbWlE8~]چۑ]I&dOZW\؂M 5&tK[t 4Xb;եĎ-;dMGvYaH{3KP Z!@ 17 ^ 7syЦ@o%^Q*: s{k}*alQe=ڤN-!?O\""<`Wj]R'0zϖrb.&lJJ'փĠʠ7OZ7>Sms~`P9H.5yz~Ȏ -sZJojqכxd -/0+)}R}Cr9*pzֺh;V_ ((&lƓSM25pw1e4]|+ 8օbUe8ɿ齌NaqjwmF"juk ;:QFThM@]")NMFt7Rr7Ca4"+t'N-o8 `;Z%xM2'9R*0W^e^ɐmH̏S-mwU@tN~%Uq/xT+Eo=h)_p'!PSG\BhBƬ@4 ًl:z.}KZCʹu㷘LǦGecUe{:lK;a|h횢Wz#򨞺Đ5<޴5L+Q(eap\SGrmO̷ 7`o[6Xzx4C=՜~/.'k_vP[5(_lDxGJ|~P=ff&J 6]ߧ$b{]E5z.f,BMRBx. A;0r?iיtٰ,"YammrB};m ?YuenYx@[ZGs̅}w"A rg, d N؛骪|Ǫ '9M'H%Lރ8uW]9[SpSpR|-~{)]r8I.^@%lY8j~Z,tlWw =;Jp7L=7PLIܡk=jP MZv [^bv yj>۷ז6Z$fJ$&%rw]i.*%:'(7Mpq[6+ZWқ(-Idd1WgnxPblxTGlyngbr Ȫ..Sw \ AqsN5 SOF7\Ab&8;lW?+3C[يBNƇku>V"$VJ *[@ ;V̸(zLp $SD~A>j~-:ZFHܭ)ä(|Vs7Y9D:BJ;hqY(zq%%p5>!&9 p(?W`~0Q=h#~C4V.紏mރJh{ {-P$i^ ~.)ۀ,G]Gc# 4'hZ`e[Pݚbfg k8W{*f͛y(j)I|"Тv|[&/I,e8 +dա'pB.EE\K2e5'{5Sc|Ҭf8Z jAuMsۓŇ! YQ2mL). 2O%+/oCv7F !f\nG03ԖڪewmI*yVuP! W*QC28&`'ĤK = Mlԉ*V֏؅㭯ojVg=04 8C~Z¿S lhW'(1__ո86my업>]Vmbsy\5toJu5kU btIto^ITSmV& Ql.,˚nq>9+\/E/*Ac;@ لO;퓟  |(N ʖiX%=|gmayA#I\Q9<+ة9. m.WƵ,:g=7,c;\GmSeofLF_*oF!pcW|~Rh i?pr&6 fK624=͛ aٛIUdrhf4CCXlki~*3K /̒ea0I !'ƧYxéژ$)$aJ"G㒄AdcwҲa|}DV r=!x3K;8eCTC$ir^Om]!+m)ٝƪuSz*b|0G.\T,df'g,7+ӍKbb̏ʡ_A_#cԂs<ώԪ֏WJ h0¦Vţ͂kVz D(Ơs [=DX $2njYPIȣŀG;OtF`#c~*0;F nw(tC*|zNogZɓNY+ ]${dR.TM!.`0u͓[<+}!y;}I)-<_Գ()ZWPTYKHcÜFj0Gr:_P|o` kXH9  B?f-/4U;`vosӗ҅;^`( _݀Sߺ棴@QWF| !_#-O4ygFWN\-t3/)`AtmTuPT&5L6?9kz &kcbHcSNk*u[Qc[eMsadTJ0Zjc{6{Eo:)@ _#8b !tMk;0`?XUх$ϹJ`I҅:H@)#dLYO6# )G&C[sC{lj@,NY!IQ=^M UM_&*7>6!-Xc`4$kcZ{e<1]b>_Fݜ)ֵ7q+~< |NZ 8\aX\SbxsZ6^T?{Y"EV 5q`R=j۳%Dڀ%i'ч{uV{`/,I _]7t޲\ D`Վ֞Y6а5 NkXOPzBR9q/eE8l#y͊.3GKvb;5^ئXo7&7Ѯٺpԇv?.Z׆rH#DKΑ zؕ4esNd!F3>u2cUK83['鰷ET =JV6kZSN/mzir<'můu}ؤu.Mq8 Q1-7x. |~+@uNH "s+eM|QzSkxb>% 1 :%hu(:SC%%z1;5Ab7Vܲ;{^MeHYk!A[ RKc1?3'A JΆy2`۵`&:`4/~% ܽ[bjjF9?">8H:b̲cmI>ꩄMZCaP8Gd/vgϕIb煦ԚlxH`v7ښpqf3V Vf" >D #D)"@&cNR/q[uꑣ9k@F7N>z%;>( J| %4B:qV7EFAv{@,20Dqv|n)cD)* ฤ6 ‘y> Ǿ)0nj빀نm6K&=7^{#m?EHCAxT Z^8=ڶΤ(̐`jϪ4 ZCiXM3RH>hu %mp,_kvJA5>mb-O]sٓwuu:Jt*a؝ǖ2 )$?M|FV b_%295ek'^'*_ 039mGU(N[#U}4$7/`_=Mc,+K wZʦwH'f#EnGi*: 3 ;KƎ4%J}?7 {KٔAVm)y O8ݜSӥ~!:ŲRd8KyERԘ)wo|y 02S }[[K&;j6B`wvn-YRJRĘ@?x!Wڄkr(˛B&+4ږ7"\,y8'E@C:e~zHcwٿ0"޵XW# 7>T:w)*K*)CI^ *ĸi©oDƌZc˱R@3Ʊ~r&w&ePŋT 6Aփ@ZfγlַR1T";mչ(O~/VZ\էDuJ붼v>Bim| .І=W mk96,zsLDӘK:趧̾(tfߛz>BF8`%l8|l}UO@a(ڗҠI!Vm|J/fpHf R7 )j>9+5,ߞNJخv]4֬g^1=4I׀`r>J{%>au!-؁b%z.oqStNˎG,A7 KG+Oc휛piyMYD3.D;rZ$ARDڝ89(ߎ^D"c&L: NXxmkZ,0 /'es 169!a1+-Eol{ >*دn!Yg>3`GeE.+vm59 h=ޖ NOV~8@ך ;0iF戉p|Ax'~ɇڨRG!u({ 1;i<&1=\T@NjmwN5FY`S"nbc58$@|wgx-;þ9P@1řҎ[J^.Eۘ)A{[Γqkɲ`;5 l{1CwxxKN]Nhhe\?y2"Xb⥧/S$( [!*Dc39NI&XA|W= <$eΚi7ɉBtEiz,qtL=m@즆/@0<' Y Wo=Ih^%e19@'kD/މ1<{irʹoLwR4B}+:}_svHN@^^tE<k~Rqzĭ.շꩽz0wLv_>-HC}5 q9w/S<4bk>BN=N0u0]xV~i/l+w躴6dߒ[6Y;łس]p NsE=Gzgz)i?E=q^znĄPUH0Kc*muiM&kopNX5 A ^#&S1N?:Ɣ0J<;O.n9k2Qc'|St"@n0&?Cܙ8 2  ()(Yr&pZWӈ]˽948tKBj@B}v|sZ=V}\5;]MZ#~i 22"J;lk@J,uBC4)J^ 2D/&?@d&AY.Y%{½ڇrO=ͧGLdw-.5~y?#e'eI(36mShѶt@l'2 K 滵cM4p;/{QOrh|O-85J B \oÆ1SIGeƊÅT(.G5_#~𤅗Ś.@= uU( 8~CLk^ snH<[i w%cL\:E<4%`KC:q-))jO|k\M{=oखzR.tm<ɆX>LM:J@RDT,n(pi͑zf" K7[JsGJ~Yb%yS`V~UrcT]|jNl),ǟWΏlNps.B.Y Lqn4e#V8pf9z:ۄ胧H6 SW yְ5(?^pΏ77Gb_^n;410n 3 E]Ose04lH8U$B(5 BPz凊)ٟf\Lhu`CWS]mTN Ye`Jh+?tXL>y9>Xq;4A5Hjf?5jպm6Oڭ+5FCynr 79qFXW/h)} S^>}QVQ]75NI|_wGgA]jTAd`nJ3A|6 .,S86Oxuv9 @4`rKG\y-Y.o'fwujO2@V˿R>Lna.pp|];IC(1T@@*B ʖ0nvwwzp`PǴrR7SNz*L[4w ȼg c$^L0bqkRX?U&o[$6&tw]mStr+F{Н^de1!G ]BQw&_(sx_ lT;GRb%e),kt3W &W2/hKyybDɘ;.^{?.0Y Zg, ͝tgQz5b| 1}˾'MAku~0,Y6d8X\WYtN{IFkc뫶L]}:)^i75~ u:y8S;N-:Hdkz"7q`Zx\a9~MLO,P^oLlJ=Ϩ,]ɼ=<l"/GV%bZnfY5Mй ˉT4d2M!.|3Yv*I>)֦{|p??y-eo9ay:gj]ߖ ]q]:+ / 4?Ն@|Cx\GvL8tB[B;c:s_3iت}>Jd<눆jK~z8z&>y/+7 . 1F˵,ΫT\?YهBdLj/5vXH,D{|0Mf,_҃ۼ]JH. #_IL? 3BrJ$NqyísWI.r\d(tRS[o ㈈L"+YsGV a$ 35%MW>(^E3iS n1riIhcOs(d-/zRypC"݋ƤOzQC锔FW50gْ{U.o`˳5tyn?@,q HX]^KK"ai ¢;/#gة /8ЂѬV]Peq7 uiRK2 P5uj7X>h;kT?eÉu|DHΏ2|XTؕQθu_~EC9Z^4Ca;$6";k.G7{h3NsE3vו.c)``))u`sN wPz :#œ.?_ kr@Œy@GE! ;vfүu 5B0Y1P&kMKM-WS`8յHSiEt̆8Eo~ܸSF| 銕oDIzhbێ ^claf{R9v1Q.ΛKCZo߲{Ql۰d3~A=#aqG~OVc8lp,RJgtU' ,n}P L$Z'`>LHG 6Xg©+`z0;j+w # (? @/krU+:x?v(Vim (4+J֛OMAR|0`5t#yl„F6ؕ(ћ iPKeGFػ 5ĐaUim}6: cE>->ewޚxTtW,+ǷiSt][0"19bHal>E"]LlXR _WlЭH! x?;Y;F[Z(m?E@HUBr]A]`GBfŔ=i2Z1-+]U{(oOYTfFjvQ|pxkڪnK7]J1S&C#lt3u / n?1զa<|BZ7Zf 8rt)35Z59֡'̛M.7z~sWG*#okAXfy6IoCM헖*nGf C2DڇE:;bywHw9ͮěՖ 0q#֢OCg},X"ieeOy>CcYO$6* zR-x* +gjsɃ䤻  YzrE+;t؃L?kW77@J|]*77$?D[C7!0VЌXE7>>~ ܅cE>:7G0?/k[#xw^ #Tq8i=_2p'4n9A玝U6͵5yz: Ջ?KϛEISvhwMת{F1E͐&+NccˑSՙk CȀ:ևYœK9/se7lbFYÊ# <m+C b"m{)Ԭ"q@({sj^`(8Hh'QMv{_T":X^M))'K]ߖrWsV&Ԯ}B+JtbwZa;BY ƐJF1_(ULöq=une]ItfC oUg~kƎ g $326b8[HuTnݯ>E {'YgI#4Ev4~Njٱ"5f}>LN誡ى{$ch.DSE(|FJ5_&C^{#w>'7j-\%j8uq`gY 7:FNj.y{9?:τ8 2Gh'S?m+N"YͺClO=DKLo~*oYs$}VعZ h.c̒ɏ2yS=F`-*Xf]zxx 踂G&:3D}M΍&0y ^M ko/?NNC'.IK+EsWk`HnP ee=A?GrDŽ67(UL^f$D/?}_Dꓔv_9bf^+ LJ}ig1MT {lxꙜV10~w~I~-4Nϰ3j!6F5$,A+p_:qۜ3yry͙r֨%QK"RKx$dQo|"S,NR;h뺳7:VHIg% @3%ʠL Qe4㺺dx(1zɢ `_+0x5Hkt̯)WTfwm j \>926aKi$5p}sl|n)k5yTA,׬aYqsFa"Ch8Χ`*ei?M6t|Ñc GE#δom= oJ|5CZ^&(<>>,wu镧<KRB?rcYG؏1MWq& e0v+kЫCV,@Ώ"m 0GżsAet܈nZԻpe1< sfv\lD57uK@x^"37BۛZaK>FsɔĭMlǔtk!T%f%Ѷ;PtSHꪩ>݀Uu2W+v l8DGퟅiAly|l|/ВR :,u'cd0 a_!l Uر|XNhܮE 2;](gh&<>pO/UJL1Tp~8n+kաu޺㚅ѣ6*E?Lmm7rhqndG }[]pV[U-iGCN ڑzF7Hk]K֑ČʄCPOo8 QPq"ȶ1XX`EWqV4`Pߗ02]c3t4nTÒLjRC8p3愧7^ !5'DCB#85fSQke@2f*{pkPȢ/ҍE#{`Ƙ@52-aVAx/z.xيwR2V" !Ȗ땾ty <>**FӯK&w+y|_sZT#NLml.1f=$k[Z̗ v-򰮊I2;dE?;)&>{`eBOOe|rO7n}У8? 2o kp84f >-mȬ&١5=&|;т7ج%Wr,ϔ+ݣDHg%=R.2^>SOT҄mT{@Sh6Yx`̠8V+E#4,Y{<װqΚUVL5ٍ\FN735b<\" :օ:DD ] xǧ)y̳z06wٕm{>p/d^0>>5RL #SBgQEBT~?j:IWiaT5m٥?ZO_䁩{&Z䔆M_>ԠJnkr(Vr-zshJ56+pAƄkk ``4HϻҌZ^)΀:GB+w .Oz H}͏v1R$X%DAVý+tȩ_ aO{Lk [dbxWvGW4 .µTy'@~ZW]YȬ_4}C1[{ {%c0ڀ Y/"A-}w?\'R ;>B e(v-9w24rk30|c >gu0w"wIM`8+5ѮRyJkBets{LcpPy$DUqsI:䆝5I[eF ԽmԳ*zn2>lżpdPi֌hs|?@m"N o5<ʪ>ىmY瘶B!hJ+Sx1Pwz,=5;$T}B f2&gi>^eʍY7Q!~׽7Aܫ%dFxXeUcAo}r_'YrTj Gt~29+JaX۩61W2gfKW'ꩂsu@(8Q7/$pi4܈)N :ݥkͲy}-4C,vQYsqLbہ&}(JH8+'PSu& ge.lMlM=!r׍Q-֮1Fn3[OokXd^MLwS mwٙ%64)z("0!AH8 ļ)t۱wW)݀29\ٱ pA~G10KbejN](l<y\a)-.oO C|A9G'8W1o{h d3Ců7s<IJ|b= eS5GaYDp1 v+;)Uwd7q />TDMO,zs-yu}l6~ x$2GSJgCȢ5pH *̛|S௻q/Ʀ_뜪DLv&?i@Z\o,hKp\rx|j_5z@/Ia_x7Vd|" zc?k#/?z<Y2m V:3㕰Nm7#,ÎS*1WfI7z˽__~/7J!r^mHPp5>-l }&w./>O+,o¶y?wH tŌ&ߏ8dt>ot4{2|Ԙ\B7pиw|/fW ImryQu@ɚ= BeapWO]&T; Mw`f-HM) aFuFqZX/= r:5 ʔBKԼzjҸYN8jQ3̬[C 0Hu~{ 68V0 lvd.O]=q~Llo]k`g(uesbvPIa!h8*qp/ga6+`jBuGu6/ֳj ݸܸrM2lڕ.g," KZܶq3>:$ LK|6Ԧa1*ԁq]/kRHx}N1{1t->`}%ܬn sąhoMsxl:+q^lmyRF3fݼx o.;8O㮃Lo5. g7OnzG!.f&lcJ*gⴒ'G_bspI1sub;/+lJ2P!S9UOܣʈARvνt㏮2[=ѝ$XphC5l5c4DADO,-F[3W_fp %#ȞBcLZ_P J>ח9ꊗdiٶD a{ &n㝸o?wLeϷ$:JD$Ѵ_c饬( k9cEu;3L>ХY-il_xM5Y_MLΌ@dTl^N׿உo^.J#mHv6-USE=4(TɚޮA 3NZmh8=֡eS UgەEN0_.H\d4``箞RIV8=_܏>Ք+ʛ]L',6)wn)joU67t~`k1L.9)r=}-4;-1.mcwy-:?fh7 2A ɢ>5<_WSӢQ䯚\Crb rHc:& O셓{,9biv[jKyp+?"ZA1H-yk~=Tc֯YUDžqYMs mz`f{+oR]lnr_0(0/\u|A^Qk?\[+o!aűm.ynܯ> zt5y"šڪzq{Ƅ8Cnpռaf?UL. j$+ق⭞δn/mEZl:F?62ՁZo?%'Y'\| &LL*O5)zsYm#9: Ӷ5E)?l.8'^6jTcNaw|zBL=M\w͟oeV{b?g˓!XÅX-]߲׶⽀^N$藀^ۊ|O_n,_+ rsޛ+bjѓh8oVƚs# U4x\i1[Ǝq 2Â`sI`eUa40ٓpkln:\9FDcLCvoa\rlg[ñlXjpyPW㥉µBDi?xAti^\|5I|6F0yDR XEK bF%5}ajx<>U)IbV_.Û0-H^/@FRJwr;0Χ6ᠬ^irHg>~.z 6n&/EemL蟆6Xd-bK2PjG1܏c8bZ#7K͇Y5LzbuַLD> /M)6|stl,xu*AdV Vq/T(v*ANjDv!2S^cǃǬaO$0MoT_r\֫̔D$?DX(TӿckNŢyG6 L(2r {XУ 6GE~ Æ9L7Y g n)QM;`bF' SOIaʕK57L܋PX]b 1-'M`Opz* }~h{;bRVGkLS.>VEitg&mMPʪ@ncfB9Hu6{płJipN<Ќ`r p Q@\ѶDk?IšTMTc: 5y--3"6'?_,:ݫ`pc*⡡G]@d r}Wmw_+`zZFmOPXnfoR 7qi{ $*}PN<\}PG'L^luYאdm 0)/ n`f扝I)t7TО-{[ȘftlBkn]V }`H׼%1}(3n?w[>(L֛ԡ2{/>FDc TX@V3h9RIRoɇPZٱ#߉mwa Ɣ}͢/hwm:>"OAC#lM%+%i^=~Tw-:xmAm Yn[9/"Diy|dgp lsFk?=|looN@<=$NP1/5*-lcm|cs0u{\x߶_MTi(TE?p2ǎPO.Z;R͠\h"Hzo2cm˱ 7M*8&\ 8n&.|RHFMyy^!| ca81x_{52o_wߞgb[RLeq;G+PN{8R,ſ 0]Y0黓)=<şޚ#7|Dq걊0d0kmciM`'ۍ?:~:hޞ0T<\èҨ5wvR?ыT5鳝i͟ysM9˟ЯbJˆ{0jޮ'$զG7Wŀؒ~Rmd~p{Z [:{fwkٝ8WS=VG&3_7} ,N/Tݿ ## 3YT$/,-Vߋb͝eO䵹y2J#v_٘Y[}LMD9tOKTx#mi_Xs] #_&@|{,IR_(U.T lE CǦԹD cd;$~YgwWGd~,k]Ƚc̜S8u nݧS8 sp < D]u,3_ZFի?eX>2J9=){R#2wthwmVǸ?8O"KSr#,]d1]U0zhLE{84 o<;Zmph? GoA@V Je&k :C~wtDkL*дè8C0T3 T kA_Z2ݶm<`㦞 dHGt2TlTk|g(NY¬cxl-1w%飋tӪE1۵YmnoZzPø//=>i;.{M|qzii3GHwSePZ])+aT^E$ s=&8FcΦJ6m픏R糠Yts" n2-XG(5nCFIb/v ̹(=% CtR^%4S2p ^Bt%(#x_lk4d4Qea5(7oA#d){b)r |!4:m}SuvQ[SߜٖS+f`8Z3f8x\2gBn]KkI nA7t`ېAkIE LQ|.;K fg*qQS6?q} f9#}$4zb']A 3LKƖ7bWꄲk-+-vW{]HMAm;3K,nYa8+. 'e/\j)&GߗWiJ1 _[xƿz+=A1_^ay'o#DL9بwaوu84[;cR̝3BYsG2 cu{g Ktq[g-އe3$0&uM9ڴ+#/Չu]bQ Lz2N\pq䬦/al? jַ& ym\ƇB)YCs^ʗ(bc-y-5˛l7z^ib$kא9aiv}(OUAl ;= 0W_'RsTe՛Ӎ?V,;9MPNeހj`5sagk)B()vǜ>\_C/&'J/yēm5R%tREױ)jeQ%sM&aNP*rj6)5]:zY[GN+gE(wflb9R]zOu_U׈Z =tlYb`X$aspo,NNXu9%Q1y`@,%oȑk{@0E4144oW0ˊX2s]!* %%/}yuN. -,\#L",xk |Au{bks\]ǝY%FHu>$BrgHfS/b?(C6`_=5[0h'e#tMe`5Iox:{Tk䈳Ftwl:Fb#+ @n)s$#AڅBs^peXT.Oc&F3 Oʳ. Y0hs(P`QwԬZGg=E`lX~w^=xM@f!|VeBqqJ?E@R =~4>qmuA,X=Tw K7ЎLboА֛R7r c}_L`<`8(ȑb©^\1dhEZy1Ģ`dHj0-{gxağ:iķ}^P11! fGB"EL-[*2^Xݣ>E,L^5l{#EY~i#<ΨR' Z܅s*wV&jvT!d`4 u Ŝ{=J&gC||zi{lpx8Fw3VؼOB ґ9 #ZTocV*P.(:;JH^'$[L:K2a' P}Iwq+C,%3.д0<96]8z~Dʲ`ZY`,W52uW[%KÚo%AL!m4wpO21ߵ*dqE"۩]zwx9&~ d7lcx>2ԿBC$pRלS{una]ScAZ%Ⱥ7Y\skLH1\\B|kbvPdɐ<]S3kALu6`-r q&023U˗ptVn725JJKݍ4h9|UB f y8PDBI`//|tt&Ɗ#R XEgy;Kj_q9_v, F' C&5IUZZŒ;,냚_-F $fVBJ;ӮY=DYt%;{dII>"ڝ:+9sL? sf9 X0 B) Eq O@X_ Ȓ- i1R~6n5Mi[mvؐ8cf1=c0E:M鈬SM0:f%DH(@5|ƑuAW=2oT cw<}|UJ$9Ҩ#b|9HN@F2.T;Dק<ԟN`^ XñX0#a ?,si,T[Ųm)œ6yfi; & |&xlǔڞ*x!8t@pT1B@^ofX;oxY4k&+ OUB•^(ȺĐ`$#eؿA^M>FRrb:s4O9g: VFt^sUO)YK(q_mׂC'qo!z!؅ [i_!+?EHY ')h sZ6U&8mu⎭R{ahS0ٳ}"q(kbG*ZS]j~Z堍ޛu܃%uSm 򦀑R5^8$p,}|p\ZKYHގ,MD+-l^~O0i+J88qk.gº\ā§8#_>xP[40Rqe3!xi5m7?Z,><[T^8|4aV*XXc:Q܇~1UP^ VmiubJ4_P*R۱s9V!kQ\z%:A Wf,ɝIzI a6iw/r:+?*=FjժTuC^! Zk#AlWGTgs[+HGAeU^h!,Z֘,D'bnVL-(F7nɺ"bN)8S5pleCxbOb['bdª [*)̮umؾwUX6dXf ϱ'3fjpz5?,1('%;byuщ0[æ;wR][{]/OJ<0iw " ^df @m?16 zPA=4+I|CB}o8=2Q9=rZ]pQBYD_wڅF.>}5 YF&6~ak-fI #pR"t_Qr H\NQ02YDj( NsD{"WuZN\4B:Z=wq-2M9JVd1LjRiky'vk]E..CXD>Dvpۘbn5(utB">h񽘵A5"h5|6T**~BeKP@~q3 xĖc?CK [(Nx, @ J;ຸk0H4]_ S y JZ$ntƉwy4|HSqĻ0q!SGQ g :tXs˺\H9+t0'i&=pÀ.km('` s݄FxzGH<].S!C) $SpR8Kra^ޛ:FThWwN O /fȑ=Jxt*JBga{b=T6d&E)E>?έ `P!7HT:#(Fqm{ՎR:0Yrpp<_Sx35?9eFbj(84-zes,M`^n>tl0y'IV eێu knm kD.K-Yתy7{*;p%6bpLƨkYPޫxcde޷*E'ajË D~m,+d>W[+O`9VBPj[ pV(~ts[;cQ;ȱXY؆*uQ# ]f&aAU\RAn_ڼ|Y*^0TtS"2wO lVwԢ/ Un3mh<|]UEk)>c-#spT4,og͑}oWCg´lfMh08SJ-E4n2P\ex&_ϡ"Q򯐌XbV3vĄA6{{`\|licNӃ.? "WY,_ `q3ĻnhkSp}[4x]lV5J;m&#g;mOYuo痶,VN%y]SG}|oVN`s%1oػz[p-ywR]c=~_ѪF'?}?aAų!r o2N~/JLTز]hHD ɦYz?I&Ϻ]3փ8W/yJ69 .9֒)319dm-Zea$1GهO h^ctz ;m-\co7#0szssiOp=l04|*EL@sl7APeo/?v͈lNrX_dVDDq9eP|yc ˰[7*FG3t&3} 3*-a>0e:4ȵ~xYm DǾe$]~a^SQ>NUUH  5Gw;lG\6^yUahX sƲ䣎{`P6c6sTCƑSƸD KVRt]cC[ *y&bڏuT+gϱ.lpQ)PTWBfųm'Z0 ?,[ j~‚>9»MqxtO- uu - @6٨rn^9~\<7C;#\{¨tBP36<ⲵjkʚkU^p>XD G āԦm(##i˨_Me .2>_y8^CY6A%rp`T:F#I%ED*$"a-Cz딢`Qrvx}G^|E1m,=TE]wcވ f.A#]EF;f1+l0nE(@m0Jʈh,{=-0SV =&%(\cҕ$>OճKs8H@jyTկCkegK>~q-eNۦsi~YzWp3~z =2+WQ %Wh3"h0+@0k/ae Ss!@q9MPw /XXEC˚yu#dba,Xu)Dq*]/l '( UlL<|]OMSbJLA^~a_e[^e-Qrbe!bdu,]#1e{ J?ljK_W30O6xhkXɟ'?*Q1ԁM k膉]b,s[=hP+lŽ/poO\}/vrʠ/,#9Y]=^}$GV]d=ATP+Y}̺ozxYܲyuva̹5f_W4p}%Q &*MCơrC ZB)m=/ɡGZ}ק mZ9  4ȴZVeD(c%-fxB/. yw.Q7YiIl$ [rgYow;CnhUgfqs'R܅1;B.J^jf^& {,P*Yt؟-)tn 6v,'O,T*qlKPd \'xg30[blم}b uJN*jOF#㝤V'GmcfcoV?k%]ܪ[k}gftA~/Q3c71V^E )# 5M]Ї=fգnO1'qt7sNҢ`c:XSVȞ"zX23P #b39o~[,oF侠ClSп[yA8LQa(%;qWehl{腲3f.^WjQ#X`v[ePV#6@zI$]w(.+CßDf}FJoلaɍW.x2w,/UZrI! }xڌ@Fd00D;u zK2m=,Sa<3rΝ`V{F`YxǾBΈ9T'#g7pEaa+;]o: ,h]).E;>^~lY(èuDhNX'=LGƖr+0sWtqV2]B-iȺY=u/pb o9qnt?gie`]#Z|1ǖwn[Gg-|eNz~Ij./ "k0xhJR-famyI!LJ^J-P`KH(bK5a$P v*-RXm+Nh?`R솣#&汢,Fp8lp~AɨzVZz#ek(E!hqBCۈݜ Puc{n19ap5%ti踖n' gI$aO Y=.(O~!Q02/P8 F'֫k'QrIOdV\wE9yTs]^Ѳ:9[_YP>9ddx6qeڃl.۪XZʐ.:qZ ZR,_Cr84Z7|,?wޝonc@LM~!1W}.>KM!)֏ |If?-Icި4d5=qr}\068jൖ64"G~9&"e@yK]#=v)̎QXݓ,Q/;u .\n*@ cʔGJ8tcoM_#s;Xk*H[]Ǯ)%1nȹHhu)\$l=^4 H/czH*bbkU ᎝Sam)kla$h 0"RͱT4ٶ9lNų3NԌ@W*Qk.aӛs.B*!1]2 So*uL!Hf*5u5z< )"M<غE_;8pM4>wlv9ɨZq3{;-s@?2X_ˎ` c; ^>~e6@nBLK\\xwiv"1xg,Ə/Z6??} Ƴ;{Sɖ7pj me'n[񋻦%eG؏~Wl VʖfUww4fҗnIӺ~s3g0p,< loEOQGæIEű0 C0O80* J3 w|OB769~2 OCk^$} #zS_=nop%˹<,Ų68󌳶 x@QE:OY oƔzf_KeS/{+mWF8g5'#6kRdl۽_0Uw_ >4+VKAP6$YCv$;TЏ oVg`>Y@wج;)|JXQBjZ.sj8vA\4]'[-k[}ȿyOFfOD(rߜ UN_j`9X&1}PEz}Bh$ t٨޳Wx?6Mv-z7U=㺆 1V-iuU*v$OR:P5F]GTwa =_`9g9kiBW;h[ȏ;"Eb@V [H28 kEĞ>ٱv^jk]!E zDU"z]BEHiv^L8sRF+_b+Xډ{~= `#> U\ u?ʶ%C&cbukgX9J e˅͌hü`4s !*[%:eg&6URE)^o!FNYy~VӚ{gD 0Ybu"@n˒7V+ ޢDe?'ee2bp}I}`yIJ(4gD@Z'G"Hs'ãw rS=ilF mȊhB 0YtQ6}qۉpK7%1|݋٠-,XmQ KO˸\ a6xx_6_Ap'3Ͳ!Bմq֬` u$S0~*FƱ3%#ʾ7nQX=ӽb6;*݆˻8 21\-Y o &^vfllȘr,rh |G%Beϻd;r׸[2Eĭ]@lѱB삺}A\+0Wuճ[Y W/ecTL@)8cVS]y 8~OLM[UVYQψ{A f}vL0MzVq">iAd6rܰ/ iTpX^ [B7٩0p rXUWSj% 4\~R 9*hcrnೃ-+TRofo\Z$cTJsl#:b/%csti7|.-Al c4#v[sbmHn G 'wRJw{K?vSh"H/A,\[ا ;|[4|U 8Ñls]0`3CFf3qhuށ̨8<}9J C#Ь>B1}wb6VnbO]b n1frz y(YE#WAlyprR#3U錕Y/-/` A/) U7E%\Ww ! DP.<*Jog'Kr(WBlT0 KE~lO͖:VNLK*Ly;1\C}v?AvooQ*+fݷmb~#}8Canq8e׍v^0GGc KPQ#=؛ Hq$<,ύ6% -ZWyU`EZ]}̓[fl˘ }# r7u+3;ͽua#㵜sK܃;R>,"ˉNR]\ "BǦE!Ɩ=vT_mZNf5fVo0Pmι >6Q&]juԴs{i25RѯnxUOPfCW>m T{g⌔4HK9?wkkJRVs53r)9k˯w*̉mjB8.[z"Ubf ^2awrb{|P磲*'۰Վ7aj&[bC2ՀaZb`p l-ٔawh~~:2}Djn/M'lq 췢 0|8CK$|З h/^'-UQ7CC,p!k 3FL x6L.G'ѕBTC&v֞8:k=?şuMSaA&R }Zm^OÕ P>#~wA-j'*jvMRƄ;`M7[|5mڑa9/!˖V:8i:p]ʄraCf,wEZ=mjsaq-{0{8ͫVS49H`إ%Do\p)6fx\I.? Y]Yc\ga, :8cpna尶c ^B%`(/@]/Ե'W̡JUEqTm]%[Bob n/Z ނw^]Mx>὜ NCi87Ӱ :.;0Mv vdA>ׅExKŔ(nB@+\`~RцZa|}w[С}ְ2wlYVoKP?5='2\d.Lo~͎ 'ZhഒVO@β~ w'V4it->n`vW[{'+U^ױrƚMk:{3,& .mH>ce+6}Nc81g!.5]~F3&KEHVݥ"niLz71x?.ܰ#"[j"V Z.wXxh{;NQq+uVN9$ou dT`BYa*o'9FM4DzY^O[Rz'|YZym[xdjFᯫˏT9-1c够iQh a*Fp:Jt;nK >GKOLf"h\ܒ{yC_GKBu]0 n1e<[[;}<fkrxwdWlrfU! |ԜAmfE,bkĐ;-CcAH\ZHˆVpܰ|lnW0\fXvۓXW?GH5%YO*`NXSFez7J!\p Ju 8ڏMdֱ)aav(x_%kF(gPᐼEpM8yaQh0I׈/ɕL @;QgBtZ hvsih樞v_Jw]i^9";"> qH(`ۦMs}ASgfqL\VFڳHXG4#2B(-2=8J+AQZ9DXWyQ<8IRV4bK:ˢա"աr_CiiY?KKڜA a#y}%gYM&l X6l2QmWؗRfU+t#4}k_'w3LN,H(g8e.oL$e0<ƮFeSߛz|\#W{[RXZѸ#6G(oJzuj(C% lZ8Zђ.kn 8C @Q&j<; Imyf(xQ+Vag*aӰsvc;c6[mc]'0q )&ۆ>E؍Ms𓾷.y}/LSn=T\ f}m2ld2D~5ͽa0tW`Mu=\7ײlكȒ[)v|`6˷0_XqG]ʊ`/QL(L,5ړJ@*{{~k)9FOCʴ(:lA~wأ YM)i Q6F\:wW|컷k'k݉W:`Bo| `7|U۶=L!aPI[{noPrC8*1vn^ae{Un-{HB~]N B+SGw=aUY,0nk~$~yк [`oe^9j #},])8b*rX\-diHyǦl1,c'!X*nj57B^gDN,V~b9kwse^ufc +_Ga2bqYDlTar0}^1k&5Ho #-f`=fbo- q#;ۆ_S/$Y]=a:OPw ?0fmjC*uBH71bli-\;]3_]Som81/ē7@FZ=`mT_+"`+0S9$&x .dٻN5ZU ֥/,-|uo(+D( Fmuck-gNs1roru.NI'yV#*,\!\T+KH=&Wx.y$cV)0L~ܮ]cs s{VpiHD=FaOLd֘R;6*2s`9h6f"hr+\<8=^fLhZ:8{۪E22DZ) u&#Lw4| xnC! + =C@ 6 etMtl$g'8ćeSL]rgQ3luu%7%??u^'|efa¸S6{ Bb%`ͷ^ >U۞ {(TPw\mJ=%%(s=|eM4zYbV[V{|@lV2bq+W_g{ B[;( ˕ }oc#>8^=}U^6l~P_D2ce=W kEFmQzLxWHd\<ad~gm7e]0?tw`8 2HuM.-h4Oa-|qɀ[סXya.jU/O*O/̊n&7z < &ꐡ5T܉s0 rYTC 0=Fub R,<1&.G0`\ g]Dqh9vpJ"v!q@2N'>JZ V#:1hϭL-ښn8k'cKaB}tInlK9#ՍtA(ׂe@)5#l <WsM7~z"(Ba4k@6*?rEmbv2a*8n}0/0&! ,/VHf8Jꘫ(W4wpc]YŶ͖>)I]TXEn~8B-K?TBx[FXa~jVұbbr_ۋtlVĺeĠ@;ń[?"K 4wpn&ZwBMعE tZZj3faІ2Fmwp}/!l7ˉ*VL"hQMF5XA8%$6NVZsv\mafBy&pzsmZIlcebc0?u |vF!Q\sf }/piow5!TtX゜8Ů[$V:eb{ԗ⾐ְhߌf fYWI>z[) T?x/|ĨϷ !KW[\'9,~`|8 ΐCݲqcyVe6ŢˌG=vt>LMƾ1Pgu!Jgl/T@$m0Yy|jAaﵨp0=ƻID;gr|^9 Cp ظgvPn\ηX6߉ŸOMь:N-GHli7so'_Iz6M>3r^s\:a}Šmvq,-`F#^c.yA,AP쾾ˊ[3pkEڄNJʢU],j9ANhm2GZʭE?IoM%DڔZX>#!i13;uz֔*SYux%h12 S%@hԏʷ^آgqʮ Ta_voBV^$Ș[Ί-F M[53,Es:V1;  +bpX('Y)UDq ."Vn g[ zӭIf|-'62X"EG~V`+vo1hfENRK8ԫ-+H =df$֏Qu2bƠQsy8a _"rڮ^L1pcΜƾ&ld0ė/V2"< X4LgByIFNbl lcS)Mgl`bAw"i?K/|[?>xѮܓ)ֶ!ss"Kʰ M`8PzX2^9n6k?B~6HۍM\'h] Yh,eڠ }7NAS\޴gjlتlA(̶QN[^e[LJ"/Շ{iq&ü{f%eC=@vkyAyn2`&"Lvjf8~;dYb|=켂+f$\^+{~L'`1 > Wp38u o1$Т*!jlUʌ2bYFM˞R P]K8\ϦX.ґ?7! ~s)v5srz0f%B8Dt2R`o\N]vν@Ì0tPs) )NQ:<*/B}>E kXBty`]úw Nf h|lzTj&2"L;̟;۱7 2ܼ|tazz .f{綗\b%N L}B9D= rCw, ޑXZ\oRִ?^I텲!bͩ+}G uu|b'8"DΜ%r?);鶇VL+\u nY@񸐭Z@yÍ4X=⦾(}8 lG~AP#7:Oe~ mlm$5uojx#;o߬;~'+w002~q9&7k/*s^ǀS uКFPCtla#Ry;I E]POeC$p|u`T1[ ") XTnUεLQ_v̘4fz}$wHlaZRˊE{eJHtuq;:1/'n7BG2F9 Z.ֺ=*to_o﫬!3jk@7;on`mf_,,zP"tn3ɈmW2"D/AĀ}il<)N͝DKM\GǷ BҬ}Tj+NjkPU,f)RhE%?{Id6C">mfi`$`A? yMsaY4TEX \\P1|wzBV1rU~'`?Sn}j1euF`d88])EUhFi+ڲUn'@PmQ{i ,8I8zgxs^>i)z=[*sݻt~d6Tۦ%y+,TL&xaۅxUG;5s]k9Sl-M.v Qy evuǪXjTQsXA2{V:ݼ| ?XO{fWX`ib|8⤎?S9 /$r{v#l%)[`@| qu 쬜D\ TfA?W^ZH0 Ev-?kzEd5X+ui6g?J.+-zgp^\.i ODbO5E6Ʂynﳋ( 8( sD!8!v|[dɼ&=c7:I~%Ҽ'8RSM;tzx"-j8G-a&G1X_onTWU=.N U2.4E{%.񎟶!2ȵ];ح-+ ؿ2M~JV-Ysa1@֏\XH- Lf6ڇIwD؍wY^y^U*#mk!QygR5:=/Nˉ-θK&չ]gqڭvsQ:S`نqFAd8wOi4^*?K΀6V[eJ-O|v|;S`Ij^Θ{5FQ!;:'Nmla3Ĝ+˓! qkӄ {V՚_>Ti͗slc"6(W_&k( Pbw̘\ޭΑGGܾ,^} -wO(F/[┍(DkzZq|0ӫ E|oz jiYqm|5jq9mjh(=Z_wח~GU欞%7  L\ܳ,28h礍6zh.J,BC3=`1VXGw=F\nw:CC]op,vHy}ÐBV@1G '`F t9TIξ) +&{3 )7 Ь A,ao;'|dV+ݔ9Qw>exd m-o/'ӣ&B7,:;>J<}(PC㖱>Z)T˧A {Ѐj:l Sԝ0U}zV`>m3Z̍u"afptR&S %)*bl[*rpGl5!ssb s#iw!GZΧwDrNH92{#|@_(30SNTu`9ü "d5 Gua}z .Ƕ=B!A드!jM")SS1cQFzQ|MHҟ6t |W*L@v>ڸW}ThlsZe3KʌyuMhpH<챐=&R/DajM'BbI"oz6v%5ïh sSOpC _0[wŎq}PۮY7f6kBõ|\^XH<$¸𢮱 5۶D>#YRWC˺* 3t;,nOR0"j'X,KjmȬ^PCR@r *HGZ郪V (' KGկ|y!6bH֍ TA穭W#T̞^X[¨KXGH?t2pꝡ3=~W_xae#qɼ2i]cC[ۤ êj˾7w*WDwV[h0}ƚBא1:+JdBHӹxΨ2~fm?'dO,.-bu꫁~ %C /-gtdɥ`%%J [_96ho,`ge_A¹Oe^}9 ]Dl#"Z}h+iOS&=Tavva֋ Z(0h'ACU &gSZTqm ?l3ecŢC\s`@uusMS ag+Jd՞~Cs1Mȝ6b]zR:g. b aKç5(j;/@(!bcT͗s(LPˏ0bP"1ऺvvGsJD[#,*@,yݘ~!lmH@Y`daOV0 ,DZkg !"%gb:](O6 ֪`!2 ݄nqUB9` ?Y ίUR/Piu *8a# x}{lV;'gX;A;X*͇!Ͻz?CԮTՈpav tjyb߀˜r[@PI(WI똗~#}xpK>ՄSmyBYcChߤ@(FUW^_Ʈ(HTbqEL7Q%i[}QJ~Xf#X7(UϿ#<6nqf*6.mIp_P=Ik9@09닺oQəY}RMCfYZ^Ţa7.^5 lZRؠ8U1,[]s}{aR()70Mq1ĺ7¨ 3Yݑx+ğ*{4@Yd;h็2v+!8[>y$ Q:ޔR5bQ|]lCHPx2zaw٬M )TXQ(д 0JJ_RČZm o ڛ=7 B=`|ө/z=JQ] fr[)A;ןA25pC{.5xюߴB9 7 /=Nq:ea%8 &_ fF -R&_yUկޚJz|C.>"'~Yo+aZ0 ͷ _Nh? J?W'/"z%?4MVVZW7IL,v,ץ< RW.ZuQeT2+6xEboLgѐsyԈj!Z, .xq:jXh3]3w{J̇VR YlT{"!i$*TMg>Xڶ"Ƒ>Wqћ`w @l/VO|Ց3A-u'/W(l) 2p&%%x )O[i+I8 Orloʰ˱` 7_2D=vw`8`|/6횰oiޝAR=Sx-Hw-k. v˃x bvKd<.Ћh(ᄰl HCY0N%P:&o-7S ?|F6:6YgH>N/@:K܊tB3!7$"2Z$ֶ?'Ϥ6 ίfxb55-`1jE2zFyoU=ov7i:۵N# 5qfF ~ f *{>Yt^(iǏ+DN4BOp/ )?'Pd+7R;+;3#Ov}^v.|]*srV m5V\{ Eg̷XhNBI<1m1Y6n"P<&(PGcnW +`0\`y}aZ>O9vG[E~~-=ۯB3˸x2]zAl5W%GE2ɃD$#&SS3Ӄ`7:FG=[G$綪Qit_3+ <'\uv$M(_mR">ԞXr5qo)JؗҪf\pӒhU=C3@6۶Ѽ·Bqӟ,sJ h4َ'^W"03J $tE($L37i{`Ќ<_cܛYc]zt>n/0fuH*k7ĸ}>)yP5S,? 6O}?W-/jEU]w?4ԭ&qP& Jԟǎ hەcS:S'a6ITl"Iv1:\c65ؖ\`ЁhEjdaW 屜 cTiQ`Jy gMlMW_iJтyip^>aF0 2E^lVnFJ?a_4$X MT D<@]wd=Eqt2Nv%t1){#~#tOY\'}z/c6涂^}JegvR1 xâ~ysaWc6{'0}2\.4Cq?5aQwQk|q4a%4Tҗ4i)7~̣Y0pJSB#$dgD0z67*r5ZEyG^{ٹ]1fxM6N|VOY=-i]'%i(4G8*M8 [uN3۶yt[ aYPkC+Fuxk٠K:ݺ]ABO)8HdQA`@M.<P5.+n@O,)čaIFkvƳRL+P 1v88~nΎ<>hWGy_aP@dSl"!aqvc&o 3c^|-muf|JJ~'d,F(dQF%jN*8 e>L UErMvxlYcap`EՃzR([W'c,^7]ڽ~0z.-Q:;EM#`XzG+^⨽ 3x/heا^@&g;k\<6 ,{Kԭ6!~F+qo "ISS8pSΎ簷[&bBԂr|䈑24#] iw=W21yOCE726xb' /D]cŇ^LV0mcwȂH?1.|5 v)&ϰ-j3ҚF4;)SUepe_+hWIcy!j̪;V(0؇8oߝ[a@Kr4Î-XY4K1\E;}*x$O7J>&)͍s[@ݗ4v!W-O)c3>l~7ph{WI dU xTIEڪNI08IRpi/n ZiAs38>0*P1Ѿ~< mQEɨ͐ND-f 6|&Vic`9V`B4;DA|#dMe c^d%}0DVvw2X^٥5Y)&8X-*D휉Qڃ`^_iF6?ӣ̂Ro piA*,;=HhA9X& Lsldv#2.³ YeN ]<n azi )d5ჿ<.LeVk)=)*qPkG/Wwm}bW|'7f߱j fAK;"y)U1٭1myZz4MjPRtSdź.j8e'3c< Z2=a)gCNZ5T~FF6̈55NfLO3-jmW..-7ݥyϬrCFd ;L$N٦OQ=ff?.Uu bVy*0z|%t@үN(D`/te̞#{`jJ}7*v&1gW!E_gPgHR׾lCs_?cn-]wtjkS;; T2/j+_fCEJI{OMn_,9-.nsk5p5kUƸ0ǯ s=[,5 }Bg`,5ί0F"M2b9v߹W(R,c?Crۡ[!,?_o?:ˤg(YQwuñnF&( q|(l%U 3ϕ`} E.̒nNfHk}dkBlb^̠3p59&rj"A"溹gHЩ׋u!1?y+SN2DuG_ !ymNIL`%9p8ؒ=cgw OȪxn"HD]hf /kdկKSN!I"  2!vMWn̠S )&Y}‘Ă;YucaΉDsjN6_(_V{:̭B~nsznQU 0GcOQqxkԣi#"C,J Ce>Tgg,'޼sVjIuYZy=-0mp k:3۷^ciT&S$IԎ6#W0We;Ȍ*}@tO G6 g/ k(0c)QKDvxO΃g*]z6}N=E0VG-t~qjm\ժU`5;oLi~A".6z ݵxIַ줍s-SHHgɔ<ʎJpZ;vy/alfuc'4 L|Q;;%s|gӏ-jn4dpihfoXB?ṕcn_Œ <7)1+kR } Y7ʁ7!0/B<=?knLn1z ?+Ϝrx@qJn*OGHEhUГď"|/(9ykQ\mqhS_Evy^ q/tZ_O,g@-'<ى(M/szI|U!lܺ5is3 ;n!V7v8D%y~'""HvgBT(Yx6*)zh:[B{m]pKqHXő_(s[B[,Vzco0ԯB-t7gob9ߓ?PдiKteӎ``JЋb$_ls㓖!8L%&9I1Ვo.L_hI1것&|a[s{kuz}+S_4mZx{ia˽\xIaLٓӑA1zv괝Ulf|Td=;3}=w>Eh\,ކQΕ'zЬ/fuGDzLx?8?E-J i)Y+T-o!g- ov_۷ga\:+*smrݶ 6xYriV0u- i/Y~Ϭ7[Axs㹠i)H M;Z N-Dya`Tm]$T8)ѿ_]6BNYB*ϕH[RF:=D~ b;EP*CctO\y,'( :C~%73fJ+<-˴6#KNjS#VsLuv/u݅; !7˷pýǵ|%0bp fh[&C]+2CtYJO]f U9kx:|٧,2.4'v/ӕɴ64v0-qm%3f \ ot*WM/~ۧ9:qk\3w.usC89&ztt%y~_c^m(B۱]}֠gn mO[̗Ɏe!&FE!L5[w̱mn2#{ؘOCnz#*`dx)8jkecvYgdKzAn-3xY ue=q[9evCloPN*B'{6Q׷/*0k~||TEolW#֭aN[z^y>a}9Z~ fٌ"EWb+4|0-M8]{Ljn[MZkzka䈷&(*x ABok{q">B;;^2A&x%`wb} 1Su{ &AOh0)k0!E<90 DӺlX?f}4_ho#8봙pڧJ-|v84dNa~پ ){{y&uiώ,OՎo2Cn/2*lu.z+ۺho.Ϗ+'dyV_߬zʰ04 q8:96HL#XKrGZ7akih 16DmL(y&elUSwqQ/6vRU!(C  @;e @h~qmxFˇkuKeɟ3r]d-ޒQl3c3ύ\:yyZds<SVCY?,yUxd8H]YDq`=})S*Z棗i׼ "wqn'!84߃cwܹ C@"HFek_[jIYTq+ۼn<@bVxj4rIe(oN[tau*â M>qC</%4Xv8Dg;hJA>OЍWg~/BERIP!|Wy ܹRw/wzu*tɱ"=ֲ5{BaJ<ʄGĦPhӡagIgO`"a?|/\8!brqrQ2s{.~$qU9Nf͸uУ ˁ~ߪ48ˊUIu4uvj'eJٻjHϛy|4F$bYʥ=a""FlI1x:pw[T."쀽v6d_gH VxPS]8ɴ19+$iG+;BFWgB@ ({1ݺo>fUwY7ȹMu߳ zT5ekQȄVg-cMgY!7T1O-$*ԉAm T&K GT!CTF 4)a3>g䢲B<= vC~ $72-B6@0rGГ}Ae iQ7Ol>*m6 ]\ջQ $.O%9{FAQ!B@Q60f*MFi!h$\_vHht<vô!/Z)' EdK@V{3µDYmEض , ͢8y,xo2N)$ ,szL h.Gjt^,Q(潝dFUQE+]p6ckQ rs!{?j=t߂h KOqMq'RY+Y0wD'0NBw^Ŋ=[)tT+ɓr9'߀2xo2 j#as9K|?x !aCAF2R\%mJҋ kmTmPm#\vrř-qޘN.+wAz?jcufA# 5H TJ-u} (0k˃S6 iWX &q:jDL=jkt-a]rP`̀NCm>cdؙY;>*@21[ |ujvq|`>ޒV]/b׳3HݺGZVE G{8=VoLFUNI>lș_yaEhŽF$, 6],:%u2U.,Ӈd;P>N~FM,:eeo"ww?f}Wo ת¶+Vf{{Pk R)e4yjG0x~81scʉ%V~[Qytbc %3\j`DW%Xq3?iv,_jf@Ӣ pg(mWmQnE߅39Q;gOb0_ϿiW#+ RNĻsf(6˦|O޵K*(%8"ܙ#9 3|®F_AaT ؙg q5;))zl˂-u"éu9ܼW7S(&}sq&gmq^XA;З /΁McATԶHTS&7hl& ،O Q&ҙ*FB8:c˄Rw^"J@K3^4ޒ5; ,[OU׏;a34+YeAȕd2YM:QI7MO/lT cubK6%0l^ꐎ-xLr"n2fQrJtBOxϹHeФ ,U%c^J .ah~k}#۾[L[i;Lr,죨>i5؛@-O O:uYvPǧmq#<F`-Tvg UQ%<~V4U}n[D/ I [Cq7>FsdywK]z@L'O\n,k%$(+MlR]$xF:VbbrvuF1PВgBSWDO;QP]W\g _9os8nmhIԥq\7AWnLd"=,gίyքm{'ƠYhx0gee PoSE&|2X Q*qm _ Ӕf%FO8Pu:Gׂ( DP] a6T?tGlbz&EVazHOYy.׳m7;([xx7@Xs#JɋtD+k)d2C6u_,, R*kF}}xvy]1ǸGF̼a 2jͿ)/y}Ϧ3 f *#ψ3bׯ^ #۬tAķP?! xLIox0sElCqV.8iyfx'MR|j)6;ƽ(@"m¾Sx:v~ty%j> 5A.=׳-~Auo#Kl%39e?m I&` UHxt(·\X߇qs3>]C{( 57>jօX5.J3EKExgz߲ #C~C $L%w/|=׮T; R"Qi;hv0=ZS 7po|-) 4y;`Ri)V:J=ε,Bv;;.+Hp׳tWȲ6 ym3ZCwZDžoݵ0U)Z\]5]l6z:kv*=_S>q*H6h$Ir,:nD-+\3_{oaF7\GS 0(9.]S,υQ3w߹|N5KCʏNM#sC`i}%L= a)6:4ŦFjG0"ZmFPUh 4\6c߿~>D&&Ɛsm={sj{ tDϑyA}6ʠ _4ח 7ڱ8˾Z uȰQ\؊ufJFÞHX1>kseӌ5mtz\q] 6cm .0bsp;Bdqbњ*zfx#s*嶵Jj3Hʄ]>K|cnQD#_fm=0 fe>6TA؋WVNa$ 2dI>P]QgH=h<' 9fN'BG 9ZjdQt!B P۞͵1$SCfh3N€a"jf̕ 9B3x*F !hc$jY(܃j (=SUf&!hYq%KOWy*0SXM&|<(a9';߿_ #nq 'ز?_<ZϚ)B8,pj ev7o:*cne+5ڼ :N>\ ?y-' ':N"ٙGv7,·A>!¨Bi11^NG2͗80v̎N֐pU뢡d*¨ M_81n1g8oeۉP4v(jKEy?#DoUal;ߕW:^cʜ\8- ltq#}j λlA WhקdbnT@m(Z M'mDfٳ0v-Bd !k]g0kcSiuFv=foV=vCe,Φ;"'9HH8ͩ%G<;9]/m(2#C;6} i=IugE״r&=nZ=%h9I·Ӫ'?7c~+Qb:=0YF708RXɉQK\ qd\;D\z^km+t0dp !WC(,bՙ܃Ho^󅗔DJ6Yrw^DՉ(nb%+sc#>˓aTɭ{h*spﱺ|S:663#MXL̮?`meWBOS> y!xB,{K=WdE#xG>\rkd~A Q%CH.w!{^.g4m]!9>sȿqɰ.n&re{TǨ%Qͨ ISmh9+ f0K}fkהIVz$̽]C]L63c%颦Nka{{&UD7a-٩f4xbYHQML0yC5Էi[}epP8o4N9d˜J`tz5UXȧjNɛkZ[/J_p.]t"yX؉aCi_t4`otL pfrNL{PK_9'zU>[z=|̆,vT:Se{OJEpvU,fp0%xQGX:g:'gўAIEg$Fƍ>Ǵ6qeiz 9n\gV*ۚZCY MEt`!XsrXoQ?mUXs2FN~w=Z6- +F+7DUz川PVi{9]$$$k?+Kܪ"Y4=fnO\$|(rvne+n䬺}sO1ȼ~y6+iw]vf-3}.ﯬQF:^tt]tlIT2^ a`EVϰ3NȤϏ[LE6Nw`*}$}_ܳeSMfTw}] |@˱=aË~П+3T{xV8‡ggi4l@);G#箰 mxe[L{kXa J #Ce&U*Afuh]u@Lwnn!yzڌxJwhu#-G1g"|QK+?ge"!ȁuGi3X:jz+3R[?ue[Tm Gcaץ2f? ;$W8#ϡG04 G?]^y~ Y_.e ހ=jmYa]H 8*80mU@88p%8, -ܥ?bO Ս3 EFwbN9Yc]^ٟ؇f\k,F8DF-1 (:n^vڙ#bIt"uWe61bvTxװ/OyV-gW=TfM=Oϫo "RO̶B |wF3 (Ҁ>N񺘲Lsp9g-E1"}cf &Bm͇H-,Z<+;2`>_yL-DJ6͗J Cˋ`PGW/7BX?FpI}sa!tyƢ0f1*C9dCfq/lͳcw7'1 E%uszkւdǗ*kjڴ_#/H(pjϫSJmU/k3n%JV ,-lgD5*/ul7!/U»&#y Չ-JlrͨYcuf1s<—J!@0_XCX`>髄@ANݕyaiio{=WfLVAR5M⮯v ŭdΉzo)5d )q ?PӼ/* Pyu-L>QMa7 b,u|i"7 {jgG$^9^g!RRFhR6MN;&Uz,B vJVRlR%fuXP/'nr 벾7o9hƕ̇ Өp~رn /6Ǵ7n1Z6) ߙ0[MOiW7;F]Ӌ%q^FFY~#%FDF#N'OM^Wa(qAn5c%jk>1ebZ}mm/-lieCfTO};JY'Dž> wB-ֱu$oĀȾ㧓E[?~]d TMy) Lݱbi.XzN5eDԷ)gq27LtD2qǜ.ҬcJ><۾lkY; )X&9N-"H y٭`z̢6~k/̧Ùl%ɁP9u# `V_p[APفi }28̻vͦ8CH wFx]lxZX3s"AQl5tZ;!Gra;Q9l""!+bԺWLY/tgy89w{W9|Ъ<;7*Vޏh:-=]PJ`llLF=8:eDqjٗ'̃PR KP#ə ST6m. l!޾ꬵ${ 9(yjP2 Ál*]H/̾qPLOv{iEgޟsFcŏEDJ@QtZ n؜Tēee ٝwkV qI<!3Y1+*9a[{~!ؾ{ Dj7O({Ȟ =^7k-@Wbr_ju2oefj%~x!aRryvviF)u1@7gA0>7W>1 $ ϿU`8զ7BuB3(s-vp! D i5>U`c 1c|g޵]oghg3KɁUݯDru:_^?BO^I*MQuwE ؿh4:RnE87ɥɾ2؞ݮ0D .aCtn_+sz;tɵ $StnFNY3Z,3⒥oQe1L AlaWGfa|+HlfXj⚶Rv~OE~[)V[S꼶޶ƨS؁H`DQmJ{,+jdĞTۯL'@0(q654X^֐rnaZsdI/oBzS4#Ou/ʎSķY4GoX|dEQ[.]_aIYTi ̰\GK +"ݱHi#}3U#\\Ld{ t: PegDx)u6(Di͹j]F 4S>z`'~cⱇ݅CM!B|yb'.KbEPHQ[n3ɲ-/ %+ߒZ-o2"#3޵=h(Tdz\-}*u,o~d9Xl)?FIW'S\`M#\l F!e(QeQuf*s}b;gdۘƐ[w(e'\*Ķs2Mja&f'jeK_-g |{0A=r}vYΡ)KMhNrI-rO*aőT1j=#z@v';A,e+лѶ]i0MJpьLΪ8æP.WM.R@ͷ` WSs9S~.565BD899(ظխǪ3stC"4uφR+ @;sq/<H}k˫{ ꨺?qˢ e3?:$ 8IC*RBE.b9Sq-Wv".ſ]zGN'qayHQ5zJOx}`+ jҰ  f/$Z-z-[c~Onl֧a0Lf{y _J8xt"7p)2ܝQ)鰛l6^k=aL[@]G^߅ mJ-4OE.~JG!18:5+0\Hc]IJZ X'jwr}Ov ᕌ/)AZa#c C8Bרwᩅk>ɹo~H5CO I|SopxZ[jQO7KZ͇?;liڋ @۱؃?coB Fƚyw+6ĞJYX$C.u>> =u7xa k7E6µKH +565n"XϐuF ,ehX+}YOq976udW 3 oY˝U@̍=)vPw.2J⏆ =cSFe8OB7@lPaD;Uۊ_$2mGpMZq>; E%96)fGeeFpOe IB#sWyPr]$26X_8%Ā#:vi]J}(d6 Unp1 *r`$IO4b5P;V=gmQEdҎj'gh÷Lq35],N{vя? Fk Z}e3XbT{Tf "JejlQL$Z=@Ob2n=|L]x=[m2r";"9H(^:>E.D(o^)m{iyLQ*){<-g_(l,9U]L,/wr|dʕ^M&)_9&[BoMOc'8ƬQ]^~ID½BD0΄ȕWn&aA-Ɂm ]lnkx'OO}wN <}3eA ʻ]"Gǒ(6+a9r^/l-Mc=aj)_ 2n?BW,kj¦|Ea(sXh0ܰ(r`jpHD1 !by{'qΌ_jpN3V/jivO$m og"'#5z2V;Y{/+X5WNZ_\ QL+ݕ yr& ](.>Xp%LXV+jNUf- ~atӕx^Mɳ &zS۔ 3vO+Yh~QQgĩEvOaRi郤 +{*~;/0zh?阠 o?K1S+tՇ0ȢQQk/l { Z-]&Y?=0܇;g#'-6zMՓw~YT&oxͱ ]ߔ8x*|¤uٍ`y{DrݏעгKR\aѫҫc^= ~>Z}alSvkx ij#Fp_3s` 3ua O$:Ɔe"B7.:YUPIU%u-^4Y8_0R3 'Uges~{A(ˌ!ޙG +f x)tnz F6sD?#F݋;sȮ-?eE-\Ѣ^8Gc] [B#؄Sp\/:;3P=LQi[Gmt\9:=CR1"dhXБ~Fr}h.<9Filʠ\pCuIj r<_#: `okĥ5t3;q[#/*@BW˳-d}Fؚ 5{';Yo[3F;3p~i-֢^ӎA+g%a4A׾o 7s\gS}/'㈋y7&128ȱa@W;Ά>&%؎ZM>lLa:ԢmSa|7Zm&յTLi a !m|c=tXx̆hrE:3h@'ذu: it8'‹?q&8\nxL. 0O]UDz06T2mhn˶Q@QԢG=beA^0c;6A,lIca|嫷Bk߭0v`K1wfхQÓ;gIDEc60v4L(bMdnaA<溅w"S%'a9exMC;4DT7Rz%t"hS+fXŴxKӉx!_&xX ?4 clrLb`SG7 %e#W>;cøyw$aRbih9*@Q~=P4uie)р9\q U1ܺo95#9Iv=+ک0ISPZGsHu >1ǛK40 jl ?lЉTkҴX>Z|{Q~>碅׳{Zvq,D=(Qb4_y}wBCUyqr@gȣ*+iWaߓ5X%wU"*g>Fo=:ܜ{G̚x+J(jG/Iur(/N*5?\V^֗3?J_izf5b-sƕs_(Ǭlhn2rh qZv&Mr$JaAB{ ) F"36yBXT`;ufJ] I$UX (, PG,:e9de7GvَnJI.W:DǣÚ ={W:chr]]7`M)o{_$Mo'tCl;gz-; m蚾lMT&vl O|4C5jiftj{vS!]:H <S8t`c3XVN,QXcZ>۾шp<}{lVYBbmJyG``xy,otyW;,/-Gjj  Ntnc lq0S<{©3m#c0 …3)+afG_'@3r M(eO| fxX W>v9"{K7y}.`(s!<׎#4{m *P w\BFj=H8Fo.|3YӪ2 = &Zczs[hre q>J٠Who*h l Z S2ʫ.S$ ևlPO˵" ~BOZcac"{[3Gq}Ha(#xKEI^ݫ/J1u :p"?۟ӧE2wl}v>m0Y!zJlwaIP5k:#g7GȤP긟6 5]mJ~AD(n^ݮMO|NqgCɳ"5oh6=xS2:V_`M|?!--~ C]]]DwmQ?-&>.[2!mJ9G"lD|GΌ0"SvN l沣|}IQKuHұh$wll^5n]YQ'(~.x"v{-#v_n;pgf71ڻۆCyv/źr3PSmHhi϶#Ub0-OF7J}K;_aB6Mg,gr`fWqMpZac_ѯ0΍*;~݀ݹPkSiD5,4 g9];}XDpЖ/Rg/Rj@C]Dׅ#hazK0mKYcj5߶RƋwz\X?%~ _C[=ZtÜ?F:êiѷB: ]_f{NCӉ<2qs~Hk9h%/9>(3MR ٢7d plP{Oʳ0]2㐦"sS}لbG9vc}vrEb܊q+JV/ RQƺe?ݴ>*p]\W:eQ i$}}5%zIR FYh"ǟt7y,_5szF!haptsM3GgAhBK] FC0WthBW`|iF9y(AN_ӵGY8>CS$xK1>Mi{ /6calĤz݊[pY"K9v<,r@lڟ٬NkH;ƞN #Sn%10J[S8 Na+Im\dIۚ b> Š'al,_69^̻70HaPk N#x$϶J 5d~COvKFl{SDbNMaB/3(O =I+`Ip }f v.OBR?%eM[0]c/ .\?]%=)@/ ZO&JETVi]ugJvy?'sBi8G0p+~y8Z{3O6XwԈYbPY ȏ8$:6'Qξ8?_=.;S!CNyT LwO%F$8΅@xF{צC"m|/ݶu F+[_=Ol7'1k;xW( [ }+Ñ 6wKH7 ylk[ɘ"zU*g;tٕ@0N ԗj(<@8 1@tURmc ]wy`V4-ŧCLQobd_')t؄q8fRk0'% R9 oT !YpS5GuK3X;[`>YO,Ms}iڕfc(PYfhc;?7u84s}JYlU7aN.m9>!/G*} [H)ᄊ1<_qf p54Q~_Ŷ6|L~]z;QqjuDb;Ğvx#3mnc/J{O~Vݱ.prSPj.'b(̃|!]* O۱jUL_%W~&D.;̮doeA-e=-Ctc>\9lp_z(K{)\8B ݙH>st_wv"[Uƃr*цj_}r[_ȅЃ=+B]5t䫇ȝ;? &g>wWΪ G;<*ҲZ3AB꤉[f66<}1Jip&Rw'jb*&M0rGv9n7NdD\6׼MS˶1XtB; d&}drz*S30F6C.6-iVe`G6Ilʭ׵aJw]u=T#f'![d`.@ƿz"S4óL&=0RIdGmTJ06sEڞ"A8L\LnLj9?2~}<<&{.Ӹu챛Ca0Ƅ/ ^GYƠb$AuD, Lb4u]x&(L4#/?6ƹe\H&,x\@8.ĴA-zo(WD;+\!~6A&rbF$`YÜB /m?@_%`K`AIk`G&fy]{h?HNY߷7_dst,RYIFy;O5lfL/SOq7R)up''0 `W#΢ߑ9Yl&{&. ߚ2;DpRbmFI0hlp_Xy'au=4?Q#,@ 5vtA<"8oa8'A5Lރmh\/6*q_3kbuFnùS&Tc_c߅Qv ~WC1PpBUGPevźRS(k2i~.*'S߽A- 7w ɻ(!se>`ta)I5M؞h2bH);Չ_DH3< ݶ=O|mOD"+B<07~g4GermPnF,wz'8Ya&bPe}y#!̫j×>GY7\Wm!`UlzOLc",غ[-o`UqhB&g%ϖ,h&rFf%-Hyt+/s;3ŒDȾ v&W.Зt*-\Ƕ~N=CdaT)cdMzO"8f$>)$ d.w#JVjpЦ(63W{&ugA-&ôĩS OwLeѵ*3ŹX>0c gN&G:f9aNeS)T"3(Ľe锆ở*&pO̳j@5j;-šӓg`ڕ*m,Az1\Ρ.9M/1ݰ8 */m񜁧;xP_Ƭ2J%4H =//^<$S"NKR7NCDx;^un0f;*9@\晽t,9 `<ou,)v*Up.f(u3  9,>\S ͪKs~4L?]]@fi,Bgթ\dbJԑ:]hm k~/6nν# v\0w++49=mTƖ{*܊ݩw!o {0YS<2jϋnPLg9T^Y6F{ԯ茣9`'#Əzb຺?gÂm@uJ!r& sz CR1g豥&&ksυC]̶hJK_:PJZ5-?t\2X2+&Bc4_9O9ł8}&w=eZԯM4g/?َhvydQ!Y+cʖ*K $y4u'eb^?{Xr=xGdPثVN~H['a-0b/8&S]\ֈV|v=xMWXņcS*W%]"ۤmTj-EB'LYYlކM%b])[m$N dT5k]÷1qrF\(uKٽJMG:^zZ@.oCu8ZW1}טh7(,mi Muq{+UTI5wl_E(b~oYO}B헱H1_VJ6r4ɗ!pCm"i 3YDqnc5{vei'<F!Fh|''i/|+be \/t'K'pϊ,}(U98eqKm(~z | f90ہ(Q VMݖpwjc׀ FyύjESH>v, ՍST ؖ{eC.`D&!8 7nbmJfAs!peu^:Y?N<@͸ D3aR0F,ugDmUs_W T.pµ蹫X˼6,î?N<ABӦ EQϪ[.g\b?BW!|+ jBo0i~T&RٹbP|ͦrR>)uHfI®Jm6GVR[vyGd)c6JVJcy5HbK}F)ĠF)WFzf8 )`1Dow]"(/F&[az t Vol$Tlz>wc30rM)ڇt _lu6)2J~ꤨQDܹcRC#4s\2nCU +bo@ɬi_,hUǬ%n"cT88I[i*r3f:*K?Wi=BdP'#R< vz'#}^_u)Yr/@<6GS˟Sx2̳yKQ+N/3!ul\d,<ݟHg'HХNF1z/kX,3C89&}mAҐ:=^2l/oQO8YDwߵ?20@3K3C97 ȒR1t8\-qDL5AT= WfK|-j#]vX(Ti½Yg?^TOyŧmOfK 1%eڜ2\$WsaDǙt[ A#Un3z~}v=de˘j p;-jr^X*RYBszpo\΢T*]f7KuA($!cA{e?qsB e-eeCs6C+Z-K7D.D$gRM\]2/G)$};eE(ve4l{(33ìzr"LeqM9VPuϼng%#NٰڈOn-coh0Xo>SrmN&oja^d-;Pq֎Y]#vtDH'_g♌9K[ K3庝Gbpy˘`c=n] UyUaMQ6Ғ Z}~ˆ$!bL;{+`bf,Ne⛪8 5DU^q(\&:LN(R9Ʊ71tw\AYh0鱨x^;GWtV& :WP`U89zŋ]XcNΔ'ڵmP[ ӗ 4{T?.*i pu~EX8eQhTKtRj|yO}iAUQ6+}*=Ye:d(x;$i\SVۥ89zf2#Y٥k3 .GSGK_a2 `eC2pjre$խ^}RY":a,8[2BPQ\y?[A\ԘFik}z>_OJ Py~vӃ0j9{aaNΆfZŦێŔY#6dABfd UEHNF'cZ`0A*g惭7!yfQWo-&MfvNP&{i5[Zv |r!娪fox:S6d5<.'CZ|Vs$&t?WmG52˅Culd-(DFf; \ O\ J4 ۛ٢h9;UK|QGjZ/Z>J ]S=` N8VʱǛ2AjPcE(1X;s3i е 7x 3{~ {?$2{)h62Js5M88}ŸG]68gޏUG3 5|ج`ƅ$_1H;oobaɚRAwxHS=XObwbakÔnVBܼc4'|X&жc}Afi2 aқ u&nf~.*90\悯ݰ]#9sk؂H?ǝ4R ˗*Saa ꉘ^3-[^#ձƬ2ԡ DT\l^ܷH[8|[?D|$pd`^pC=:~(aݡ cE~x0vtZKpx* Φ̎Pj5傍n=EK=_m]~5#5Y pL),[QG;Y۵{19޳d Zg}CJOi3w )8Q AT4Ѿ<^P< #mρ/r"zs֔bFjŏkL3ޱ٦HX#սV{#'˅}qo^0y"i}=5Ov4P&{YlCUM?^Ī>]-TʜC!ws*01WWlqşE>m(Q_waN-){5=Z8Ύxm$̍[Lp1omE3ڻ IPa OPݠYW:֝5|yU 1A|Ni‘P9+lqWQ&v,O!jj}F vm;DQxE&>Q)"'.ݻvXt.IʂXr~PpVךOmŔ\ E?ڡ[/˞arr^Eӟϝٕ @MЩl,eXL|#\Pm#A!H P|j8oO'gst1?g׫~{GkvV!6^pWC=z&țUͭCe*a⛅vVeEz[/{%nSc W7Y䐊V1{08V4/:-~^ϿkkGT9! DY Cd1o] ٓnQQG@U ML r8C\eX+O0 gѿ5 5j^eҬhT`K;$ eGouVUj#|ǽ6=>U$4r򁇏~~̗Q8}Nq)Ұc;#^gC+|08Ғ 셖ǿ>k 낲w> OˎgۑDѿL)kU $#ggHΫW:7#o8 ÕlyrM7%#N@HX$JRETQ-#R'r̲~ Z qLa:*_؃(DW(N9o1NO=#X$yΝlAΖxc}Qӿ$"wqHq铆qɟ^[uan6^zɑtGvءF{(c)FT#5Б ՛ؑ=jۉg0և#@}9ԯ_ 74@Kjs*uzױ/\n0P8`MhΞBqh~y#ބ4H'bbuC GrAe,ı8 GQ'~ًK ߀];~T4[CܡDEݑ՘+28E 2H@Q0ƟHX?tRk`E5Rfv~i_wi:'y# |al3lҝpjvs P~\#S^@4/6Ua;o[XȢA:,4F#KpRK5TPgHq H& `߽k<4~ 0>_ߴGk&EUC j1LRM\4Ek8p ܷSmXVIh7 AMQE:DG8]yĮ_D-3\فiFA!N>е:'#s"*{:r6wϰQdeF€n|+ c[SDqD9n\T5Eer}% e=nx=Z*$ԇQ"jBGiXB]fhrIQI>ƸyQ^˄Q0 㠃.} ~/ZҖPfNب_| ^E*'F{7n/¨aI9Ud5ࣣuĴLg))d FE!ND_ ɭudF%H ={fFf8vݭðm9(5+um 7@7MzͳBIaƒԫG"N0C,Z,ٝ \Xg9{5V"Ye~k-n&߬V^%%!u]dMF_Dݭ[V6Oeɔ_WoIV輏ZP,.mMJ ":,}ҼjJAD+K+W ;ewVô0~9~Ì7-mg϶O&;DuPZRggȹ]Q,Dj0la#26Wa^StiRM 29[gsLdbs\Lnka /=h{K_*·Oc[HFVqo-~!:#{v&,hGRtb~R; 5p`U~39,tF[?ET|E[Kv8t)o.dvXYP>r%L0 ᅂuƫ_ʰ%Z'۰M&LީFHٯRgPɹǯFJJ[s?H\ṍґu㳿pp'[7PV$:]V/PjQdf)u ՈxޔA yŔ`A9= &ajs69)m/0 =W#mn osk-)˨q3*>60ߤDU[ӌp4EC+>m^/&62~U?'SSYOiɒy5tj Sa/|OO9>5 qMcg#1'L[2()&n0|oОc_x!:W *_4qF=jB|4A3>~5#QI9Tmwnץ݋/T?LV+z^Wt/ٌHn\-i{ޚ4c1K[^}4mc}PZ:ɺA`oQiC}ڽ*c.}نQ hXۚE™x"wzp+aض}MBs:j!ݨL(msX=Rxx%6#ߖhvXG%%Bfcw~T(g֞{tA4S~(G*oWWkV N/o!',U/:_>34VffǛ9KP-{6b;װT|m/b@=7ۄv 0Y{ʇKC~yvxAbǫܹ+m&k&2Jӗ;wŌ7sCn8ӭG$LmA4R ۏ7he5oK;Ӝl7Pa ~iMP /<:VZn\-,ۦ/bTC5^{$u{v~#iȈ- go8>&ZP1-9Qcz)x Rذ\!^Ijיό񓍌]tyz*Pf8Oԟܩ??0xiO,ԣN$Hu..05O09E\z>e8-q<}-E$C{)٢{%'i釢ݖGvTQ{t)!amv&FR]Z8##ap.LߙQk`5G{9hj,zNef{I|پ3}OxѢ/D{NvkF6@&,,DH]c CndƎ~F@Ka[Y6I4"砅lEx7`/]R۶qnE~'U'yւ^Hn`) +ށ *EBI/rԌ.S6hr{kc|AC9Oge?S< (!P\ƚK;exZnÂӏY)1/'ģ3jB%Z.37#8J x^YcWlmv)Ϋ N.&OU5} dq ym~ɜc7Y-W~pGw*{|wnZF^ r(5)Z] '#Eh[-I/ 1OjZ#&};?}5|Ù.q*:Y8ŦD/2i|_8UoT ((kWH[sxh5}c_5dUM-Y#оAf[rsMSbhLV2uJu<[XWw믺n oz 6EMF^ A}mvʡA*3i,¬#6=6s 8M#,gΨwz;Qe7Kd&],-/ *oTXAN8G_qo,oZP[N!R^Ee~,UXF9_H_,id"vN6-ib3\FZ5edPy#ϔo/7Z(LMa.Ez[їXuKROrƧ];7"b%͘6?!.740J|?MlLn3 b*/>~Mb L&Ԛ-z0b&\hMҼ(_iZq!J0ɟ'DŽrz $gN%vyN&nAyscE{nP>z⒭b^m cGydKi>U\O=h0ny:˩E|c|q]e!GcQ%Y'ϔ9j'.̍[8x0oƲ\`)ad#ͷ/ ist4~-\JArX> "?qّNzvYc\֊`&WӒNPtF%;rFU3~C PP#DN7zI$NsvR ȟl#[ëSAcn_S )9Ep%MMN뢈%"#πw[TOृӠVc:uqj9k <"[ف 3 ي[-q\fV&`k`qՇnu`fԽ8])3mmu%zu%Iv3ddԅ/fٜU:\pCE@>f,G(?ͫf6V.I>^ JQI0IJ=Pbf/500p(pX\/yAP_Q4YSZ.~dJ ¾9pY>v>0 Z2e2_so[+"HS Fݮ? ,9U)p$a-P ĂRJ]֩Wdg9m1(3 mk0,~Ю&@f}i?t"Vo|W\TX|+|O1_IX'oq:=c^N tɍ`m;&jtɛ]pCy}ԗyw[utq9M@`K<ڟ;qNRi\࠱[ M 3-_bCg2c=(w}O OO<u3X77xQvqWw%ËC: ژ-_S6eH#+^,mig( DZ?=-"e @^Ҳ9Y+Q,1lP»_0]N4Nd.w1Hda~M"?MtDꠓJ?j]C^E67#֏)h-=dquDԍs$H?Ҍ{U'EoU7 \aOF;esQoiLZv\:mx8|b3ګjZhl= ( / `a| Oڒڕ^Q]L­m0$o PfQAr/% (;3F*guM1,YaO `NYO`I.LkἽo ryR ק ` ֐!$ml[J8p}W,1#qx'rp[dA+~"j{] hpƏWsMUx %\$6}BxHk{(U-3~[^q2Ltm*gɦ-U36  $=},n4hzz^T90 {U={][~ gyr݌Cer@otO#>-Ca?AKs43CL q 'WݠѪѓCdHWlU!GFqA8W}f: Lʱ1o f@pM(â LƀXM<9=+9ȁۋ5B+ V7rivM+ hpva8v6݀RCnDzF}vW>U|qSL*YiՋ=V}pGXX|tU֣6᪶DP/=#NPV}oGzy Σh r˲a76z[P=Gc [Gz,n[HVVlcVhj6 UG IxY;Xe ؐbiy=x4q~VP 3K '_[,S>N}~0>>6؆[7`|sx!$ ǬIxc]X.'l> ׬ ƿ dm>S9AdzuAʹͨf:l7:#alA78NO2y@W vn׊3IevVbFU{ٺlZ~P|*zɏݝ ;l{j|v M"-6G K j"ȴ_Z|86\Q.׽ ؝8ظT3*>Ttgkߒ 4(ꯆq|yԜ("ۑ׸@n_gY6kf:qEp~$Tt.ѝ+D7E j2:,Nx*xח1)+%M3h-;]t:33~>DŽe;2040 uDwl5{5>պCh.~J-wٹxyJ=[`(W1@~hEAf{CS-B^]ݜa[=eC_BOQa*k6۱8(ߡ +$vTȠD12Rdڐ*pȎXWr~lS#e;JS=C=pRɫ|su}\@ h2ܷº1X5c بvwm4Oa)u͚֯SBU|1Qu%>5FдCu5fQv4<Llz75eiP\Mv{Ug*ʓcOFCbCoژAGq&VsqD<4 hEa7$4=Es |FLph8+JHϵ\,M7Z'w)?\YI7eh/eJ,M4cq)88iS֍EkAn@*2? _/E?PDx,65FwVJA珂Q85\`k'|W4c}1chnIzY2i*sM_CUx`:8QEs~él2NUl=Lbp<2sSdAjU=$oaYߚ"(Be"aݬ_%.hbPWv'b8Ry"6b?eMC:HbD\(AWF(zi%F^,ӷC?@8$2 ;rJ0 'LN{O`r6wܳduʂbDm~Zfyf'"[\ů"R8N5R~g[U:gq֖rA=e\ 6#-0c( :l4CF<05{K3fޮ,c7+x} 6h,vݓq/>!jSոIԾ]mܿj{us<>S#۱qe(&a ԼDT֨8F*z(LᶹOvo:,E[|5ѐV/ڬ)Dk̳ob0e C-EhҔ4a! oAM `&6D,̶adx%۵0N#Zh-/@ئl{9,(?8cb^iZ'#\bW08ET/)A=VC?Zf͗tv޶fxlʹNDgۘ~.0:]5aCSvӣάrN`.χ}sUWdov W2(t>h;EWcĽЃSHdԙ7Q?ؒ;\%/5uH/y[fR2h\w2䤈&|;iNӌ8] }Yeky WoQNpf%Q pAf?V#Z)\7/5a/r}emܪkW.M6)~ V G [v@cua5"x*'s=oJhz0\,4o aq'* d5HiL:Q{|XmD(ͯҊ'ǿkJ?ֹ 7ʻi/S)$֏C]Bo,Aqbo{]cNRq{2#w炽6bG"nc.>(; 9yo.(qNxapfYyg=ks%H&h`1e&M;Jl8+3TbQJ_ZLKq_/<;<`,G Ĥ]Kp=Y*{mw^}"]Ԕ r^*`M 㲝Hs $ӳp^ k? O,)m'cvjjMl!"ȄV dfn69FX19eֹC(+b;P'¤:+`Ņ•( }; Z? ch=oׯݘJwQ}!hls6Uu2LkI^x?U x)kVCXV$Z1Gʛp.~-ңLa ǔ(ԱE}~/9xP=A}O#/_:N,Yo§DDbd8!D~ZhL7X=P06)ϟُH^vlRTZs .^ԫ_}xaT؄2 AN *:j߰g(0v6 -B̞MD%8-GU#cuG{އgT厲zWcW : ,$P%CS-0ƪxR# PL^*vRM} ;w}lUv ,qQl'ז]ȵ8nȉHT>`6BxbXYl|#JWiSP: ~'Ry]*3ɀ6}>8 QMɥ'O8ދfY}Uoʶ&+)w!*uiq1*4Ԗdd->J\|t({nb=W`h2Pm؁ȴ["n̢7ZCOPJUq({__u ]+o7U:ZxW~[+ KK13> G>pG?&-! Nb1ZK5Ƃ&3ܺ.1J Q )xu̹8=q>'2+W~BZ_^/H`RWAL faRI;-JK_e l@X6c OZȿS{.?.\9-lkwvySy祁"E нO4b&tTLcSˁ$&A?!;[ ו#sn>C^ug8T6cc{;s\6w`b#6F[e̛D h?_dO,R;"DM V<L}ƲtL&#Շu=mg)cP]UqL[:K$6Mf֑m$ԁs3+617%p87\E9CEdHbo Aq/kur&ÀaB,휖P愤E)dK%`! 0 io.&øAͼBQYQr+W2\6UJա:+(!7&;1K*#e[dq%`ax#ΠJFAVVz@4 'ECT*躜w5 Ez-;L4|jM,XPÊJAQC\qܲW͖RS$E4VQoXfb^ENvadoGHJCm֞8X)a5_UF! S3٫Eo(U4(̄ZHm)N7u#sAz/w/2ȂRAOM,.l(ю)b6'{ ԔckXtŰV7mP"VIkX%dѼƺP.$8՘`|Ć <.eu55VTxDHm1xn Rxv*낗A g]ė90vĬ]qS(<>"WhVtdnb- $~`0cęJr7}PXFZ|ImZhcͪąW9::qs2[ |z;'Vi*مA%vk}0Nk^Q˺6} μ &2 -g akfG~Ħbe?W ";h{aDzYuKs;UwqNM0oA$㭻+[";t^+V:΢F@ۥ"5U(\,>LƑeB ,~kgu\fYK'ыٸz|Ry~&υ?!/m)M[؁ qw񎙬P?ܡR.~ok\6-m:,.ғͤ֌i\H%M8-{˭bN5,}#deiYyB8Cge{\ټh,6[Mq#D’idѩ4w2.Uv*HU?-3 P1]%[?¤*4ꮂ9N"vQ ܶkfŪx`tpؐ72K3DU'5גMIkNXDg"7/\|Yv9n^cGEqTdՏx50 {^ruݶ?JyEZc񋲏ӢSk9߶ĔBI#. tRt%?HE`ٯF.lS>nhӮF0%uT:)rld(Fv`[88'rv lfb͒lW'.O}4E0v-,mFmwMcǢ#^jCW3O.cepgmkC~ͭ&|_8:+AV˰ !CjbRfsvls'2!M||2)MtN4zP\kCH~TLaJ,zL[qz 8}c[<5\|{+0괝DQ]֋wflWRk+_sٌti0MmAVY`qc!^kYTFv(\py s0wD+sIŔPo >a z'l3M_bk^q?e+9<(`deKp)t牧LS cg XhaemCKG8SPĔĎ/T07#H̖ҏn_=FMX| )x[&Jn<]> # c*Ovq:r2oe}CQq 嶜g:ɧKBHOɜtK=_ LOQwՈGKh_n}sl7=n@1Tfc+&Rq1 TPvw ufm-k+{7Uwo5?zHs鑃D558LKyKĿ톑Q2+waכ~RCsFKZʾԧQ#q<]?3ʙ}_R<xT9EsHaF0&"5SXV-w? D0mݺ`^~8N ]f5 `VjrA'uQϷvAϚ7?ܣÉ;Wv@'>ׁ6a E w/v@{w6#Qy_p3Doɫ6|N Yjͫ݇1bIW+% >rwAeQvϧh`P=\ ;aruǒţ³ _1h'&3J.q~* N{ ˩OPؿB(yj+46[9GL&}Y~lI~r=63Zq"1%Eb6V9GZJ1jm &' 'C2V؄,A[dRkptxKZu#Gh1Y\ξ X|~DF |rܝ-JͺuR\׭x킍\ߧ;/АD)!Qnfl{|Y}K,2SOc:8akɁDC)#Y5u'F2gC^m@bW<I;9IK΢u$QȬ(d. "-1n& lpl?q'$rLgmrHYMMɭo;b7Dr@_Gk2,LU)KM[0j)83᎕dFh\euYose+ʓp7t;m`c0RRF\ˏttTۧ_F]C9[~oNEuPH_a >i#LKD1eD(5JZ: zJ%U8$Qa8"gJ!ۊVz_~m=}ka ϱ#\k3d^ipd .>qpP{7ЈnaG[>V/ߙb<~iG j .سu=F Ʈwח1&uUQvQ+V xwZ(0y9șھ E+c8j'higG?rY;cftYd/Zؖs`k-gKS 7(0tn]gR|2Gܰ#;YwX~}|0AyD{K\[JŶUc6X#j_#3 Z+$(V ?!WnXM| 6'ϫ5b5/M9}|\D~/A'FQZF;Qmz="XnFӫ,tR{FjTLqsvQgayJ1R>q6Lp*u~ be`TcTTE < qjZ+;n4OdsaT눒08M_#v%Dc mKթ #N|1̜Q׎T6(JiۤlPhOaZ/݉|WGilL&` E I{O0.FN$wBRX}W{GPLTn%P'4\o${&Z:&Xk$7ųcK*% oxm'ad't&azY߾v,~qވohN)<-oveM fG[2HʇAÖ KX7bFNTN|7L ݟN.M2;?2,IHcSN HrIg)sbLAiۜV-gd(i#1y y5r.+ա`z~||۳x:D}MI!LnV)yd) {K6xkjB-"V4EMFR:8rYuVJۢAۙa %E :LŸ536 rf {"^ #MY󻃚s9oNFe<78W.`~%6.&m罣# l޴Uhy[/k]!'^+ܩ8slP`tޅe+ }hjw섦Qdp$B2. X|ݽ0 @.KC._X{?㱭pPO)dIT$Y}4K!PdITY/8J,ebO4d9™*g*82?ٳҸ'0xŗ_)xFs*e$ ;iҍ S&L!νQ+VOmYRh-~o\Lp7Eug3"d"F;snc;~0,JVlMl6R5JXlѤ dCQ|Pƴ< љUk晌JWQa0[_E{2TbaY0 [ 淟JPf0ޥtڍN: `vK6am s% Ȭx'ԑx=.¶䁌t6k%>p1jc\1dٯ1 &Γx~[Ew8D>%jvZ&v #b@$ @ I28r*)BCmr lД3<lhCNUOۙws&Gsx}՗ΜԼZ"DOz0rZFN*aԫ q$҄͘t-6]' e,ϚBWm$}K[=(Dg/iaR{6J}Vh.I)ba+1+?Om $M_l?t0lH?G_9oAloA04x̙ܽ Y_^C -c5ļ1<%Lķ4g?b{߫USZ/lF_R׋Ad,fճj%zE5Q(v;*!E#S@و?+9 }ú{nF c9o, u<~huQ=;<|vEjot|Th Eڷ~0`?֞!ك v:*?BY bW4EsWR;a"N+L/qiֵhF'"BəMmSo13/å0 E/f-K/ʚ7ؿM~z uN`"Bۍ  :L`q>5)LrEf~6Bq,̡w3~x 8]d]6~Gu7r+,8-pijb0jc->e(Df0UNʹa=h*lѰ.:¸x,f ۵ @w&CRF e/bfxT|;-O/'RVva)itG-|rhñFb0YFFf(k_k jP.(7g/FբSJ?12CzaA|Yn t*Giq!PJ|&Mm,8F5=aE+Ȅq\0d% ˫CX2o|{"|Ptn_ethQ/׺L8yػ<,G!g&ٯ;52N#-1.8q瓷-sBYNQs lOETG,؆C s./!>  k;kw@رq swkwc_3o$X]>-.b I>-GPG'U߱\jcsQ&8q*s3v*ۏ}z⦅ZW8BnuSmw|>F:xY7"X0bnuf=F|>~8=w̘;Sğl!E.[/I5!E: (d*p)4ZnԊ3Q?nJs,O!hv#H~KV 9]akrƸ8ױIKD~%\et/Q(ip~_։`Axd ߲tYf ~D_a’tS[aJTٳcuk5E/Z7c`f& 5 1Zi 'q yȫk&5!U&%e kIz}I8c nwf{k9 bcoϏ fK"g]/?pWm?\h^`HD`9(rƲ`?E3[+LTE(_{,EBm8 гm5)Ub,#G,`$9sv#ةV~+kDlma ig Tm5JԳ_pep2 ~e5Q狳hj^8*Y{]W~" 'ہB@{4a~zVYH>KCT@q uN7qn|KJ*i~rgHVez~ N=kW'~_9o"-&(W|ky F_B:N@6_T FSߍW6jf Y[’Ԏ /$]Ύ!hsP c-X jb3QB\&Qkb6  ̳VzW#)޽`<`-=[bHֶ.0Jmx_mnfK1˜|xP8ھm'ao{,*6c%a,qŘ] or߸bYxmHZlEux8e`5T|m" :kige7WĤSWؓ~0P᧒\JbGc, OsŮ\D)ؔdLyt1D)HGs{BF&E_`^Iԛhl*rAb7(Stү9: d9wa*Gy ;dDqFW5Wah~*ͬy"X~< u? !(C͈f*E>V]T7wgwITjMYD5%zz-]sJP<1 D_p绢F*p[b:X \CK:=3S*flQV;9)au0Q k+ExDoS<7gE0]%!5gu3ls:L&&/b Ƈ6bë$&i@0IiY@QnWhSMɺt[xm5v:BMk3DC0r:ƈxvLoa^'W0'ҤՍOAfa@)PjHTa+iQ;4^ሓsWojs3tM~at;~2l\$ 0p,"{ sL D m`x?̨T/{v)#9^,L}9:OEāfb&DigӖ J.KALU쓁4i( /b^FS2cSOf KGTojcyrI2=P#j Mɴh@ۚJlYDSvc^>\A@Qw)_$)Ӧ J1LVV@i_/)q k'-/hX_CJ(9ݛjϺaqbpacxӧw s3.r}8fBi?s25Qӷ~:4ۗ<+ѪEtLF+:v|M)&FhIsA8ծ`ToS9QcG=&̘yT~NM[nSы [(~m6Dkl1~Ugy048eDN狓;jPT#X޸}Tb}=_x[g,lcl֜knn{TW!ȑH~[;圦;}^&`#O{~5v-{'BfG ]m;6qu`lњ}XoJqJfEJ[:xf2ڔ9^%3::Ĩ;Xc~cKy*g@uPV{u9qh|8"QyːMQj@tZ \jHX)O H3 0|Dٲԥ rl8~ i-{ đܾ3~խ0'w'qôT"5@ BDuar/R fk3K9b=viɢQʝ'.QToQSZVmvW\6f6ے7JmSޫ{M"`Y/*ErmU,nJnQ. "=o c~;D` 8E:BGi}(&E!/cS~u #8-9LGݳwb4 YrN%r{E CGqVε'oHw l Y¤\xƹ 'S`&_% ;(/;guoc֯̅{'RFn`Vuq={eF,lSm: lW &ڿlq5M 4woEW+XuN&%%LHij0 Ɉѻ0JJqX =>hz}0Z!j]}[bFF8 MVW ^:Ir&8! Z[d1Mݜ@/jי2Wc0/sd܉:ufN6jùk=sm=ߴ*HGc#06n'^Qu,8p"2<㼍a~W<)S g,ƒG`MLJy"ĥ[RgI+%vm>a{#رjq|b&mχx^[ lۯлmڱh͊mWU?pvx)Qtm)0llWˬ{tM=ǑyV RsBsT#R2MxbDnq( F?t,!bJ$@dmk26V懽}ϸ9;ǥRP>$. -B=J%QV;&kg (G ԰!+0ʸђXGUaہy}83*5{{|Lxn\2bczB+TkĚ6&=3]6⼰{93X@@8qt΃dQP'jIN"+DTzzl Dj{ݻ͋y+*g7潽k=qYϮLbȚg00G/:j'1 F3g$,Ors&J%aF;DZ'xb bdU)sL@U^X%L!!@7l0}<pi5@ZvWcuI$30DA[ muc1=8f4"ìV525 I]9?'hgN|`Ի)ixE%HHcЦ<.@¦tDZ(C\GկsH'i|)l990 0zܔW iMֽ !#mɋ6w̘>;YbOߎ:| ]'3&fp>[[ƕy*O,BwPR6 }؃v+*ez!VjǑ@mqn=khk&Pgʗ`#+08SgmGMLw(p|:ױ*Z`B+马\P쬙lsL:VI_آſerPuֵ>ԛADgDKgXe%]??ޤ7fpn e5cH 5a] 6NJM!^'_%gB#9ܶ͝H棟݁&ܢW ӌ_#Ş*n c$NLD8 T*zwuslhXYkn"8#eP$i ´NF[*=dҥ/aHyR^VB&NbngOdsljq6dC%_a7Eչ^u6z°bsrSݻW=4<Գw|‹̾kgL\?rn 0Mn>_;?\^ADg:DA_.Mhy)"p#z0Mмž2JAom//3V ug>TXdwhA*F98DCC,p j<UY5YɓWu W,W | Q-Ydx0+._Xz+@YFe[A&3}kN䁩t/Ʊ{]El)*ՇAt 7x/pUoi\fqV|>HeRA -8ș8l%NQ]WB#ڞamz~~B@4x-Y{,O7Il,}EOqm}dPy6Pg[DWm[:%\F;4R"ǣF f cp>_\wު;-&qoÇ׷ $\id}%ùȞVl+kٮBRK=j  cj9^㏃'?,N "D}bwR_# mh1܊,bU-RJ߱o`w/> †Nřl?Uik%V $*N%<-!:@IAWwtc!J~,"" iفZ4r %i@~Ņcw0<ɰF* όߊ]c#vN7bx{ZGQfL-fsqSKe}a;ճN E2{&־b7|n0ɷ^Ш\ܚl+4 +H$$^lڒVpܡrbBW(iMmy xvCH6J.q~@'4s*0OR8Ѫ䚦Y|0uo8}k`mۅ 71B3 n#oo@A$JWvN< ]^2m,$b_ k3] 6/8VGR%bC<d_=6 Ap}-GCìэ5" GO{X--CqkK冇UkfLK 9a h בֿW0umNma iYN~ vҏՃΖGAڹJ}RĩHp~kzƖ]֕1đd:. ᭝.i.sKZί&n="~;U=ZEz'ldP`~>)5y1[ X[q ch[_ "}זYvYɚ7-E3fXIS82 G~fuʗJ^j&5bv S|;wx{͑PQ|Y1FPm,Y;brӦ Ud&od#׾i?uc{?C7I-oV-æ7;N)dpnhó -9Dn7rICmdr/qDhnQ5Xe%e';=´{D+]͟ cY _r}I+h0tTW:q{WWW ~M uB μ3a}c8fr3¾J P0Cr[;=Mԏ0 tv`@~V55Mp~y Aݒk3&Wカ"ܚ$. ̋,Gj>cdzLy2"V:1Tzظj{7F`%N>zЖOvrpDXmκ@߾%/m39zS 6Ͼ46<%%j*a?Q2L芆{Q]_4?FFzJ-8.OK_>IY ϰ{4XRTr2Ss5eh b[`4keG$US,C??i[giá3>M@<<' 4*݌Lڭ(̊`5:Q)9?QAp|L6u{=6iUNOչ(cώbl|ggԽ`@/W/ e[FRBDSZ`Oe e&}Ɂ3Pgs6)LwW׏S%4n^FģN$GNA1lu'+Qpa7ܼXN5Qddf}]Y~yY7w]S}e-PobBdyjzHp6GFH<^)Ni\QSdEPmJ4Z}]~Γ0*0#d𰧕%`G:T)r?3 5%jyKr)_W糲[@(!hq7d3y̓VH l9"0Zb  K'%#:VэN[ʿn74־MiqSWW]d>T 鶬eQ)O[ObɶV srgC^RͳaRV+J"ޒ*Oj@(EQ*($U=TS;j>iYxzs: ,L㚼jŰRLMPz(2{!_M;e?:/Z5OnPV,[G0)ԪQC"Sm!/AM8?}7;^M>iJj{ݦ02,y͚` ߔċSV;khNg@O\5}0Fkw5\}2ZME.W.B%el\Of c%ڮTiQa6k NLQT|Qb*4 P_}Hm%𗺭ˍHNd6jEoWyɫ'ol&wTzk7Aj%: lP4/L !DʲD ԥ|<:*& GaJKnLguE$B H2!6l-ƞ^5\yCV TYFwѠO2cS%mvH qn>M@'" >_QI6 y$|w|-$]ыu_T3ipB0fÕu/#4^̲.ɱMoGuM'@XRwCM뇌φg{ÈR+GvbK*VzjF^pm-LFp(!>J5)]HY[=$( eR'A(y;{U\uKQx0>˙~[.$rrK2G*mTN=I ٶK9Ǵ؇ܾ`ࠥdP8r+zs -^x)igHH'ZyS^[C0e@!gEE}mGPէDcÆ7d~Ptӎ6''oء*]{h zf+nܡL 'i\Zg~21~~¹39Z4t/Wv:R6VAT è$]lG8 Iuy͑;Ff Do!1ӉUl>ÕNeϤj{`&F;ԬJ2JMi>oc*Ʋ&q0WZ*XI,_儌CG30|=Rs QCqA\+;?hڷlsa|>0~[fHjel-D iم,C3 s2hMeܥR/>+(0uP!Jٸ El":!╽:zD"N}u.a2;:XN+8QC4.ʁ}'NVX *ْ# yf7GL3{j1M7ĵH]'Wvd;w@p =+$YjD*Le7LeD?|2 >pՂgӽ!#=PHclWe!  _? ~;P+@؅(D}6_QPt*2mdqa5 W9):^OͶVy=+Վ\ɈC>&)3+^Ci]'mʾZ7׾{ T6f: a67D9ˈ5Gr:PWxvr _JEU۠)\2-3镎՞ڏ8jAR@"c\#e[-.*`q޳wGO|Z'Y8unKVM_a ;7ȽC`-{wBxȻ 6wz!aWlݕk$X&苲Is.S=]PB1 s"Ψ+~n#0&7PHC`&d諒2͐(gGھӓjg)6[bۓD:&,jQ,e(&!.l `p(Oju摩[Ux qe,cG{7+LWUD47* |23̥,;7Ѭ $5&g$ F'{J/5rڊݿb۲CeI*NkfA#5+;C O ZVg-C#u4uX8Rv0؁Vi/7ߪ-ݩk}4Z㓷I517RDsb@BD/&+*2Q]%m{[d mY, y% ,ίSε ؅IU.M$4<;L>jS Բgn| b7I0k<3q{ ``+<(Ћx/0*rK IL'Ճe\462OmmaFQiA]?62 ]$K+7a3بx;,9Z4pRw Pw 2&3PWyナŲpQ'*Q ?#z8 Y%y* YlH60M-aQOq0G}K|aE S3ƾ1jliH"ѐ &1ɀ1`۟j.0tB&E~YOc%M*HLJ9oX!1°哫 Ail֎24?8~Udϔ`q3Śz\xP\|Tb4B@Z8飋{2 Lv[`8 Wr>4*^'/-C}>z dtg7T2N=!dZp7/v\=Y_fXK+,6-ˊhߞ2߭_VyRby$Mq\zUPlt7&r?&TMH.?-$唇 +}^ت~ ts-5* 3@Q?>h3;!nDy͡=ݣB͘YQօ3(:+) /uL4w'뼭eς%E_a_QSmz%gu`=4_bZݘ[ €djs=&rɤeCiLu/}ТrH7) @S""ŗMH&1%0#K`;K`,?EQKUHSk(*ыm;ȥdrZɸ5b熌X /8Pc [||:q]1@п5g-0_ΉloYL*9ja>?hcqyZl|)Ӟ[ad&ٽ3P,H/@ pUF[)Z훍3!ꋐ:CTV *(P\L͑iseA@n5,v:ŐsX5Jyr乢z4muwLJAr-mo l0",'T$tDlqƾ7htHS ިCyW s3wh@>2y]=]G]IW e x~8ëo+Wp+}sR^7|~1o|iǎ]R@w( qRj:s@q^A|0s)X uVЉ9cy-.m%UF#h`K?">I0LeCF%X+Vo]|QACԫblu1RZ&a*\J[][w%+Q# H.sS)r3 m,2ɲ;%Sg.}ޚ5CXdծ=DJ.e=tsvɎ:v%!=D$J{{gV14Wrkw2{Ym%sE\ LA">H%6sHc# ]H r%c~8]v lXV2[kbh|:lRPIy},ʩd_|ɶ#ah5 u k="h&PnMm+VFP)Ѱ2(HhVO6naAϤ #3:A!VuUanc9הGFCVw@dciK 4ɒSeǼwʄ/gI)t"h+@t˚ FkA8z2Ȯ;ԃY~盒5;aќ~M7~p[abz*qغ?A~=4sr Dt;cHs;~`<4츋Ӗ*t~ڟگZ ?a֘|Ȕ 7djjNLEP ze8gܜ'=1yȧ%xV6;t(aJ r!:@Q<<8_^+gUhD"ðQ0sA&zJ\BS~jd 9WTYymXW$MiL)@! ADXo=Hmun{UĻo(UU~aڣ93V,00y+Tջx 4ͫ8YfGY[yZL_2Z%8!+9d}03UU+n FbwHl!B@+5&!VmsRY&1qY տ6[Zӷۋ5xkz$Ps:.?0Roe:$ nm(P(]cΜN3[fgP Wlsm)GnH5yϛjڑU ~N}`Ĕ ^1/@=]ǻ7x4Kt>|pXMr}_ 7 #q2yR?R:/8)UwVMeMٯ f"fZrx'C @;oy5B^cUhN!._'k|ǧ@d~s$uoF\ϕK"*4OuP[ Ʈ+VL @ daa(xŅ`@W9#=Ҧ H打M~Tp9n#.Jr͂(_<k˵X?H3-bĒ0厢z7AQ}{qcÓVNm@;"Xm/aPwl ˔pM1QdQ\]..vbnq#58D bBO8GvqqcKF?#5}N dqURox#;H\>COvg5_pXV~ mڜZ)ccOq j+ h.>U_,+ -Ů5jtj; J;P7W=<wDދoP8]$ 5p†̙= _)%b U3v/= } C5O7A#fxvDnjgcVUm r5\el[ {\[QP,tGGG0H5׶`{Og&xd/O:RWf9OY[<=Hi~d-jki+nu[.kZcB,/Ca'os{,l`j8 g[򑘰Mt@ d<(˚nS>R4럎>uŌW// 0X]2ܳr0Q2oщtڱg'tM "WuqpJ=?Drl/.ڡb$dEM^mg.-a[4`͂P\v͖ q$>fZ͎/0rby9L=Ƙ\|ƼEm#3! hqOрo6Jf]?ROLaaב?p$ KOd 4X$s-ɿAwGf%SN :I<>A7IB7)OMA@z$} jSmEuC;qDTi(JBI9wO3U: sBwO [߭'sTB~PSa ImtF߯7!uZj-:zʹ"EӌLo׬qY L^Gp(}&i5,߂p&[a"/f岞V6"U<[:*q(.KĻwt(P-5/Œ{UeNNybF2jzg{2 ykhlOo2 vs,`r 6b[/!7QrRX,H+!W_n0(&*kcKĬD4 %]xu9x!wf@T-q=fI)S&Nu[n+Ff] `/o<0کFöYXV0PʭzȌtYm[,Mr%tSe G'!_ܠ^/9s˽`C9} ɈT {I=kǃ8';$DZS\%,/\~`@I^/|N*B_gM!slŷ*Nݖ肖Kf*@PJ[oGէf[N౹f'/$ʞ0TPy!{G,ՏoZp3=f<;vzTsd~_:a=%?JZJϥ˶E:*~|I?O|u'Kn}.}7iμ)-;P% d%qX2Ⱥ a׸`<*b)xDbw"n+B~rʐiȗW=V\"NJs/q-%'͑<[^⇭j2 z}ܥ_ٖN)`8Z8s 902 QfueŎ=A#nTU8oʬ߻6F!vY7kuW;}K.d,_5sJmPf-ΜQڸ*kD9%ֆRRnw_צI8>oU}SnYmyjnFb[YՄS{Qˍc&zG1qm{ކa%]|恆,*=Bt`u{zs;RLk>0%ν> "bq驶HVP}ƀ nifr:g>a(0׵3&TH-5aa,ݷz 8Q$yXF+@8Sݨ2d9'ӳt65B?ۢrpu5|l)z}lm. ˮ-8Y@q!05L&NV0U=kS6xMX D6p؟O= l'u87Hu7N+ҸGS^Ҕ?uݼ`27XCET-F$ʣgL )ٞxC&XØ Mb;pMu^W[NN~T7"Xy3tO:%Й21D֍s@@ ʭr c-tqTf#B|fcǣeSQҹmiJ:1b]@dž^LQ30N[x KZ/jh7Q; oM}m1c@^+E]&яpyc/LfI<Д7^펞TR˖?kHZ ]i`#:w~z3_ښR)8ЛW%$l1(??gT,>mlU+VʚMa8LU9(ˏ UJOK!n,qwK|Ok +WJ'f$o Eb}B.SBNp2r%=H۰-pEݧ5~ A1^iDfv7^'&-ι}#%>n5z<_} {|4fa O]_"gsaҷÞAdܻMA'Ʊ{kºyd? tmheZM\,6.s^+Qoen/A[ rXEF< Nc!2Qtpۥaov5jWg'Rmdu͕ht\U,ZIu3V0wb(vl' !^ Y"0 irAr-Gjy^)jG . pF ѭ5QD~!Y+ǹWB5`F׻l9j_P`5_PჇe ai} H:VinU͒O-b.$/#u$TK+@VKT{sPrP砦rPE:I!5=3Eאeje("'U4wr^݆O 9>f69DE#s@ؽȋ_xcdcfah ĕڸmMeC7X\l^ iJYDoljU\}= C!{$[yH5 lUT?l6r*EeV),?\] X] E `?u\|n_ѽ:;%lb< 0UAx1]"U-7vҿA>">ϣaא01Mp&bqlʃ~&űφ1we?S7G0}M ]m:4SUNM E-R-Sٞ_Qw۷޷}apʀ5\ ͼt :!'o Sֿ}r|MDڞ#?>N˜KJd1ȤpMajKpӎ ;=Z+=-+ 1Y^xǔՃT37 65$cبUaUbx%h#ISQIߺa˰FVND"$]BN /ѨV/Ǹx1Э` 0ѺSp#&I'~`%%UP|c|jo^k+e؈1(UI\pfm(.D. UC}2:rN=m{;}[yAb*[3JO6Z&D[0e*ŴC%@Dl@Q}:GɈ;A]>'׻V5Q5lKGTZ҂ؽk$ i-Jv m@ZϛY`'L4yZHLΗSjFl+qFxb|j_?HkȲF!=A~vhzsdȑ"RW& "Q@˶m4_g<ymAz$DXתT$^^'r2+(xu{"s٧0#6Q0QaQ#aR58?p f;&_/0<37k-`"J^(j.0"*X( (~=7`r=<njb/7^;s1Tx>ZN?-FTrJL6UQ;cfcm} R}ݰ B-[l؆ϋ`ک)֢&usHl'_/sG2_,-|`eRE#._GZo0°m[,)7k16*.+.ۺoq  (W-3+ +xܝeM*qZKm,;QkMQVX~ģ8h~ʼ+5jC19NQ9yR;]{tu:"M?f`QH)yQv:I8x,nlv۔oy~a٘g.dب$*CPSl\YWSlS|5t"x3pe|ƇK*Re^c,qD^IA,"&9kH?P0_|2tG:'8<; U )oˤޞ'i6L :׳*f#KN$>1 I#Yoz#lwόOxٯtwL}1[S ,JsA6IʷA6`6 Ex2|&2MXL 鷋'$#4B6&hV}{u_#-06MLdQ?5XN]:u46qur~ &g#T`;P vep,%5OW^S+$nLy ů T/WR¬8V%:sم_4橚 _; n>~(t IkbeNy}Th` Ẹ5mŁnvCQI1W3CEgIFGu,/qMs! eS0$SL;,;I>@l&)jLX5!@&yXa( Nx >c ӵ$(iW c+]"bŤCY OH`#O6Tc }v:JS%d8+* ۊs{ DNmF:k.iC&F֖u,mz&u橨lk%u^Sq)}9 ?*LV9:~+WᤅD^73(t5hS ud/?iv}[n0;y*7S{*Ȣ%WGr hX&呚VOr~ILL k0a\`=bhX%1JsH'\fobk>1ȺABVGcc>"2,YuVY!m*(՞/xʷtٯٽ;> sf}9LWroo'y0*ebdBBBAu%-8uyiΉ-A_^qwK б?Ut[v4Kn_DS觖wh/>< e^nE_B`ȩdkHJk] MX=Q\Ug*c Ԩj1[J6` ë[nlGhT_ל|ZZ2ΩĄ339#)ftg]? j,JM-3Ɛr*ۃ0˲F.pI!AXI9 ;Fam/X菹ʉKhcl!TU7/IEfi/<. ڙwVU_d!R_ks 执 &;CPc )qPa]Xb]C+qN55z B>-ӆoz#pY{&`xñݶs\Z `̏&M?DŽ"3Ǡg횢tޗ97CyƚuK 0YԘ[Ռ4ͭ*qPВ% dc+Ӗfu{MWݖƒ&\ak8v*]e{[6J\W67ms!un?e{51@B(-HR;q@J3CWy[:%w'b$WDB+P45m(#&A90F32fq19_)Cڇ1+nJ=74 {aTP1Rg6I|cO}'h QgUlo4- u;_=ʹd2*[’ Nw6ߵio&!Ӡ6̤S;fb~sy*0_}Sjn،<b]̩bN{{͟S(.~7fxc_C|muJbh ۇԉ=|EFZo*ךu&J =_lXˉ6܎'6hwU%Hy݌Ww"c5Bx>۹/I0Q8}C0 2V»<ffn?E_ϴ;zmiG |6!W|ڪۋ&k"c3^4$ L'tpm*WH  Dqs2]-:ZӮ^~tn|ܽ׈V/ ϱy7c4!s%O ]AyHoh{x)?J3B{R+ܜ(E5R(%]N r%;߰u w0P=kBvHwJ'GDbi$aE;hI,بD IZ>J"S{FGTŷNa/-P CS:hͅIk]Z9,+ qN&0C!,fnM(~t['%v8bMP+ 0mM&h3VAңJ& ; ,')˻OkXn!_P%@ !&K; ]p80c՛BBMV`ޣRM~=yhLѤ@;6m[Ur2}ELhTn<<0]`ɐ7\ET`&Ml3>k mTvPhm{xME8sxccup*qS~őKyCv//*864:bpq;޳2` Ofo/X6zb'yTobJY cI( .K) t!v\\|t@tjN#0t;n^c榴q% rɅYڨ~{qc^|-5u8r>UtϴW6$4^Q8egPjXqm5UkPTauxɊpxFnX>*:ѽb xLNf>؝M}yCRnjrca,#.MߍnJ.'>8`VVu+պ] 9^PadƤ[ 劦C|{ O子84\TO!@TԿbve:5ixz]蜬RFӠܦN_ oϜ;{ )# i4? bdipy-0.13.0/dipy/data/files/S0_10slices.nii.gz000066400000000000000000005710041317371701200206360ustar00rootroot00000000000000uRb02.nii̽gUٿֺ3IdfdzItzBzUP" £`Ei ] ?Ͻf}ﻬr]w.1W '3$.J '-emWUՓ'ӞKF߼.=7؛Cn=r?;dҌꛆ0"4C];~Xhp gLwT'˜Y]n a$?CkelCa5{z|h2Xm9MfQ=iϫaX Oψq.72S7u#ʹ;ؗ61<$hcHdj9w4-i^=[8k,hZR;9>[chγo޳6hXq\̻f>fS(Np=tm=59g̤=\&rg-nu?WA 97r%Ω~M>7qChm gsT%SV9Ocǥ=aBuw8'K}uxRs5|5 3gTɰ|:!%η>{VxMC-qUyCiʱQog8#E? ȽqS6?iȪ ȧ=W'4ru#Oh3˳sܵOj`GsrĹ,jdv>&gGp/s,O]G=ic/S&{zTϓ8>gUiGLG7pYCrLsɓsG5%{1Uy"m/Ӣaq8QDNqbqvQEv@F0Rsk}\V8OA]&Rsxh] ehK >y2:ʺޗxhQeA-ιrcy~Su G7us2^795e8g6U' <9LcJa.7߹QʴRWp&~+8Y3W4slOFsq{ċK:_FFT=x3:u_G^Vnxeu{IYne>dY8k{2"|L X8BvÑ a-5ͩQ!_6c~Zًc&pz90'xFk=a{r^3U8&3}pz!ie~{hO7?)xs] RDvQ$T"}Lɯ20k%a9g)Jxv4ssfV9ʄN7J2Z?Wd%?e~ɚ)ӚmRNUǩU8o35hA-\U{+jue %.p:ڛz kڑ)]i9 +J-4v#<γ ʻvBY QjiutJE^=!=2F;dQsd;G 3#_UeMfVPHœ7ʦwG+7Zy^Xfy8G;Ќ=ټY}6&W8CaDJٷ<.c~b-W3uLb*zhkd 9WJF|sm6L⸼P,Fbl<录>kWc}Umk!?-UĵsC=,~kg=޳2 # 573iFUGo.*'޳;㞙} p 5{W59͸@=Sid*&/lNl's{=1tA]ov3h=V9)8z2j50;MhQ[fꃖHGWiE#. kcoah\ď &ad_]9~"WV8c*OV Z-RV9/yfG]; O2QV˔j]^V-9^`9}ܚ2SF<3*cœhq}==Ǟ؈ U4x?vE#>CKV"~AW0/WVЉ@ řc}'>61色^ï>bM2>qb\+-5?.;P@>MG?e>\~ΨR_{7wW(>dbQj_ьhuveE 8*arR [v3zF('7wg0PoA, EX:`(-qx``G1'žŧvHU;wM{7Eሺxm[ EJߒYw}nΒ!*㡊ӲgF,q4a-V:7^FxAej^8)ޙIǻJ3l\mǜ_a FqF5&nəJz|'y8''!q8dݕ2%bDD_K!&Zho==__܉g2%=k h9#q X<ްSi?W4J8ޫMx?6zqD檩 bފǣY aZ6q@AޚxrmߣՖu'(r!\%7!~=!G=scZoXl8˜m94 =怎_c6 `yXEKyģivZA97[|V5Ƨ+P`|_Wsƺ:(>>6X{bcE-='hLƟĥ7_@cD}8+~~Nu|#؜z?bl:QdZ媳f7blz=im1%5a|͙ljysF[ѩyyZk$8iza7mH*x$A.=Y*Povx<+~X6tqU{: 89cy2/ISgȡDwg2jC_CcnL^1uZjfѯ^"hy ;3b> /H[#;1U鉸3r芳WS-g41>#AP4G;&(Kx;ovu:;ʬsi>Dfs o,:ǐzbV2GYy)DF06\ kah .vv!]|prwǷ›p!11n`!nh3ۑFa֏a—`/BMn|mSB&SqB=gx^TqSuʀPWXp6׎A+Џoɻ im7qnl~;Ŀ>zJhτ[ ix1KrrPrw>nD;KX"Zm}Q]OЈPS8FauH@[j{_΢iZ9DFo௽i|Ǭg6qT\O'ۻ_/҆xfAog9Zc6_x/ ^ xi./W0a>Ͼ0Ӱɟ cNev :lU 6?;gHǐ]-O/g.e\`VS_HWݗlٜ(r̐!#JVNٞ 07f_mӵ^@8}}l\ޚ¾o%qڰ~40kxP#gc3/)!}勵Q]LӢ`Et}8f͚*qC'BX8E9fTZJR~l6<Ҥ;,sm!z9(~6 Kfn7}1"p)pIŽpn 7#-ŷHԸaZeׅ)xxiݐ窡cVc"q-Qoc[2[}Pd4rfc5M;CʷXX=eEa 5] (<!UZXnz0 B34M gik8k2Ü'fx+2++> b1c҆H gQm3l,VTS.,GF{#9&Jus#ʉU#AJk1K `1vyXQ ;AT~SZRS_Q<:/HyJfRōwbp:̰K8 `D6go^w 'hNCx̲bDByr05˄"2kQSE>3ߙa<8X!ID'<`M/)gX]o-4W073h@xSLt ̌`3m vy~ʈyb *ʇ3nOA*d=A϶Aoʇ bb/!~Yn@{ `1+%ǰ+<,Xmi˴QA3>C嗈vHwv`!Kбxſ+c)I ɌMy/m2fxS5Y.WǪwE }r\9%OoNee.){JP'Ys{Aʹ['_?4$̊epmjXV)ۿVG3RzU6~/oğC1 )2^sy? .>B?$coq'ZO,Κq3"yc<9ƴs.տVNh۸~ɚX7\祟Z|մ}A~oxQjrbtљG|x.ayxeAyqeB\h.x&!tl,PY\ ,lnL}~@۝Gqkv u΢7;bF!װs\Gf ZP#r~6CR h 0|-\v%|#|m\c`?h8ˏP~V٭@7`]]XT(ES5ɓgҕ鋱!f>iG\;=׳1/nq̔4m^ożQ##,@$Wc771> ,Fd2x a[!_FCRǃVw)>) Cn-?W~|X`|sxvظRŋc'9A HtYjZƾ2Z A=1yHaUל'7XCSaS9/6XK 3x8Ԗt:ۂzذҽ֋ avFBba s8ɹ rr*}8:MΝl͊׈}7x $_EV1-ȡ̬3iV\A0篑Akn1ЋL/X&eFM\D!sVQi Z:Yqj0MJsرSa8!Cl1s(OhҦ6wI#Isc5k`::5ƥX^*X?MM~ ئ^źFUszTb#a'Zf̛ZSr( 9R,$?T$D$xH_#^ ^@_$ќ_YbOG>.Ccew6g^n v,9bvW%>5naퟱzBz?yuj|FoX#ȱk&͗ȞI\/EHaaCxY#:ۀ`}#f8t$32-I6/W TS6MOA3`']-& ,%pLj..+y>n릏9m͓H=T~-x%bwrs sae|#LĿo7x/#w {['<\=tw6 ®O:ކV8KŜьiɍOǔל0#r՞[+dH*g%hBjU Yy?û^B+z Pw'}z~.N\T_}h`(LX*F 8 Ix2L*D#3e%K!lsn(5UF [AfVփ༩'OԾ':ڵ"OdݍY~}} YJjߠwԒGkJ;WZ:^7"/E:Ф](Jy꿑7J| __pjc%dQk F|c12L`%5ÀL3ٕ4.hD̠ߒgi~Yh}Xm"ZffP'J |TF8y {ٿw3쇰WAϑ.rvG:/s&$,LBo㝜0SZ|x- 4˝drŲ`F%]׽4Zn\S?@;Okshy/+ ЎʄtECqf8=Jp/тG_+}`>vI8t##!1N5GߢBdF3៣Q *4M-%=+ۨ2'Cz++3%Eiv\iȜuClDDe1?ppXHuǯڵ4!77D7%wm89.a/( +̎MxV,5y=6e#kdՆs~"7eLϕMAT9f̬˭ ץ}ѻ0>L|[ueBy~x8J=D-a$s8<2b]xh2BYސjqrjx]>/Ʀx&EFMX27%svQzmD*ӋʵrʺO7nF*߮= ت݇f/J_QuT`HŇ5_4HwNvػ_]V|ħҿ2AW0g39+Dp[7oӚ0@F V5Yn ֻn#-hD@GYTa1KGavrS[SQk]O!χNJQ-xwqifQ~-Wz-4f_:+_\,Ql Fxd;fnOܙ֚Y!7 ':eZ$35k>P~ ݱLI0w&9SJcv:1 cG=',kg°ҿ7Nq8%*o2{Pǻ'ĉoMϕ.Zr _V8-~'0|w0O(UNo%v^ooA\+W5geZzJQW Q̥ͯh DD`}!144RIoG*1>lxYޭ;amQY!nQn*^dq zp)dsXtvxPqT`jb'2gKӫs\嚑^߮B%k,8`x(VO>`3zsq#U5 kK;"s={qŏMGŗ`{`Aֿ~q ԏ<]zYtXb:Z聓6PI68OZaC]6% ֛VCWlzkrrϾZ)l bI_bO5WN3KpES-QKz.UpH$h`.s7Dxܱ@w7V^R)Q$p2TE8(4/?sRWyD(dXk\] '2]_/޹s\wfXާV8Ls$mJk{Ou|OI*S?/ȼJ<4\' ѭ^%g{դX0d)lVQ4nätx|"Āfl,'WȂsNmN ވ7vRCyVi q*UO}_sV_(klLrNNhU/bـLx)' Vo;+b`SMbUSm/2BbE9 #yD 9 2缏 (3ZKW/a؞uS+7=KƝs}E=]IL>ȴH82ݜf϶91z%?[InQo]Y=IlP/Y^֔fТU}>Wl+ִYì6 9k0o-cgF "x7X-umZSA{c<|1>+/Y_ZNOkNx@&x+Wo!bQGJ}lsWUoNBs^zfω ;|Yʿ>0/4eu'[Qy]35qvLZm>jziflnM0cLzfx1ZL[T]ZlccX+GKs< _^/uU&k]Y1|]K-]\lQ Oc[w(#c@iXϵ=Hiķ@`Gϔ~_z8'~6kSShٶx0N'sWՔ9*Mn迵@r\)lTˠג ] =˪r[{=Jl(+.Ū=~ZŶdVz& D?zwao0N'0{Ɏkq ȟF(+hH[+¹%"e32dGΌɈ\21Yxk'qk7 _Ab/mpkz4PxH09^cof| U0>"7/QWa.0͘8ـJz(0cWɌAmq9̭l PB2NP+J|YN5+JA ޚ1rMzd,U5i6mgatGyD5: 0q#סTgJ-0 >I ?pCaH`䎌0ŇD=!GLə Kx% ,=ԖK-tOunO+fof{NYPfEdA\/ ( kGKT <B`X1óy ;.V487H?Hhp7# F+'"[iC'`l)θ 0g#] "veEbyA@ПhJQq4 | 0=PLc;{ V^ .So3 FhF与pJtybS6m>{ o4N(-?#mTci;=ȟ-z`3-u&SKkv?e}ԓ!caI0'e;~8XS$M@[G~Bz>5Kt5ѭ/7a庨;oAF3UWG~\ڞ\ӗ.YaUL^+Ȼ[]dK8XaG@W2܌r* _ݗiw?p$Ξɳ*ŃŔJKD2>&Vgqktuiytk1 KKW\G,eX/CoDo7ѩxZV䇶h*̎5 FC]!3YBzYvք7[O}DS.Y_dL!!#[ _(؇mnd>`u>wQ|,[Ek5+˟]Q[K*.Jw:9lh8/7`ڿ^ r昧(/f!Ǵ9V k͒8ιBahP蘘jwNX.??owVE ;] Y$XY9zȬ)t`1Ҝ*gռ\HR"<Ǘ˩8HA8)43'lw~НT_ us]lSp?Z5%6xNYGgӪ5$m%\CErxv 'i؇g[KGVaFS?]^@ZVHOY VPkO_"ӫǹR/i}@bv8rluGHK] rdG;x{UWh؂nxxm8/ҭieXTUL@0+Dnv q1#J|8xk^/ mSs/׭EcS3w8Rp >ʃy/+ZfXoae]/_dLQ[SR`}ٰvdg?/+JLWϝʯ;<_[ۚc}@mJ/7*v-yz q8: (@+\Y3y\F9" 5ձK3@*F삚f|eZљ;&\,R{x36~=ygȯJÕ^Wc;cwǴt G}RʯKmtL\Lໃ`3\ٚQIb k{Y 0`WefĽWܓX|[c5rwʆj롧I;J/P"^[>dX'w}pG>Dr6!=;8 (c#ijIΈ{^o^H[ζY>cWV*ڥ.Rp=뤽2XWg\d#2@btKUڻ?'ѵϙՈV6(/ZZeCUmַvOEQF)wJT?j8R b[kk:5ODQ+3{-](q$ 9`S;d.%f?ΚLȽF;`d6:kzx^tZtڃ1 % 2:h8hΊFrύz<*9 Xv-AKo._.DC:8{1g{^㕫έuXuD vKr=_}f\W3l{[9.qzF*{ qeMrlPrE,\;#`", frv`t)yVH.&e3b mlzUGeն!=5WV,+z_ij`\8fy͖ + b؇ޟlP/JUwm0{dUK%>)qo\۩_18J6 Ѓyڎz5Ojzۚcwyw3ΪgNBbP![X爾xk'2 hY@/F/QgYWi( F%)9 g0"iY娮-Fz MɐiԖ>z]y1%^HIꬕN d]6jn+rLۼrkSݲ25by'vso(Q2c˵4A(cƚ17LUV3V'>5 Q %3kv-9 "[( W m~xY!J2knj׫3252-ވmLg81Iyd{-'+soʮ,޼}TXU ccZץ+ᙳ3#vtY۝gkaǢQn᜛_LnJGb:\ʅVtXsT9,2L^gŧ9[}=Ѓ9r>9B A~<#l~k1_yH]Tuso~9ozkl]Bsw"Y6%NqEBUDW9_βQ- @j+jf?b w2 ^lQR3B'$ZqGFj)z?;Xπ1RYOjh׭m6'3|uVE(dbcmt?cXgY۞:9Z mSя19W5iWOWa`5|(RgSebxLAwj4Q ߽e2LQ% oʠ~xc!3cW ]֩#8(;+^k˭ѓ#e60We_][<+ >'cZR2^eH^i՟v!c+B[F~Ց|+E9JAo1 }qgTI0,uH9,ܴYlP }R+?޾Ij{GTd[KiQcoyOGG3ѩ碲f56Ӫ'7Яm5=s7X Ua~Q5C᜺Yۤxw7ޚC[Ŗ}=LQaVyoG]#O19眃Q-i=.c; !~/# >wX}ukuY0˙ސpX:8cjͲ)Be-񉾡쥇)%LIOhh6I?Ί^;[i+^; 6Q"ZPm^G *vg(F^Dj5_-RRFVF9;~f[[TأʬDD>C܉2e]eG3=]!ȹ>X9%U&.q\qwL|iB+\ܶjͼm]]|x׺8fbewZ<Ա{Ժ89h\((^Anۋ^Fr]:>cԐ߬R'lˊ'ײJGtQb9zOEjgzQ*{zΙgl@"6ݳA IneTs;laδ L$ݴ9Qe b95PU2b}X%#h|zX$&ilYl%P?, C'Tc+q6F>(|Du ,2xYD{}zmGgn]־ζR[+ bDC uj1f{rМǐ19")7NYXmR6q) u)=8?QcrU*UқVFeZSh wcX 5frzm~V2jg# N{dQyq2o/wTr]XQ˯^[$d FHL.eLmwf,[V7-)U?H96o_q(3n//rc9B/h^\Џ17>!iS8bZ4 ׋r0əq4Jsu0*JvKV*ֹ<`e=Iga p?Eky-J!/xg?} .iQyx_٩2[q~zoo]]Gߊ|l%p3Bso)pgӝhcдjgȆX=Jp^+8&-jzf(uЍ8E%T`N</:m^9c,|sW@Ҿl~ӟ~@-fm=zFq-z}gE\-1dVθw;qǨ1xysoBYq)3>˘Q9_qZ5ƋSÂ*XF5ֱg%Rfh#pMY!WhLІTηBfWYz!kwnޘkX}6kdz`2OZxFaɓxq_ޥ==R|pB"Ҩ4S&sLbh"*jݵf/!ʅEvH17hMBrL.3άqߟjO2ּm*tuծ'{ cl?56(7iVw;B 1|XnrU&ӹVېI"#~T8;?|:k Yȯݖ(m۱t1чi gN^iZs=6O԰9Gh^#d6[H.3"BfOdRZi8R|T[d˛ݶaLvzLAH pԔ~.+Y ]}>HO2s:gTT֍k_1~r#c**ۼF[_ѿ2.y?<ޤUn`xv ĝ]:p7M R>~V߇/}!?Ie`yz\< 7~9sdԖ:lz-^Y/5Nks}Rb[[%h[콕2 މWL}Ej*ێ^Wڰ5=k3;|}67A/btd'5؄/#O-zqu}@ƲD%LاNג1 ?% =V漅G${vXy|#hdN6Wdw>@yNfG1Xe!~=.s+Fgy&t<%e{~>Ǝ7ٟO9h9F]]yY6;5Či t_?y,yfJiITVL?H\QꭕNdmLI'<)LA+!l;vDGoOEbG#KwcH9H g ; a_̓h{E#Wc1Kr}I|חn_*fW$T05a>aHys"4㥗w$Wр]Km461ok3v/Y b?7#yf!{?oþ |NBJF2+Vc=ւ;VPS%]$BkC+>ʃ7tmVMͱUscF&>YՕ,I1HYE\lj|Gkvx]/nZM:m~RZ'k0'E@ГA/[7ޅΑ*wU"2?\3*E&OvOIy_!ð^*ĉa^*0|6;obVJx"JeO}q_B\ 5\W|K HR:1mx/,gk#B y|`(p젶!l`(t>OIɳͶL?zv25Tb#b+lFO+V3/[`|M xF1Z0d<Fzb`aiu][DV>~ά]ߚ5Lkw/h7?c1EoDhʹTĕ" Oz-a?ijla7h}tCxu =FaDnb?'î#05xY93-$,A|?r߭!N9z^Q!6Ϫn+Mh2'FIEG*7n4]r>0>~@ga9`[ ,otj祟 KSKI  Vc=KyWp*ݔt(Ob%C#@2ʀ$ZOBTE䴙HCM;}X"1em" 8Ҵӂ(,dح-o~5xZNC8~Z!0B2Iюp_@th.l"B0? ljE-J|d61ƽ=Z#\1^"k+.';$3XkyWVʔ+sdO$"3#y&y^K?gWG740ϻ^ay&dn!kloAFa)ăc?w$u%< 1/ 97Y@՟5478bE3!ï=rF;崫ỗD.ND4d݇/-{=ڀv áx v ~?8p[)<Ex|VX<^Ǽ<ћ_ 91t+,@523.e/8Sɘ B >fܕ#ܟLoWc5s|W̒Hz--1m}W=rroz}vg.%l[; <;W3/&xr8;(x$~ @Z?{𙣑tF>~N|< VW89ksb\*@4bS<5Վ'ODQR'\D;vc)sbtlj/!n c[56(6""!jO6.+Zt+V3W=*bO_:%}р]oҫ9/a?: Ӓ.od)|/%(p0:44 {/ 2B{ɜ0TvϮ /Oa>`aE(VCQ"DZ1_]q]OFj^%u1;O؃miNTb᫾SB]rwi~l*<O$Yg.ͷ=񃽘"=N;HDžlKvK,V4#dff`m_E݂^/ϵ'1jSЌ|ODFM֙و6=Pw<1ͳD4)'0~Md$ yC{2گX]@r3Ӄ}H"LB&H'iG4= ?*ty!gglLpX͜d"w,p_k.&wՕX}LvFQzpirՎM\Opʈ|ܖ́sg `&Tibl2 Kz`U1(snDۣEQ!*2%Y!74%v꣜p{)y,ׁؓi+uW1aڦCD6~2Z;hG׃ec7-_E419_JYN,B^SzWbU(WNQh9 cmg0y?#8,xCӐFx\ᙦ_`[zÑG `oFw`>R3u71u9*hYHJzk~*Dy"7-±X_5 "uKIH.AIXO2$tB7g0% Q̇lkCBCZtn>N4/iIYֱs6$ּ,66ŮݍHfv N?:xxgdKVtN6C٪0a V,r$b%x6ώklTֵuZ;㊼.ڿ'i =QXէ!k ڬ܊ܺFjRt kFq翖1r5r4r[r'p *15H@_Nx.ĒMFgw\]Os6p왬`?Puc{9ϙ>+ӻn;949}+ssgxĎOd 7XXcPF:XS1,tZ;c1_A]k}cieĝ 9JȻQyH{.!hu3Uk;ɣ x3쀲B/6/G<͵-}ɿDŽa:ϥ C>ԕF ~橛#E:}1H|WֳYuVًWOav^z fKS52:kG8vLNsp ٚgxp6T8%}8P85DF'eh+ׄ_Cc'" xY"K32'mX8 cV }F8lпLrv4jmؗ+c վ bd_u敝+ ZPu{wʸùRVz VEx<$-ݳKU5> ~'C2kzg=VgY5XEe'%K:L O\^7.\(YvSplveVL1Ӧi\Cma?G{hUeUUDQjRuܣmrB%[ZKg&cӬbGزI[-ݓa_G-[u+o91E › ۰rzҫ^ Кt86n\72b3~wE^<:zHA;#.G`;ݞ&ϓ_GXdg A;c'똃G ]eb..̹bspbcMl2^A_\ [A`W9^Cx @wz6uqqM1qz,F;f}U,s x?rbdDZܕlxO|% ds aZ"oc:ґNY*| Xj$`2(M9=+ )5Oocn~ݧ:aG77Gd͖vA\ Ԝ΀M&創Cvpn{1x,3aYgIIH,pr8mV ,c=wױ |xBU@zR.tFX [D u+n gh,|_p rCzCKfdb89Q;~ů2"F:B6W:j!gZ/~B*2pNpk ?йpGne3­dOdLQ뻔GK+Ñh2d;aHN6Pdȶ7DrJ@0S *$uF%;oZ?6Jm%ЮdƯ_A+Z Xx_T:8oSDֹ[ 8}J>s r'LxPIec (e-N( 5?q _ '>ߵ~<6ty @-ڐ庡X{ +旰;d+f1ء:Ј}AbTvunbbcQ-ѐϽ v 5 ?1/`ݼqVO bHNЩaGN:-8|v%C=8.1ւ yGr=p_S ]ϑڎ &9Xxzf'/8:ZcTwPֲj_}_Y(C;\ةYsD vuO۬v1rZ,oQ_lN-:t}*\=Y(pv'^g| ȶ= ftI~؞ $.ns |*W C|hR݈xIzEOP%3é[n,Su0y!'[aMyo1a: X\?DZP/=;#kߛ=^K/P؍YQV0ՓyA=@\4(1M:qwSj;DBmݨ7y5ݖ/µJo! 2v燳Am1yD >u|(mfc)Aw*,>L~-{~I<\ɕan:0 oQ+㙊Ƹ\VTDd>AT%^/ 4$ y2C#ÄoGs 9l&X7!)Ă2w&Kafz18gˎ*ΕگRVG03fo(۫>️??pCaY bl{AD.Onkʪ ۳ *c4maneH[ YH0xY~ g$b=>&g\szazb[vJ'DK.qa{Œp~.w4C:c̮5Է.joi!q|n1rzW1/JV#P6=^Ia plwX.:@XFؾ-ъ.X] cT}Mpu:%׵ g0͐ &aEI#,a%>]O)S*=z<,?|퍡VTBԤUQ>2L"f 95-~J]yVA +=,c&y-[ͯWpv_R:${9gX4j"Fd}% `̺zjhyYc3a"cQQ& r;_C)0lZ9I+*Sb VAM %(ޅT(\[B "~kD2Dvɜ +(})k !UbDVެ, z"v;m䯍f>vLXܕ"P~WZB#tʺZ"kvy5FԾ䄬gqxpr8=2\ +8?Tg|u,RvI^_S/,{khoe3-T4.g>c3>=v{hcͻ AYv%mkOW,#u^"`=:M4;.G\jd">񰶣IϬrm{7%њw'7I]/xMlvKa#` |):]csuܡ:J5u4!3V^s\?dDNw@1I}̴*qIqQl"ŧr*9C9%}0)ܒmb{ .{%pB=s8,xߐk"5a%(`Vs1ȧ;؇ؿ)J6ݝꢷxv맜ĶVAXVOe~ӉF7&vLΣ3_jת?`sGU.8j3vO89=?}?>CeOJs(DsЛpb/|aXm|@+ߒX;O"VsgilQ\!)R~-vJVjĂ?DgX"5@G7sTʌs?J3Зu=}ZýTtgiᒲűyUvbgrfG"2R Г̏]'yƈQj"\Yщ,ϋ;}؎gvf DVhٜ+}`gW'X bLk|>սxƋ$jz- ~~tkxH櫒Qd0 sh 5Vqv4EG/chm\f'ycLkq7T:\Hn-:0#R^W ZBS9BLb4,sU\;+eI P-VdV?WTgƊ3JӖ%TW Vv^F]lU7>#`7k"߭]F2e_NGC1>2ʀU>HA{R)Vl&s 8<{Iw#j/z~+)ev' Mow</1Zp'ь'){dMYʊLV+}._nL)wL#DZ-?MǼG=;'N5RŝͱaĺGs_;\؋g/c0D:l B% wֵwweVÊ8U~pwvLWXx^_f(Z&e>=O7Q)ZZ*v4֬/>+2rS)^vsdͪ^̫׫gݦJGfrZ!1V~(7]9ߕHq(R-fF<΄s#滵1⫔}G9D6 +_|T#bŭ2TQFg$Ŏe2~CW4g`A5+CSŻ`2Qac3b7ɴyRU;;*:/B9m9b;h$ F]I#˸œ`6W ln=Ph"FyRYѺE62l<=orS[ʴCݖRab;1b|kUEVz#ꎈWp^8{=˜,h#Y%Y$CH3ڱ"͓δxjTq]Ȝ:";HT.tPSdJ4KFvkoE˥hekD"O#5=^3:b狨 rʆv~`λ8y?{;z D5\{4>'ckc]XըOCD z#_mxVz- D=x|V*:mƫE_/$jikcFZ4~G睜S}ʤ4vGJY֑wCjyV1f+uW8zzQ-qV^Ot&D12j~\qC>݈Бc4Vи',ft9ʎگq6yG{b )ѿE}jl=2:ci2--ՖSNﶘ+7ݸ'mkܵ*SkZvMvΖNDmk103Q\Kk/5GL-;Ii=5FJh=1n"=steWFD|]b1l"Kz"DZZ6qB% &hWIdGkq:@MԦ*Fi$b>JidәRϸs1.]⊨u]Ks|j>u5{)tϊZUF-beZA#V׾ݺ+bֽ4^~Vؙ#9rrʏsfDYW&dH#4&+jyjFY*nQu57t(NYpk><ڌJ}Ld<5R<`CchN+NJQ;VLdOvk(1+CEwꁔ"qsDQ媵r.Fל?ʱe w\V%T&X'?\Ư Rz W[O֯n"Q5D4Yë*fK紮TmWm̢hg}r=#3Mp+"` OVpqƴb@JF\H9~5rqqw-%rb85@WbgO/,\WLPB2huuk*m2彴x'DUٓoyE`#@D@҇{ZOЖh/e8J3gݣ^k3?E7햌&KF~΂W+_?X-w{*1pOwd:cݱr+2j=kDr)+GZ3C[(nD<O[fw]]zMFd2Ԥ_>L縼?GQl6Ǩ'"uƵηkevVP=vE~\֌Gt,ș1Ĝ>T߭'QDMzȚ(g~~L6VBT{>D|uO}V9d %ZJ{ҋ%Zhe_# JkjQkGW>IX_! 3R/zFZcykCΉk+i"Qj48jM+>h#|$W.z7@#BߝP7x̀*g꡿ky/`qܬQz]Yw\}1)^s(@_*Ǥ˼v!Ɍ;jZ%& Ǻ Y(5kQ ±)ΧSՉHݖXձ.Z5kbDJ9h{h#*'ʑi\|EuhFT$uRyrd<>uܯhgeRsΔ-o<Uk#:}sm)e0;0~3~/(M$+V,}oF3q|6e(f\'#/9#RwyA56S$mՓ.1仜}V(q3F24>̖ll51nW6V_g9ȜkA3V'p:/kc1\0WjHY;6u0' r*>._,Oäǿj-7c#m*yS;e& γ2e)'sEcJ?R$+eJUUTWTDF,I|xQ_ fM\kHnWҧʁ88ʀ@Gk;}.ϫ gLߪjnj%jgw@o*黔wTkͲjH|ZhKz_&bY+ЕE?( [Pb@FT5W[;Tshzm]G[jmDaZ3|"mAQjR+UF̋\m6VyQcz(ʈ\R*09>R;DQ~l2PwF_>:g:1PjVbF˘_vWRSfdy]9!ChezYdEΨrZiƧ:(Csg(QYFm嬨CZ6cm{\|E@/k$.9'"`댈"i~Ǖpd:ʝ 9.e[3Θr#4Ɉ7f``=y=|6\,rQ$*HYsߡesL^IP}q1/^'z/ulүNd V?G̪.Y[&2YYjYb5; x҃GXM.*ֹʀ-VZg,=?U{W5X+ό#zÉ" sLpקgݾhV Q"3] q3_O#_&Wq1֨k̯`dBk:"CbTcz+@]sZ$|WS@+"!GѸQNyʪ3FbZ+c;9A'3ų; *ʪ)O[5r QTf{ݨKE('OEK,bmRo(kj2;μ(,\#حxFxl9Fk-]{ FΐtN}'1>TعA7+ՅxO?ϼ#ϔCמ鈿=>w5h)juZn\BqwYu=k^)LX0:wճs~I˭eReHmlwF>Yu%֭?QUVHƑc|M̮Aꑍq]M1p'ȒbFb-G-Oi፾FkֲǻTqO#JVʕ)s=/*.u>Oou./}y1FLwr a_k]=l 1`1S N-LdJ[ΆL.D-nudbHNDg]r=O7WU(Jw`U11-xE^DҜaLf/跎=d=#0WDz㸢'YA֫hV rO;Խww1FU]nDFeZw$#UR7m|~(I+'2ojgUc2}HSgɪaM.Yiޖ3VUo 1*~wEګ͖3#k,lLgP6[-j;]cqkZ3Zݸk{Wbj~$bwF8;8P%=O9:ީ )} FΧjLș^Їk"͵q*xzHuOf#ϥ丫'Ӯy2oSaܣ9c]O?}Ѵ܄u3FMJ ]tpjnɚx|2>~;>(9³ Z?ʒ^K$ukx~jJq^@U+Z^! QncܩoooG`1lL(F=W$.`+TFyst*( M;*EUwDCz=ݞ:u?f;ܭHZ/'su{?̏2ibvzEFctcM[N4#sa I-㲪 VT~]srʼLʤ|;y){^H}8\_+R\_}h.c8e{%o<ÝrV\VZ3,x Ob6Ǫ(JlQS-#5R9$E9FNW8 =KOEp J,;Υ;֢t$Rz}ʞȟz`ɀғ^|Yy<1X)cK/V7zV"n9&3au\U.(Uȴ;SD.F"\OP_εQC{a7+ٞX)t~83AΣX}ڛxv6wbdNG[+vY3n`l Yɗlluizəȇ8"*qָVXOΝ52ZE2~'zm4ҋhϋ׃ϻݰ;gVcav—g\:D^ӗBA>5[˞G3d 橧x/d@#_l Rⳋ|EBN:jvevaFzXmTjU 1#ihr^VV^%+k@瓦}Q>􄸏pvCK7 MtGۃ7kr/8>oj }wp15/il>|\/kִfhMiFN?"޼+Ա]^%1wG6O7fʐrneZy\|5$pjNYI'9fۄD^:DӁB/hNHU`s2wöN:/cFڠG,d|؋n^GuWG7eS5sʾ?=c1k38rFijMYwgN3s{fgLŪK\B6]&bَL,:+pZv#e Pz&}.X@1yڨs[RU:9fxFYkaQdb( =_[\-n(_21VŘZ=:*rJw,͈bGUɜ]^_Pz-o_= ;v"b z:2,.g=3$3O혏yF~ i{‹:Wpݬt 1Y{B1BZe'r;Di (1"%fܣb̬lyH|{qtߑIyp&'"H';<Kם>Xjt0,PN.C>.[YJvb X-}6ݍ>ͰWB5HHֽ',"\_$l"&qjoq܍_y/ѕTad`nC_=W; _r[ៜO}zv'[֝5h}kdyGa?qqbd3W t =Nuv`(vaa1 ٗj==vQ ŌUgwkb%~{|Aif>bNAo`E#9)6D GŒv8K~MuA-t4*ғ@ir쎌tC R1 X>T3kc!i=%4r@։\F&eWrfEV7gr^Vz㝔s"k koMWX<Ū zê7D[1ch]8zX q~ p>@&ܚnK6-|bV+ oكyB&@o7yvk_rfdGӍLyzV7'ҝkm9.g!2fz=ɘjGrqt4qN-,,3q|0^3FeX#5Oҗ9+߄v{Yq?)RP &ك#-Ofg%DcK2B1Z35Zme F~")!8k"A91Bø >r)hw;*,?/?,gkyYE8t$sQDtxMnmBNqsUrX3KpQA?ftl=' uYuDɲUGy3b<$a+ięƇZYxQ1_9%ݟr$Vdl_$`䴈BÞŬNbsv=Atss! &MZPt\|{You4lu -rN;NDjՀ6J>_{ WS_z혛WՋ Ư3vpNc a\CY`TIfpk_CZ|-{e[dݴu7sMfr!MoIHtcp$4^Ճ~J7z[ C$ܸ]YFkvJbC{ٚ綪 ʦbأ432d+A:>8~DFnkҹv͚Gk/b\}0: ~Jo(=sr!xaXלE2ꙿ93ƄsԎ/_)/ԁSM#~1_)+U/\G|v6шWIю*9"?YUw āZA,̓k{_%ݲyݑH|G|>bEbG+<lMNr^z2Μ*yZo{> I鄶hb2qPbm@̯)˱G~Tm"*~^ a֌֡y&{sRͩó ,u2#'}Љ?~=MyC!KI~"Kk?<=q %[0VIΌ'U 3QNhwI9Vĸ/ɦvdǁ>H&K t'x2[t.O?hhWcA _Yݝ*ܛ&l V}7ډ[Z!6'OBރ#Wk?d6Z$[d2Έêa[Ecx+fmtݓK8[MW8s_r49ΙhQ!љhi '8 418Qf6,s"]SW\=!q^{g$oY 5J*JG缵gKeeKd5sX5|Va^F7'la`>%:^[^::퍏xٺ~&a1o̕#C&3a`ka/߀3d5XjkАEjyBzr%WenzXIT{ܭvVJ<3sbV)"w`=hS:쒧=X`%oE3+}2/yrX#}x?wC wz#_sz2 ٟL|>u/5gV߉w5PO])e5vWEFD;AHCEDX=2gAYMQW'1KIL<Ռ+l\ F 8;13߃~vf=kVHƭ}&x`;߉ ANFqMX޷ b#N}!IC1+.Bk+)nӪWbZ&!_"c^<~O`Dn>u z5"K?aSjzc>9O~03bMAsR s57 }y& h+hhrC#O?AXiz#JPUl_w٢/pZ1w=h(3җQ1@VN{B]'<4Ng^f,4ɶr Vl :MVLjˇ&+:{6vHo= NS cSǶ3xG`:cƖP|6'9,55%\Ibf#>+c-]2-Ҫ!cD[ YlJa+9]zv̮˒Q@fhOꮜ{'.{b~* nLX%A)w/g>w/z<9Y<8ӆ[}?Wp9Bm~0ޯ#kD'Xͮ[PSnL,l\WýI4`_֧ɠP~F_oY5uXX!o3 ;yѕeD~] ӂ''ÜVsZ#7qشfG̏ɅZAwDHퟑת ̆־),c6~TjJT8Oʏeً~~ke0k`\Q_ G_)9%R<H9-2kj Ϋİ/x+tX koi՟^z5o\oG ʑmD_׌ٓf#(/?*S3C1{wA-' F]^l:XO߄WGC/ڕ?zW2rڗ~ٯsA~y|8u;hmMNH{7-x_}2ӻ%--^ɹ5vdfyN>,VOXuf%SN_2 3-/Bz^yluFo.WW9v7!Cy 4z4]OKEs,XócYpxvE? }|kӌ:;.g\d>3±wC@͈Z# dF/[)eFnu@rz/_Y9 JX=)+签[e%P؟OKCP ^7;VumY9:,d|dYf> 'gn.{2 JzVD}<]E,[? K!lOd5[^ Z%-4{^h)"_kjCYhh 8:ʲydc34͸ON6 G>YЅ?RӋeχb7doQx鍵8'yV\xd穖󬅔(Q̀ #4QUb"5r#(XTSGfW[a&cY/Q cM>gc ]5d +.9$7~='kMNd>v*^y>;_$ F܇8'%%œ KCs"1XW`IDx!x;!!'8'E[a,߄=ozbӳOk1%d~2>8JΣ pQNVK w.c_=ut&>NkEE?uKYEݓ'/uvDJg {A6MXu݋o J[ v_c§ȷ&}Wk}5; *jS)* NWe3ҕ"@XY-_rDB1'c^9q#ڪIX%Z¸/ kyGI1mhk.WݓJ~R~*S8/eDMTC5:R~=lK #/fn rfM Kts.>X}&3se}N7QCbX/ !60'FskOc* ycTUO^+Vfgk=,{Zw`OF3=gk C;šG;\@uÜաXo߁|Ά@} R;+ RQ[#sz 5JkbPWOFZip7Jʌ z[,8xlqva7xKʩaBY@}RBz`|?b"s3?z-|aم }w"yQ|䩴)3`QZ{,"xQA7bĘb}FfLڠFȕZF-"l!Y6xeqpBYar6l_ 8Qbл[AGQQzf=%p71;;1g ?۹4.o(.(Ϳ̷̎d7.~6kPZnD/q璱N^k*JIG~N"F#񃧿Z>쬪;!JIHHd&=!B)қ"*.ET V@&(E@"o=s_”{{.k>dOȨ@,vޒ_wd,'fm{B5U-_XgɐȝK+Jۓ;G:#۩7Ϸ<1Ŭ1s}ukp h@xCaB ^č;ᨕ(7BA~w6K.$z?5x^0ՈT=/xgl]lqrdԹ|chOWݸ;#a9 !72k0.;ă" c[7{_Lo3vȆy>R4K>(V5XRtD7N{y31. r8Z?36%/1ʌ5/ƾ"|7ջ4wn}a6?jnm@wxxY^PvM܊>P:c!vP?u9ǩͧfؾD$YAURф&?,tӝ?>(D~׻?3[,᲏M K|&99sYßhɰo>?aC$ 108ʂg7Ϲ{ʆf<CGC%?/*qN]}~gAbҟdt+S%&,~ owϨn7k{ɡpWx9 ƛMύM{~4PD Wτ7>X{ljv`bJɄ#0`9-A/S%:}hBu11,үS 5LhtFݱꃻ|9Y33e'3bŦN/%p?z߮ 8Au{UwC{HXνTvS)zY  x jHs1{OF.' gW\-1cƘKy3lS&qmA?wԯ0|R{tk|&^g+צUλC)I}@62Ns{x`!~NIJz_1c.RE1Tk;kz{vU%InG>N -7eJ[zdjԷwmS'4//f%֒'9Ox،N3)#x_1:fqx |.2?$Y}z~ ֐GczX-02Q۔LCƭJBEB8y*fjSsᴼzyUF`Sޫ*<3YǦR}nߢႌdN޹[ҷz۸c/oL 9󊬕  Hqʅ/r4g%G-G*.N3{z~޷4G/a |^쎵ZEjV=&JUHcF[+5+o_%B64$-Ҭ\ɩ{߯;|Ȭ<+˓El!E~,͒F i]λwﶎwY"5< "@$:k"e64ny'G<9oֈ-|\QJȣ])lbMTHB%xS'1#YV63 y"<;#i/ce&f4C<,gGU5A,Y{; K id^Jn*bLD1bI4Nb=vy"U?:(K܀g4+ޔ/ ,ϦelԱ7kZhA>\J^ 'M_`4~oF%Nؿ^ +[E7\S/^<VL;g(0Ώļ/bVÈd 'enyY91<&XqXnX.L{VNssk* 糲Zg,UznM26X }3brʴʪdbb7R֟O.Oo2Eyc9-Y<}33Z[zd{i_mؿ+O9E!`]gqCC?B9*9Ld L![D3}2ʚcYs~9/[Ƕ$^W4-Ge%dH}Bji9 #d @lY+;ƊP<`E2;h`@R9TXԊAi*@.:RNǭ:C~{Y-6Mt0V1@Y!߉N0-VЯ[MEF[XY#z{܆RY9CCw;3.o5qQOĶհ%dث;3Pwri.=ާ>ETrz9C3cAAu,+J[nxl10[͇մr*?>](5]^@DD/v!8ZG^8Q0qyJ'=/_fB|jT -umt.Bc F. sOA_N1*ЋV^`Ej/<Ɯ<'eC-~U,L;l,ů4b3H&gH+l}?$"YϴD,U\%KVrVSK6wΨI㖋.^ӳzDV4AKϕN/Z :)GeyR'#r)C2&m(b¬}nde%2*F {SnH5%;LnE$sSw0A)]lȺO|xY ^4aD)S3 ߄~v;K" UX٫`\7Fb _Kzh6lչIP66c 6GI_9XWvr$ uMVE@$I[2Pp91=r0ndrk [Sc>`ḅH<&ycau 礕]-65Y !VV? ,7plaNr&#@ CdRXEu_f댕'_lida?^S$P:T%NjGy]rTS7SzViy*:ųsFvِ"%*Egd0c'Y0עtcªOa̡d $|:w)ٓכ.\wDĮ0"5tV'njϧ4MR1'/aUcf$Ny0^&2ܘyw!DWc1`<,[PЍ*dj󻒷> ̧zH "$s 5HZsV@bHO(x͞'2oXD&Eő?:TO5UE"A]Iq :XFge#-c#ip*/dPN, dpl<U/ \tcQ'g %חy\Df.fBFjz{ qugGɄ> |~˓"X^jy'+Up/_PM*]HFF|8 ӊYKf%?"𣤒M9+"ɟL6̉XAuOih?%v!3Nt# q?K Ћ[ /32;jbX)T 1{bY4VJDj<U49B ֍&3^jaِR QX7adVudz!) 'qİ2>[?@@֕SfytdNP?/§yThhnGA\Vim?Kvl{DpE7l=.fBd+["!t.px:kBɊQCo Yl /cP PQ>2}  aг%Sf{ր${לh.NXd, -\!c7ҏc_ fp(C)F$m[m5 Bmq:gwUs_gx9@̈́=K2%O;ރUVG |kE^ѷtMZ,MC;eXe'3Y, ,DDJ+8/`4Yi{=bhb=}E=6LYjvPDXBh8C!4)(ԟEGSD lbYC)zil`Drl]@r䙿XS\+^Up]m؂%Ϳts1(hV0%л{&< 7;O:_>Zڲgs /I`)|nh,λ</Sc'7Čb4?LLPl|~x/kl%A#PwrRO 4#fdU'vhsnw $Wx jdU)ɌK+ `JJKDT%oEJ!6TEX]θ5buRHaX3a[Nah=ÉH6AE#$S}ͿSdf7kR uxjOwʼWD,(Hex|UudNħX8ֿL"?%>dMqC=pJP=,+ e0uzN@: ]~:9r6!ӣ"T̎,?Z(199H PI47^ ^MWaEqNs߅=cqv~bE͓s\19iVӾyS%RE8E F[k$;0|L!igLe\9b3yk, I$߉ɉx)3j!gb7ZXZ`D2f_aJxuǰ{\Φ :!*=/6+!o.AyX`E9b(6+"*Z-lJ?\*?J|0)\NlɿMLϺV!?GA%或)Y o'WbD%bBsFg:=䅌 $Rmc}D!"[؏y޸ XƁv3sS=+[ͳ&:N$ `bێ-Y.2pJNjT@ƋIXbVȎ}9+ f4/_/%Ж*HzEyjуJV῁lޚh>>r(%JTeeK=S' N7 ۋX:'¿dQOUs&3jx\`{12s*!lćVY*ݗmFP=+޸|% [abaNƖ?TPPĻzb&f ;;,J$b/X >"7d?<%]>ςvYd 867G?,;!RuimH^[:JT*,8R Y,ݕ`]^.~EYj3'aTTF\>I܁q/TX_n]OHo9))j횑M8,2#Li0&hᐠȢ>|}3̢9냡'?oJdhr~G / 0o=eL;YhvUn}=x-FL{\b!knvS~rޙ+y>TJ=vΈ<~|T͛Öb I%I̖L]ޫVy&S/kIJ7t~ `UۈpDr$`a F.9*c9U\8bS SRIԫ9jnh 4IfW^Lrge+}mOo|ঢ়2J k_cW]+wq%,/AAv_0ht yL<egy"WZY9&=rcvGbݔD\ޘ3JSպtaÚ9yazmS*]4"#W7='D^L!y&)2%ߩ"03/}":R`iK!u"*ēW|U[.trֹ/'_}>=205?z' Eaex[M,2;A ; l n+3''P,|rOp-MwwB:~8 0.ɑsF:B_X9]"#Gz;옞!Sbޕ^oLu2xiYk&-Q/ƿ]#,A|b+ďG5a ݐ_Haۜ_Ṝ>,VFϭOͣGm bӜg>u?;ά/8>>3츝*9n}t Voacjxx/^YLԮa [Vx3V jDC<-UEj6S'1?6#:6W O. 6'-YNbMW75/T[vڽ9DzE}00n67I?JM^7lE|Â|2ƍJ|DHЪão"=䈇d xX~ݓ(-g6^'{}Mǚ-oӱs$ùч-r^nz_N'dUװlU0OĦMP|;ŇZ]NbѩLYGý:y"7]2svuYW^h3m7OoS(^Oom=ۄ/gVpj^ ,kF&ͷl4.Zƪ$B vP̶߬Y _xVE~,[gγxɏTgGON}ՔX}enkcN?^ئ`gZ 뜞>3g|jUn:?c]^;=Isw}!# ܅S^:$#Ž 8_՜tO!68{gt65YC##%* +cc,Յ8H(<*R|諲@^Ro&:/3+vK {;-u;"I}euwvͬ;966 2!dXc_xHml$`HV2EJ /ɢZPr"wœ@8MvL$ yH\JuB~g}}y]ӟtRnew5>'fvJs~؊&1s>9.ϊ>88qVAf3-w_X'kͱ*DdR|{)wݤ39S˹E &.+݀˻d#5Nsf=%=n]E;>qRgԅa.<̈́8|KqzcD rY";4@xHq],:XZ=Vo9sthF;:0;>]@_lW~|xG汬k&3zu1F]Agb8"E>:k7O5t&4 m4 4;^%^g鐷%UVOѩAaά^7r'űNOڙyntkn;-?2ڻ~w^)bt5%O7@pyKމU$h:Y|#}wΊO36;4@&":0 -Ul$FS#N*.?5.5"`B Q* #4ofl5<;N][⳱n3+ͷ7eAGLw] t;zKϺpsFO8r!?Wh16C8rUH*Թd-1"f!1CbV,ۆ 'js)yb/M7׋Qͨ2:~."F{{tt}z;Qq؀[۳?e%2d<]7xC49Q{v|:OPîcLK]Q/:Z4JdN.1tخazU8|/:}y85C^rb<1v}V{G97W"wV2iMՙ5GNsT:D̳I/okYEA [;ye,x#BpTBӛl*[UgmTܐZ}uGnJT ' C;OG}K<"_={fqFhvFܧ;wwjDm,@;oMGw&219e?"^𥱘W_gY|y7 2lˮCzDĀz_WMe0"%6]EՐ{,q,xJV_$ڒ#cʫ.ק5 w';;:;-{EN.^EWO˿gs}͗HVw侟0d_ 2Lpڠw%bu%Ұ06~e1=CuDe<}{lP:M~ aQ yH5_En#M_ߝDVdGE𘬺VD72нSŌh;A~"VyY*<߯v{UmV[~ Y5/i厔_}4br;t +Q2.ZX=e#ut1 lP3̮t/jdm}l_Խ;"4-Dο~kvuC^ĸ/V#'w_];#64|,e^l"ƚ\{m5rnvH&4WXm5;,ơXP9Aa NX(-/~ǖ@FuΝ(M)f<,KuJtjatjCpeӽ^h ۳U40 V9afu63ksYߧ ѫY*aYe`:IoI Z6Xy: Ru*/^/\Z8} -~#jv<^o%9޲Osbxo3be :pK cd7H܁qi#׿~jv/]%ͪ^ܐ`pc]ɑccdo.`_=1{> OG3Agsũ2[UaiOSՎxmHx#7_Y]oU+l3ꭶ ̝a%on[~T`Y-Μ].SRͲ ۮ^پ53xbspkIU',w{XՅySxa23^ :$v/ԏwٜI騽:JU=X*Vq5`!Ι gR+6aͱV߮G7 0:ʽҠbg瞊E*+u2;_ϭ_|9}J'~mLhѝgPK[ap/}yc:Qo8J)8uf,{)VaJu7t *Wq0!ӻnXO^|[}StN|4m=hd|k)bEΟuoW;O&(b`GU}{bfFo;di9UK2R+>d,fap~Z,tI>DDӝV!qp/ۜΊ[%F9(cfep@_џGY{{7.(wq=?'4,vAgNg!]Ԯl6 mX*qW3:ru$0NF8P< eHq<=唏y j`(Ww"'{QD/DpDZ7կ2#xqei|nNwS_mwoОp[DyS0r%_?_=. UC6R ի|,р5Z'"#z;@FoVE)~~dq=udXzhZgD l`oGC 1"?9yԋ|Y2 U]rb_hUl;%~qIvILup48|]s{-\v牉-VbrV+su/l#G(Q֩ .2ٟ;&~Б?.ߩ-C< oBO/FُL}ڏGsꟵmV-g#2? 3շm͍>{]Όogwۂ?~ܜte=;]g"Almg S"|N@e?D䊩-gpxf+?~/k̶|<fC0Dɀxi? =6x%cWέ/KXy*pMC<ݪozffxߕ}Gt΍x0 ~*VQ̽u&~eQm3őhcYW2#50:sf-kz]s@S`n~0X& M;uNkwjfmsd۫1wF=sOTWtJͦöm/9k;WdGh'+˛:6մ*k͛7fL#`}?K^mu)X:/G`G*ʜ?sڒyh;)}À rNS2tfwѼ'{hYU%_$+O>+W޳;mrTOH^$;6郑; ô۾:jF$@gnu^Vމ[?;a6P IKt~@r'9[z,NoݰڋW˳ ^άCaw޹5}ۦmlCoVxyM7sRKOt\v9L]Xgs_:ry,{˅߰2?:QňCxV?t?|>ga|.#}YW(U5=f}~tEb:9当-\Wl;&qͳ=&$v%d>{on>D{l|3U9R֕{xD9&{Þ @:aqlðWvz&LY0͇{}f0 nݜݲtڃ6t_jl&Y/v4w=Llޚka>l`}Et,τD$NAjŒBft0K/|2N9S3*..7A ^ &={{ԭokQi&k|#3޿]ѷyA ]I?;Q^iTsT}_y@"w7d_]\lԚy'8BE27%oM;1M6}㱉K /]XZy/r&+tn{ogBޜlo?yݗOXs_ߓUy?/~\^$Hm%Yw?Mݿ{G;Re}AҌI±xVDGop@=<Ӗq%S!,nzL3}^u3m@v؝<#H{)w]3:3\.u M9HS/uLW[;$+gi>7!>xmvv.Lܟm,@3Eӫ !`:TySqxEV?N}:Uwsލ[Wt 5۵Od;*?Pئ}%g4*+p7 &K>^*Vr^уA_oh?؜<\Ӽo{CF*r{'mʟnP*Tw-&C=v\+7o"QR-, .tnViljJ%Ș㝛v{:w7ݿNd[\u:{o3)fYڈ _b=C W!ŨaӖ7cr~p@!A^MOa9 *lS|8AgŬyi_I}tO]v 8&?{M=wI?{:vu|)ͷV_͈$iXo kqMӓx[ٮO|kn }udcBU[cC>R 52=#{de_xZh ϙ',9DzXb 5:TeX$LE&Ys b~ݯ7bk Ge k>J;Rti }&rkx LB^5yݑŲ /.Y',K"O_JKُ]J;W ;2VAB6!#sl #G_}oՍGW"[ޜ(nUOՋ#dx;r3>Y]~hpÁCC!<ըzTsS׹b?ygދS>:3J?Sux%ܤ+#>e"r )?N7>,_%h8*˟ò߫j=&<]5w?S- <]m;0-r3"՚$W晇^/S19~C5;22yS,f Tǡ$Ͳ)`?'f#]DeVE~.W9+zwګg b[rG#'y~S# /mW:r'1*sh5ʈ^o$s3N:>s5&@Uui9Ā&vDAg؇Re@RNՓaq$֒LeM؀W۾rwkݱaG{KVbm2${:$[ې}\|bfdg /M]9aI֟44ztҨzs HS1l^N8k0S8jxlȹ>pKW:vJάi`.?jv' Dƪ>fQ޺vejǶ5+ssew,6C%xfOs^?:g4y \t՝K ~;wbw|{߷[8R3GErNe%ѝ5XwgN}G,2-KjɧC{zyC$~{7B]SQ NO`4\IBt^wʡk2_22!f`yR+̪VTW$X*^^?<تn?v{WF+V{ ^%g!N_lk/B3,s߹Y\[jY7co1Ҭ씹p ;‚|>d5D##y72*ăxo5X]"ʟXݙHx˻v}gg75}#N?~ h#9tHF󔋂hP5tθ] /,Un<\45߀㓽!YNO ,N].d6y O] qZoR4X޽'{0$%A?O ߡyKg]Z {N8WyR++Res{g"9|ݷ܈o=4=t%ѿȎMe3uiU/j`EčlȀ!U'oM| ,kbҽ#? fDaW};DʼHVEb̮.˟UAu ~հ.LŠn!jaX*鼶LӪz~i͙]7yIޠC͡mwgdϢW =dX?m9Cm|_.ޞkF3-xM'Te%aY2\(|5:;cdgt;ݭ7wi;>(|p\zʣ:+84]9IÎ^^zuvMɴXyY(řnq]鈧+@.PcLK'L*b@QaYD5u, %  UbmbNNm']Ծ.p2G-eZ'ch*^$q\~T: %SoC̅Sx6S?BY>@5_MZ):Gt`CO[F*|Udect(mã? LVcDɉ{֊5^ De|`<˪Cg۝<߾9:vrM,zA3OxfKXr_ƇC,t7S7Gul#Vo5csXYCHdgA^92 +{}Eumf73ai:ň`yє=U:;vQ'+#?[ncZzf#Pco<jOH'G]]7U-pR"'3}Y#‘.Q9/j fl.KϧjȎΟƩ# @H..:L0r\=+W ra%__xrs.ے=7|@4Yg^Eq 6>VԂe'AN^m[!w ʈ:*+*E=H'i~NYuce˥B2gDVGzDV@\D쌩 Dyi dWɓD*-[a越fcuRO !{k w~Ϥ0%'[K7E^H_q?"#[`H/ |;#XFr W1ㅍrJ>deʴzu9Z:UQ29o(u%EtXܲ+*W!EXT?c1&'텩0XO>H%[ޫ'f雱s*J]1مO䩺Y$ 3ϻ'Gfx[U J6!Z1]XrOA|݃Мdٱ/b% 4tZ.5dSY8R%8gh 1eh ,րIJRwj?:05WG)!|tG={NAsN.<@v{Y$9$_(I("b=Rܪ؍y`.5xU=9&~Mb .Ue- /mlRYq`YuzK^9㩳O<)Q3K|f,H^xX*ƀVatE=y47tIFTX:4>hhކ%y)y@+ZHtMRBX1Рو@xM#E85Cu,0*Cʍ+7K_vҫ) y8/9 IpbAj/V|l c/U)ΰx&k*V0"ۢG PS*R利ա[+:pznB= 0͹TٌΩOMX,cAN.=ߢYp7YCs˖A, [V|Vz V}cF;-%v˩ 3&*8~O+Ɵ{Zq( 0<VX$2Lj @^NCyOr S0r'kLJ޻/ [~ʚusPhN, c36v1ϡITvVs4g/)(׍~h63fX[G/t..o,=ºe\5 Rwx$SXs֐^ڙ3^,4dv~"ok]$={NTJ<>rGEQN3ԕWof1x-%z8U GSf B}yXzy<΍ z¶v?%W@n1v/( + ՘Xk/rBkLU緅ٰ|r#z"ԉsZшۚ75cq >'F]sxQq'd)\k lmflcDŴٝ0ն2$א!Y!Qb>%l??r5/[D!7֞z7? a` Kd`"%0&V% Mzyj)l_l%FbnAX/v#&]M, @evǜ%8q iИqUeMU1Rzvָ2 8Gg!p)ƤN:ABh, * S% / +-[.[o`-7ѩIIX$γ4E2#֭F#4_1ϼ5uȚ-nd+BRU]r$> <7!e!}ܡO?")Ck>N1\yf43 yc@`` 3p~MVFΑ"}{6&s*>vnJ;(3Uq=}K߹ 3LgAE2&,:y~0/^_xOXSt_KFaK9:ibG1ec3 V0Y.\ƮиL~!P̧b^nJ{PP瓨9:Ze%' `iiٰ|뜌yNa6v/>j5":6p.>?@u͓g1%y-}0w%)҅R5&|?_' ,2Jm nidA\4V$>wrYx<*^w%~C̆ȓ`}~^'*wxbj :ukO΋|:ZB|ff1 ΂[wHyqr0-ͥq1U$9Ł:F2I`Y91u[ C߲'V8QL.sSD4Mkvap2nfgf=0 Cv0O/`_'B|Q/y{ǃd ]տȋ҅b77N ~y{K=H%©˹`.uigdg fK_5_21-0lRI t 5$f rGYV D3bp+&-g=W>J .X3fʫq~Y/|VrMD'oymB*3Ua;}-#WX؀tŠ8]CxoV YC_2VG9϶2v;^ޣ;ء'<7S'fEH6 `@ͩF]l!%)T4/D$ľada!I<j0_a+2tYvGAC;%L&m >9s< OѳTFwv1 ; xM$CfHY=+ q`/lKH I͌CflA6%dH_SB@;pT-o~A)y!!~ʽ3IH)$ΤMz!{ @E(vĂ{`CEzSQQD{3;sޫ]kMqĬyš^Gdy&tsc,WGZ`53Dr=1'f^br|Bm$. }gYɿ*!g6S_ff, ȫk$hkҐH̑=Z/@^7ЕkA%QZCqMƏ=% M!\*HfT`p5< .6-ϷK2mEf1O!3CpVt6JW|Y*wf~e0VZHjyL>GEV,4 /LE3OEeUؠyY9g>cwD?3=3r>oVB̥î y޼Cg+)D򴺬< f}F9BC R=kpI#l[q&j tP ׶˸vIC,@e+l>N*o^G10B!=k .(Ş]!tnB޷|g^^ayyM;$D1Y^zZ믯dz(hA$JKyvϭ2jx(w\oTuvbV0;q'ܑo1j Bƞff?3~<'RO;Ypî`fVblUA_/ߡ#kg ktBhrxܔf})y-i!{_B/TU2~p+]7Ӆ NXOG`[HX2ôD֕1`oC"-ytd xX i \!aJ&D,`EDݼ=X%G<.Vef[.FX?hb҇#(eXR߰2}{zWXjf68hrdtUZ|4a ҉HۢrWC%;.`ziلr?)p {q\0S؊1fxdPqLkRtP QVYP}0a:g!uP `ip,yURi;2#)6^nYy29̭_zx]:Y~)f^Iɞ͞yH8:!ˏJaH]Y7Z3ҋ͊FWv({i 3l 퉦dTwvKA%[Llozy&fo_vLODzW9ٿ}z(}A&ׯ.ʧlU8]fW|: KKREp8a/~%9O\O:h<>q!jfd81:?2+yR:|2+5M_?et_1(WWl^3fןI/KvEd iC;; X)9&SC0Aٳ> *R2шy +u'Z\ ɵd [v (vW~5)' o7d? |@9\_jlewz0]A}s= "ANkz=l=A9fgq34&]gJv/RR L%;SX) ƢT)/Lˋ9i}~6nfX_T?IYpxN-P:aNL'wyr`Kvſ).Ow[r2ք~]kSr/3 %YK],e*<%JXdس9i崌#ը.=y 3D# P>zbTKeXVͅ`ulv]έooΎə}1p\vM2;;>l^?ec鵃-P) J#x#?R*^wͣKƮ 2T#JagXbU&rOY`{wueFfTn;gǭ|uQz~^٩jvX}p;l^ܑ?ڗeƬ G0c,j3 W~,P8fλd˼J_cL&]a!X?j.~%vIv|zu`|qaslч1rm['stDyO[,.uvЮn'֗G{y& }Fxgp~ɦCެ[eMJտGqh˾=yAeb>(Hd'&NJ.-c>֌Ynr.鎳,.{?Ynګs鈶$}F8IY!uNvLܻtDJh,H#ؼ|<ȈbDR] -؍ijt81)*Sg*6.EqE`-L'::{}rӿڬ;2Uټve.i-z ,bPm٬m?uH3;:`mfeO<Ǽl-+!?#kcaUYC@fw5D5p="yz_s3Wgb;M?'[8f_Qt~ﮌo3+-)ٿݦy~"3ph|ŜyqF#˱)6NHb!ـ٧u%cR"+S7~=؉c-]E|<ƾW&Ϫu3·X]92sYB<]TFlO˿>R)b1U}9.\yyOlmjILj-l#WkeCcp<.}4j/c[X`RFbF):"-jBOD<9+]~ =9#_XGeMc2[䤤ūΧSKqIeI6/Ϝ쑞g5A\@dl5f?dߎ/4ϻ>5?9 b= `WE>[1o:$vVd[9 Xը K"ícC#''m5e4/WfYYm6`E}sU<l|q:>ϯŚ\xKl9_M ߢŌ*Vyl7'sċ2XrsJ_RӤsNzOΈtw<ϺKlAݟ ѱl7 Έߒ>HUMYSޖ~vC:dJϻGD~ͩE ~^v:`N=e 7W?dR*v,h}JvNyZ,[ڬY۶^Xߐn{F:?D4+׷7S5Bܐz/^qUh^NNOWg&=2lL `^F,o!Q/> a~ų֘0?܇n1(UKQ>_xPsOWEU;93ӭJ,8޼yw3#ѓ9)jQt~ݳ}U::( g|GΎL\k0#/u]]fG_Y@#ݢoѯY59;(aH_o>C/\5t_xU$f>f䵡D=coe[̗3'#;Fww', \m>/VÑ遶4OsZΐ ?5+2,W?pT}:.&>[gt #ֹҽA&T7Q2fgȁyoH4/N0 R`pۃQL,sKXgZ9̹_)2 M7'35ovo./_lŰ.|!|K|amLuy&9{N~8,ƫɸHFtOcԝc?uKU,m*HiBէΉ6?Ls[VrDâSt>zw]xd~߫|lUXӈ .ޗp׶m0ۥ jߠKn@UKDP<i=T=x N2"HG~E2?)^6^2䓫 fH'(?l =#v'I:%tpAc3+|*[,e}ћ]Ŧ5=OHwEO/V֗&O͸:߆Нf򲕰 ˞Dkebyst#ZSt?|ųMwj3=7&"rY?&Ոu=?ٹ/zGb-;sscKdz1̷7oAk갣|/1×Wg}@<@jxxmѭ%ƃFt,zP|)yx%|v<˶Lodl3k91cY82ikvj*?)^Vs@4H!9d8;匧o5KvRl3-+s͝6{O `/MÒ+5Fȃ s`B gޭZow^X{t~cXpEArVyct;{_ό]0,ZaM:so;jfm}ŹDTc|vQ NJ,%ڐѭD>VݷHٙU }Ibю|B C)'65^Ά1?s*ČM;&9#qGuF>9=w߿UWoߵw|?1<=3sN'c1g^ ~>^,k_y>7A^|/QVXM 84-NFP+"%µW_ow/c:W,o[~kbbhF_9 v 3ݿ_)y"2p|"olW<7kƤIA>[eˣOT#2{&>#?!e[7M%b3:Ho (_ a6`x֫x2[FlbHS;5OYLp{07Y O?!hw9itNG&9Cb, K ؑK.N II<1mUzMNOpӢDX55|ǪЀ١e*?l) ~lPIC&f捭|$u =fIsqPhY?UwWW]]NjwgXҿ]?̈́F6 5'ޖ}xlD7Wn9rj?ʾ^8"$#Zv{˼]5zc}\έGeZTy|f?E'$`}uz>^]EMb"̹`#<6eЖE鳜m̳U36nT?숷M'2.إ œ z#ӪvMp6ޫ瞑 UMoIԟdwfG& [ĭf4Y‘^۹?8y1M\{y?…}8yBֱ'K33-;rOɖX=8<@v5_5u < :08&Kl r|ȬB_Oݳ^Uپ9NF(z@<-տ#99sO5/?[2ˈ_/=5q9̏G}*?s_zJQy}jx ddE\sjO9ݮ3ͧ:WqC[SpCቑMy.^?{:Ϛ;}y&Z}_bD4[oi+<ȹо^R*D.P hH)8x3?{1 d'~bl $YgE%Ԅyyo  |fUƿ{{qky7tݿ;l{| UI/bL?K>7nf,?AFV,E^Z]Oj|VT,m6TNDq56' cv\$@q 9b !> ooƮBŲPbɿ/S$!UlNlD+:ueOҷ#<^>>19~w8hOmn,kWߘ5&'/LfrgE7ݽDé9`m+YXl~ a1VMUNx.rIfxR46+?=_ #qovY}k!Q۾-'!: _f3';0x(^-vnސq?Al+Å6 g~]-_IS9 ;h jN OT"8.+[ep>׬̊'owł8o+x{.J5L:nK ҆poӪn,Geo,nΉ>hwg&T |9K'=$RsP85)细YpHW%Tl?l歭4K[7,vaE١O) 4߻jvX,7&0Ow#wSsClҨ䴇u.h$zD7W }8q`o8Xo:.}t!D 礸Sb/qqX_w\׼22596aEHĐӂ³<8*VV0yx|O졚)+nA3rUC= nV},yi4~ Rwِ:իZ՜ծl>Ķc|5ތtWw45tRsϪޛ^ aB dlH\8'_v&;v. tvSopbxgR$w4\E@<'}J5kw VCX502H0e0؝zй>}l;7w]HFr[|X웳fؿ/u!:[ ]4],af^[xdD%cڟ6>k-JU X#-vԲ4o+skO/嶧Pf?9ߥ !&וOĄ(pIsl/ǻG/w__ GV"um"ݢvֻ;wjqӪwDzz11aI) gL%߶MkDUtXIZW)+.PmFx6/aWxl 12/:Ú,XbɼڻzCgNN7OG e{|L<4ͪ/ܝ@oowfsC{n?~gTwy/ -? ?ݱ}2>'뚃z~gŶU`@bU{k.h<.]D~%zW z9%^gHWw\nh:Y sӼdh8h'>sdaA#zI;t͆|Uy_OfpNr`C9 =ٹݧ瑶r\E~߼X116njb+p_/\;:ɜ~m Lffqɲ]$1K;?O>mz<<Îy%i+:=r[2P?(v`#Sr2#GazA&.9S],E Hx3iNso<}K=Cg_^r?7PذcS+8,']{@c1%!Ywd+"р!c;6w/k ޜRޮ۽ku-; d"Yȕ#۬Eq Q3߅>o`Dv&Yl@,0AfDʞv>\% MAfvNѬԀV6{뽿gX9wWVxQ_wY2SǓ> #H3!9ǚ7O&F/=Dz+쮦iyVG^%Na͋hoO]]@gx9kF͝⹳^\~Sj|&EAlTCh'Ē 2:~lF99щӈl緬JsxXDƳ"3mL ̎&b>6䩰!y*׶%wCxr:مTbIo<ɈcaQ^33pt}M=30aWt&wy٧׽{b{ry"?V›K9^:74^ Ȕ~V8&L.ա~L<-O-:2*%T5NI[[1uzY! ³<7;zt;v~Pkok=18ռ^5'wVNg|vA53] LS%x39gȏ<֓"q$eFET Սhx=OTh- #?lΘ DŽ"1Yw\JSb%7_L1ω'NחwhCSSO3fjp@ 3+ dʎ>XEV[zLϘ!#%)LM((\(gFN%η\7 =-jUV7#6e$#s2am^>93/vR|Dn%I97ǰྩbW-59oQjaFa<.|_1:0#ץj[ncEveGnFn2aCjl+-޹iH'GΓÅbq&>lT!H*=v*P/`;51|ЗbOZVߝn~6jN6ẘOlbw|CfdA>y>ԋHO[K1bɚe*ǜ/z +'rifV^;,癞wi6s$~]@Uo}*:u0xuF5*}3#2{2_TkŠM/3Au>y4ߟeuO;jXEA8ݑoP,фtsY]y!CѰ^>Vopj=݃{|1bLbXI?J/鑍OWҌsg'&Vc\$ex. ɱGG>֤ĞWf4Fw[v %򗎆œcfuh )\Fy"Y^'QNjбbVOi6llLsSufwS hؽ|\. h>ѾXo>߹=%Gc'S?E?,Oє_w8^׃o3I~LHB[=Qi#Wt{Q"cG;RxY"7oˮk"W'e5ߘSñUکe3l֞Yzmw>>!57Dj-[ni)$?>ݮP"W,:{q tpyݫjB_~C5~t@$mKM*"[Aʞ,3o-8==Uskj~t}I=\ܕ:Env;G1ȇF8Cf\ܽ7 =23" W/W_jiVzzNՆt:x:wӅ xAM+~cTt`@hYƎa-{4n08X~m =?]Yо=lԫN<v݋zF>=/zqUK?=dhW v{cz~٢}e=}gNS#.K+;i~؞7ݟ7u&<)3\|_ڹ,_U&\]"v6%ܗ!OyG^qyjM0 GR 'L >4깹 c~ 9yaݟ=. SPvXr5;֟Nc~V׿kOoO?pWnݽH{pS%cuG%˻mN ΋Jҗg:XN޲2'H}ĺD9vt[OA=Nuɠl,]%]ɿYDCF^P7w\;N{ўtu&sj/aAߋo;Csj_rT<`ܫCәS̊.PO, '$? k?k?銞C|ssޙӵ=rLFGO 9#QAM>^]<.蹲D[5wH`͔3t#&BIt0RM=Bk Zkvt_ewHo (F‚iBB S<:1gEw}z3ޞ;)pa]9%5v'fG4m~ѽ'G V^gq<Ք17JH4NO\;ClnIU etXR1qY¦y1Y2WLKكHQGe[^нX = {.o.^%yw/6%caYY+:dݒ}{X+tGy~=\Wtlk8:  7lJ,Ӌ/)_h~RʊMƥIvĝIk= O_8xѹ,=X-ɭ~W#UgII$k5?w$7ח,b*g'@VaUY)`3eE8PrMXCO$fN"+V>T'nq~A6^]޹рy-§ΫYY}ve}A^)I]\H=ϒVZس}|BR?ncH4Sjˏj& M^Ic+ f`Sj> ޱU!Ml;?>S&>sOc- Ԃ 6x8|5sȬ1uW5 7G$8ʼJg`=vC@++n_oeΙC |AQHQqYҵ`vbC9柊EF|v+tsYo؏㬅n݋-(jX.Q{fZʞ`k陉MDꏴ_'"Ifjz+:8 Go| jc'fN5T*HiPcn>ͻclacu)bk]C*EI"+I2bNZƖ(+}4VMj&1t&7Xv {v%lW=}UչI3J6m|ob6xЃ{ aƑH?ah)] w;G8",Uj!UoYCx' 6뻽vu#2e J L/$eՇ>Bn6e[Kf2ZJ9ey缳8Q&kzg.{9эwm k8bS̞BqtNGљiϓlYX:ONc*ֲ:b]^.U/enK^O[֎1kG]pj:<%V,\80$&gpQ藓UtWāy|oN\]/>;sg2^ʧ 74͌Z0US?y&M)׊2E_tĨsf(fKJ.<ݢ|cA tSbw.h^lh<\Tjh)`WtV;BF{TX ×[&@ x8^"!rGo].Zp}e /\3_-}ڂc=xAzdޝ<@8Mɕ`|^ٌ;t+SZ񿐜ɽHM`klƒR%XQ*:Ϳ *tA|FIOϊ U8&s &2^[?Lvw=/k=_ ; mvJ:S4  h>MUv齃AErYw%Z33%tˣIԵE| Xyµu§2IhgfT&pq XCzߗnnO䬈>O3xM51.3U* 63Gçfe&<#ȆQ3dkl5YfD|N -{? iĠb_֎bј[ Db`o3hȦءBj+ eI6C[+9{?YozOg!!Ƞ022@C@fyF[j{艝a6k9:"No\ ژ DŷI q I8;/ h : z0||QYS\|8VOOGY9y;lʞ_;*{aNO>Vedvz~ u!{ &oeʓ1YRaYfp|m(5s#Ѳx%N2< UF=խ.K[Rb!5>A]y3nTkDM0\Jy`/mc?Y`~&_Ȫla>iɓf z%pe*7V}9.kfȦaey.YJ )r5<8l߫>_bʜYk̓¦7FZFȭ̗| BpqY7D*V,'Y#WAhs}htjKGOG^I C5{?3 4zv`RW}d urWTm'gxC='YCSXGˏ>}m/O Y1LlcȒ@fw U/~Ba!3*/ ƣTrx)W&,hc,; ͭ*t;+xJNo{]3H]pfCBTj\,qӶ$JEWͥ~g|ԌkFQcĚ{KEK <*}X71J{O 7GOyV;sG ^6<1zW lƕme-HL%y*,5)wm:YtE%OI+sk)5Yjr3l @Z75HYcwF-KC+h.Nq aL4wv%ZIc17l4O&:MorVI^cD,>r&kgUSXǒ/ȍnb!.gi2DЮwC@bN ۣSqiTAv'y euV!L'9 (V?VA9,{y0>xN$@故2Q[W<âV2?Z}5xCK|Hb\6ʳ@CUd0UaS2]qK62^|԰A0kDBDh9s TH5V̂/Nܫ'Ft6`; [n`Fy s`uv~7xJtE/bwuUbuS3N('e6+yH&kȍ[Gue:}tw(ܲ$Cf亸6^c'gtC汲%qIሊc5w{a"<ސm*uւİS6/b1l f\<4ݍصc 1熌g|Yy~3FH(r:68< \B#)kk-;Bl܂.?&nԇv`!e}; )%)`:,9@z<8,;:#, KMȶɉ-ȻWNjz$>'R}묈3gD皐y?H-y]bt$`902CJW>#bkVZBPqAcl 3WeoKƀa;ܵ0O ~Ϲ2S)d~7D-* nJ gOyU֔2y~ ]՗aܥnbK'C.y=@|/ JqX_-1lnD#χhhZ^Y0Mlq ¥û¡/Yr*m/gQI~jPڲyf\{(./_/tXs ]6Ě#w][2,Ũ%N[K s^>)ӪK̔byQU O ͙'.;~QT3 \UI7+d^}wbս$-ㆳ͊ՆſŚ6xfzAZ gUW'2hmn~S\87{U$cqV.ȲO\zgQoN)^`bQGY4Eo3r*nQ@q%cADr5WpWG칳0 #VM 3/פ 6X6]d5ؘFpygefK)YFl.hYkЖusl8.s133FËAh\T^dE|1*A'KaiXKk%`PZJi. ZЗ=ganHH@ͨnNG];d^yD[/Qefw,c_xz]$9d$;t"g3)xcܱ b3'ole =zB/H Y"Ks@>_~s)rI%|6;ޓ2%{s5C$egVţ>;]|Yy+UsZ_o= 7IqӦfy=h#)Y_:)O,R8,z\z>[)=<֤6צ?t*2ߘOkem~:<{Op}烹RrJ)nay&q;;n @~=hե֭<6v:@`isf~Ep\G* ߝor@:-l>gf?pMeTY/l57?"Ee vC;b OkaM"tey ~Pm9`k''J(ȦDʢ1^xgfoybN!]+bxcV>qcf쀟߬ߔ~̮gEx1o~.:ΌC͌ٚ;z2,;:%iW6 GP"2u. yOȐDU vD <;WSHYvϜI| w99fSv􏨏i~l.tyJ]?Fgs<\;7'3,7}_ՀYwx n>V F\p]]^f`<*S(c!bi-adxHudg xQHh7Z Ҍgo=9,MOg6/Og M*mcv5}D%}5\/^O6ՃPGUqz g| ˴%gHUƇf0P3+q}=!juWGbJmaC.{!?^/IOCdiD0"JC۲tYȎr,u{ķ\~{Hexrb (S%o.r|. +[$#wIAZc- 0|_4xGק_vq:]Y$h6]lJm9ՙwhsYV}جo`~!㲴"'mNJ\tHeF>+>-s,M<"5*-4flWN 3zSsrꭱk{yEz8.HǤ+-ǣNgޏ+'qc|!g鹿sNM}Rʒ<(KrD0.y֥bARS^tjaZ#_Y;YBCi:].u~f"w)1f?pJx`zZ9f}wuw~=5_;POv/|=>FGt]ڮjr՟ 'k.MvxRTx= HDL(o4D%ƁƏ0Z}y;R|~Lj XhJ2?M-c"볞_c95{ߓސ^@}7g9;=irZ$~p"r1N(G4l{|Bc(Yg(z,;O%c}osT `X`Jޣs>vdW3Giy^m=i9"]nm#gG'MSc\6M9df;-]N)tB??<Ӝ><ݲ<|K +Ko* YRj\{d^oȈdg$yY tE|ݵ{lߪ+ӯ;&fG K7?hnI?W[!j9M97œ;''<IL~_oW^ake\Pt\s3OiH60zxAd_EB֗`= EY{;ܰV^\nض$N׷"mt?< _}إ~xtl8a*ͩ{BEq_P@% F+g K !bssV)/Ʒ*:i&kXk;ӫw N|lfgFfg՗RGOK%oN۫6vɨgܕ>İrO;aX/#ex޲g> E+X` /&ESȌu`wTۣĢ3=5Hw}~:3s$Y]W_>؞R,ݡ;䡑3uVfmcW? ة}C$gŠʖA'V7OsH`#xfZYWȎ65CHxTXC+q˫S3s`뼣#ّ8$zEN4ƿ=#d5)ml!G>nʻ4)?omO̸l"Yj61֜?0:5IY_-.J|Oy h_t5[oX33" n_No;h~ּJ{xjUt:kw@azo|+7e\νYYmq8W*U0&V- U!o!L f9п=&rQK:~9%Ͼ4V;(R|t0#+ё97,E;%-il>>vw_.yMh'>:"rAVp X|l'֥ƣx0oqF'R^0TTK&4<]v@0,Lad>zߜQ_OWcɎ l#>;!VnwKpip;ӑ*# c9N|#E9 W;W(F+QePY Yx;1K~XE2b]?ɇ$Qb@t1W_z7z1?wt#{dL]_,N Ķ)ъK)r/gC؀WGX^M/}r]{2y c"8yr> "ELp${Õ-–l:o\f-o?4dܷ3YYHЬ[ob韫F7Si[|ܜ<2.B>37WAyf~S'//*' t?=,vZB@Uk W{!#eS񓤄˳]> ;-̪XvoNoNm6f]u{Ug\7o_eu7ˈj椇W߮&ꓚOK%Vџ9,93B)?r&0'̳$ʘqwֵl8WR9NO.K/JS`Hȑ+s]_Wg]bկ'7DF{"Yvny89-kX,OfjdŜuo2 Ljw/bh.լ=cQ_`sceW";A$b1݀aE,Xd΋'p(alXA/2bf_kLREڟ&FG׳2ye<)<٢؈?>6=|s" ћ 󯑄Mz6t/ ?0b\x¥O9 ; qCa<*B1<#]Ljz>\WxD4Ř'Ep#d Ad%بhK=1fWt4~~lȌS_VZ۝j9vm9[Gkh~, Sl@3mݹsW߻ w2/2?#W.G$/ճ'X޽pTA{pj*l!+(l_@ itn,:@;hu8'KF0WPv!)}_-Ov~ݹd;:굑/SγB=hN>(`lp59!9{xT03x Syv'N\& jPK,(2f&}x"^ #˼ٌWȰ>%3cޙS ^,mG_N9 pyl6vdӏ3Z n_z/V'2!_d}΍97gUձc2 s/aB'/XN=r_P#5q032i1ɭ2ɻxfÔK{6SOb4z0"H_/o.hOG'>*Ts(gc>X J~h39ga0SprVѫa`ɵA r3퓑.|k%AbYʤ=F2dz1-U I$ȕghǚ}z<$x %VMO{rl@xxR9e5hՔmhg~>Uݖ8{jQ&kXFX2E_I ,oȈu~X;ò *_Z6y{wzURoF)A-ĶhY%pu#03(BdGvr1OjN4UՆ{^f{]@ĮXc{oAޢQQcITbרXÝ{eo}kDzO^X2 HfC2c`ܝ=K:͞N?'g#+`S8iNj O^[ tc5?V".&rI_^\Lf0ϭ$q|WDMP X?؛wN"~>غ ղ ¥nDio645cq3t8>3 ۏG)x5+:/x!Rix-i=z xxh[w 2+nsƪhUVRas6#n4Wf5JqPhN*?4|u.P=> vNuta`{(P\0@dX+B0<ܖTqod##Йm>+ps}A,\%>;-bK/ss5>Y橇pHV |Z?g,?{i?vabLHɎ#A?##OE X;)fųHUKF3[KKcoy9\D";#ݎ[fqr?`Ӛ\ͭMzsLr"u( B4p_'k?|Ϛ5춖~?A=7o͘'c'o[>1R0$#3ҹh23K%eYaص^ m24cMsb E,E#v0(ouD;5o nwjP+u'Ҍ2#G,e^ qYx2sGbCAY j~]LN_`zSqy83;u֕] L 6ƞNdU:6D]0+G0]/iJjgvVy#;h%=SZ،ԅK򃲣Yd|e~=qwb,p857Fc6+ DßF!o LZN gׅݸ 6u|8" 'ٛ5aŀzswSm꽬w)ߞ:wQQ gmrFZJp\c_t<ORXEy ]UeG3W-؀ 1nܚ5ylɇXHda1 n|$2 C욬,R| Яl}yY׻qWzD.{c UB"cy!ߊ/dNU:\I  0 ](98.7f{ }Tߵى0C͍Ў}mo8fw][ r&+0$frj눔g2姳鰦#CW½gQalnˈ}U [CPs13B6j.Yºn|9po}Y'&7R;8ٓT>]0υz`[KɻmkB) F-8/V^e_Ot+{J@L'gkkTu]<>+Vjy%.:;|Hog&?PBk')9 YR<4mnʃ(MXYbt#px[f;.cdD!_I C 7R qxhи_|;Q#3ҳ[')P3hǪHv<0q'V3gdDNsQXw\؜WA+;h8%wix?nG|N&TmKh!ﹴW}6+ cs26=8gzlؗ!fbdZc`#B-"㲜eߗ&v#O^@~GSFY0wc=Zd;͔{o=ȔruA\1+%=1!&~0{1'k noMTjS&|{_A gI QҔR^qx~0*wofw F:5\v zbšhFCÑQ>\8hIJoٚѱ^^K U Z+e;=;G&=Qݏ9+#q CAEE|DX .?>>A؀@PbuY]TFpa&4KP zp>4$\qG_Ϩ!OcX)om0vjx_г,VGz˟۲ri]ECI  ͕OroXɝUCb[n b nz;=g h=2P!M ^uEp5LyRgtAzxuSc구*z|gX%!*#d#fCS<#f'V`d='W+JJH">억Go h^O&y^v+Y:g2 ^27VNKoǷ5; S1gu7~D=ev=c l5P&Hm')k0+n$aܜE,2` /&5L"?f_sgEbcނھ" )c:>x{b]ӗʷZqR_8McX3*HQgfb5{*.u"*#ˉu?)&#V_]!"h3Cj+߮d]LNΊcc{ϳ+3V(,B{35!ǽSk^BVXؿ&pd'd7$^P p^X "?e]cYnTNW3xsKlQbyh! Ob~On!fݿljޡwP/ ʓ}퉃$rAصZi7#Hvov>>Qwq<[Г?0般t=a"x`oHGK0t)z7jG}@S~S'֢k{BseI3h,5ܭs"= n2F0T+)/ݷ M֡z֮JV->A ũ}Qx{~7e {^x?goKoR2>`s:O;rEl{k`</,'>&M4~-y^;f5q( ցx_O#S?ie#}=OG";Fp|T4>DhbzsWfΠ~=kt"1ymɼE"̡-X:}`e/0;nMJ7ȺŕPZgTݭ]Qll[Y+Ċ(u@bFуGcT-;\%M~x?ۅ|{5 Ӈr_Ö{iYx.|)?mhēIW!ЩDf#鈬}WXݳ+E|OXoׇY%Α=I_*0Ѯ5ە5pY9{vU ҥx_`?jl~::Nb=4y:hxM|Eqy1 .;hx1 {#;k,|A|*I<QO*c.zS3L:F2z9 ^F|Ƴv3 *RK A$>˄+oVi ~/{tz}h?a[8,`,4S*A݆̔K>Ʉ؃&XX/a` ևʬn=YzGYZ #9Ş7S5M3"icŘV~̧F5'4)ظy|MHqpӨ_;.~df;KG.oTNI W u,mϟ&I?` ; O[es!`Ҋ؜qH@yd vu鰯e kY7y\f# ї.t rm2qok"n_a|G f'U]2A11utySQ X]C`uqH۰gޡZ, v_aOMV0,o~@N&4ol>-n9M+oʞ" !6u{f"wtmNn՞0-|yыg?]W_+*5rnUU=\ Ϝ< 2m]YhA,w;XXaU>ؑ+Å0ߧ/Xr<͏ovJ~0'k+8>l`ߴEvm+mFWofdwt!Y^MP{ZudZ1h;*Zx2J@Ho|$^t%*#72i 1>dߥɈ7۽ܣ;|'ڏui^_/]&WC`I_>),}{i,,wi86GUғKo6_'z|{E~o'1~ ^RYſXOm2zYU48/-tLD,sMr+iJ=ý=KFoL!sXCKׅnN.{Ez9b,?fI+QU[#{W)9v XSavf= ?_=TڥҞ+B릷A`)nI$; {5\g'@kýŃү%%9H 4ֹU3ތK?q&C=;!wSլ2ǰˡ&cZD{ji1Y٤GN,7dX*D${"j\ w5rsn94k3q.afߥV8uɻCf`#^c>oT&+eX/sZ wU+5,3+5Jo5. 渌{u 8s< C!_b%W7gWDJQ-[䎪5fS>yly *6fv0r},vk4q#/qe\S9>T E?t8$e*O3~v"w{>9 qﶃgޭ wOJ9WcJ&QQqw<)hиWBw1`5:-ߵT&bV7Kf}r} scvD[+#F캦EV_S`U5UzTy<cUwߴS:wˈέBt1!hh~2Β/2Fj$+jV})k)U=}+f%yW>qy&ϛ̅Z@h4y&jwG:GQnՋ[qvvhŜvG7{z#gdt.sF[)RDJ~r,<:+JxPhWfjGMx;#wnxc۵U\i#(Q# tq]sl7h{]3S;KnmIO!{]wUW?k:cO3KFv60J{ W=L<[)k}bA3L~1l{܇5q_=mΌoiU&hZ<ɛE~\S6,fZbX\:oM=nSd}h36k⼝keߗՇpN=(RٗAVk|n5e6WҌ։kOyr'wqi%cP;hTfGODY}1eEcz nZp1~S(ZX]E"]"3Fxڽ7 T_wʬwoͬI<{{-Opg{W1ϩeVRe3-kobaQ+̿jc%as)#+Xn dn fJG FR+:Z5 HD6?\-M_9z9skDZK"jn"xF׺ 6"-R+/z1V"IBJ1rz#JXTq fk1=%ዼR/m\`X9r=#gY^[ˎIQ)^p13Z3+JLcZkz|U'׻7Y"g^M"ù>Q^K$NXFT"1ٌJ7-=COv/[DFCJEJ6Q)'2DK"GYvPJźfZ͔ytA-صû2d<*c[ևYjqz2y$5}w}eM%A<#ю귶ߊd}Ld+!Œ9wǠn:^W'DY;Z:r8PkL!-(Dq6;Z5/ӈJֺ5ԓyKff`/ig+Ob+mڣFEʹ]=xSF$"%;YoxgEV:foOC82rD@TyNllԠ_֧nj@D$3pO '6"GуF~cjߡ>DwV(FCY_k展h.vFcDʖg7jڈOpdVvzYnpD#a55t'z5-yԚ[C$JOĎc,wLGDNr1zh"[ӈF2jVP}63~C3?Z*yqhG$. d}LyZDjgbVݑsB( ۼ>ﲒϱ[Q5OBeƶ޳-ah+R6-~Nn9Y;yYs,1VG JU*:BQx~^ 6bϩrʚ빅 k{Eh${qLkgwzlaTO4V;}^?3Eh]GKחWJD{P=j810-~S }ï]ci9ZhJ8BߩE /[9JeA_)֌xU]P]3g dQM4t]ЕǞs;I{hzcE+1: jٌdT48^- tQZ*6/+Ş3W'2eĨF~#V8vH%+Ӗha|e(N.zNTo|\5YR6ꃯju%b3ַ[k+e-6'ךX@$`MQGh'bEǑhe.Ƭy(o!œ)?5E>jνZ e4ѵq9gTK<<-=Vl-v^✵Q1HX(FBѸWӞ9G U1;=DNyXFA ;,ʡ>ƣ屵29.2]}E[(a%+=Zo̧?:2WuY)DOpCu3qި#@~2J99`y-J( ?5JkGNU(ar@@e{2݅Y"w,'!z˪3ڋZ>TiF`T+JT1zDLM֡+w`5o6~LwukmňK|XUZ;"BWRcD֒Xav4, Orf`5JVPkDaJƪN1ӺQeAdAVkDVw]+/y٩KyJV\~Hϧ,a梌+GLsv-+[9mHEZkEE<~kb`wz Q'J:&ڑy5ǣ2b]#Zx׃ݝ_)]]c u%(J{7zjVA9Ȥ;W3s[˨&hM{*zN1`ޛb_8CўE{5^7goڤ!vI :)2tdbAo F} =r}hEm5[w@: ='ꉍb.dDzOF%~뻍A#h'_W3\qY;?/lR+o$Ps0ӵWK"^V#zzQdFDK%2TmLIά5EiT]#llQWյ/=vz j3%:/7F;g)0"mH~riKO>'4O+Z˳]o~\[oҮ8Y-9#6#NDJbw\]s[M?"U6͘Q_g.\6ϮROn=}=""ՎrCY7dȵwD>K1G+fb@'oj?'Xzv[*3I h]hU79W,܄Jdw~?K+7'SɯCj9Hb њ1$2&5sCbd}g:8gg>+0oyzt״q6'ND%NO_ZPr{ΆQRֺ򞑃̨;ZHB&5w\eBqn]b-k?YZH9:L#[Q}Yh>ی]\ȹ~ў~Hk'j֖Yxg Y<ϩ jٕmԨtY8}cfd)9%`wh8Gkw[vyuR^QֆR+c-rrmERE?z=j:f'R:ѷ8`&,rRɦ_NoD$#LM܎}2)x|ژ+Hh8sŠAL~[c$3FF+`~K(l̝wf D/lUK(aO0l}6GϨFZ+MJ\kd[έ dd)l޳'nz_ˍ-G6@a s2Nȕ8brE?NggwZOzr+VvwotWdFD#Vs~zP\^ܹYg͐ض\Q5u_>]r V%óiOΛ3 y=Ծ{<'Ob"^ՔPHٍތGV +ʌ2g1 (V-s kPX7cuc_eViyvz'~ LU:-+/VcAt>vmÔpZ\7J݁3M(#tw2 MHyF68[ ƞѦarr\F)ʿBsEv ]yp0X?K"mIs" e$RPa>  i8r/yЋ{Dz0k d2}s Î5ڲqHJ̻k!c\3h uQϭdvebZ?z]Jt2y0.cn$hM76]:瓹t¿ֲ"iVcsaSqT֏>W|ŚXV3bQL i3zEo\n4--o[5W6v-؉!|{>yjBkn5}ߓMB؁9HAEz,bY*CMgmso ӑ%"0h`F_Weeb̉] U_\hh]k•hem8 izsx:Jf܂2\M8n.O5-aOeOe{rS<`w~,nhƟ s!E/ӱg+^_oQcT;-)n>S6Wq.> iLoYkٗht:>lϤ7zWI]5=sYt+1Zz[.E?}4f[nQ~z͙)~۲֚xmLj&jb>f[Ǎ7pp싖Ҝ~,?=$|?3] _ k0{>ChoLZ"!LJAk!v0^;o'\6J|ed YyZ9h="5#6~5y8%lls ?zU͢t-j`sA:{ j7nǽįE.oqH}5s_LT4Y'"  >inʘ<XļT'\}ƌ>*$_;j?&frHi2Vb:l>N'0{+=m_zWd!$bf=0ѶXs虿؞]?;h<1Vw{eVffzen䮔d1|oݱhzgb]jDDJ3MZRu e?7ҥzB$v;h cMf0ӓ;hgk4/p*џ}_~9`o'd2s]$ چK#&6~d3ZX\d2f#a\#OkOJћR0KYgz#-au5sִٍ^|LV—xo ElDiIe2 +pT~4!0ecN3f27b:9c#7MX 5I`Xc pv ?SU~naAV͞Z9[aE&Mgχ۲g? HH3t\pџI%|Z({Y409NO -HRZǺW2"V0ay$s&_^Ldy?˱%~SV"E_b%xp o?бQHXe Yv:88}YɪVʭV0g*.2qP WO7͘ז5J.+>bhϪX9ئِlF>*Kvc{0QaEyaitqe.(Јd@ȿ[mUxd`gBH/zJ!B~&~% Qˊy>L@ƪȈ±Z.hUZX$̟|@`[&ӿ{X}D\Wu A(xSuL+Yg=F Q?5eGWx|B>;%Cy̿^<t QaV|5=+ #u y@3x\+%B)Ͼ32"xjū9$e'+%FmngϪثj dKةx?}=Oŝ(L֕`^P‡yg_:x_ `GckwZt>LZ>WKי7ue=,2rB2V͒~KW%ȧOgvhފXf*I;6Tt~9{{aa$֛ z$ ^/÷~"g7+K蛟:E;WO79c>3<߉%{n<۪ -cňmdKh&1-,DqH=.dl^o4fb5; =`q>N)~X{ϠK?rh0amoؕOm& K{/dk8wHҶM)W*n`qd*UA $"{Uen|'816F{*_`dDcр9p!X+u` `zcuY+$qONȪ 8s\ ku&uz|S]{Gv_ili$`$dܟsEux'=Gl3؉8|g/pSS~v^=Nx,-Hz=ȇNGkaK@SUC`N% :xX/ 2[( jur[/\IVhF V|ciA1&`cqr>.|`VsBa#AScxtKQ[m`֛o*@rG 1Gp7 =g I~ , Z s>/yn›ꜻ('x>g<eȈH%#'numCD~#FF 4L/ Bs\` PH=w|x|޹-o4n$reRk2#Z};dS~SoI9~skE}2JѮ)V> YMft=0Se*Hnwk}ހg3~`&L8vo_g+z=;Ȱ],z^~5MaCo.nΤîtGpS^Zu8g})a|H>/"7{o2vzCGtyX)5"}X>x̹6o :?<SQ9VGmp,Jr̡eٵ^by*W-:5O6:{e}FZ{Ѥ+bq>=i}+~ 7fFvZ'j `uԀ lbqA+u2/)Oϲ3'k-a,m#U3,fR/îƬ{hZ3>ѳsUSfߒ 68@y<ѣy!O&FƓ<Ӟ߶|,v C0+`ds"K>gXQrF cT/˟w`߫GٱH,Dțњaz9{AeCéYw0G46f(NRGdkY<3ɎYaY;suDD8FPcCYb'Rc&%a6ߌ VgP7'4zرn_%Cҿ3g$ͳʇqKԼWF?1>K9-kK-ؽiYִ+پpI`cmRrb1"5rvR?݌KľWVl̥f 6sWtWp<܆\Y]=i9 n|8:I>[YYlvIQ`/VHMS8ӻ{8ϓ#Ob$Y>tg,[F`Z.+0$>rFt“y ?)rbF+cMI\OJJA+BZeCh`vo427#R ͐~R|c_y|U`fcz;2_8[=Ϳw _ݘn ߸VŹoVY"zXϣnp`O{|2OA|ql]~+H<%s5n4~ŎwƢ_JNuS$ CrKs+#K'C}{pps2.B`E^KSrG9\L=81z8}l{E!v2c ?n`7ZLWuˑ}1>gv FlHDv4 fཞ@ߝ\޿ kЫkxXd:9Ұ^XF_72 /V )j,e5W&1nZ:53y-y21:`1,CD:ȧ;g3 @9Xو JKn,4(>̧U_^S~?:ٍY~@FX!ۦђ{yb.|Ʊ Pf(UP}bg)Rmk̟ Ĩ!慬1ivBL]GL*_+C->}< M_+@C%*;g ߕo.#3Y7Ɉ]`Q:i`O^N{ϖNN&|J?ZR*;ȧ_׃ofb &{i LY̷T^-Y}p豞3:2@V VRF(UIffO>K?n,Nx6cnkρ}Dқ6p$O$o&CXyUYr\vd]6=!ٛm7ﰧΘHb"oyN8h|>ֈh?+VV̀~Tgb?)`N`"w7VfD2x\*#CUx:+P 4x|[,ac2qi$}#dOJ_M;uaGo#:k DNNd xR9e̝1/dlB_4,',59a*&+1q\Og ~pVplO9*-kyga0m;09ٺlAPoi+> fv>50\DCmrb򊤄,aF@s;\ eH=0̈ w8 oɛ8Y5'ItsZn͗ؓQyjcV_{&fN=GlZ l1bX؅dARXeʲ~Xx1KױRPAm1`]*$}O5og~< B7IǀzAizެؓz,rʆ^"O&cjxjі^<:.DYru^Ec'-N N p |ػ[[횝HFss珒1B-XZ^hIƸ/0gf1lG/(gez=`Xl[!# PiS'v fzKe'G֓iuZL 7@Bb;81-l wVh-#fMkW?+"BwL7z:E@rB>I6]YS⌷=þ;? R}9Ϟ hq(۝Yam.w" |'xva%qoSԀS14#-Jw {jERnLVSdHJ`=9c@Y)q z?.Gl=I/X& L7 -GdžGe-OL|߯IOl "% pJ4^J7LFʴ#4xfǻxӊwfįGs;YUDD6'̒3˪VhYAPKҨ&ϗ+&y;w} 6ǛK=eXRl]/ȶ×@jZ ٳwA+Ѝi01bbslxR"o{~~ghlO4WzXC"/NRH>図7pNg?%I02~Šz8ѼA__6EcPSAבzf`(fW5~V/n,5j}!ʄ񭌁q%9k5UE]U=Q$j6e:9I%<n m%K<q)?{&'珆g``:79#gp~z/%~ܐasvm|\%3f^;(IfC׿1 Ђ[k1E72ևx&ew#;ʼܑco4X|?2@o&m׬gMyapv`vC~Ty|KJfM ntقC}a-Rlcp#aׇ#jPUmmlIh]nuA#&*zL+ 03bwEYipB|e ذ?bBe,livssyQi}b6X{puQq 9 mX1U22l$\8W5Zھq2<Xl(-OswR%2a,dhD :cڣLj$}3's%;0wV^lJVx},wF=']}֩⢦Z-GEhZ-e ;0 d|e(V}"8%ȬG$ ѼNDZ0sXrw QGЧyI54b_o̧5&U\q9YX!#`)*v@rUu0g^^*FJU͏n[O /lթܖ+f'P׻19gY{Ǣpv& D*څd|0ءD/oӓ;sFﮐ@&vPjQ럌 Ev>˜[Edf([ʯ,2v pm~ d5NM 8s* H&~`{gg ϨIVW]n.Nh<0ADWgOegwsl{QITbe;Ml[=uӪCb(?p%GO]ֵJ<̜zRmK2Hu6 r!p>I^O/;dqBuO+$%W}t2Uϳ_ӑ~vÊ[TqQ0[Ud%./.Fd1&#x˗Do:&t`oh0fR/3i~y09=q;׳҇ZROIN) H*" "eTĆu셱!bCQQaP@wp}J8}guv0~zCb?K\pRyhiIoN 1|}\dE&1|7y/_ٳsS$߫7&UNŰOy x7.I1B?}7›.dL ȷդYִ/ue{xo"gI~^sa]͹ݟTmlw/hFU?_oLdJ^L&WV\GcfK3PM>W.Y`݄ƓijKDUz2O[;)mզzbW{_fw8V_QmʟIzlgvOھ-u#Ҍmz9g~!>l $_NDG LT<9qszCsw^|~>Y֝W&y^C9)%6 +_P^U"8+3fFda9w;f` |q֖l#=#fd +|p#ZMoI}EU[ש]Y!DqnjYOVk4t|}{sGxcIPuŰ9,ʅZwc/ lk~F_u]XD8A|"a nW&NJbg[`nf=_$nFEĽK^93'[kt<[xVu]n=|6,J'Y8PIq[=ɓՅnER+GjA^m=!DeTC.9+0%k)nHߏYI ًx=D,5$C%)nMG:y܀1 /rt8)q"UHս ;c(?sB$- Z9y]#y$G^,y*1OuW3y`#äh;2<0<d? .^_vGmx;`LT@JK6٠ a1֗[goFdΩE\Oֿ, (wXE>p{KY%mt:ٞb|bZVU ?T~9Ɖ⬢Eoxb " |y"2I)jnYRq+.= }"gD$h,.zȖf^[*!C*}%!5d'hhVthi_+GxC'JB|D4 Ѕ%$_bmXq$ sJKYuSyxT>iS5bt֟ D]:t;eN2_17@VZNmޮ̞j~"JY1M?&DreΞͿ"VM^J\_[+ʫ`ñD*q_9Ao#8&6:sHUJA+|M$K37rhՕȩD&vNy;M ӁO-az]ܨaxvR?gw@^S ɴqa ^uӽ#;#gۜRBzO8?N 2Vbrzcc0x/ b-z|ʗK z:9]pZ.M'W,n,3=nFMt*}efzV\ݱ#[bFf7K+>0t/;+VP8̕%I~A!֝DZCC2>7ypQ^t+CZQǧBQݙE`;[?sXrKUvg%/Nzi lT|u@~ 7w|$x.HdL|:Gg HO)N"hF{enfAkHf6.<ͱWTh7>:Vt;ǯ bT|Yy5ɇ$0x%b]D5`qq0ãJ 5nAJ^n*/-r'֘NJȫXc{ķ0d0 vLU8-<'ya*p;-9/aa5co j76UqY DD^k#&8+A=`}veGy QcYiS"4C7GgR 3X;O1;^k' lpaoÄ2lRGl-8nkM<5H͓`D9KxڜeTe3KU.MDb A <ܼT\ff-1 ͏dy@l.OAsXWȍR=-ů8 /@>qkмJK쨬R HF(oAZ>*ҍK߁\ESvY>,og%e+V bq2z{pҘg? %<+j(.汥ykc-H/Zq#1N^k67Q@{FE^=UJ\F/zY%",4o%'cW%Qp۞D; 98MgjQ)xnV/%(''9(&n -r6'?sS5z;DS0 ;9hԾB^OI&.A;s;f MDN}>#`/b;<[u!iTʳxM$']\./IHB4vv>}D7ZX}jF^1=Q,##1.Z;*'`!6B'&OO]vVfYоힳ y26^Aaj(T|d1Vf Wg\3yK{Oyq${Y|#d1 HJu܁^KypoDX'h[{~,+6KLT&p2:;A>FuURy.?|* FFNI&Xܲ 0߂񐋴[3>&s`P`9W&O<{n(#YGM.G̝``?dH23f|R|//- c`|ÆN ^B)4OUl5~耥m%?-,m EqL2YbnQPj0v*6 =Aa,UɅ* Xvcϔ[JY kDqZ S<|AxDRg% Ih qf0g^`wƏJ {N"|€ɂQa,{"diYq'1@F O`̅+>R hOa?(^R}0Zc36v.bǏM5`:'8 ~X OE#H t] [%rHȤxsXw0%;7b%΄)eYȶ]1|ČH/,xQ;q0ŋjcH@* L\f>Xdu;E` b(r|E!IdYXRh 2 DT@U|R*^hpGQbIe?͉/gejd &*#-vLZ}>Ȯ'EbtȀtܐtqzy7,XX9)a(:AlkE)abҘz6"R Ya6^g(*+Pr:8 |gfމWxhTQP )sQ^i`5~[pT2F\jKi`:whV~;\yb,bJIʌczFT:O~G13[QqʚA/^AbGJFƹDfWA܈3%+^-'SbX8UtfB- n==D-}Ǥ'tGӗfB^'{S` kjgJ\^|U%B&I7sK$X r|,/ Q?Ng|Cx(;a~hbFB |Q ? `i M"`S6MފA"@WG?**@ob~>K&z׼  WfKKFĄΰQE6M2Wl_2{ iH{jNF WY=|h{cK&.{j Ԭ#G_T'/EEd7xYeg=*]3V3{knn+%0w,CL"%ã3`(S0:>!7FCT!5xIx&6\t\|t^FQ|elgF`F:b6w'N~g1^g!3%N>wtn s bM~E2J1 ^ AK~S,b/생b i$JS*aL-5&vlPb0Zw35zӃ|W#~y-_МZ`'{ԏ˧Y[ֈ"5/K?|U]%˻{EX b1Sݰ%xꝼ#343kvm##[ȶxWGBƢ87'TV$< XoGgflp{xZȪoVҼmy>&,[ٓQH/_َU\fA6ߊkeD K<ϼ /B 1dҕ(䉝Ze9Ѵ=+cX˕9tXspp 0!V-3# <]{Ve=4xng:RT3ȂZ¼buV׮U+(ȒIu#{Xz]W_)U|FI27W6@93^=ܾ^{^NBޜn/7dfȓ}ґtv{qB@ۨBT=' 71 3Ey~^g4xr ҅{*Aiwҵ1&c`uy$͙ mێi6=Ƿ㚙i[O`_99XQ2h(n=dKpD13gDqJΟҋz@28a;$+h0dVDH ku{`ڜ[9ݎ9+j+rW5邹);#cyftx:0wޘsMF B+7I~P[>騭e2AN(.`-'I2g9kSWqVQFL[<! v 䗊A/0s0>۪#ɟJo6OlQtMܜsm9Fi>~$Gf;IEe՜tgU Dr@)y)ِr 3/*84cgDqU[L1WV[W$I%#ꉾ@BXdfapCvܰEy%Đ=:͟k&5MtA4'H`啕Vp\[_Lkk^^0׻[ͳSzm:4RJD!#C߼x WA 78!.ap^laXՂ3P]u52/ݖ;nޟVtD<2mq>`Y>VLWM)U9Y-,`J)8L*#V?)OT(ϯu6DEHtGHs*EhI}wVE{Kg rvf/'dWկo"=ͬ9;]ޔGzKX+Vbj32e"A]Z~sDvHHL`<=w'c7*QđTtgN /`7%~tk-ʬַ_k~rsrymSMGC{;6E65kna;dQIg)~o"k̳XA@~vndo]_P]g`D %/u$BeJ66/üwuOpW9 KDFR:#t waf2& ?:}v^ ;>vOo/66f7P'xDJWduJߏqtZd-ߒ>̬VJœneC0{ ΋` 7qs/Oin^Xc =;?X_~< }/{O:}-3Z}ݐ;lޑ Y/xu5%t:>׍:>)/<82HpBFlX# r3Uy11/A0ih-J_Ck:X̊;b͆gS2%q rі󺢾=~C ȿ'G`|vG~V5m2>'ǧeTK" DjՂrS"kğLsJly.*8! OKYN} ߲Ż}zN܃rM!-7^{p}U.yQk|U^qS=k}i 9ym~~t$t)98`v]݃ufzZW&g _& \ #nr,CddlY9!>9.Lno~=hnYn?|([UxFvq}z^oI߆!Uk^Ӭo?gWS7_HOuzi,0Cc5ѮjW/7H{j$h9.C*QRg5OYWIYx/ZODebǼ2Qckx-fnɖ[y6֭I_Ú4?n| w&?7־~vs@dctz^xL_Mww7MmOY˚k 6i{jNGԇO㵉AZɍAQRoM3w=ܒ8a3QIy踏S/0#^Nx}%F?oZ/R,R HH!ظo$)'i6|gud`/׿8/nK mf?x8'Bi],XY=pGߴonNi6žWcϋ) ӄѱ\ <ʜl0q pYlTiLgSR7hz=XX׼=ݫkvHUчc\nn'ݏ~CtAGvRVnXc)ӧm.hY>X:&!9eU$+2: iu{GNYhx}}X %+`\ l 'q5W`3'{Թpf[s_nZ߲v2^{^tdC}k~mfO WnM}:>vE{ydȄ lF>@Ԇ.#G{1|5wPy(*.]M.0j+9LoSZ\ljж㳂x[#oş {wv63V_6t=KV}(Ow[::7w]vV6~FFnʸd!:OhɈPiR_N*,J;kFyyc|AMkg/sV{Xg\{l{n|~{xhg?g`sCtyݸ8^O5N_6/ge]4)4ڞ6G OJr0^<ѩS;_jXw)]FuXOPCS2*[8OHX/t?t`]yy~jp>ߪ{{6=QڞW'ŽvߛUv3;]ػOy~z >kbQOuѦ.W}d84ښL@ kq?lRNf|)dUx|X{|YdLb;"_loKmղ^>'\sHݶ{񗞈 -Y`J_}dH3?~ äƛ^򓦪w.O$DxVHCZa8Fɇ=g)p&5b*Eu԰qrs!xN~y{Kܦ&7)1a\[ov|m喯Goyc=n}Csmw(7!sI@\555h,̍kFQCc9c _SKvO\K &KPj$T4K*Mِ5wNm[696m瞙hXq`aadƿ_:kQ2Iݛ%bdIJU/i}auMY-+*^ '+._닃tzWvi;) yEPGf*l֌wqFvUNCEqVĺ7ޖK%mӝ{Coywyp*Ւ &2r}t~%ñOcԧƝt&ؙYu8 V.1u%֣L hF^A\p8|Փיhο?6祖e/qub=lkr'֋"+{˃=Q匓̥~qEAu_:;77sÂood~{.Ƭب{gۂg=E3Ve=mչ!͐S63W7l 99؞/8٭\}BugSc>?T\1;Dy폸JAQ yfBvMߨ/[<ΨW(nt [o/Ś)H}KvsK.+X%!ZjN`vI>\XE'6RsBf]'Doz/ioiu뼫}!;WvO q쟈Ԝѽ0\z,O 82oɔ/yyPԩ8' +;57n¦ȮΜ/+]β-fi`Gǝ5<{f ί&晝?%+8';(?"rS0wX+;+_>KnG-ϸֆ-Ç9_ ^ݺU8!1; yܠd_Ʊ6;3qp1읩ye3%w>aE:.:>xgV7;v.h?/ ev;!XpHXhG]{AΙoenkjϤ^ KR EEKXXN77Cf#&"cT:\`{CPy0=Z8vx6 kOO3 ꌼwcsW{K?G7D> czVVsB-a/7^ͽeۉ~<߾g;';r|b{HUg='b jhtzOWf3볘248,ps5==*;q_^KI>91 O^]^M MsGr9.klc9yy?&c;5s&pdnV_ wNOFN0X; 3 2RNwd挮 Rjzvcc]wC{|O,ɼ0# *9߁PΙՓ'tַ+"3kˍͭ]5kO95W5onNI M#6 |ƹ5²D/.k%FwfF~.-U %F_{Vdσk/Ues[|u aϸh+TxA}R?z=_W9]Da\+%ѠKW']T?nnJ#a'|wEohy@tY 1_'rrDΚA8.aey#/OʁU#Sq`bn٭nϟtΑ{[{;7/v)Uulu9-\^XR]Pgj7am +X]8ԨE.h5&ö _W  ^Sb|`zY10>Աۃ{Dԯ5WωwxYs|{q՝*+Uu˝ o_߱H$g 6k[Z92`_Voow|~M"jb9n  {Qfqd>QR+dlĖ:Ͳ4:uv}Ow6 |^=&_'nD88<9[m|/잟x6whk.?sgU}CY$.4!lzWQ'^CMp@T~ UOFw*{r:"b< 2DDDIuÁȃH gŝg[2_Z=]z{^ ߇. {uqx|huB=DowuI.GF~ߺ݂XscYHՂ]ݶD(͈qvg"UeQ OzR't[#pM*}q?'adUU~U;%A3qg{{/ėm?_?gwGf{+ﵹ#v;;_S~{5kzho~LoSoTS:/૓ ۣu?Fpeϊݾfu&\`wXPGnuC Y tc9S.a;:}WskYR /Wx33o?GyPuKlfjޮé{<(Twtxu~m:2jWeL$`m,j@~̃3jH:0W(.aU1 _jK`.KrcCz[^ W,^űTݜV$[?vbmw"{GSͧtOl u< 8pZ2Kq Q(kAƍі̝Zʊ^-خhp5j|3ݷZN9M:7-Rl5ْu88s}"&a(m -H>i^L ߚ䐦uO O1q` Wg^r^feV=/@4ǩy}վVS4^$+ɲ87e?y2D;ĒYVl* &ʽ_^?svAQGcd{w+"[?N_A'ffzZbȓ)|~Ӷ]cWE>aEO&K^ԐiikAQ,fߞcN;is!{FB02Xί o^{* $1 S+ۏ$7qx7L d'dd볗#K?9-~G4g0wHS7q]|^yH "ƥt:P*_OVa3j`iܘ?]p0}*܁ؓpO0anlۿwe[%,p58(SלSIۆ5LDt'lv_).+m&2Sw?$>9QzghdU=FRoꝔX5$6`;;ˏċy}Mlecj$hДT>l72ݳ*Us65_Ϙ/easdiԶƈb0>+A=g3`%%"S]ĎY%oDe/g7tfuc~N.Xܺ29'Ɖ{'=ٲ`He͑{~/o~Z?+((9t';|4; !w#%JNx(lA `O('$! -dM9Ssbw]gA9G4x0vF"K;%pd؟5fZO>wUn =I+gr^x.,[)`mT }K}LDzX`X̚RskÙzoYP,-m^k⾺3Y JC{R,[LtsO `Ta|UsYuDKFQXF{Ƈq깮cϊ8SfLZd< q5\OVr왞[NT5X bJ͋LO';'=f||Ū@ҧ5%uH`^$~׹%u+3[X6)S⻓o /ܷjޝ=whT!gg3>E ~P:qg"o^\j-sbd2㣅"N*A@#Xz 켆v# !$0+_e 2__ _O7؁#L{ǤqA?{ބ MNlNTüwKoy/in 80=bU +\lސ/#ۆ,μV^~mc.hZ X RGE9]yaqVg Gf n6>V}#6uD5Kd[]՗'v·O7p⛛;V䊰y џ I_SUoj6׷6o߃E?ǙR ȮhxfjE< c?+Usbg,Ǘ`n5O9YG57&tHj[vm׎M맪ͯ߫NM>#|fκsػ8e௝,D.>;{ 2*?Ϋj`yhЙ}\V\̉zDNF'O""=S_(;fHjl EY!Te?kӣ#%}+?y={Wƞ}bK~R?, . pJD¾^ݎmoJ.7'pEsmp杩{uwܨJ/6Qk5mVgxi~;!#-rݎߊ,Is>) /ӝ\]DT-~$b '})9y9{jbGד0п5Kxj'/q׌vCf6Ӛٹ|+2#YjeNW9$%Wcw:Z.fBh f~`n=vOםZ˺;o 94O;_٬Ɏ,ɓ1/g!DWBgմaUj'ĻUKm2zLN=/4Z^ԓTO BY;Y{uN%'Bl^{xf867m_ʰ_uӽa࠱7 |s˛R5rnړ: o&v.yS\p*WVeKyfNaDlWZڌ%sr~/OdNt $[4cmI%SSzSon={_#nW 4tjK[|{cmYӾc^wusΑK;K륱7S?ENv,+`u+N<ka~tquϼ'xS6{kVঌb?\f&-Ld>{dKV܅9"MOE3WG:{һݻ@gss~D [u2=鵿XXROrU2肼gj Y? bVGzFG/=g}6+Q2E,7zB* ev{D"sVd^Q!.̪lɸd.tnHDfS|Zw``pڻs^̫A{:o@U4[.ǧjfqȏ;zo_v}n PPFB *s3}䜏pYlKkS8-hɁح̿x'{e?6"VscA&|4=9qS|_jnR>ƖelԞy؅?vhFPNzhNgDJ[+)vj+f A: VAF \ӤNL.ʌe$H09K$.',xZ+|PMgp5l=ko7D?/]}#oT"}O̓Hӽfůzf:##K9_mcH'#/TiAz~yAb:<U9Cj@$`f,/N~*),[ r%:eEݱZjyGUS~,3I-HV+sVY(Oڬ' ^T)S'SSIYU+g^^m̍NWk~d,z؄1 *r("h1`6Yg dV*^0SV@# Vtc nЕeɊ3šX2CޘHl0{R]r>MS9H84+O셻 #:iD%Lܼ#',+0~1e +^k<S-F^Pv|b1b}D~ @6N,A̜/HBqC8YdqɝC'e~cV6n$E?wLTcN$ai~xt  byh2=vB(/]IS72އX_:i_f8äA E^<X&ĶFoZF ʜqs;u_)%AV3bx{rH-9D;H~eQbs44 (U+6TI hV iL9*EFK  LFx8M4oKjO.g,c; <4vdH_%Yl@GR'E`rAPYؗ$}h;Ki΋}fXInmΩ10r4J.57-S`kk/i:#6W*Aɫz td{xD[0|+ ^bąJ?vzf=OLZnZn?bL_WzzxO72ђ%gT~͟?l}x>,ʲ8Ktc eT';1DAݨP$Vo x{%dMIIJQBX_ I'ᾬє49Y<a^W.hK@PZUbj ܱR+&#KxttclU _~;8L'd}iv`$ZbukE]m3tRVS蹃A9ÈYcz3N`]>7GN W"#}f.ʩ.߳Dta~lЏp;gpX^O6adJyNcZlcn.D!]*M{%3- >713%BI1]SC2k^rYK*5 ddq;HZt7,}0JN˭jaVCm]aptFzygĽV\؉GNTtl' U&:W1]O2SllyOw I;x*|Bsq/:2S,2Y+5ɫ9v|ɂi"I9?2l 1YkLh尀϶d 0I*DfmnP:_to.3MhYCd"x& $[|/|>X9R^@!b{*qܼ-~y`-4 >X܂%NY ^2:S,7rʒ.J>`?OޘW/(s'xc^KZM6;k1zwg)x'Y)So1Nxd "XBdZ":?fNa8;:+vkL^F~Ȁ gQ6=IÐN]X&q ʲQm99.#6}:zѷ&coKo;?(bK˪>쌅1D qzM0#19iX$.HuaDY⭕xuK^ߒŧ6WȰ;&nBVϙf'/.d_kPVTs0?K^2E[1s#YJ4-~{1{s]:X4{d5J li{`!Om4K M m1 )%xPwEp=$ &B#B ȯS#NxeȈC=5ytRJIO+ x$ݥ3dfex ]e1FQ28pGT̝\+y8[H{h 69:OJ+eou΢#E+ot٠y=>f'|g,xh_`8 Cֈ!xܼfc$> iy5 |hJM̗Vy.V0B;V܎@6>+[TccfγyXk\V\SOnS)X-Ϥab}e/K<ͳ'|P͉"yp!叵ea9*Hԧ[ǒ`y81d-87tVˌ w=4f`ʅegf7ޤB-%WnpĸEkx욘YAO3hʪQqŒ1záy(m3Q@KTHﱗ'9嚝rO6݇--t#١ _5-^b/Yse^HnbD/'={]"X3Q{CDWe\uQd-Ӈ31V"j}bl-lVi4u^Ur vG )"8Hy-c aٳa㜅gC`WA voTbN\ɭ9e^]hc86%RaK&BB.  a/gQdI SE*>#[:`{qvb?CHv[x:aEJ{g;BbeY` k/1̷ v {+ ziy']br1uI~8(l[Ñ VQx;6j6[O,<>VnCx]D> 2r@0DJ =יJZ&E   #J&KV1H 3-"4Nah"QY~Gda-éI  Ӕm5-ՒɓgP-ʧ\ ۇiYIjaui8e>qtTrؘ|O6Rd0881m?`FMR%hyqhZէ#Q23ZcvSѲOQ>,)y\A!F$R{"pS*qU;[* yh)\cA@l׌Jq kީ,g_Ǻ7K KFGIX{,K|3^WKTV ~jOpbZ,MDH8fqx-}QY^^a?['ʩJ؂\i vKB]XlLbuAVqsz3db)֐Uv/BQ-FZC'fJ䐖*ƥʀ>JvV V^=V}Aơk ܄*>>;^8$+=&= ]w)>5n [?\_^ ;->f?c>-^(O-ET7tY$ CH&3SQEyqeݖEH9N: h Rt6u|b%¾/'mK)Qh?•"*>e.=YHڲ%W O%X5mMө3SX\~$\Y8&ڰ()UN֔* :^"o}l4,~:X  k'.I#Ox"@^JgX>okt6"%4얈'$]tX &X"R&ұCDAN0A|~|OeKj bO?91݋?:7>|8/y Yg\X+^[1*EpZ s""nO~eɪ8(`!| eل؁UA9Yc]R _x b"ty,ÒdL=nBǺcrE%a]aN@.\ghE 6lC>3O\ؾiًr#RЈ49N6gGg #"ssRȲ`/,\װ2D Z?a.y5[ƜKLA[%ZɆ@f|( Rlb/yZq\@UE9qMӬ)~ykӧd0^gʞ -|Jdg~>m^Wr=V٢7%01-#@.q>/OeVO> r@2fܦu1Q?VOX o0ZѮW[TYI|9$0%+4+όX z~t_:w.*,+";qiFPO$dT+.8 )d9؛⣲N1~Gb3Z?ҊI+rmkƽO vC=9=w1mIүvssa\=;}nό|!As!_((&uT>k(#kn7*1*H];Y'AE#;b(<I7h>;Y/<&R/':N8_97#坑,2Chnϯnk~?9XNy=sfG[D__pzhʌL=޶WFNnbk(i(Q|{O"N*G|7 %F/#)pg̪n%8}`ۚ)4<8+98zeV]L똬CeKnowFege@ȀhK},Y˔`>wRQRi$jw_uZu`VUW>gszmiPQ*66Dc//}5vcb%1J#/0ι瞽*[k*}^?8u?K@ւX-'OwFz=n~U|e@Eňk@#YuUsm̅~*t=l W9E Rt!t>*el'\A]?;/Y9Csb߼/Ldf{akO%؅=#yz#OTu@U3/WcEb*='YjEky~Ge?Λ8ǘۨIqh}aVsDfNdj="z2Nۤ? wq2#@ogϟ徉W'{ze8ю ɦКu^眤>]FŸ37qyɳZFI57Ƒu6[?#$Sav8yJ#dV Mܩ+DEˠzӟ;SSA/9)}6ےWð`VF3.>qnJ:"}ӳc1nt;wDXA9k"??eO}䊟Nm*Q'sYm$cD.+΃s΅CR-&NoIqXYۧkE"KNWi;dz% Y5gΪ}&zh*0=t9({1;"K|;O:ڟYgGy WJ籛u~FE:>O \"-hV)7ۘ 깧XOT!e#5jVy #Y]b^ +C;~/ z=NcBQ-'xV}Mo DZ~!nTŔkE^OoW#>Q1!J9͎:Oj\^mio X/So6'пz(3R B78w:a6hf q}Y@5o&vspDLF ۹(7&.fՋ'k16Ic#/sOkf"*}gȵr""<O4-.Ŗ3'b/7;'p: Y8t=տNbN{=ӵFws|W러Rwˬ箫{D1u'$:=μudF`FbIoAw`.ZOI+]!1|Rz]ame4#f K*9#;;dcofmhOktqV,YLj'LjfـН?n?FrSZ rƴG\!9e6  8οӱ )GbI_迵JXW |R-yc}yc+Sh۔vHkƍrb"Ċ~}S3-$c4hg>szA{2`$gVoM;SN?{[ }֟NOQlE"whsFߊJ/ͫ.wR61{NU N?`,qՎUr$JߵE},d[k4m:OYܗȿ^WpБ@3XVc;"FߧYG~0سYxzϳ+۰Ia)ߤOxJϋ8ٛgEax+!rƄT9ؕkWsVh9i(7o!%avvY2dݺWf[mucwX_YD߰i)OC9eNT;_y;K,+9ݑ80td!eNNYȨō!ӹfWÃev&~9?O Lj̴9(W?2?qwF{9QBB@G1cCƜcdFKաyagZ۵n ~[/B3 xO8<[?X<>؍oəxp#aEN~L!,}v Kg1i5:X`ݓeшL p]nu11B5!yJuϪoTbd69k+Nq[3{Ԣkc{36u(XeCk;!|ւzۃmcxA_~>!;-{ 9x7,Wvd6ϲÑ1؁6hmax9ٚpb8"+gv"2E\yDuWy13jezFv%TlRV(CYϝjc:d*R~[a;1Ɍ $wp!Hxle-~t}L™"#SAg~{]h7f+yyR w3S y;ƻr濕~c)K ycR':;mSx-9˶9?_HDz6XyáZbg/B[TnS`ooJE~^CzON'.*+?$؏8 xȖ+_Vg% iF5V_[:3>+S"y%ŬE=]ys*? .ebjr6z]mqV٩Ag6֮;{+6ð=fq 66@tW݋4!H?̭ا=6V<9oq %JYia hdD,291h}xV"em]c̺ee:ԑXaݑs1X|O_1| IgS]cgҦywc0WCAC'GuQшѓ?[*\Kt|JXe Pخ]LʪHN.31ڷ:fe f0';E90lRF2u`^#Y.fLD'}WlErbuX=G5aSq~Wس/ !{CpY7/} _2 -F}zm6ۅ܄" |j+rc,fU/jCѭ[du/";{уQU>aq]A^>k G?/Dr'nu X׃O˗qoX~hd~9 jNz֡H@;'b> {'@?rby>N3NϫXlŜMATdd.'J̯ `|3#D"&-G?f Po .C?n `z EsaSCҏl\x&5`݃ 6mpnIKٓ3ߚarr l|pf9@FrFk>AqŸ=Lb4rg2Ć1Cnl?23 ìgyc1c Όg.'BWP_5X` EXڔO ׇYK_b87A%hf/r*Z-R1qX|6lDv^/\"?|e#^h|Y0iO;g ^uEw̲AhQl ;.ca?'U_@*&2ZEْu~VwRF(5 k=y53 _=CAF,G:&p>ͺŞu%Z71 pl~~ov)yɕׇ7ªŢ|9xK~wfgssRJ Ŗ죬{x;/lFWphpypg f=-H*v>믬zbF3Ӊ;a7v\;P #;KC'p1܁Jzp;PzaXt_zs<{ k~\6-[ ~}@XFw1qUs3Hc=٣8O?Ȃ[ǻNbݻ1kVvBG|NncS' b+k?'1#'{Jf:tw҇䅾6/ O`F-஻aKā#9+2EYC7BrF>Ff͵ϑỲÝ< hRFl vD(KDGM)(t}0"d4uϛ-/O/w͚J(ZArF8wfyuj9b+F&'$[ʪ:hfeV7xAdEhP5#Mw=laF -_$  ezl CD6zc^HJO텾Rxvsy ͸WfehGpXɹrQy10r|SzLpzQ2=o yVXqk`>IZMЬSR囋e-gK,_Â~?9LO'N3U؄dbo+}r+5pj)P3D"O18/΃r.#9QFzL KLAG׆'XYgdD O$_dʳ, ̟;T'of[%gb58`a0V' `aTE=׊̿NʿMVYcXWO |fXs-cu`u~C4rP)|&r9~gu$)Kg,e[legcweq}!KgJbIv %Cw7[XNxI%lɱ+ҧM@!uҌųƨ 3ky_ь_Hn/&я {?zacd́눀֑{NvHZ,%dx=ɷ 1.?oyCqz8O:1{xT̽ Jꏤռ0ק7䣊~=UdDw}O뮝nFF Sk ?E"Ey1_$'%:q{l߇T!qbܗHl3viImZx2ibUl5Lz{0fX"1ػp22ްeKdЏ+c,XYd@؍NT|ԫ\OB .ۈ[ӹ۰vX /ZrM6{G_J s+v M瓝(tT+ߙjhr!fKdz0R\I{hnj yOٝjƪ/ _-̚h%̣h75XP`x+XSa6g`,3@l{+p]$ʿ$7e;Na(V0tuC=8w/<@-s7=ֳ-3ёY.-M#0 <22ΜQkdaf䰵ꮰ2_j7o-/9sc"q+gQqab}TQу2z< 4 > %p93CcOlj`?py>\sN:aFǂeVG´L@sDwڪcuTC3q}{d\wُi>keck#57H߉hl;h d Z(=F3"b-ќ^ΦD1Ph.3wcrY])˦C[>IPfMy|=+[up˩d"~X]zc"߱Re'>JEY0 ZeFbק/Ľ1VoFK'Z6ײ=Do lO!ZSm}Lc d)穌l.06 D?4\pR*>`mz-#5xX vhcr/M[(&st'MvDxz;t`AŏObMIza02+ [d5Mze_櫙O~HYJl- 㲛8Or"#iaiIT3~])wݵ\>a݋c1{[ Sf|ȑmYʇ\|VPVEzܚ,".Hy}~Egrҍhm_z:=u % *ߊW;Id<(-+N_ X1茽47}_[XڰbB:ث#{Oge]mퟯn*ZZlX^)7#? Z*ӏ*Aq*;ψ;<θZ4g8_ҽHg𑓩 &ېqlL%׾?]^~7U0O>Ⱦ)Ÿ+D$d_SFnHmXV=x_.kHWeM}ј:R1z$.Mj ėyD,d_c eᵬ_~ )8z4ޟg\+nR8)}:х_0}~]~=<{4-PfZ:(6kڼ\UyBmsȎWƼZf,|r Oujl~_rn24}.;Lfd2Vz9l$fpD[Jn(f|ұ;QCC>ޝ 0qYrCV2ey\jMZ.@-eg_&g6A7ckI$o8vqx1.;}fGԈ02&tg=! ;ee MWmƔ] ZیzZ80y'bN w柅|L ,/'קb}m8Ȧe">ZXAcһA2V6Wc_m_#+\gߒcG?pW3hlQ__9 ~nD??|wȏ!FjF~6 ߇7 LK-6Zm$\vZk]'=zDDf^uA+WPˡ9淔s_r"WYKfEˡ#TgnC:]ݕAۙiu79iή _懕-h׍o?ro?Kbwo‹dWrviCvfqi8\vhXώ5]w.h>9sp4qqJ=lheŀJHړ޽Z^+#OBlb)y$X/Cbz邪mIǞm)S|Wn7T3:T߆'3'^0y>T\d7d^ӝ)FfGw"JP}7C51zZ B0+PS<)vgZTeE7Da Gb6Af܇6,յmWy10C;2&W_n-Q:tt>.}߯;_$o3^kog<~fֿQw |zL; E2Ib m|a{4Yg0B8DEU2ɵŽFeO厑kDFyLn5ܵZl-QȪoJ/3(*MFyc#%HWZEs6'dA#|Y#s|=dcH%VyiA侴^ansFVȺuu:qbx.C;z7ɧVh3WLɓL+g2@rvݓ}P}|6/>BSżG[JR}4{<AfS#2fYAWþn8K ::yj V|3fV1kb5qg^]^(sν+羞݉)iMWg?dmb&ߝjITz~KЛ<;H`|8SշT@v<>Zf"yX}͹Z]\>~q9,_:7娍nb^~Gߢo);jڠo4juC;r%+쪮ӕj,?9.ן݅.Y|Mʧi*ssws Jw;OM<^9~kד6d^V&Nz"2"95E6zBk 8\y+9%v_6Gf~ЊAk]I fSZр,fI5Ro9jiC#w}~2#11@k*k|B]UfL֎*Yrrd舗bOAQdUgẌP-rUbWx6uRYˠ/ySt?əѳYw_Y^[2揻8*|}ik3Wmj?3j>BMY5݋̿GDYFSF|R/z@iܣgH§=BOݍ87=W8I֨-+׍?NVO-6`m#4ùQO?OV S2fw{GcO,"0 tV)ʊ^{(8w+ʩH+头E}PE^ZX 9veob2Z!k'9u`jA q^ÕIKuvU!œA`5K͗_r½R̫vE,BֽY#Fh\#(Mj|,r?r kW\+9C0>Z29:?2Rj/5Zn5 ւh:]X@m0ˢ%}Ev!jA=oJQ淺5δ-f5EOՖ9bgQ;3V(|hy1C {Q_E^CZjWI+FY P(ZBsF\7q`F-i6jSd]nS"b6/O>A \R\'UQ&UF*{(bHcv=g~%UnJrHqWA4ʈbb5 F002΃^8H,*~GRy$W@9H(U\*Oc6xG~C Bj9\O-QiEm1pMDY޹=L?njqBGqDܽεw7sGOVjwF8~A#wzZ9oQʆro.rmM?*ˬR ?SLh\5s1X^jN'QT}hE .MUz/Yݣ橕vn ZެbL2c7LC7U?]HV [Yo.P]4>!kk͸'1WВ#cUf'/w(m'1 k%?Qʉe͈n# BN)z8WL\i`w'BDW[z}z3fKJ=tJyUٍAI0wrrJl1.bYBGYf75QϪw,~BW\OisʓAFBlmZQ[e[b%l~1iM6je ;y^s2jWvyjZCe@<':ZȁYٞ܄#c/Z&:9Ρ{xd_L꫹G ЊEk=̯GUs[J%DM]3K(ZD+˔ Q񷺪.)V^rF>`~2&kP?.]R#0`Si8*GS6'dC^79]/z"}6KWneAHI9szqSO0B?%Ck3y"GVE7*PδhOPJ Fjʻ"-\1"| 5|WDUjWx"Λ+}"͈)dHD_B\iw لڧ~ZMWC &ʮoQҳ1g(P˫ƴvM,GfK 3SXOWjl!Oo`6+8vҕu^EFq}}zk`}H8\mݘO̤jQĜgȢVPmX[]k:HGÙV8'ʬf`+Fδh)2"V$kQGj̼O5w0>Ieᙞ-L9>S*V_m2J+cg\K=XZw5:ӓcO|\q s%mXMN^9đbԠseѿ5kc/~O Qc$֚eψ 6Иy5J^`LYIBA-`RJz2l~3\զzg55zhkܱ+ԑ<ݎU]{8Cs n!URlw3 {^ FۢV::-2:x%&j(Az,ܐnyq}QJכO%=3D.mB=$gwd=:P;Wi-_zNk wDU15R] ru~QeT>"quvdrLFEJ̲+~e"I>Od=AZ^\,3OC]K MIMѻkҫA| 1M]h~j$ $qcn_Lc7*D-X8VoDjy_O]/+UhwO!7bƓ1{ŊuLlhcXW0h33݋!RK;Az;oxըUYLKs!Rf #\ٕS|n OR^[ݠ]3SP{j%r~Μ$52mI6W1]]ȸwFDصQM;U];M2"S"bYGN̽Y?,Zw>E" ^.ùSj|&w=|$Rn9Trsn`}Ƨug棑&a,zXQLw=:_Α+ЈMBKi|KDGIbLR%_)Sek(uKO6xvS[m p2E=Ќhttf ˰.gE2ıJpCyPc}hqd81ZW4aXe,ziV^ode/s*0-w]gI"ȷeͧ{"m#˕fbJ]C>ILe&Fr7G!#ttM֒kG-)teOk!W4^l]oehT'#u|z0s Fu~2m43"#EugN=_3ڮmBsϗw+WjuTƹpVOe|r9 v? f3BF7Og|dO~ 㙥mvYdFj>وZ9Gѯ+XTG|5HΟ.ĝ8Ed^d/rZabF2O7{&/Ja{bWE=};0#oy:3۝h6hEF"O{1+#КxWo3R>r8>vAF7XSK/|s>Lq쁳gH+`osF&SejыEǦs9H:(OOe=i xyҒ^:;QaH)=?j׎Eg0vס-:`52mxL4bL6)ƫ"gL1B݈`= Y'rz>~@KwGx'"^I_3E2_?Mm4)lA5;YG?az/-!d(N=tץw1vg+kdb|""X[ŞZņʵ궒{Z?g@$r21߭."LmM'b,HQ)1}{kvnݓEד:t>W_%Nɬ!-AO+sn9Sw+ ifGy*k-BU -}n){3i@ā^ iڌ ˼2Ӱʇ.SOXt<-}Jitibe3^Hv?uyߐ~ɦHLK~vO锵 <41meVB;n+٨Vݘ0_*ZW艻& >dTs=4-/=|[grKPMy՟ٺ-CÙ|hᴓVlyK5SYV=Woz|4F2g|+&]5_*D^w緌wю LZFOH/ct9G*ҿ}9`Effc/;h,ҮL7Jͣ/h[:ȟ,ޞmk۸]6zڊv,rG)1Z*Z*1: 4OmVXU7ѐOK@~Xg",EFƆd8mnh"pM#T:'{n˱_ ɽStKy^K*WNȰWC7}n_zzOxʁO;T{E57ߘGћ[&XDĘm7.SP8e{DVQE\-/.uXm46ʏwK<FWxQTVWj0ЖWYNuC֤5 g2<(..sv_Q.}%>R^%Gƌ (?F-^1Qw 1=;mb,W-<5D9ێfEwBzxVG:%a7`-+Ka6=9~qHoN~qWa'^&fwb[8qbJ/h (Ffcx8J3+F?i%{ԶRI?Tôfk{Y7XЄ|*}A\ÐxQbV~(IM|TȵЀࡍIA;Ñ,"_SyN#䩵25t\>{}@Y5ѯ6Sx]SW֜܌wPo[Q/?=٩V[X ^T: Kw5?wf$adO  H7s6 eYvƙoq'dg>X*{=Tqv PNE}ܣluZvGኩ h??D΢Jv%pɇYͫ0[.g2ON<|7SG73B9ۇeفH @]SKE4ѻ~QD/h+e6e^V>ʋ:Uчʶk9"7Bzx˙ؘY%0ݔǯW=ɝE߅خ SFw,30<z ]^K8;~ޡY=`F?h{N` @;=Yyixc1 M'o=㽒68WCg`O;qsx_?lП+gó+?a_I 7yz@eJ6p>ݗ瓶9ЁsڑY $zUiVG׏;eݧO7'rf}tYv3#505{Z]3L~F`9Moφ[}'ƺ>'=:+f [232ߗw?kF2-`b^Yw,N]Mϣ$4. CrfdĂW(VK 'BR+ֹ4›YeŹwVk0 Z֤?ٞJ6Y+E 6nKRźeFg<7~=͟ $ ׿XT_޿lp0=?/\U<or๩zοoHOWw}r +mmT΀5"a=w^ർ_ #ޘ+|OyXzEؘMsH~ cD!lqk_ϊ1ѥ{H<|8}b'84|5=|f%⇦\qƷFr;e䇵Z~<"ZY3UDZܙrՀiyZqpjx65~u⏱[fe>|siXy|wuC2 ߰#s NۄM|W?-fmKyْ`x]'?={.̞9u7ĚNہ.ꥫgC{dXGlcg뙍+ad=[\ۓ{%R<5}La}bɏGN J['vHD -(૤ĹX7?)S~g1WNa:;k~N jpR:Hז<2b]ьK^DvTf|f+ʒDiGeDM7v؃σqw FNyMgcGbXX|doK=_s 2y)|E&?r8~+6K66㰃zo~WhņAth]ѮDVA=W̚ ɤ\՜ NG#]\9zޘCxI[.O旇7s95iBg_vpЈ-;ozch U0'GJ0Y}2j5ce|[m@y{?JϊcFIFŇ#~ai*=}OU?F29ߟ$3)8)_Ʃ&񓝈痢doC了Mf>KGVNrcsdirɇmC<<>"=[j-xufD1j68S"H#dZ@!~2pP=Gks>0? aH?L- 5yzFp=VTW{oXdx Gkc ΕcELUOu.mrFƛ#%`y󬦀1ETAt!#}?_^e{+м6BԼ ҈&1{# '{XyUdc-Yt؟=LYR#77תK_6B?gcSr?ȝ3J rȪg&eDžksY]p']c?A. {0 Xo@E$W'gK yK14'{+]]6iM4'3{`3אGXd"E2wK84#<%J~\e0OYT<|;mDw˳<(*4~b=ѲWuQb0`dbvB>6g?K"ݘ=-=,5d-]ֻs+**]Tt.%q`O\*<*HEV2"sXojyV8lJib[ѱ_k#?ёԺG<_߮gύaUPT>}~Fƣ,^wVp<wPfd&0bңze^z8P5.;bvz`yX_N>t[48`!Exyl};%ceG׃A G+9C\2p|^˩ X<3.i>ݏfO}xKN?l]lg<сX1גA<5I--+].R:t6p`L3sW 瘿q!$_ )e]Y3[J{]cd5CZ;f̶Z?'tB6'̢nHCbwN\cx_KqbLNƸ^-/)sDr$;qq?r^%zn3kg3lg=>% 쿈hj1c!aמl"`#L9w ̄t㌂ D;%7}_27gaױ5ZW_w5`k0$=ϔF,}?Wl-_|RUz j !5džS~D54&'&SAb2D>^Üv]I^Y q{(^A ;;)s-u!? @JF΋E&:[n&?qrҠYߔQN8xQ=2H+WhtW 6\0X?<:sB~?1N9П#rRwL44:" HF?3,;ddF.C[_}pb?< ܔ_un/95-R~PfUiVmᎋUOwIY ̓WW&{{Xk%2|wְ6w:XxRs|^ijjhDd|[Z-_})za؁9`'%P0Ѥ3;VoW?zF}t{MD|RyhN7?pz1v9:k?"x]P?\y[yp!Nn?cO/$"{ў˝k}NebaʃanvcxMfN ڞz4Ffȅλ'55Tߩ~_<QY` ?V41?=x0!qlv?8;i5/8&??((:]ؠxy¬x^.&t]՞/ەͪNB،ݱdq^?\u^]#is"Opw,糗N=߮d{{g_ a~^}Yoҗ$3&Y㽚s;k.9GmM߭gSӨD*|= ysZwaOf '$%SNԧ)Ӈ). %ݛV ȋ!rX꿬n07_^Z=߾1JE7n_H$NrM6x\Fބ=WnmTMV_ߵgu7{'#l lfF3c[3M܎Dcījؽ]ԲNᝑHD8DFgnjHtC'&z_ya̎ C NUgY ^&,uT3ޘ, sz~ٵw9mڷW#{n0୼}&7U޽?-T/=:7{ [vdYϷ$p$c]zq0 G:1[!%sݐdcr`ꯗUwάg:/,>%8? ],+F~zp63{;C&왬r 0pQ?-%0//{6EV?,xkvY?8<((j)d޿w\^]SmU"GD($[95]2Z9+rue8#9n3Vų/4&D{)Oxz8?W,MmQ,kowҬ83T< jF`9-'q G% +d.dTDfFrzA?9<>~b|phҌSfRK0A4lVWcա5AaW(;!:xlTww?u:7lD= 3 T˃lk2Y륩M>RߘnύW5 y@f۫Ϧry`Y,Q Vūy>`]A) ^)&{ij.ӲsYti|\V>l>Nm Ud, xfMi|+]=_Hn_h;U/&g(x6kI$d(4+˷'ӬY"^="IUC~@,dG-rbH;THB7}#zڮn|n]nONٙǒHx᜛RnvhIwܔގIÉ&wd,]},v4+YgtߙΈ0؇uٟr~Av,e>TzɃx5! p*/jicji\׷{G77Ϝ7 ~}ZfYv?&;<jMa^ju:6=* T.,qe)_* _y3{&K .omtj5kӫrwu].:+zll0N$L[wh5E]AàF3^.?Aron\/Z 4xyy |MS͚ i#/g(k7:p"$8NKö5.ı~Rssd*7d.G`ydzw Yfm,dhg9o9*2'+x[uWBjGLHged^PU',X~(td/3ed V?QYpêIfM RVA 9t}*]ܛ4xz~&x۰wsGX?<\qu}Ah'VXsjZP]_LJSW=(R_3ZjRVfuD\;hpU_HǻO.mOH]CԻeXQD㚛ÉW{[htTU3 7f"`Hn;No pP9ODi90٬%.*9yl=ljM;?zQwenQUXy]Vs?z'6l;87?~+e!geH?~wΙWS+ܴ}]YxU$yJb. +)`8|H"b4-}b7[A'!T:ESEFvhU\|]<]NX#g{TsM{GPn7e g_\` K+3_޼`ߡ-pt|bBV_ 9N.輝T:$d !lwzH CE23ssNВb\ܟܼY4uzKa?u0O32Gb9Vw@3%wJ=?񊰇't G&ŖV5T^1 w*NtחٖpCMVz 9-Ys✲әDv@}s)$섛yeTf5\],տ]ONl3q>xh#ǽ0y07n.iW'"x՜0zmd}d{lF`t^+=5M}srZŨZ>pl#[S+#ɇĜ?DWAj[eͣ|61_{weyviO̾cٛ8梍v;wUCS{g6wntGr;ͫq}sr;ܜ]R )]\ceB(6m񜷆89O#:Y0;ůy? &Ǟ$̙& XY,-($ (ϵ/0nsd|՝t]yom[v''zb]ш)͉tZV̑*UP^h!*(H/'oA,7G#׌I d Ĵpi n?{N6^DvUIDxw~'{ڡ#͎:yѿTmߝԝўWhyY9#QtK&<>;a\3b71YG2o!H q8Qg ]_,DrKܨkr%[{.mԬ o*V[L A[>l.ޜNx`d0iY[w[}m\{E8yz{D2c }j_3a9N.n97ĺ"ZAZ/G܃-3M8u3НQ mXT゙9kb\NLh^:֜bq\b%zZC+}ۛz_lW{7vOt1]RgU%爍=gZ.;gUSoBLcR-b uӻ|sٔ+TA"ZPߐ7\nM1ɚ5?(s^_^!m9,>=]ߝ>ksHnv܎fAQ+jFN {JXD,f -eUX`xQLROg;2&KAnĉys:|V4ɗώ 5='3]Gg.DvD>t2?2|pXälʠ[iֲ1Cwv~nUx̋Ey+nHݜN~lV/|8Mʖ ffN m~ ھbTv7HWi k{ v\%sb mr^+:cgmi%|! qk_%e|A!^8~D IE d%}ivJY'ʲM*{FR.ͼg#gWL Zk}!<p.&!>Fn" xˍLsdZ(R)pfWCqH`E2ncG4xV,|ƁzYmrTAƂ1C닠'b#KK !'ny ꓾$n4@ǝFYMQ U2O7VW?6!0*,E'8=,eMbz0@˪زs0R`Հ t4ړ"т`}q ~S}6GnMoq"`8T^Ko(7\Z5ڄ_cYڛ5Xk : O7J==SF&ӫŊdhCab뜗ut8.rN$чZ+EgB"J~A.nCO/YxZGq q˪y8,lEg<Uw˴{/bW/O'.k4K/xR)`Gg16o(`0tb) ɊG(Ocz!oM~0Aoa@>WK˭7<) ;`WMC6Aޠ_ƱpØfD;QyxUEt {^p]-jLt V튊;ڌMd dVTw}+!菮6xqcsfNQ(){:Lx1ZOǯlp#c'm1?#_j;~ΧXl3~9~>"Ab+gy XHNnrOݰKL ؠ{Oe(n1yT~V=:'+6V6c$IkYWh -\{a4)Lx~ilKkv7%U4mϿ誜Pj:L=XrIAX v6nz,_D?wa>ZR$OY.e}.2`d;o K06%;sF,ߥI2ŋaQtG@ ڊ?ɰeEvmnVֺu|%̻eeٍ)ҋ0C ex'ja;3 `JlF.Y|œ`w肘њgBf4 M"x9ypC`yR { hoeE_l2SWfmbyy) *rx,B烘xV~;IoSKD^j} gį~îdXyYdˢ> f^\Lqt ;K0 Of5%V;K?36n6ir8 32ӂ(E0b[aCVk4+5u]r#=3t&ƷtBv\lF-Mx l#'\n.οt/lμrYc#zT،l5cp_f|K{,]0R7EkQ>?g{c3*hɩ;d[qNd4JBu @R4-ԏDܺ &{tz7u[!`jAaAk?,2b-U0G،3CpN 6_eŜSrMXm`"-I2:~ ^~SE}*KVx (DD(.w3"*{ҩ!rɓxle)@.BbO>afI[pŢl!+ElHèqm3:##ɓLw"'6'CWjtJpJm=gBtSTw/sxO@NҤ$%[DXD}:,E&ILj v_؟ ܱ)z$!c ].[ɳٱ+}hR:۰X,.ǯ]aD<=203 "aZ~*#KYr裘Tl[Ï:y5t+Ax{QJ>Sncugkҫ<q[}>gOzHbQ]_7;чH4!āߗwί/-1Z%:Rw3.WU]ѝgTJ-W^ TǩNi8wץ_#+ڱ/,CͼJk*&W➺rRKy*s3@Vzuܑ/'Vۃځ8!Y:|/CK=wZ'4VNZ*= u)*r[מbӘf)i$J 4lz8`Ȼ:(hb'ݥ|O>IصT*Psi*b6Os9;ȃ|ZAe] `!EmJ[s 5ձDm^m..IKX֗)D{kj7z>W70S*򽘩?=jB%B5|*ZLn.X\H@zpHJhPּ]i1r_+e[F__k |\+'j9MK)Ǥ̨ڲĬ&<9]@pQ8kK#j[W Zҗj]ԣteq_qZ1= prcT2)yu[1B?LftGR7ʑvH:d7WH:>iO_޴sbb9<#]#x,sm01UsC'At02JM,cJԧ3j\k,%$_Q\Q\](Jsҧ|^: :4m\ G;眵6a1;GWrvJ-ߪ!)ݑ7q^#5ӊ 9Śh|mz4{ƀMȟ#Q GZ=Q(3Jʶ*K\~5N[МK?֢1O|XڗNҳܣ1Kà (Cb/u_k#sXOn?01Mbi~"~.9^.!݀e!9VW>O`z@5m7#|&Va'tSZLw3DxjDFJ^ Jl姞|}+ޢ>us8盦(Mckn5uPM%8$FFڦn]y/eN\[7`@鮴0y{t|:9->S1&%<5mI?GCn඄\9<+hʦ`O#hXCzͱecܐSPb 6w4FtT:-'kyi/>)\6Jg&iEKl|>mʟIE]ԗ6Ɲ|W3Ip잴صCm;2ۥX,GrSiʘ+E\mܣmӡ|*2=UMF3|D !M"jvMK}i _CqQVX6F=ûxR#Ob~C .gei~ox%J0gJ::d r |4֧c1d0n wL[mVa gLW[ "g*4|xUe.M㊑|Ҁٝlmױwƽ~>Q=/N\xkw2ZآZdƞ >tnz+, =F.j$fqe xRXHތ*Sԝ5;ou{!0 (RX ?ie#/U[S7_F`¨eVT p$%-{1Z3mto`!&j|33nP">SsV鮼38V?0ʍ-y\"ߔ L9lmV w1x!(|8wLPe</{srOϭ1УZ#x4)]>iW_Z>1Z6қg_̏b_K9M@EN˷Cfm>E}<5;YLMFX5,3 k.~TvbLc͈4Ԏ[m5lgfu(F8>=JybRiWzik ʏ21 ?퐙{gf5+z~D^chx a^+Q\TWXUN6 6B;Wߪ#k'H}C?D0cѻq]1 `,mh'A|nB+ C+Ì̬{Y!e#Q'!ƘwuN&9x͌W ٩OP yiD5FVemPUjq#*ha3,qu\ߛѭ =zf}yjΤj8VE`/`8v#֬5rW?^b$)dx갷Fcq/Um8;[e$qf<*9ܳ6a&6+8}%^G"9(y=-qjcD$e.wصΈƲjX#0vHm> k19N/|iU+lf14GF{xS'>Fcr׹h-B[1R捤 ?+hkqCCAz#4ӗIj?Zy+dFՊHxZ|e2'|zVcTJ~\M(42“vF,pd/ .%8^vw|5F8zEyh,rZtLKkU _хt4Ծȓi't T1%EjLH2a}b؍?HaeJw5ARSdpm=GӖk븪 O*Q|丹`Ai Y0;ѲD؈ֹ+oʭ'n+ai/6}-6SE94wb_5H_jvGlݥd^z[sY=r),䬒xkjGsq:J,1L#}ڮ㔂?J1s>r^;灟wx`4(aCA3S? S_,!WA[\GxCvpؘ!_ފ(̎V _O/PLhrrR4?:Z"Qw|hޛ~OϢIوJ䓺iH+ꂔuZ|zR9,W|X\P(~$*4;o-ZQ;m%Yd6"JRm@2pz%?TX\X?%/*h0ivq k_zCdzꂚh$ϴzdFr⼢Kan;]W@S:.J]עǡvh]T퍲DP@t ,X$]=5ݲ'_Qo-mU5ɣ?jZ$ ]X33Vj']oi4<؃a&wMDEv WMCN{Փaܷ5Փ3\UA @2s eEBh}fF}7+aq:R}^}eCVP^fn|F;6G9:(=ï1fBs6Ng/Un-mʇy͢f>'tFuµ#Xz| OA Kws!11h>TKyt+V7k} Q_ 6(t2ѳ+:"#-Iv2yb޼L疼s>Kd^cxטK?`'soT4X +VOmߙvEH^寨,> ObMސƧyWSl*)_Mw֓ L6*CA;UTCf1z+>D<,Nqh$:/@ӝ`½Xс(#""*Q䡺b܍Hg-鴴n~nXՓ ̛ˡ]|]\g0+ȟ~[fAO3V뺨 H85p镴,G{d}jצ:. R>\5BCUfq:NvϞHe [Y2Moe:WLzi[b3ެC;ػb+_ۿle〣 XKվFlq.̬t k*{ӭ˳!~6˼s ;ٝDY9[@#c ާEM[r5h$j{XqEX\V1sK0҃KSm^E^:0e6}ifԑXxN!"Ecދk^.:id %tfy:9\QT/ o/%F<ݗ~o&cxe.v}oqX^xudBgbl evõu̡98T}:1ٞ4j{g0 ] ?j$Sdn/2ۿS ״ kkG~F*zs).P`+ fk45?V<*"Dh`_6; rT}$3Qj>mٓt!9?O҃( bCb^郪VmaMɈPa̓Ë+Gvg,Zct5h_p~f[?gR?] Źw6Z99ʽVǔ +R?!QQF(Mg֩"D mz#m_;Ķ?|&5+G[g̷.eh0$FrEު=dMy[9hyuzd)9 3HR??i5s|5ǕKÏMDq݅u`nWD@Ƿ=F}ϟtOǓ-@K]ѹ.fZ\O.ߩYlйW̕Xgo|X#o`eثiͼ(KH=H +*'oNjYy O*⭎jyci=\d"4~u6Ë~ʌ&B&wh/2(6T夥6@!?,rma 5C{`2 HK0=p2z299_da3> T;oz@c\c>jN;vSl̜ϸ9hTi%^ey"ba <{{dey]fiSHe{F5E_G3?mxY#\?FD)k?FvJHziMxuP2b̀6u2V1R*O~k EzDOklGό2|k]UZWTW8_8=6u}07+Sotl\^#Ou@dZ]!KpLg2F~jWi/Qr[Q4O#&vvFapt=/}<^m~r(@Շ:}dg+gjS=hN)Ji T^D(zH*;Aq}.xHw;:N+b$`V>IgoQ:r**הrmY:dFzQF/RtDelZ}e{5 #䉜#ϑٗp1GqqWڪv2VLi!dعig"ZϚxڽ˻]ꎺvN!}bH+0+bs)3_j(-N ܡҾct%W\MZ8yݜfikCʹ_&Fr-cl)VQ~'L3#7Zß}3='k5B2cf4W) c%eCROʴ>ܮݧR$T,1jI9Xc.tuv;g᪻-XZO iFܸeR+8"q}Nft9{RE刽 ȳ_k魒Pn >aI੫qH)耲E:ϔ8/_{qe1FW3uXקJ+ELJ.RmrEfQK|ܭc#r]-ڱ.f3qA\ob2嗜׮;=զxP~ 3B+ݨN󓩀?E5`>,GG/dx:8EB25woe<3:FeOAno[|M`q Fr 몭 'V:(uOxFOkΩ)T -xg>o:.s8 s&H˸HIR^kSܣg(1v.us_yŦ S'`>br(4%SGȬc-pϊN?sܫbnx=C׉PsChQʗ視Pydud_,奚#z"ڼqZoi- >ڽZISduumt -n}nV(J˟DlTS]%G03\Da6}/ғi]4ӝSB)_LS"k5!Q3ӯQc4#T1gaT>MT57o$?|%0J&jCnÛF5~gɹR%a3t˰oow^<k!`񁑡u5Ͱ~Z*`PmUŞ5H 0 t43=2{.zQn=1ޙ>CWRs9YBʓ6X ^>hTNs۰Nޱӵt7J5<~Z%k#4ԈWˤwZ3HQe#z? [pp/R#sy6q,&>t^w5H (LQ~j',/4G~K|!*9 G3A3,HxH;ә/&켾"jvw(s{u/b$i>s?i>&-݃Y:Xd- yp}@4>{~b4]6{֢?koxvysu%oL9xr.o^Rő>*F琹Zs9@%1iE5C;i+{s}혽h3[.i-T;~)2#C6Vը[5z'55zti+* Z:hrlURet@cwG.σmWp]cauWȑi;ztZ@R^F3v1>\t7A7>ovw<sA2CW0YISlK2_'߱}ۀs1_`צXs%ySxD*~%rn#^5|˘m׻ٝO KcEst-f7{+E^t}\e(V)iԡ f0y^[TeY'_\U7"`?Y :s#=hYwF`nc uUM!fA|9Rg_yveNukխĝm&W舟Vkx$w(KB_: -ho>JBja=@P2wM;IS7,&BKkf-1\m.1O." > /_ibi~-/jo1߰Oʨ뷵FJMV^ ]isPdb*'cd&I1O|a7w -k@Y \F1w6X曈B#wV{G = #h{;CO;Qk Zzh hن%rvAZc=gXȟo514vd;&JFZX;<ʊxϔQ߅O3 c&dꊟ:^Ɠ?+zlk19CnШC ͝ƉBw>>4]1kUW7n>j':W\ˣ W'8yjΤ+GR`da0>TSkCUH9!h[6^kC#%۫Ej\54ւc,!Rԕ@A!xGTp=c=s׈MQH2 ^)HW{5%GVUܣ vµ.f)[9RqBC|*cK/mD_2d66x\e7,xJ I ^{g zUG7W}#Y*YSdRV9/=I#i*!BF9g_Y>Rd"S:D]:95i#SJQcUtU5ҩC宣K rG;']*RT9?2kp)Dy,}Tp#[^ Qct|;>޿^[?ui)Eт5IyuqT{<=X(+_:?@sAF+z JRj5@-Z),yBS|Tx< )1QSSG&0r[ {VxN(4kO~-ތ I IY\ VcDd]at:o;MOٙ}a|RΏ{3grr>mjdX|lRUue:cEWһ NR+:rc7a#~3gdZ=H &G'm8e ]sJYTQn8`;1ŃdëYz[iL7kfej3lԾ*_!Sf_-ۋY[jFbǖz Jk۰n:d7*?ycF2=oH4Oqr$pC4+s+f V7<ؽ od4 ~΁;pPnKsЭHa ˌOc`zYe*2a=q4TV;Yt:>+FJ?tdb/dKCr=w?MJdiQnO>2mUjY3^H0,{qZq7UXxge_4S"Pz[\WAyS9yO@*ܕ'ϴ9&S~Ɣ%^oR0G ?!OhE:ɼY~Cx)Xet]3kl+f"rЯD\$fߵГyYEEn\yTN_Kwї< 8?ե[ͬyN<_NUJ;7z:UG43(#4XfkGB'H%tJ*b+sSR35d>/]0އOLUefFf{j}h?"1+ts iX[ތ\vwCK/FPv]@<9ikGlYL$ڟ\M܎=Ct:S&g]/K73lt lw6/bHHZVSn\ on73kP={~5pXIiZuӁq=@[t샲9zAۧ%Aͫ./:vgIy4XtRiInU|N;^UL=k1:½ ,ڨHI?𽶸Mn2WO '7۱`gg,3u߳dUP(F^BPn B}zXk{nz/*<ЍG be$| [r:aWH~j V!~uOt\53RGK^A<O^o3hcXVf;TmƗTc~bVZr1MA܉XWJkZ[ܚoa'-<NO<#m!<#4 Fr42:P5> ZawJOp(Veݏ&{ـ왹ꞱjLfb ƬcUۢYɊn~=n%}5KO[D_7XwڠV-lyhB*F(9;R<ˆ|6'Ep {9xY)}x:WzN-1qmA322O)aNL+3nO\;齁ʨ:z>ys4>I VE+k椽Sk}h{aA앚㟟`#} yVA*ہ{1ֱ7Krܬ0}SffVa\hLh67FGAwPC]g/uS?@痠<U=_30TGb]u!5T+05CwLsA:c *[y I㓱v\<9vD< PNAg7qu3Ykn,gJ9uGp1!=;f%s0u X^k-Ҩ?Jc qi\o_;Y0ndN1[ݰ"m17*ѣnV +_fB.Oc=]} מE+VQ+OmxiBN]7kfE^jn*iի^t2Nٷs(ں aG~&%Bo)Q+37/Q|Q1Hv)bQU c׬JѥyuMaD HK[eI@F)U4|T֞^jUU>L{ʐpB"*%nه:) Ǝxuܑ' # Λӊ+FF6V2:wc1F2V΄ٙ9[:jȀUYV+)K:}شR=ycm)}7w+71Q/NwQ:WH۰Mqk68E[+k-R"Q-X wQvLmzHPC:skREZg<ʇ7BTۤY'z#"ѥ< ˪܊yӎ$']"X?EEKI#9@J O;`0W;dw':<]GvHkb̎Q{\瘱CFއfʏqxNW(j8c2+UOMQnB}X^ǓwiZyrY#B^ QzI@ѣ}TuG QN_Sg%/4Z^VsOY9Wk!BW^jvW~QtX!)\iƹH5_+~RIZ%߻L#%H =wr+g]ʯuL(bi9j5dX \636յTVX~_"Er2Nzo!<<[t?e#3j)(JY^H9\>8-P&JFB4_a_EJW;-g#wAa;]~*WNΪU ƚd.%]E/.'Wښl@n {5Wrr+3VΙx({µFxvb9+>\-j+ݹ'cFYrVu5a'BC i%t h85tC.v5ʺE~OUjK\7RC\)1ԵqOۍ]෻doݯ )PqQCEg.3'id@G<Ldg);:!u1dF};\+A ilhUG4zʞl`^GvR]ȕݥě| -(TU6vv7I7(w#=^S|ne\m7 |=sȃ¾F\'L=30rK фsȀ>G7/!Z58On6Ou(ʶpG((x^ڸqDX9mkizdm)䣙iX|ڲ)tX{ʃh6 Qn( 9X?bmf GU؀G+nTh%`ޝbV4-Bk.*K=̈&ijΫCmSXH/{}c^I%8vܐgSQ8WC>Y~/@PJXkOmvAnZ5wGW̾O.+a] ppr_ .*6e19{Y_@ѨHhoZ)]g|Oh`̳F?RL*s|6U[ɍ&t-x WԹn]֎ZE~a:x;~qŞ:.<2?.Iߕr^7^ԝ^n3PZiwT x"_3B yҞ'7GJT,ȟz›`*>7WI1&BŌo#e(Z{6?ҖHr";ojg~'|>9iG,HvxFڃx׷T0y@lbFRQϢ/S"˱J*:n0{:ǷePq1]uohUځyٛ8>=[^^iX<#.ޑ57 T!oT_pVW 󯨺cY? ~vyjB%}obTH뵥O 8[7gfhZR=KWgg i]K;ƇV78hƍ˕V3멡CԹ'NV,+žMW˟6 ^X!{u6VGG6{ G 4TV{*^ 'EќĮ],g0]سX[6f0; ,]ϸ9.=jECVXFP5T'#@k {;.ū^\XJm[ V6d͏ae:쨃j--56o16Žy:֟K#<:pu$f\r/ȴvr'aGIPt4'~c8a`zQz5\S|O&AA5;WH䧖O;FB1F~V@5хKKũuY| u[qT5N  ΅/[ WR\ԇx1u ,g/Ϯ=$wnim*u{O& VsѥpOnl̯AOE['܀z&'9xqB2~jsa,m -F73Vo|vB&ky_]|\bV9|cx?[#_Ō ͨ27{{+rYڬBo]_*,:elA|2qp5Vkp*[{$k)0՛mםe]ݦkҩލuڑ oGd}5}ߊ}ѳwZ g&M ٬C9^f=ɧˉV+N#`Eϵ}_=}5Tv*Àm80rP*̴7gQ~.hϡs8{E~ ?o x6Vv: _bwɢ_f$ފѼyc֒MݜWbp}7? |Aİ3/NjuhRR_:ckoR;嗺QCORܻibf#}e0pٱ ԑpr~ A_ubuW)nDGuD (Nt8}~< >y0oA%6>ϬQ^p[_:Ih9]:u!9Ei`WFc }>1u3N(rPf=4H]{:;yce#p2Vjhc}\ QvFuxeCW JhGya 9iHVpV*7#sЦ9f觰T@Tk1-OTVêۦz]XҪ +TwޫON f ݕ"WWCJVD_ʍJ)87#R;CvR uJ9vYE=>n<jsXxmOݑZzJhE2=kŚ%g AqeZݵ[-MO8sfG2xX]mW54ow))1ILJT Yz_ȁj;m^׫عJTEfMx!L܈rsb-VZ\Hyc~}fIe«"CKzIE&j9(V[!},xQ24oJ%نI79:{5'W+CI=RcEZH}Gx]Tg9KuAۀ,?U3JiH3GǜĹ~^,.!qJo]*'ysfvεV 3{5<*jGUV̊H+gCƱ6kФ^; ]dFIIWu`H_u>::TaSy?xAkj)i+< ȿ%wZqKq/jrp-S*ʇ=^<[Uq_kU ;Uj#!ۗ\`*@9t;vr wJ`:p'p+a6lԼ"S?37# ;C?XFzsl^Fc)pK\ŜN#쒧.*CF(bmƪҵЉG3=z'w[9{9JB`Z;"F/Dq:% vɕd\;ٍyjC299.{!|TݕձP~GNO_Fv"ӟjA]DbFZt9|p-V'O4^d{ Cg@!\@̻;wEq[K9RjJv+I5Xr|JqE/<s:^FdoȹS\g=#uQ@;`=Aȯ,9( hZ_#uI@sЕQ|Lx9fZa*藩-2oUyT8ӗfOjWbowj3_غxt'2pSOjNUweXe:??k9qg`;:U2~)j*ȱ3ߕA ҵ.v" }r.Tr/_'Fᩧ`qW}A|9(xvxp9FgK#s>WO_# pbx 5Z06lRxlpN1{jKxqLb+;hʔXuűrcd.,iiSD;Ϡ߃n?4U(ނOc:(q+jV҅#krcTW1f1r/=ĉ;hxEWAyR-ߎbpc.PS`ϬVg} TL誚(xx{ѷ#C/yrq!NZo˿/Z~NKʈx)AkF*/iG=PU=uA_C_~D7/ YM0xZ3JdE'{bu7s6cP~o\FLJ?wm89hCV~o-*-XAPo&ݫ=k, xWF%V/ /'A (mPg=(;i <2%׫ ]؇ ƨ)1fRBi7BjұͶ jeD}]p+9bS6.ԍ߸jg|)sFWRWIR^>KU~xVX>;TWrս5Fϧ6L:PZ Z$}"8IY9kK[R"K[k'4k4bύT@3mP<[ օ3t݌i͹UbД2x _V%οN͌>ަcQRmU!:Ce\) iVMrcDjńLZϹj֮~9bទVkW;E}ʴRX޸nJnrG^]FE`<{Mkܨ/u0SbE^v0ݨ7FbKyZϩ>^GcC;Q j;"ʎ_V]t^BNd^x؍Ucpz3cVPBR"~[φzubLB/Ԅ =Wʔ蛿JtP/9i8K`mX>Y<ֳ7 Z4VV -ꅝ:5|BZjmҤϯfr;oRT߂$c9(=yuGy] Gz*!vw%bCx/}§v{u^s~jƱh3$G%&Ra k.sEX{T+LRypŏ81N2t_/=uo.awGt Fr Oz Z8}7x;m݃LdSSx^zXE2"S7k9 o_X^zx/j #T}z4jRS `=g0˼$\L,Yڎ%0?@7Eú~/+a/ɱEcq1gW┧ϩ@KbBFSJzx=J9j%kMfîZUZ尿/uGJK;]wQfי!pEr5+u/TW+?qꬰMO^ҟ"cKs|ހZ{aח.-NϥtMY]w= eSyDѵ!*mh]֪gӬul^XћϤf?JkWt=JTq _ >\^iv!m^DO Dsg:]1IgɅxKs+nʇsb՝t]柳gq-&,aolQ-"W&8i.elFajJ덷@qj ~kQV&⅟7Kō&"m[3|: Ѿ8A>L;DnS ެW~i(>9V/Mn.]Np'a_L#Y_M2w1X] W#G?eH|CƸnA282}\lN:N X/ǟăSs7q~WzXb9̝jQRZD+}s~9Pý8u$^HUZ"J[#C޻r L >)#*g*fWSlFN<&}l{:-2,b:V|K(*S`[YW84G 3+FFZN7ΧyنOFnS|=(?Ny,hbFE]XVcC6*ʳ^̩.tn'o_c+~oub|Kq )xqk,\9`d.t<^)2zFwuz;1 7|nu} obsg!ڣ{G9=H 14Ԉ/v*v!Z J0Kz}|+ђ'3D /Czƺ4w YmlLjOw}\+}%OQ1bkcc㩏Ef~>oK_x+Rᵑ| (Fal#EvDXזF+- ?DoAED ,#AEEb/{QcbF&&ML * ]H!T{ه̜~~S%r{zUw.{ߛsfV>\Iň"*]lx2~>'>ϘusxQX4b؛i x:ϒdmZ yѥ">g9@{u?` @2qcOک9K=C=DlNJtᙎ |VlAz"=GVg.ҍo]|W7YDsxm# ~qflzNA]5e"d;[U&T[RiGG6_[E)#6OͰ)m͌!< ^s 1rksie=I{oj.VQDcD&я'uE<NDL.vB򤶬Վm*iYaL35%FJ`t;]vtb̞X9ճs>z"1*3U]`c9F[soD[T~C7^M.Z|@W!Ч^ő3ޣ!8Vt""k"osfw{N C}`h7 wYbC"A28]1{} 9*z>jM3]y}8Ҩ-G{\xkDQ ຕg7v5g rd1N[aj/Tqƾ'mC%\%q<ѹŁjZ_bԞm""$ʷgʪT诵tQEsL-kS\bAwI_ej%ߓ8N53g7\c#"cqҥc{#ls_MVQCx# ʫ꣫0"ʔ>N`gH_*x!voӮ*(#8Z*m><$ٱL>=N>8HwEvTJ]]6EgEQcCDT"=JQ+/3OfWRllT}Z^*GҲ$WF/'/Z Ⱦ9Sn),glNT ՗:zqA%) LLyWZ>tL#hF ./Z_nC4;[v=1_i[җFqF\_ˆ5j1:݈$_m#sUSpIJK# |ѻ9K>0tB;Eݕ7R W~Viɣ@IWr#OAΠ0şȼZQl2Y-dQ곬kg$۬LbU'j !;%;rsmȦ''Cγoz=0"YrK}'81PbJg޿ҕq5] }D7~ r ե?1o/mjS?sZ饤[=bԹ*oݡASv̋ёq_ÈWY`++c?j+J¡/5R[h>TRlO5''@,'{ؽ{ j%P/Ft3yde[ejgaE+dJ `e Kn[(9)P$t U@\RA$4K Z y/6vq@b0&HmR<|3;,~..dFF<_=:츜wԌ骍}lOZx-[,&6Uc}~i׌9" } Jl^ C 5{~J[NTqIUwupr vf~qmo.oF>CvCwZR?k 6WL{Aݧ+9cjԈo]]PwD=ec>NE\u iػhξU;ﰣ!>  Qs' iԚaGz_L'#;䭹9|ۊ6neֹ"ROCwvg`v{<39"JlTX{ lܘzsiI/W(]eo{kJci,h4,gڄ%jɓ̴Cro&*jԱXjo8Z]\.U'ןJsԻPFjbwb [>ba/ZI1 ;@1Lu+AEsK\x0b9^i cө O;։f|]ʐvN{oӃ+]?@JìAN&wj{afak <7D 1⥬s/8Yz. xD$_/2Hͨ_> <*'T[ΏSkYKAVM/C = 1=66OWzE{DWOcy/ot}-~SD^w0iVj;wOfǎ6f0r7S |^ŏsQ%>X1gFH.{Wl[<.ӱBmZ1w$S{T{ġf - ~f9;b]Iւ06gۢ ~%l˨BjwA~;\jIJv iXJe0cwZ-im*)_՜Ԙ`,uN-w7"c ~ڵ1Eݩ}M[ίaAغ $}%_۰3vIG%XvESߘ\+F%љcw7''1ܑxs5AZ/Wd˰;6sm?SS;9eD[JKi玜WjR j7t8tĮ?M~>s wJ?B&;YmD}I,١ _MOL@n9՞E:my-Np2+n{v},-@LԞOdU6E pe]I"5ba^55Trs67FiHyy7z"XL0dجю9ܙ-)ږd-S5cðx "h{/5ߜsvcbh,C=ˉBOC55uHwc׻;ѕ7HbJs\1Eekl!x:ғTꖢm(=[%$kgz-6)䨚|eG,Hk[OY/c86nѷ|z0Φ{:Qte+촒JN $ZFjV+P _.2ejY{*uYzQ|y]>5JAiQ6sxv籢vHN}nꗘhvD!x&$8+t9@C =ꍗ`Juկ[Xe"c݆]*+ш'kA[7VWZy`<*9Z;PY$pI#U'|gؙ-+51+s,mQzP31WL]\6QT8>#/I9/(ˤ#;g,8ѓwiwy_pghFh(w[SC;#ݮ'<+F9ʧi'# 朜B*AJErO0˕q\gax@;N>k |H+~D+7zC|wǍqy1#ל?/8xŵUzrR^, nydu|Ҋ{!d%TrDW|ո PD4(}fya9 qw"ir%OSFT;yEp5>8A++W]P9iջa<ޝj%={=$61Ôິ5.qejozǒRD PO5x NĕV|Ҡu13ʴ&:qZg幇$#pv\~|wT4U[OЊ[lኤ蘾#ϿkeC3HFo^  1|\yM:lV+o<=J5&Wr16'?Cψ 體Æd5?[+n9hw*SSG.1cjqI.GZt/R"t3~e̠sߝzE?< XFfs;("hQ#aAEZXHҟhvD='>(&To*.Mx!Yu!>1 ͇t5E`JR|j=`mh*wp}#Mw$r'xhvŮ Go3Ն5UW7*R,#ԑKv*L66R{9ƚЃ:06 F׀jqd;Ywg<[a#}M54Sꋚv|'Qƒ@EJzwm+ |ڝvri)Nb'(dwwH?x{WoG>CWg)XaVjۡMVL:+ HTuKi3ӫ(1@ Neq$-AN*_7{~卩_>4ȯRwΫH/k /yze` ʭtvzUQĶح7]mH<|KO~/٥1 ~YS/.yw*_xq]O5~x}XE33s<3=y;9@- ٭Bk tVcgR1FiBǵzfxBKt7shζy|k~877q*iU. \x*ɍ<@,MpVԾkEvz(;\,/C.y%'@ɖ~NG!Kk^x=yE6JZ[VOgGT\"y UWt?:&>o9QޏG]K}"gRӞnE;7;=j(V%RFhȚ;&!c(vy>͉Eti!~]>o'6g+CSX\N<-ݙ|θ40#V`F쪤>h'ڏ{a%ʍE9H{X#7Jz}cld~Ҙq93g5ԓRZ ۿ)͕pU]MW)ڤԙyYigQYo8їjԾXC5Pȓ,]Ļ՝:i/;mGinw#XKܹ8i=LtE,NlU'SǕk53?byv?~jQV<9wE59+ƭ}jymٶmgCxb%hߒʈC42X {f:e<@?eծ#HDK,\{蝕t-ocz!ϲ6wx; H;nv{JC>7r:#"2:#)(d<ޱ-7')ʫA"!)Zbuy&i4qjԶDN*)?JvP((9rM_et>4Jw5HP.͕cN[DXj+s2#'GW\;CV.<I)W|9%uʉS|ݡr[{O͠_'XcѶ5-ԺB9>|ܛ,U}Ei+!RϹaJWjYeIȋ9K;(]\J*g.|Y?IY4FE:r?aU(ěV#`sҊBJ챨nN(G3h?NJTߔ;K< Z܊'SWY.*W6,t.|4z~L)w(s8MwxwٟC{zgA+\#8GA ^n{~JI_涃cʹzX t)+e9<`it]jaxLfT<^fS!3ʼn;m5V d#B c}ᾠ>vj6^ tezXaыsrz^)torz v<ߋg\Iڽ+V}ZVskkr/e ~JmrNbg闼 Ts܉|sc^K,:L=*G7:#;Ue^n*PҨu$Lf<ܩ>F pf"PMPQsX8ZǬm2yFeu{Ol=T] /l`q=__Da1[CV `:##o< ,}AFE_An缃jȵpC2EŌ ֚{V(U/Z#iZ gA飶3w?W!qFx_8ŏ᥻^ xrOxgPwxepi9D:16=MpN3 -+"̈́{Yx6W-eH#ݔc0kn=.i7>J14j+8gSqS1X{KB]oCwnʮg6PW\iK~64#ĨPx xA KέdWMeuՍd3@珐ñKSFzY*٥ KPneodf*?HC+67Cm=8>̿ʧGkc8\Pk2l:fo݈HIϼ/zwRp.UKTշ^U u0|$A|QjWXUA9[ꥳ%hpJ# 6NCz?+̪6jj4z ߡ:J>٦e*i—h㥬j== ؟eHS*3 L=c_f8J<$]j.|,h ܛ<ɾƈvHΚ3Z]XWUtIS+X*$v=e/T_9~kz+yVDHHFoC˴FflbQ%^xr{`ˆKS{3[F3_w ζ;?f.4}HMo喷6Riy3λɧR>՜G}[ ]ꂷP}Q=Ҟ;|DUR+ƍAc]XR<-è=R:UZcMtl< Gj{G:; !VcIW+-?l[a5j.3۠+.`S ɜt=JE> jlH.nADԑ'͈z.hL*e_P{o?eʶ57?ſeAlǬMѾ^ `A۰u6XzSАO@'j,O]DoɑV#?+$yxRǚ;}u˞HŮi1wbt}q_d18If--#}TeFn䨇EͲ܃ʑ2g0*5Wq*vhnk`rgެ5۩ʐk+X ^h(5t' oBã@Ī7@م6/O[Eu3޽#ګKTv›Kg=ȹ<c6`gt>Uݩxbԕ9۳6UK _ASKvآ+\hጰNJ#sQoUJS|9Wf׏WюsEuPyjjBj ܃=+Ȍ18P;:?ta//,fEO؆+YYP)_0M v&׉3+X1ZkF%΢vAmwj/(LV)XaOIozDa aޥ^vֲ= L56DCj{ՠX@{QN7qYq-*.ƫB|tb;Y2+t/Ot m|Ҟ<䷱\taH kNpp)tQܒO{i35qE"ВT)s.ffC)i 6b! $U쌢~r:ĮR@Kt<m W Dgh3pŁ^KmOgzl c:-1Qx V7sz<9|4۰N^y-c&:Qx>v/أ<=wx彴 >%MZ#pe[x QR+cIE] o휛IGN/qSx=yKrS2c_g,Xk>a`Lg⚂HA?o㻌~oE R3ɪ\~c'atМgQBW Ꙫ~Z3%dO<0S-׵QKoddMu6{ 1VX/v7 GFR&rByrJ)R/w'["Q%**IR!+z4+7y߱OG.f\ؑcrU:-#XS9ȼ"[VZ]D̜:Ϻ~;פR#֦y/>DIs?GT_jr(yn(f ?|xZ|%Ƚy7OӨՂQ?"߫V8 +r6{OFMEv"x.{K#-k7sm\U:뻝¼N2_ 3T']SԺψD|cƚ}3{+Ff4;GϿEO]V>g˙@rsl\Q3XH'3?;7tq6l9:t!?jQ Pܕ7=5ImHH3-u5jԖ4RJUdD ;A3ƽZY;%=Nq~qbZrUtP7Cv#Y`6D?vB%S^>y:3u(uJSV\w%lq|w?<{3s沬]^A,?3(/}Q f[ZSz};}~}>4Gj+܉ɷ]r>_EƻThԫ%(Ǡ qLޗw{O5vsZsF`^ӳ{;Ay}+5"ȓ_D1W;0A}$NdV?kU|]1hD5-5YoOs~m<^|LF^[~p"9 NgpbMwQOs¯JCU&مM%g]^o~W>lTGԟEPZ&p;skpXZΨ4_ݫ-@X"S+ؗu 4ndmI%9)yn#i*ȭgsyQ53ݜ_ Z:_Ku ޑx@og #Beak/,n4?#(d/p{\JA i܈IOQފ抑XnFuh {0Z &Ge= Rp2VoHTkzv%D*u]'1VIfz|-hRIRjvb_~\Nut 2~^U:;˿֩}zz{wd-Ͱcf^한\]͗xihGvp#h)u[/lp2q5dnELT'ϵ)oq$yw4u!<]jfu֟D9ӔꬃU؊ổOOڟ72 @r̳(A C\hQ"r Ŵ  ,@+#3OcT?ڨ (Ğ5uM/Ux[r 1(_ ry ^>M̼%QC~5A?%XUͰ7ߍ[I_$_O`}r_4p=9Arp72/V]>y٩Xʍ=>O9Q?͎Q|v*MCr-CBKVU=8?yxqR^;:8>͟XS;䫜jNXcg=ߨ;pR*ŔZik kX^{uڻc? F%2:GC}k4 ᳜SWQ]4UUDKOӑ~$܉ŝLѝRtu0l_M+x}\guOxЇ@"Wd™g+!1>f\?3j'IKwcuTR4CT?kI#9GJ$gkZOIf+}A_R{+ D:#u~k46u_w5NJҢG6OfL?!5!-bv*͌b#"R{D8f|"# ʼn=OEc\o:ϰ. ih36RE*/)'pFH*.%U*I#R?}w(WZ eZZZ'=6xi7IgkTi61 SVt4~$ҚW:VF_}gXY9ཙYTQEH=q:JWs tAMζ7x0's<ޣwi>ଝc]种9Q?Y4_q>ᯔRxjo˧Cǃҍ]Dd0/O ;$'3$jP bOjsJ}p2d:,] <$Lt=̧UeoaӘӚOv]Mx0<~^DJft8H{<:Ct<)Ujwf{ wICN#o_F1]`vѕN)1'A*__~*wdIk/6t6ZJ9JjFG bi9Yb(;LqUhdcFl^黕 >HBcվPeF-k؜ :ؓS9wke|etVd)*W@.^|yVJ '3L퐑@X^G8f Yu)\˰6@ BO;ۨ=D|:qpxӑQvR;+c?@q]ՌybrnKɩ?&N N#yt{ތ-~z ݛcK1'eԂO+[Ҷ,hI%'}1KA/~H0S}xǡѣAzmь%]Ox:Tv"⍳X=A;/ȁVtG=4ʵXPY3x|Kܧvg9^EY2?ueE4MhG_r]K^ItIK@xz keSѥ)%b.`n7PrvSU3n:%;؇@S{*-5_7WVS+hF1 {ރ &Wl·yK=h@럅{+^@ۧOAq*]yYFɪSJQG F|F]zlҽi sޥA> K+s77׳iW Z+i`Fl`$C u%y{tz$֞#}YH^PhZ^#Ҝ7u֋ލnp=8sA;Z랈YPflMi*c~綜SrJO73*)'zR{_&jV¼y? F@:2w,I֣o!UcsbsR|͙-wL:WLuđ䰜]iUnK>-eJ'^w-MZ<&K=#Zxv\9YY FV~Ti7E45$P.̸ #^yނکaW=FW 1`uC"Њtv>/_Z/\d}4~o i: ^=H?[$ksm[S-KpR+| ^e=khiԪ|0,Saҧyq޽thWDKߞGۃ3'Smњ]#90Pޛا!xrByQE= ݸ&YN.Mst+@9T[<5;r1%&rdmBCF#56Ds ğ+g&UZաPujM}0派ݫܥ\=dҋRSž@@щr)!Ϧ_ҙܭ}hJUߺ[g59]nMˣ7̫wّ}mFFʐ(Li&wp| ^Gl:>.^'1D wQ1OBf30. R3yDt[`3FS˫eROvs6 ~CF-ސePtN5gQ|XNod(HK]yD0.ke; 3u 磁!ëf5ʞ٘aPƩk|bWj|jQ7;pGvu}gHf2 Yi;ޝ@XyTv&{68-D۵2#)fԍB˅`푼 Ӿʳ zb//~]7mXnD{3ڴ'몟ZZZϢLm |%5 v7S{}~3? 6V ~vziZv[+c]"?Jwξf+gߑFWp>Y97G &Hce\PDPS|VO"C4h({<i6 Uk[C1|G+d:1ʓgoʊ{0(j ^WdĜolDn۰hnt$݅OO9."Wj ]hJGa; vn~r9};5Z>Q 1I`}h- wU8!`8-ڹC&x#WFmsԛsy=w(HE"ΗF3Z)kz*^ƞNxP s;joXHOr{=UaƔ3Z U͵'jBWTqVH"kAnֳNDd#01;ѵp C;$͜3 9voNc_}8שi/VX >X]($k{qDx5sD +E+izk]sgj4ʃ^I<:)}3q&GDtȺyeRcBšE.$J҂t!yձt`yGAFYz+9糕nlbڧwbiAFƣl_:DF>a;3lA;gȧȭ Q`pr"ŎB,UQDǠzGߥrK(_}3Wz/~W+|z*H܍o'Q:FH>Eq!;>L3ŘyQv ўnP5\lKrs漱^U~"#]I+)%mܛ;xvg䀮3t';א195]I*bws qcx3Q0 . Q\IIR,-ʴԨFG3zz43S m0>w6͎N%*+coeJN4RU~rY负}>ӵHCxK'e|OR&N=brB’DP==ɻ=#&rR--Qn) mRrK>Wkk~huD=]{]ݼO}1 kֻi}{G \od] G|i#+w Ո\RTbf rIg"}[bݪrXؕ7pL7[ςףGOjPF=QBiC"b\wVי'Zp.LG^ǑU\=/[N:q:;+-Y`Z'w-;B7=@]it(@/no%tAS%+f~Hu^ |;]sD)m_x_ t;{@A g%]drw.3:A-b)y-U9wHjѭO|y7?*ddmq'GU`|/߸C>+yn{ȥVš?Zպ/+"yk=΍=+D >ϡ Hjzlf"]EՖ#-R6Aq܈kjԛPּ 2i=:2MOӹ L&xoe›j<.?B͘3DvI16|/`l_e7!Gs:풾!O빭؝o(ĘƎ1/sڱ#f5(B*sK_Tz/$Ԯ>.Wq2NͮP:\d =o䬂%?)`[;3O!nB>z?!}(*F&2X0\ͺi^r'u X`BГj49> & dʎV-w3h;;ɉT'{(z<\ſ/ӿ兠Q@U~!1 \ї5Do\-3q&G5j,~ O}OqTn䄎7PO.tL#=œ}Wƛgj;QKxY޵&|`=~orq#+~|XT;A+;}rϵ5ME}zjśfSehMsSk|j51qW,i|6Q͢gȥ=_(edjS=wtv~WSrUy@U*}<ϭpv? 53c#sܱkg֭5{2!A 6hXw}͓M=^*(*Fϳ]dx<|R@ E7Ԓ7a#IcA* G;wT)a[y4GVwϻpV}wJtm~goiRya۲C N!NX]ʠՏttD-u_!y)gBH]?x"jw4r2Wm%VyyV+?*/ZW뀍 *vM6hdxQndcR31?Ȍ}Cʟ@bs7bˏWZ NM>ܢ|+0#҄n@uOϡErDњ 4.C]G@.j"2,jUkkI~j?=c`F<~iil736;6hdnVJN)Ū@wX_$jԱH*ZKxyv<5\m-ttbc}m`chpykUWWN2ۣԙڤpFzZO>OR_"W421BxP}PTNf܎; f_Qo'6 / mxDbF[z-Gx4MjAW+^+v%\2 nn!O:djs+Du\ϤWE6" WU73犊w.A R;-~ɷhoˉwUUgjJܱ!} Aܾ3U/{K[㽭)Υ"EK*xUA-\X jV >,9x%c/E!PXkOO7oAaP`1^#E,`Zyh;P; ٕzK ((ws"+6&0ӛ+XoϚ<\I6)e+"u嬲Sox̙D?&ZIeVO;f O # ck +Uwcސ+iaF+%0 Qݕj160E2Տ봶Zq6sf>URc/RVKyX&մ^Ni yҾ&Ǘ;3顅񯎪lq=x?/KMTnG%XmQhͽ!+;Z!4gͱ6.HQ-b0g\nʪdmaacyRFq,6Joi93TV"z+Rm44&kq=9fԃr^?wj('1ϖJZMwvb*^.4}k[%;?N^ڵ2O1Fo4k"dGE.T{{ }֖O":tn@NLޭ4ۂOjӤvT =W}x~@{]W'T*ُ@% lzfZrR (5zs5s6t wU &u&˹_p ;л}9S =;P0FQ+Uq{ /tz^Z?$+K%QRrPNJCSGMd7鬵-c(ʊa(EQS߮$w1Mo2,ʭt⤈3o$zoDvngA2O"mNe4 ᳵ#`|g5B+^ci` πϬ$ 4sf ύL@s@K1#]OjFdX{r%wx:4=/USqd @;saԷ0ޢNW *q1HI)_EmmLgó?7d @O# :+SZi5<og=8 "ahMOJﰛ❱RKZ)OS=hXk8MFD6kt-rm^y KHft @2Fd+Hi[emAЇ6,ȟS6L-e)=( =_aqTt^ u'Q >ي|lju)CnvU*p/'_zJچ|s6*4cnq|FɅ+O.Ni6< B2{=RΛBΎ0537M6?ʊ P oݳ1 fpא)|X/+-zp-b`r#vNY|cS;P2cn)ߡ%E` O7s l#sƚr?|RA AkXbh:*.GYm]ݍwƚ3tܫ\ۺ؁"* *a`( 빞< =|)V&|6ҹW+q\q%MfGG~ 7Pen/rӐw@F=!9=Y C#F9*=բK5GIx F?ȫϔR]@q"JC(mĉU6猨p7:c_CqUF 5#@,m}E~m@^?F;H93Όb_{.E}#1Ӡǭ|1`O`gJdp=XG{ksn@֫M䚴TQ~؅lҨOj4fB9MOoTi~MTR7]C;b..O_5a%,ke< #!zBvZ)XD2v|Ά7b[і>`)l}UF}dvoHFhT`+2 HkjB}1z[=+]8;uTʫ1ݔKb[S +dMa4cA=Ӊ[Ѕͱxzq\#~ H-#v%u-o+7ZuN:jo#1"RǻFMS Ÿóu>^oR4ZęX]KQt`?|>]*%`ZP DtdľԩXzk;2y 2ސfzinC3>*Zޥ 8sloUe(z.iQvU5ࡦd|+ְ~rE ^]h"JY ~a2B&qT,cVzkçnl>PNZӔU{ ٜˌFx3fLuuT] b+r`5Q }X[ѾKG !Ψ׮D1f׻sVS~HM!bAu:1*Fkis"0j 'p'p8 =*ō񁶡;mƮ?;w";R52AxToߊыlfPtb&DCCxJW0Osi#[ 6Gˎwd#jA̬e] R;^>dg}`L0^3弈!Hh2Fyzk%!# f[ojx+ނ /f;;MyjG'y/*8;\{jB*խJpJKuyȪ9J*Ha)wwhˣU fQpԐ  S`gn~N /vH6n ϕbDN~8]4(g8tS\80Zmq= oWLTc!ۍ HKdh6w:ipJ䃝OҀNzj'菺5}VӘ{Ǹ a?a.{GB;sp!6Wz;xѓZ?FtDna-g5V^pdJoi-x.||P,,a}2(GUDm:G͖ꡀP -n~NS.ETulgT߸u|]qDWaC>r)bEVJDjQO n>^"B/Gz J1k *Q~V+#,oKl +WC<$=T+R画R{-cn5y3# -)WGd?ÖE &ؙ]incvT*o<6p~mض*tRS)!Zr1AْY1xW!Н&fh͎LFwv1l#NZq\2r(mrQp0䴜 #aT0WkFb{'kJ!*GM ̸jv؍A^ړ"jxgc;Cm5Woq<%S vs 4*˃ܐs뽟}*Iua@Q[$h+I>W5gnܳ::4uʝ6ߦ[.ŷ?6ӫI -|ɿ(_?Aav~G+yP}:KK^ i~kp;ʧ7TOuuM.{z}GUd,06y!yg?"OPtвg#%YQQZĈr\F1}TJ.o=É.'u%y/2wؙ[9it}YDUYP j흚ý$]Ż+PyHE-'y.}u567krROj'yx-\f_F hJGvN%H(8\#Şx9r~Hsr6!e?p',*3deW4z] ;mJS= 'ky'w&;6yE1q|IC#^4OƐ=5Grak"osFG K_+]W_F2|})^·8^Zu,I^wYڷ1m\:WjŌy~Cp=Hu<5%꼞WǏ0΃|ģȩwh.ú@AZ̪V ֋LnjkG yɜ7]84swI(\1E/P1ǒ.DY J^5aFBg/Y{toߊ2"wmHi^7ŬU6#8ʼn|7$/Y\˝M*E^ cNbj\~\')ڎdNֱ}%695i"etXjԘ'G\cHX+&Z!)J_"(4[|-QkSI:x1b(->K<+zS'7f Y.VoV*s38MS7ڕB➠$G^9VZ  3gՖ3ڂ7a;[y0&4 _ҡp*X]H.Sފ_- K֏|`ybh_n HA͆d-~Fwl:2A5=K71Y0$-\{@)w `(14gS}}Ӱ'=PbIb^{`S4"97|o!?Z|"> noԃ TS!pUBԻRXsC7I Pٛ!:{B~dίщϒ[ 8ˊV ZyBix]x];Ύp\ ~B/wEE ^M^5P=xYHHQ貰R}VhXk쪏-!+y1{ |2rns3*YϧYe?Npm|l}`kw~"e e$suE@(:y/2#Pt5'Ԍ.Wbi^.~"Ӽ OT 0s.aȧw{kI6k?WG'vw[4G6b7SLw&6js̼7u3}VkE3fueA-˻`&Ό(=]܁9TQ{+RzC$kpnOO/ Ĩ94XS+;Vs&sGn;r>` rkx wVDZ"C$^ z(n6cAt/41V8dڡ9P/B_ĆR<|2ױe</%%3*qIbGYlm:1#A|9>7am,bUT^A8t;Wo@:Kow>TK=uZ=&CώHS;[xd7!O[.r$#rTY]~ό'f-#ï]Pޖ-Fb28 tح0S5"W*45~h_]߀np7ظbVI^jq\Ħp3Azgї*˵h{ 9{؎F%' E.m9=i,kW2oDwo\JkՖ蕄ddFZށt{C￞ L(ՄK?V>4/&4;%ߊg=S |>9(#H3Nwƻ_F k*wWqFoHOy@uZHiPJR3SE/ŪdX)n`fc3|tmc^S_{l|)(7jΐ uo8%vxy`7GD'Op+ݕ%113+J4re} {W;_bWo^fоtJVO5A)Mļ>|5qS~"%-!K5rUdSn#yrqc~F0<1ŵñ^sW#9بx o_^G,8WBNXDiϧk4t3Ը+yZHM6[p_X˔ek"/1g;=FxbU_eN舮#lޟUJ]9D'/o(/̫ylg?Lnq}>/u/uNZΌdC38H݃!53vnWeN&2?7Pm؍ r|?ډ'bUq.#^ŒR l7HȪuxuֳM=o\**&;T^9"&u: ~Ihԩr#nVT(NURԁ޿_ۍ~7!l!=$ZbNџ<~زߍYSUBv FPzY{Cvxb#&uy}JOwx& jGHl2aHm.n F6oɈ#')-$0oӢb61 T^J^zaS"mPw> ~2* lzHK\dm$C 9fd,Z.l%鎃d׷^/KhFݪnvΐ]!eՕO]&;1=/rˑ >e \#Sߍ |q-D^UxW)'rFz߈OQ?\3JF|}x~" F -o$Kk8#ݒ'sѳ*})pZȣS~@'}ͽAb!=WMSyԒo[: {NW%;bu85~y?01<|/w'6|Vlf2ñl/B8 [ '5O]׻3|"JNzG9[@ƫ4^R )\<\3?*(mFnxHv*Lc}A#Unnin#2M0ۓ~UEK -o~zBo^ZK1J9ZaUٌ.iu8\kƁRWvowΎC8Y)O!*k!!Izƈ^|kĦv|,TX\'COԬ۰rBx0B ,푬ř30MY0.pNr+j6%7R>aWC2|'wWߏE5X|.z  ZʧrIC%GHKtB|4=܌CK%Տ9z{Aט!`9{1jxbEQVU* \|.5._җ rƜ@02Re̺܅u9yOEjݝ9K<%v} S0G57m!X[L/D+CFrS-T6Lidh3FĮ\6΄ҳí}Ys.4dGagp;ۏ_~@?Fv/HQ#8h2_%/gN{H* /1ܳ\*voMwco]Njݟ^dD=LR(CP+{X֫f>W֕SΠJtԅ+A|jx^6:#ȹXP a4cnn7c5.ΡqQS.BGۡop1;HOye UũER)EM[#[OE`9@6-/W\ؿ}c@;VQqW/ 2.5WEUR%/X_bJCg;\=}='>5kO*~U>[j3tShYuSϽ (zz$r5έ_r>74#-15ʮ##5_k]OjGɑHrssy5wk|=|F&33Nbj*!=_IoTRE[e;u-k@qŨ|kW޳g/w>YYPVS=oS_e52JҞ{q5O!6[!G;6T|xRK X2* [&ykD5FnF0}GL7cu]/ǬWg%;.%1g0jT+Z#T9uRֱ}ߔ6A!!2|<ъR&O'=ŞAI7Vg-_y_ߑzj}EYR/B J3|)HT5sK?ǾyEtF񅦺?xH>3^ĺ,LhAϹ&oMvE UVښj񦄄.r. C !8%RJ\!0pT/08u5I3M(Hiu`uy{xH|-{;3EIC7k<lzU>9.pJH|=sE:ΒU|9mo3OOQ Aj5pꫡ!TžT>uӾny|-[YId(kwҢʇ\8|kt-UgezxGg:hv&D_ut;b&ibL؉+8%*rTţ?Ɲ8 {nGH!V=٘I v>a$zCf =A&nj83n#w{ZxT3lXоdp_D?#ߵԒ+^8mYqD_PAxq2ZOmvV6zG}yR9myjo.GKKhZîDoRz,®ʽLFQ1Vָ_kEгr "+ٓ,MvL~b->B-Ÿ(",O>B9tJ!邑;ͩ tk ϔkԊj!9]|@t9cs 7O VUlf&o:9sWmvEu{YZ&"mR8d`7[t෡y%ro9y? OIur:nj/$x gN=)|>Ar|)q&rk*$|=2 ϦMv{GWYDrکzؾБb$ڳrϳr#9މەzg r _ȔW]͟`8W)^R!N=ٴFŽħlwgpSNʽYBpCڅ`~ n[C~_s#1'aQ'9FZ3( 5_gfq~Gsnp81~[֣2ᩛÖ֤QT8I23ZAyp:@w)hmU=jOZn.Qt@=rMgbVYou=یleeTFf)I=qp|5XX#enAە7ʈ2y8O;w%? ;C$R<v$#h-;Ю62 _ {wį̤rX9z7mf8un*NVbõSEsn?7-gkN?q o rPh(-XԻGo^'`mOԋfR|K}%=*<]sGN³y13FSo[^rVۚko%2y_ 6k{X`|-\tp7DF,2-?b9|j9H_^5kjFJm%ԽW*8-U6}C2݌]ʹ5&3sw1l|kL3#hnHEQtK}:6j)A/?rI~9&ZObl vF^F'V09+js=Q'{DWhwl`s-3:K0oPS[s`StU~tr3D4QN_.:xWۄLӓ4ZTuN ֬^hi(]Em`m1[j\kd-N+|#̾FۍO@;;c'_R)|u y]u\ w62ň+еSFrNPR V{)d'.ݹ3 Oo;>Oͯ`n.uWAb9r5/ܪ 9e_ y⍼u1f OFW 5³{# v!%939T.M˭ Mäc h_q:׋cq|n>1}N썄fPzEQ _WYPR؆h+֡)Y|ARK;]z!wO/|˾$^}-ӿq4xC$ažQtѱx_w14sоoutdef^MD#>fxDk|Buo]}9d| :aYy9l+n|9mz!\QY0{k&yQL+5I "#& ^x B Fb>s3Qr >GE9Ru١nX.*~s~9y}|ҳ{r0~jiT -b)4Uّ̽y̘ ]"?^ސS{5ӔrJfCpWS`JFk.bCjIK|$>|Vn] O䞹oQv1VĥڑuLk'XU?Axe3H271;rr{]36I]){mZ1˘S^IP~!ߕG扌Llv5BׇXŕ\sC[zO7:~ĶzdD8vׯ?O$xDż5Ui\FP㡛FWa;Z?(r^9߷O¸>Q yX́u88<ɮ8wa /'YR,3VeBmueR_Y58Yގv+VꚾQU?DYrue! ʅd:TGipoYs5RYCtk]+vsPj6>Q|_Վxc!Վ+Dr<ʤkT"GgsH[!s*>?6B_CĊkqM~SJ;0?${n~ß7 G${+{$R̄SzٹyRB}b9]g?XIɰKBsgJJ+Rg\ZjCqF2BJHo;)|)ӮBZ*7^&bV19Aj?\OpX ~b#d@]0"+$nJkf\m}ɯyڛc;YԉxIJ(1Aji.9$I:)=>Vp@%V b_s lm\(1Zx8:ą3QU@Ҭ,vgFТ=`V@n@JwT]Tu=ĽqV-hqŜl3^3z߁׭߸n홝srBxzj¾V훸fiZVs>זYoٟmG19*7IGwKXbSZYQT 6K^Rk'2^jXLc[*֖̾{Yw(LOjrzInǬfädLN y{2WO;]KӄoOI[;Cȳu۽JnԆ6i'.Gbe-HܛWL9q\λh?"=#F0(]Ļj%SNaЖjW[fMpjp4Vօ %*53N/mtC>eV^S;?:R[qέ/ 86HV"G}{8/@X2Ok:``Tu r9; М# }<[89Juz_$id9j8uwJm+Geg?9j!WN|N<"#OybX6zzMz͢o'R%Ξ]/&};tL%o+fAYUhWjR}mhGm|0n+g3Bu5ijmϢt)+ML'r:Ǔ*Ů)uBm}zW S3EO2ffꝟf,>Nڏ851l߂:Y^ M(V3ꉫ=\u HpLAʼ@`o=,h((~ky3ZpmVZĄj;ι7c" >M$'C| %EgSZJDT,]\{Ao1Z84nAf%Feo2K7@d6ٱ`:Zgz/Ꙫ-ӧ̶y5]֙ 9+ p2 y澙Yir׏ґ,)SE^Yk:X#KWt*Ёpv= k˞dѫ,"cԐ:Uf| Y{*Aρ@jcUc59U?jsM=}…ō8OOrWlC|f9򇸛PÒ|zciI}"gWAH f/@{2#A6#?ߍ|O?ƲC;[;y–iwz+H@9Iϋ!EVx;2.N͇OP?ha@MuHjGyE RKʐmoz'v*ܿE^kw 3'fi3 ^ ɰvd_GY^Er#Vm,#`}9#=g*?>o~ͽ'RI>I3jV=~ fމx/  xˌAm++|~dPB9 nNo}뽞da}VQRq@r"u@qNM/4s3{qȹT&Poc:'rMZd;[`#mb'w-8h?&u^\}Fxÿe˨>l+ӿ} GJ"4|df+k'Ot`';*0g&(א'CN`_|ޝFm+:_ɿWuoU݈u[6[Ζs{gz(^;PapcrNN7VΝFHOk@NZmY33:򧱃wSW+Y͙uu@/O{WTp<:95|īYM;d5s.*L'`[̆(Wc8W]] YOA&K`+9?n|E[N{̜<竡6wtsS:=oCVi'Qcu@Ԯ4_ 5mxG|nuK% gQ'r2hR tCR`TcCJFx6X ?m o΂q5,/{h D?+؊om0yGI%Ni~Ѥn]u#4Ś܂vgK4|q.]Wь +1ˑq[uQMdGMo9zv4r4y<+=z;#p弼xN9>]Cuӎ|-^v'nKHKy1zQ w߹Er;/|8| s:ܑ9aƨO*g1%{<|$1(ߤ_'8 N2ؚM}I9|sG)j1o}}>BSX]~V m*wB5"azԎurS+:%+PFe-Ǘ Xt,m[*-gS7)ۓ `*Gb59$$ڄss<"BO N}r-ߔkɷFAz+Kƨ>Ud6d̅_|{a5/'e׸QǜJ<i+ŵj);`W26r屙/Df[)!~S5yTBRbG&voD4`WuF R^X~3 g`Ri?f8]1Tqhv\Ƨ^K֯#8Kd3b8* zKԦ*MrėUGVB#s3FX1Egłu௰Z2G"N8Y =CEDݥ)9uFuo)bN%JzHo5HN G6(uZ}t5SGFo h7#[F>9\Coδ,T!)su*yVHi!/+2+,˫>5T^q=bҼw6cSaGVHJwɫUv/ezv'i1슺]x]idc4sUV!|z"\;N"*!y7b9cnbX'g_uBT .r"3]K~> {bPuy皨;|^|X)eE \9A/rݘ*S|k7g+cVY EYV 5Q2tg^qمGy=_vkGh<oaU\]q)CʊRur~ igbf{>'S/<l@:M43օ񾞟sYS3CW]h/tv.BJ6k2PZ7?zNCޘ nb=KSIrR <PQ#+w|Qjc k{ꦕ.=%wΏe{Q~^t1ɗ^cy?An6GRS^8P^M *W3-Ϥ-ԬWsDjĺX(")l=ZnuKR ]5 !RH͓ʑܐorhFOY%ja)+TbʋXDwExiX~ Κwv#BeNj8-FÈ̈́ :Zm'RU5^ T)^TKzc3C~t)Kt]T FT _ CztQѿ5sv@'$=u^foK.{] oN [М,;bN}7wh\?>ƣc}dM/-,?j%y72SaG~7_.a'ToƳ2@SۆXƒ#)}87& L#\:_|ۡ6y=LXp&6ڵvZmuX;6ų89A}Tp+Uk3GC~zR܃la/kD%gh԰2qA`B: w[z2ţ|vj4!G&-'6$=Km^X,o>ꃽӳ~cF[t:^K.' g}x{x'%o̷d5>g?`_{Z8<63:ҩǧT;'sQKIMi#8lDk=ŏp07oARcm{.T 4[9OתFVn&hF5T_K|j6?@lTVmS/S_$uXO-rƞ% i)cq wftO)72#N8{}:AQn҉ŭeׁؙu]{1uAOWgSoC#d!#8av5T+-##yfOb>cnwI"-3&;)u>u=_s-rd#9c.Y[ ?ɐv\T*N,3#'?Yu}x@֮cws`[j>9]r&{Wo9޾`s/rmyzyLv];[8|_4!A%iYm>7S5di`fd#j_06/>.|9it9/1y U%C錳7<"f-\NrO2T7K!at5ey7S52te/Mֿ,+9d:'RiT4ul=]qv>1j)NՉy<m  dq{uϣ^L-pr&S*qvA#RCF_ZGDs~rqV+8u-vgn꟤6]qA%D -f! +ECJ;sQ+֝fpҫTjtѕżG^V$bPnHeg*t0XDxTɫFzEzpnFT={s/.t7n<=|_>GBGgQHGO_23>ZAsԒ j#Vj\zhRcLjۻv蟖n'M-؋@?pwɥJq>*FN\o7ds2*kBAg@"'ˍ%9{:;[! =R7x ~PP꥜y)zų7j Pw 5.臋A,F R.ȓ8=B!Dձ1r͛轛k˻FMz5t$`19B䞅??,*)/Qo-ZHlK̳yo!!A7JʼdiX~^1^~Oxg}9ޢ9eӚ^ZM*fֲxwJZg-SE2,a`o\M -uENShaf"ǫFO/rJpwō>sOOۃQwoI,{~: _ ed~-oqʙfQYf`(hfĿ]5|J;{5|}Gj)5f</'#i kohl['[^4rC\,nW(bUӨoikB~Xmmy/UNB^duFҘ_ej0 \^zÈ2M&m6G,9F\hO-.o&PQFFS3Vwʋ sB;t|$V5%D_17*Z+|js~Fb=5`q#ĆURT򚷷(p\὚u+q%}vq'D.UJEG8ŧH~Sg,a1.s|%ǨҨVzh5Rn \)QB%Og5/jzݕW&1U;?ϥTX=ܙ"WI3wNi8,RQ-T:\;\I t AK-,LZ])l'6t-C G v9<r5gLUKm!!B^s%r*bW {;B*ޖT(X}~W]j{U<īR?#=f^ ֿD'֏ .CB^yxa'[i6XE<5^+SZ6jBUPVg#d jZgsRaJ8PHռчQB3f+'k)5KJh>=?ŒSh4ځPN8&.xH+І'!TG*!STu޹\yջZ"-D01V$#5&qd瑺ʊzHR%dF ģ-Eԋ`x$b#ힽJI+9 "8W׈_mV%x 뽀++FZWZ{YuEHFbsS@6@Uu\7FFNk.GXF< īS_/0 y&"ڹsiO8f*eN)ד:xx%9[Zsx46eST漏`8st;3{UߪARđ#PRbᑮCN=[i;= 2W|&L %2|+WC*a "?Fʢr#W$kdMBd2lUΦwt>=@{A.dI36¾3> <=8F}<˷K^kXN{cΨk"ƂF|^|_kc9S[}TŮ\7 'l{s6U|:$~ˀEQ_Cb@%Wpkv'; SNT,j?v72pI-S {+pXATˤ~8[تT7*OPqE7U}|u)L[;yorQAn^sv?A:[PG8jxM yL2ߒ?];W_F~3ԓ>jhM`VB&P ɥAM!= MP~_N0DI a43Eq)U{.x/tw]fTdNNP̸q@CNCgI.zQs>$P\2E'ph:)BJ*΢`6H̞㾢#k^+8utUT\>gd߄s\Qurfjx/"pdV-&.fvhY(2ᖗ7SvfݧyyL5} Pk#'Mw5|CRgWOWjtCވ+[y^>0}FO>!yӔ;ױ#k0XKfw3+ !_z{+ f+Q1Lnx :h5h?R]5 chٛ3GC]qYc%sU|~2k˽^d%d)ΣY~Jz3Sq}6cp1kV"V~UO6%yrɉYA:g=5yF&yk7Tpsym{|;j;C,oc'"}>AuDNzVx&;kꁉQG_4EH[Gմ//r?;kىޣ24.Ĝ\7&n+IGW\j'*}F.!n߹܊]X- @i˟9hc' IO5눘T\i#czWrl]أ{`YY>B?X<ܾu9[9^WOBQWWh¿QwoAЁT8קe4W]&}.Ti7񄾂y04s.BU7;3&rVmU&A[%ߊoy3Ub^^,k&1k +* R.#Ŝ iwg?k*ir8v9LYZ’Wwg&jLt:,v5_@H>ǖCc_ m=ʪ֕;|8 J>rQyrNz iV9GRG <;z8k+B_ysuGG~oCcNkS) CF_2{Zt6dN̥䫪̫Ż(E]Fa2,jW cYBzU\~HH*J+G g+B"Bs +5=}XTcXUD)Qi?y\)ݡB|`<;GCDi>B3t\}yeWտSg8\CV:g-b'k,:qyhy@,Z8}ziwy53g_aOZռ#>&Võٱ~1jAE np aPn50bP;:W& >veU8ߚ[F4Bk}Z~)7'.O,"ץ?rwH\P;+qgOC`,`|mo1bUim@H] xY'$>/l6r@ ;kAVBr]])b{<+dʈE\{`1 [p@fӯu.+}+H[>Mz\,MU kMkܩeh=i"Pʉe4xB_8O>g\!9ikE3,u|ϥ6`ꕼucGUBba\'qרlBjP3paO\ *029kC&OVjZZ|kpL 9nwυSk>kۓ&P\[_U x|5AuEϪ|淔Vd~p{ϚRʜ֙ uF+ΦnPۈ7Jh>L(kGV^ǐ;|F 7s-cg{>QOq,wا/f=qnVo"ͨDSĀlmTj^=5uގ'Cq {xX#LUr|P}Q0+3ʴ%GӂΤ{ s2T_ ? ӡF-ub?Ph-ńkRhzmsS1%_V^ SW_&?Dnipp_V+I'%S+'ʊ8j)9\] 枛!`JxO朣;`:Rx;&*xn_΅v]Ώ:q|RQjtarwFa: ۋɕPK2F_k6CT ߾)Ui/p*DuXwj<"ůMןtDޣӏV+O5dͧ2,0txlB짋uͤaYbzhTمqYJKyM-EF琧4Iܹ2}PWy8W9X3T 񿾹3n=HF;w!d/{sbtl=;=kUMȢvgwD_Qs@\zR/afO7`g09ryZ>| ;'81baܓaA:g hoW" ۔S<Wo&3ӎ3!?LEzG[х9ws ptSYnSw*/w/aUʇiWrHgz]ڜO渊'߿d8pay5eT;P8=fqRG{gZHfq:6Cٙ1u'Кՙsν%C(JQQAJTAEbP51Dǀ8HTbhnA,Y*vm}rus=|N||z@|!/N~ܞ[LlG E Eɟdߵ4-38={w_^5}zvx״tXv!4H}cf/1^i<_֍u~Eڬ Oqpf77;>s<'gYz`Ra[ LeBJS ??6?OctJnX<%^InZ?<1 g]2uOUAm_6]1pxp|׺% Y~Znl-XW4#U2p&#Uܟ͎oxDosPd@u͂s_;44/47%N6L'g>ӯ[3W)bc<$4VNhu.*Y5ü9K3e]_ϙNl7Y=v6})Ԯ>~--Ot+>gZD{׻v ^f|N9I_l&DM(xD_Tbb \_w3hs>Ⱦ7 w䴹M4{ޙBѱNG~yAYXf V*OB~/~x̽~x]v<ɉ>p2gxM}RzW :,ODQD&G\e̩)gn<\Uу&81w<?DkKeiuޘI4vzpk歿{V zzjX\Jq ]NáRi|>!Mb[?9QqKܭUlhc^h%wѸ>#RuiξlMٳn#I9w[ ;hQmOɩ?dΘVˊl =9 udwD ox~X-rCϩ J(=|'?~sv<^EWiENw |c7b921vmahX@Z`zaFӒd~˧9&ej'j[AT%縨BEqQzm[" r*,$V1?MоxhE!y㟅Ϋ6s$-pYybSz]?4 A[MWU?}[яl)1eH=4h x8>n$YWC~a &{ޝh5f׮ʢ:QF\.E_hٙyN\eZZK~Zg-.yYy9&? GȤ1;(#Ooa ius>3by]ih>uy6k) PuMe+>۱j}C}MZ|v{&:yi2uprpm]bOFCVgEkճNOo\׆3糏9oLQEXk8qªL\$NdNv;7Nf 9 735o!HwώH<{͑LwR -Dl YA%hKtϚS2}c8v{~Ğ%͜Ƨg6ǩ}&[}}[=#-mJ}]Qu=ɖ>f.6{d!d]U~m\<ݙ^S쯲N%^kU|nxxw O@u;vԟxo.̣dv]3û]~:>gM?{>-}:Y/rW bȩ'`3Yܕ2cv9'p˧5+?56wӀ4SZ){I,0D/W3g+gϞ1=eja<q}]%3exp_f x|ꝼ5ӕ5 FPrjqH#dks~xg/Y Wg_DzGpKf^{9{=sA1~'Oa.Qӳ27~鏣ϓslٛCeUZ7ߢ3Jx\i{N#trlmڜ{}2鰜/|dD6nUo䐴g\[ˬ+`^ɿ^4wOOy/Cy / ̮qny7Y Sc'{$Y'5"ُ&N~T[U_<2<6hՆd~Nj%%ޖbY& >M6 <̋=*VD Zn)#@^N畸qjop윢/K̐M-ggv /oFg+Ѷz0(4;U Xl ,,yyYOyF{Y]">i}QƼD.ƹ46ìL،c1E0A .Дz [.h*8Gm,jc?%u~`Z H$^ k~kNǘޙ>jݐnO`f6t@$66:ܰREN&07%yڭG>Cj^Z'v{yOcc'O @q+i O;| R LӘ*;|}(~az/MOy5hጬ5Vћ ңZg)dȂ'4nM/=5.Q&,ݙuGPF\<\Ԭ=.=F6^LKT$QrhU>1q=xd^w_,ӬQ߳6>[vb Aj|L3d1x=Ѡљdz4|5Gdl FYjėzhn沭UeZ L8 IZ* yEIc!QB-v}n.'E<^ӑ2H~ }oMKOwh׎Ho چ:O>M :?`GZf;ץ7X\W]6j)^e2rѕ5jد Z3gmYYfs2R*dNYiRyŢzP]`ڝɛrs!VGS98|8#y̆ 5؀%A[83s,Bǹ s=9?N{1xu\"r4v8{#w j1K.'-1ͩ`7ΚFօ>!Bo;0$,Iu :ЃK=,臅3Y[Ǚ{< O{:/7R%BG8Sbi=$=;~&j۽Ĭw|8{+y~H5E6m`CuLH4*ܗz[ g'M,9'ygz~ַtNSn6_{W=78!4-ޜYR!3?l˾r?Ξ NJn_c(XOmyVl2Rճsѻeut-Wg=&' >+< z[BXw S/ÚVr]+V4NmƿLvٟف`ίA!x^1g}S۰t<+߆0h՞ NĐSCQϜ ~t;ƇNQU֥wJSnf2~wll5*Uk۲})xwLNB|txbvωsucjZXyowVⴜGoY< 1~cCQ5x|ǰrzpa{u_}6d͸ml̸],{DkRw=)=?%mr-x.I<2a8Z* ޟ+ecdzbVMg#`nIͣɷlnZaߑ%Ѷk^t_.Yr7VxX yb`1ԫ"xFl^͏\^WfL_Y`+_^3'`0;nȼXoup򉇅fobuhYZ븊:Fݢ͛x3:{\7^3ޔOq˼s*TTEg#1QK9o@*4p_N'<'OP;~\ɲ̮*_?;~e87s+5^32g–p3$F">DGѢף\Y<)~dx{"{G_̞SӢb?#׊"FgPC0 vGEDΣr[g.C3d3y^؈Y>$k\O+2GOY~@ֽe`czcc졡(h2c_CfGn,[ox&>Zq}k/ԦZ|`Aϋy6If>@nLfT#%_h0s6m>kSϴR/˷7 T#%KR|Y|@>`(lT*Bi̼C5E/bF{J xiIx.Ahx>w%Lw'gSHEVIzʼ>4Ư. %?ܣ{rmo]ѻCz_{xAI퐪 HQ?cn*aKtOzkϵjEP}gԛ>=M@"wj>U>-4dztd[ړ^}Z3}[i~M6݈Άc>K&9,©3K[.:C,|WnkϴEE 2SvG,Q̞B~ $M0൞w]EmM3B%k˭ȪO=+*Mw l6bdQ8Ǖ8c1bRXyVw>J.h8S?|]ae-,Hzeu+VI:7P;a r+mRq7A3MBA]ou|;߫`Ylт96 E<¶qrUvh\olsh|@ؓɺ/j()QkOm7Đ;陮gcEnwJFd5|hƇHkM}b%/8ӂkyj9ľ3Й9 2xz([*-\uY;3˺=GF ڮ*}PGzQ (|gd\փkMkqѬIGbZ̑uRQ}m?څ`9>IF(t][{Z 6ÅK4"=ШW$3Urj>;&ZF[E8>\paDjS%ۘ w!4ybTIT2 QGPg3|]#ػ ف*Q3 eܰ>cDDj ׀1.nHVpB~7BqMܿ2Սfviݬݚ|)M>vPL̪\5 i6ϝ{TM {o4ΊZY;93Il;T#5>Gsr(R{J/k#ZAf'7Cl <iʬ]+3.鈌F?=^}F= tԼV6P|.ei]A<鷳|Ғ Ug6e^Ng8 R$06@>>vcaTk&S=JmXmWyYpqFւӸ_H}qzK/?54\&TK{rϖܒ+ܒ6|jxz8ueeE:p:4w~EOӬ3C4Ew9hoRykif(чO㶎{>Hd(HNum2(;y3[RkwzUT 57.֨+rPT/5ED*zS"wc6򚜴qf_2ݰXУBK9σ/=#[wbSwJ{xeZ=:TutMW__|7#VzzK+~GWx7EױTc3e<Ž6}Ÿ|O-J>~͙jhy_i`Xz|(3uWQ=hWyNЙ~,<8,s ]qyBew0OH_tOHBD3E{D;>Hv'p ])oPR/~mAkEY[$2ޠcw|N{Jpy@ܟn˓>lͲ]4/jAEkcɹS|Gz\(nM޼݈bUyjDNY<'}_[\.|tMÎ [mϧwG͞gf諉?;Iơ hi[f'4>;檠hkKv]:޲eGsc7/{±1@_yOXzEtk"h —`xEx7'N:%j׸7oLkkvA.}<>d':[7\v{$X\g ^ǎCO?k2q2#m#sz磖3gN5ydzhWGzYOVIa<Ρ`>ũO٘<|[̛7?-V񱉥 ȋXPP3V/Ck#7%䠴ċڍGэAUKo[8i&, nUZ)6[93nt]=5Ex{ 9{S27B< #Ԏloɫ6f+"ߑ6JcsP;}b)Ĉذ MZɉSME/Gv'aNx@{v~Z)Ӑzn XD{(Oښ&诚wD3ǥW|#3p:[ R)Jun! _hΪ|TM(<9ǽEY#1yſWe{dꖖ VleXFz4 =g!~DFjZ=~+! S(.ٙ͐"ڮ|k 2BQƾ`DZ}ܭxʈq;+O@jt2g<$}gIu rZ;XGeG dMNxk"zpdz_MmdPzWU[үZ oqe$HN/Ccvs{y4^oI>KiG} xdipy-0.13.0/dipy/data/files/ascm_out_test.nii.gz000066400000000000000000015620251317371701200215260ustar00rootroot00000000000000oWascm_out_test.niiy4}2>k=s^:8彯}{_pn8Ž; E׆pÅoJl p|ua,`2r wR͙0Q RlԬDp*`OD2D_?ni/Ň;e?!|H_,japٛcw4׌$/C\MW4{aL~9 PNHvm>aTa(PG%hr&ܗv1449hA:U Sۼ[E"Ye=%bt8ۂnp 9fو'ОO,.ZGP}ڏR61`W pBb\jH+4_NwA_b)E<0y$1)FM>6ՂŻOS꧎;(-4-#m5w8&H<|WZ]fV~ g Qe oTK(:%k#BoV~p鼯2~Ձ8YDmie3GV )ުS TwB?w,m>}\JSM%;* |jK˯o 㫾krM ĀAp̟7L]d/,#I;Yg)tD>N$Jɲ+@$?w>}DZ>Atza' Z? ǔKݳ^tU8~!#4F9T-ErtcQ!i.+{ph,꠯Di"!IJhnKO;Nup"goe_gS6+Ũ4,ٚ9omDO;05^H =E6=&mҚ&@ J%v 5. vOg5nfplIЉ/ 6 ,UXE ؊M-גv..7 bHR% 35&lƃȢBoaǘ-y}1L@ƻ1>ѿ{t{$]WWj1' tfg!ܺ㲗ȡ-j)87v)#&ه[Ϟ껟#uWm%kTvC/EBhKc@u*yx.zj]MWϑ+_=ϸx(Aͬ2խ+pNi*[wI&qn ʄ'~߮궡Srl9 AX{`˷A!wB/}v9QދwJW^._vR"Ү9Gw4Qzп`:xCE%㈰ŅW88F>A*تC'RD@Px@qK1ӞssyeP/LPs鱧&>{I1+rtD"_OT+I~Ol i]D!/8CgB1 k6k\S36}՝&Küpm:=A;Kcz] \ I[ $ejw= N,F eA{e k <յ $3ca׬;PKz$^0'eHH&W :F9̷ N :V~<'k]_"`eBx|trrm6a3zUf(NoTaW%wo^A/ I~mqM!~:~\cܘ3, rj >˱*\K 9_#~@h;jc 33wB'=>A S=9)Isr=T߾f$jS@ݸ)*u͜Xn0z yKUaw88X*2*}ujY4f ċy%xUe!]#KWDcE0\ NIQa8wsp@e'WзkЭd?\:QCjgQ/V |JKXc-1X]38SEh7^z*GVb Py"uS*>I{Il窺l. >?XmZXP8 d1737d914& ї)]H4|e$Rp Lg X'ԇۃ2߃ V4DžA>0*ϬbX| L<@5cKƓP^6OP ^m \%]W^iP/N2 v^L`0S drAv/)si(e5痋'o锊; osE>@5f7d9:cy>jC)*`1-,]С?W,L xYgjR f"F%牁Msv1ж.I#Lh|$'zJp&SzE>\ا߲_ ө?,؝Run-ϐ%?$8B : Ɉ%{Ư:oEpSh2Rʬ껁ƽcu7_n*l@qdK|c2;\TPS*IIqz@pZ‡3&Y^MDp\l q<6y-"+8Ywտn&)ԑ9|XWpw Y:6c`tcG$Q^KC Wyދ@y-Ғ:0<( e>#LJ/<h]Rfx.5F Amw[<ݛX}9pT^ J e>]v;p:i()iGCԗ~:RrݑiM-`5I\ |FJn.p.+#(d[Dw!uGURcC̫1#@ƜrYߞ!-vFF .O:ԸtJfbc8죗 ٗpBSl_6Sc=I68G=zp̓Čun @Un&Ym\"5Sd|'77B-#pwō )!L! '}U[r'ɴ\aJHc@<,Y E ]NJ;#UjʻujTH/b^0 w3,Ї0à>SҴ޽&4AO|N/8sKp:&ޤqgIaeS@Ch5Vqiwxi.S/"an2()C';_6f{izRH+NFF:sBuJy3W9 m潣BE7z_hC}O.K^(+k?Zs|:~\/@VbHG{R)_D)H5( ֬d2!2d}>*gF'hB+yIq~. 3 Awo~;LapY+OW[AEnktqAEa3n[41|7{("Kt;N>$ =g ٛI`0h8{һ(gWR/\BmGJpTfs+wQsClD+N6H^oy߼uJDɎB = kgC/"e{6*z8սMQk{}kuPl'pIax%%]4O?D_@6߻1 Zp헧gttFq 8g5 ] TS`3JOf}X+r8r~$+]¼̉uEY V/TSVj%Zn8Q fY\xc;bEh"j/@ܬ5Md p%x\pu&ì)C| @;9#) p4]ii]^%e\9<|Vݧ!cөbG×_XWf1*$u Ż׀=6^]l<쯃|ە[WI~B[AaRo M$ai$ 6~Q>%]>H3''v]͏%:8ymʮl.ѺP* ĪoSo7^:}I|JoryH_|OL*/9s1M6 Aز{,Ҙ - MC=zE2u,Xï鰝{Ac.jsVzݎ`ǖXݚi`̇+T6wBT#r}jP ]\w:-Uo P;4 ;7KMhrZ05V:/{# h+y)+7K\WSrA{}V吮4Ȇ_kq|JN`8i>W[p5|R蟴Yw)PPB\tT{|v@rkb Y4t`7Ab +| )2\bÌ2})mFONjE,g]^FүL6K4m5L;}U4Nc.ʨ3e.8  u܇6 pF4)QIu%EbaIJ\rXLQ"7>1 J'ݧYtC2IW~UhVC^(Ja{FH{Cp'_g 8^O҆O霆,!EҁīKNp+1fZJ7/fez>7jubVe|3?lǝ:MfYÅ1ߝDEDJeqWQ # 6~quь*ÛPQU4nD<Ӆ?Vy^#00lwb#{m?jܜCt:۷6;{o+)ю .0a1QT|dx FmK}X T_`Kҍ`(5[,ꑫCRƳ0Z~ׯQa7cpΟhq(q᫳ }:k Px8 *7'XBV˰&8L?yy'a矈)h3Cڜ?)qjf4ԁ{UPEA|}O1󠎗;Ge1pu|f#/=x NOv:vcXL-U4(r.sx9gQME%se (:}X\H.J魠DuR4u zyL)Pep;nz}A;wP{{a'!vZ'˟洱񃸓 ZXqw*xK$Fl(ks~zv =&{f)g*X>+y5vr7c.AV`820"x(z9s >5*a 6F҇;Q mSc3Z7_},s})HW6V(jߞAbR>dl|4O&5d?:7/* 䌶B67Hu(OƂZ0~*a2@"? @i7BP*bp[4yaJx17+1rí S$D? >R`\WVd"$ KJ8o~ײ-^$,_X:1*^3%]Š5qŜyاTaQ} S%Yc6Zp n,*]'rPԩ3(ĠSxq+Xmuo?Pzout+mo`~)# r}Ⴡ;`6  %H #(nX>Ƥn>ԭhӜ;E12砛sLSݪvk[5砌]m1H`1?[Hv9h<x ;WVPz;1' mSj-:Ld?< dOr n$+?4')(]%vU(Fk JZ<Ɂi0a"^>PѪ4d;Ro> 5G|W gtg0W4ŀs>gCo|#sC}Mq ߹AGwe*mg3^5t@VruۀVbv 9 q(<]v=% v|Pց'2*Eʇ흋=jYLf)5 $'?!.lq;CqNT+yӐ[dfWߋGbIuU(2~nޚx;#av7|a> ˏ`XBDA8溱4Ê)U~_EObUȱ?4F"EB LG&5Nzx=~8D̦ʵ!ww@UH dhwyNbK}F (ep,j`Z,MPl"}L]fOoDk  ^ECWz!K4;iX*!<~/|. i+AVn1(*Ewd =:i. sgϒ~ V ’Z uS% عJ8HW;Z(&%s<-΢SKI;aS:V|?+N&D-2@L `:._"ؼ*hĝP\lfzDú\t (6TqCq?ﰖ2ˍpًENo3ˎH}M{k5lZFώ+4W`viŋs)igpNU(Jy ,(]mk ްc[mLQm56(j3}n1ؼ7YϨՁ:G@f; Hfgq;`g"s<ݺ*׫7}ozy2ԣ>UDJ:`to)z8kγ':b˔|HCa 0`)9)܋K?o?]{($[f،O)+tK%D`~&,3ܫiM=نK0_WhF*6:޽c_؅:+aį4cE`#yLژd Abt2}djfo<?S с7\\Ug,ĽABϝ!sS30gYr{OzM3]Ku/C+'ck^Df&8'`*&IEuN((TZ-H[ߴ7V9 %H&v~i᪻k|NLCoP+Ęŵ:c kG~ÊI  F3E'ӷQI%gftDs0}a\Mp[hfOˉ YKOPēhIRd*  ']7 cW&tq'(õǪkSp7{1.1ZI$MIO/xIsD_. # p8(&BD5 9k?1r{C,>[= yLHi:^|r3,|*!Ssr"am6c1ÅZ,WCtdt+G f ",R%({@sIJKz3hmQ`F^39:.>ٰhgV t$<kq4wcE9tģ{[vߨ{p~;}c$[$}*,xQI^Gm%F[=CqgwIuu[~=8G>>A@܀<ң(߫`qL;M垫h:Jo? tL~"UX?4<"ݱ;:#;+}$N91<,pn}f2?"X NSF KnQ]U {_ym/A,7=2'&eucs[("G3hӂ\xSNkioр%׈߻FM7KkdvK6Fԋۭ[_FWz\ݐ d:m")~)p͛ا->_FT>c9cxJf}+HSIy53#aoфdpjk8 iP]W%Dz {㊈EȲ.:@s CY\&*`'Er/&aUԥ)ƒ1Γש1qC^ÄczP_ 0¹w6c01$+^fy׍ lhCR59p$!9_X&?~b)y!9"#6v lȏ >ki) F:%ѯFٜZɿ ;]y] m:{,^y,+9$P]> ̷"*Llْ]thHD@tgh=\Y;DF A `iPqsov}]e[ľ9zW?_ ~h\"Cm sC9&}ШVmCIrbG.|C(Jo:s0Rnԣ #9L^'&_0ԝh{ټZ0 )rjy;w ,P4^Q7H5,;o+<Ǧx(V;et؋.dyFE=7@3'-bܸ*5]u9XO)y|5 V=΄Y*~)ì~\"e71dE(hu?|PćkOח|wMt=,8>-@FcSY=:WCVcp,t+yY]jUt|+I?Y@d~l^IHmX>IИ"=&|XsEI&:kd#i~?VްkهP؂Sm@b܀\n5]x4C.I+@h&"0}.3 G7H1 (b̆̆G1LO"<|x{2ՆӚ1yL[s&5S)=z_LM.u?)sfd3wYAo>䧥}gщr}>)z` 1LQb>jeo:zɷpࡢnp_7K'נPWR584Ew3H9EtTi@\;LCHzm0K0d(rl UA1-#FFjczˌ z={`vKR.t517,`}Rz1D 5zzw?uذ'qL'Y@^kc+H-Xx<Lve; ᄅ#Xh1t1JEuo!G`nt *X2ϺHW\u%s\bv]4L" E:x%O۰,vpŸI:V3)<Ѵit-( AL=*Ԥ`q>W}W0f<ܼ f|L9ЎN< Y܃H\d!%2ͻe9ֵ"BvZP$v$V79G u=.?K89Ǖ?_*}wܽO֯ pRӇ+nY`Fқ/g EoA@O4JG?9u8`[V${fkBFMNx?Sk`WJPI  ;|RL#2j^FژLJ24[UЎrot&cmSj8#awWRRL"iJ-²DV]DEkV|n2O9:>>,I|P#gsZA+𭁡k{A/O],mUռ#D?ew5"3\z2{-̀ҕpE Wa!?aqp9=;AC':|{hI" j=bt\qANP씒%<jw9Ыf"?~^k 9kCi$;fmx䴚64qMhvX? ON Y&B{m#nb:5Zr`Nq'88gMuI(J$޲< 1誝 3l AcGbBSt8]c8!| ]x,JU$sQb"ƿAQ|- ]κC-7-(X)dW&Q|ftx˛K1^ S-ךñB =Sjr r#{J&ھ֡ӢmT0Z(K3.`/waa|"Q!8SJtzD6a:P>0_^i JhE$|Z#Bo;מ}49FR?U>߰ XLQ=B}Ģ"' X3hrZ͙voKH(e;I20RUǬhW꧲Ǝvy½ZZGXY/J?tHJPJ> ˮv= ̎- Uʦh58:BO9 8y}1HaVުT+~w7qhGƫ#귷мs &5/ە_;mµ)64# .t׀4&+E@ bR'`߷x+~|}29NT"^o1BH!>R\9I,"O{WΫgoٓp'UhEqdn96#;c vbQI_!%<{"ڷ޴ HKĶ^M_q{¾=NWݯB4= xH[pQ 9 I"%.ζnj'&'^.c:_GbV鱼s/(qiiפVc=Ђ&!9Ԑ%cq-FvtoUJ :&Y|ȋV(GjԐ]ˆ;9«L}ovZn=-fǒNlmn9V_,+HtQI8K2(xd}NxK-A?TNrސi I!.%d1E5KjXAfW@jUp 'آ7~B78[8A$M|֊Z" OtﺧqPtĦMA\7q`̀43-TG #5}/f)[$m~|?ǀ20EXҲBI/^Feo_#| 6|77[Xbom#`hߛti3\c~s -kd\uXEinc'i+4K\ tqUR )7~!! <&,7?#S7%0EJ6V넰x9=G t74-&Wۡq+ _+nE(14MF WbeC) 4Rp)>}$든bp'tNsmgF/(~;bW]FCR$J?m33Hr+W.RXc;*8{7=L Ȉ~:ydC;Qz {jؙʉF)xJ,=wVBzY (ץgf(_ BE4Sgzm0E)HAꒃ[ߙanAu@iJ޽'9ՃѲ|CZSyO\P:Mޞ6F=U]>ǃ v\#Wݯ_э=,\a@K߉ 90ag^җVLJ,2 ]l@z>cu޶a e]xx\e?QhTIwg#mw%系™1 R͉*Zу.Y5k'h 2]i7@SF8ȑ:>6s/+`]%d,I utJF 9$(QDE$(DA$* ""aS[żg>sZWOv.-[mX`;!8f, {ǜU/U4g?ZDT LoC7v\&V; Աʰdj Ȇ>qG%2\F=z4ɇtACZ6Fu]bm\7 OkAЯ>l֛T(r3#ZGA̡|DYmAN9x8d̸Ed&<#?H\{dñ&|Xϧ߾dZ N=4)U! 6&.5c`P;W!I:dGµUyѠWeph`E o&VZ6=N}e冋{nZZ}A{.>O|π]0 V־hy]"9D($2.UHC呏4_$XևSeFs زcI7·dHs!;1x|D> CKh*!0.흵QW {le@~|Я/Øj(KE;Tve1^`iu50tdMJX9W+#O0K};E%hAXYb>U ~nU=Qm}BӁGӫqZGrHi76͚8AGA @V {b?}e,Inͦ҆%=̷VZ}Rɴ_nsHhƏ&6-f]?ߣ OF9mlcdmT/X'ipu{Ԁ/2\MS}%((@`s'Ms؉nH(pU=G6zo(-` FWsp xv Tzsj4BI&.Oחf lyS蝝hb>I|m ڰ7?_j_wu;A`>搜Ӷ[<)0u!)#!a }FP“EwexrduQZA.Vg0?62v4Ӯ@hi'E+g(Ux;GriJ#3C2= puf^g} ڬLΉ]0c .gtQFܜy[H$v|._ۗcgؽDPOc[ɯf1}rO-e|U،vC_#o+s%|X{`?ӂ>d0Ku\;u;"VY@E˔"|V"b&_^7ꀘף: /R: lIйj=pYxb2 p)lL 1PI5Rd2`K pSE@l~=*v`<ݼIz0Ȇԩ(Z%Sᒌ(;xx'O_/7c~˦=$t Ik,{AW"_4fAa|J1V#@,2Ɉ䱫|b B3v,#֝'@}k1bԻP"tC\@*NC3@V墩PVAË ^"WNjt@%rdCziټn;;5q$?uu/ˏ#}b'O_O'D}>_O' }>1}b_~ @ʳwk{Hxi( ݽt;oB>F3^ܳu {jJڀSvN@<\pʼsT~C+pM9^J0jtU폒'X F;&{FUb SREd@ىQDzFIm}N5|MJpvj=dzs]l9WR#0ϡC &` pa?@x \92=M Ncy[:`4XQ784ߩCmF3]P`'9ylGL,~womƨK(+0_G {"ssAk{I`ج?>|z3.ogUʾbyѣϕ9iae0;0hjEk&0RAQRXo=3GdS"PѦ@k4ԩQ\3"ĬsjƜD_ԁtLy!2{E ZQԖ'iMJ"_#`BI![+>B(qP _J/\ZQ䧐cu"Փ檠`fYg:9 \]/6-Qi,׏{񐞊m`=hJcٶW* R*jM@"T> A֍wzA%\wlϬ~-zoK9'}klSu1?t%m9q@[3?ыHsVi Iyu]'ZQ(B^2ݠ'Vpmy"tl>Gmw`ѭ ~Ĩx8,HwCiS䏻qA!>DKf'A|ȝ6 D h6, Ko9t ;ӠЫOu߼e bO7{wSJu@·1Z'fo+EsQ4 3KNc{81X ;6"sym$_t7+Λ(z"* ֊ O) Kyֿz{5ǣvy o{z*hv4qjRF&\C 忽!#*ΘCI{̺une]"ZdoXmqδ̴/OAmdBl@/5T;-r¨!LЌkikyms81R0 ~u3+3Wh9հceLb&_|V{oQ]RhyzW$z>ȣn͋#s4Q%yyT96GQוO.V&1;em?CAmm IFg`';tmP4%>OR<$ύX!-[kJ!bՔ}$={ k@9H wT\I =MJ\I9=N%kא Ƃ.kSxpe=GH~@’,.8xZrh2Hm$_mҳG gҋs^H,{.ms"Ȭnsij~NDpie7ؕZi j'c:J=Eo3Bt|{Hd:ሽ"_lEmWؤĬ=`k #+|^3$neF(ܐ$.iޙw>:+˅vz:;h!Xu5UKPYԥQ37p~,- J"Lݗ)~d8AR693j t0J7ּE f2=f_IUhM'1Oҽ %XpW>Vf?Ma rA C}½*ۇH7oaaPpu(eKVU(#֍(ɔ] މ֝αA T?)󂚬t9aNAgAV׏hӆ,+2 o'U?چٷHsMœs"] Q:RuvUkGGK M?"ܳGdiُ (YD* 3vA7%߻;AoNrԬ?-GE ٵ kPjMia. էrH5s\M8NCU-?2I'uh0Nxn]h$ yKpQA^2T"Y( @~/ |"){h)^;}a TcU mD%h1pޞ:_ҟsyd1'./m,=AqSE|7~j0owmϻ ļ/Ѯg"s73`ܡs;,ʳ ǝm?Gyg j3\`mvϐ|$OӰ9+K*5izgQ\eSHbzjZ(ഭ?\V?rH"] &g6[k#AIAbAr(,r<>/;3Zm Nbf}/>3;Zo]xO߼bg?y=Xpx3j*8-7 7!ӕsdR'Iny ]O,5"u+o*w}>bO.O/O!523y`M9!ddtfa)p t_tfhhkI<\VfUnOF?%┱p,e$gP]%]r@$;W,8(큼1..~s t2;>Ձ_ޥ DEw NMg4 4|cLЎUU˧psa^gr<|9瓯^Sߔ8'45q^{%jj O!tqP*WM7țh 11Â|Fy~ޛ<h`v', ;)i/A#}8}##WW5Yܸ;ԗ(> oD1 pX=G iqVߥA.;wNsч/P*: /w!S+'E2Є9Ɗfsdu6}EdvА k#w@sz`ȭQu8|{ JU[6Qtͼ|d3V'+a|V{NY3-`zJ1 ә(hDI>JT+NTg@,'>ԍ'`U#iTUq‚w?zN|ԲS"?YjR=hW>d9og('V?`q=&:U=_U{VUjMluG) ۍ"5bu}^▍u>GW3YLވ50H5gftB{^f.}t{Hdzhϡ_AGAĖLe%HΣ.3Xn4Ww$-a b-X~ԵN H<e$08M%ti`Bc*2-gt ס޲zΐOv=tX3?UYYNn8#;bU7@E&nC5[j;uw]$|C_7xA 9*w _}ێF>xv|"S(uJ#=P7ە'3g|5JϷG9]YU9}'8E c?Y4P9=}hgCv~uKx8M-d}JEyEth+m4#5SB=:Axժ9hDmanۆW 8t[*!NٰIws n~S8[BH,p@rJ;DiO59k;׷ nE"..o|qlO.:O_ &bbM$J(pyWhC;qRu4J|zdkd}nD=ET3| aPjK2/;}9Ke~>7I|Bi1jeF;ObkC>W12o?5-7M_% Ӂ@eV rh^+(sý7 iQ})d1KMӰ( n4s .Uhet2=B@ Qr -BΣoϝx(VtXooŸW ^<{;D \E@;VN.sBrub)l}_> HśGOC e> aZC皿-1]AN3yT>}jԁ]]bM,Yz 'Wo'Vo3܂.FK]_ҁeW;Ot7U-eTzK[쳡5N{ѓ`:Jdђ{!m;KAG-0wo!H}7d&"Ưik݄OZ"%97(}a}F=VaaMʯǎW ?gLI {$Ӡ)v]7>H,խwa|y.B!=uzc ֍1i*8Ar8⥈xJ[`'-{Qocsp|{0ATc}I8$k ZAwIQm*Ȱїv.`Z;8n8/Y4iNbnK>O9o'=z<$Di~f"~ }1tzn˳%&(0Rc_L0T|:QM+wG[^#稹Eymyδ]~jss,D҃VһI̅%/kT!H :BQkGo?EWL`X57x5:Rr[MVωB XKWn eTlZiOF1ZO< /9V1+H$zHbېč?Ab!3qv34_DH07hHۘI kWDת"GRm=AkeXsC:xE! @ ]au'Ѻݭ(Ⱦ;VpE@[}4,]GDq(N-p8;w{- S:}m ~,'Z"~݆RU~d0EƋkȰTӶd+./39+r|}9O]e&j]iZ ;a:}wD$2|`!˫ hEQQM/uEzNoUӰf3u"Ա ߨl&EL'Kњ60}Ѥ&ݧ;Wh?E䨩Gsx:A[{Q<㡻ؗ'_n`؋!.׽m|PpLt)Az\RF U|Jcrw_#shmڈaøM!u-P0;ȡj>mw4a儡8՟pRr| Z{9~nQX?qC(gƧC>Y%. CVٙH:KݝLt5iwM(m _Ok;D LE0`"O!E>M_5G^i̮@3H}z>z Wm8uQwDy"S]O贇M89ww?ʺB Ibta0HSo_@#(Pwy)pN Bk:]a؋G40m)An)v^yp QѺg}x^Js%!}1 ԰IB=h~DMAb  8$c?=Lx[Z=Ips+`Ӊvi<*߷ Cm'ݠU'PIW+ feAnu}6?(#t i(z Jĺ5(:&m*C^i XT~RS3L>A)пsyn\A>Ɛd_569;>O`4Aן6}r.nJO(Ϗ‘QјR"-aʍ}Vg0 QݯBS=qw6$n) /C 2㾾hW2@E$p oe={GQXCpH.v1;YH߇5Dpq8.Ʊ?f+Bo׶8ŦE$~X=$bP)U-yCz!+Dcq^^pOXMrج @M+2+ʎBz,j=]pl"jW㊊S䡢r+0֕d=umA4q3ZSբFͅ߿Jd0)kJɬTp,|xfj_3VqӺW"7q,6q4i"D S̍p^KeDh1 [~HwcQKzcl+_9$OLw/i=TI)Mk)g͌Y$Hmr77HM?o{LnHs=>䧉y5]F!gӣ&C G\Z)H.gxAK^H< ; }ê|U(xN:orFkY4]x,q7Zh4hdpC۹ ;f'( HVj|lV+Fp=j#\ȚmsuU6iiR4&^?}o1Ճ*17Вru@)z'TZFK/PQ{!,KD3#ݹ|bF?$ޟ~[Z>PPzv{;f+d?<$Mu0:H(S3Ҩg$f-#,U `8 Qݎd)Gj@]GqWR'nfEQft}=icQvę1§2VŚЩU%/|{7f74ߺid5~/L:Cgg?zYStaqƁĝّ'aPh`vṇ HY5d' / s}TpKFyw7mYƑV1[ۗyC*Hw ^L0n_/x,s?T2^`:~Rbsb; *!K#624.RN܃[+叐vw>&q;,29`g !'ZXe眜":xQz9Jx(C;t8n}- 3@Y}Bh n`88pDjBfJ:}s jkЉCI h[s Hcl'I +x]%oOPD@#5W0y NHTػ-/3CO_[/%1a^27#>5b<:6PA,FmHdg˷j/ G?rJKj%QdB+-9S@a>לŸ\O{ؿߗ)2'=J@ DFvky dEDwҦ >|+cD-|;_"m˓_i ڐL׬-m%gRNI:0$hS;ۏ !Slg?gI z9WxoW3)}mfOIO E9KٜڍުzS8aNܸ{9_ppV!dHdl1I7l4QߛV3\͢~7AK @}u]6U6/"+jQEB=eȉz3$vZCd@F'zUrEYy5.{ ŇFU^) p0_! ZnI `8ĮRnE1; 05>?%S~2M <&)ʶ-ya=FxwSy&=tzllBW?y`_Q EǮhcw?WB5ueȂP W4FH a|pf&c:eiqqu}ޚ@ > U*? i^3rU2Eo7*?DaF織_R880 iTS`~ (U(1 i:nEg2"!hAhSó$:/!r[O^Ps I5;F?7qԱGpOkvܐkJiĞu0 <1oV?7EB2@2?C,0v?@[["6y0(:s?U>l.RT V Nzc}FDZ8c Sj-Ƈ\Cӟ8InsH2NE-v0ZkS<10lt= ĊMYh:GA2 I0Zվ*Mu|kslבuZyxo;$civfO_0ֆo~^u2) 냱/̡P>""y3"10hrբ)?4 I=9Z: ghBp)J'- vs>p[zu3lvbY*\_`U^ cz(bGW݈C_FrD+iMɵRuR/џEGh4<5NɊ=E:?=>?Ù`Yl@Od, >]Þgh7B.f%*K!c:O|_'%M^z{w䢡XLN9^㌻jϮWv\ZP1\mʀC8.omkQ(ٝ!^Ԟаcm3 J1ނ1!V3](qK'4; b ADR$TcXtϿϫ߭D-<75oC_CO';yWa&m \%n>>taCe)p/S9b Hywm?u[- yV̜m*~&Q N0:elY`p?#bks#-,?Ў:[o0Dbmr$a Ga{ӧ"4n2;P*&VTJQޔD<@r&বsG:@'dZ9ܢnփROrԘgGTn75BԎ,3,Hn y_;j&=/vbTsgis|lK#dj|twWJlVparC\)I7}Oá4yoխ7+s߃W5e5֎=-2V\?%?:7iC!C^oV*BwvǁI z^Z<&ܚQG2 J߽t2M/Gn@HEO,2Ṱ"2V@[w~ u$NXoZj$Gj=9W~:f0a(-CQ~u~^Ȕȁ9<i^?yj#4q?CMioi8A3_M(!>{>>{"p`Ao4,*+1D@[w@2LHw- 1{⼒<"֍GRWM"g[g_y*_L/7~%cjL  %r k#JPa!a<ȋg#.U@Tjv i|&}#]Y}mE:hHsQNX]Ff==4Y-)@ga襴d u#8m^0dn]kт`!CfySlr^1 ZrI /Wwx>ȉ`o8A>WdUڭU4esMԅ0k'2pDtLjpͮ7X~<ɯ~bAAY'qкE/3X 撁ąu6ZTwU> e\u L#uNU6^&N;o8uf E0vep0:I^{gٜ^%<u2 G@^}3M)Gv Zl+,NqzvZGh+}o;|5?g ?פZAƋp0^6=/y ^ƙYsmϿx4xvO5/~vs5 \Mǟ:^_#K)sHTˏ2|]/> 'nZswtnAti%ڏI6;RIVVP&24c8#e@ca7҇z;?E:fK=r~J)1=u d^G)%z=^ ݰzbWKYk?[s _oN4p< \}m9ٓK23-gh,X<ұb?RnYAFn6TwRG awrf/xB1C/iyy)'$/Yp,hgBY:Ygv&.~ESqy_-!WҦD|{4&L ~ " YL6qygnÈ@J䝢cSYH<eE)SЖDí.a??3DY=[Ƌ!\!;7F,7{ԑI웢QTA|ذsrsBh.U. ݜߤ~o "tHDH: XW!wM DM[fy蹰EKkefMm 3{} ^ǩ]nPq Ew N+L51cw*=b;H}W󰧶uمGaDN06޽ Rb n+6 FUob}|X:]!3̛=e>>(׍# )|;cuLqX+u`sPIܩldQJ!;"@?ra (79?{hL%"EkoL3eΔ9s2&D BkT29JfE\Ϻy}{?k.eϹ~nNq{<}vc"ʠNzTIChh'ZIWP}*z?=_F* U ݰq_~pqL0t/84 v0x;5Ի0Ju}<|xl\!Սu!sV7X1ƫud9ڔuX͸mEϝ_/L_+BzH;]_t(V/6+KDtNUv"`~tWH,Dn۲n#}/U=x\N0~97rjp 2wMgz|+%3w/ G)%BP(_f ǔt/Q56 0|8; /kbev:B(o׃U uӗ4`͸5M8>Hw]Y{N jzaGd$ȉwR*`fIM+8!9d< ]GGX5-.V NWY R'.ǏRb`hI p}MwR\`+Po >@D=Bك{H7_vh#{s#ny2duzfY^N9|4.d<3CwWM XZ}@[CmjWyC,:u>h4_,^Jm FV&teFX۪8ػM]8e Tbm8;#sa#?O8)4єRK݂N\C7;JX2q> &oDtmn:@@ֿ,y_qE‚_7 Á+wohΦNbB62[úr`ɕq\WVdwAMT4MV#-9h6qJ8>&H:@PqIGt &Qcc|J#r$ֹ ef&ߒoʓS~\ƸOgP%||?^C?+MZw*~]t>t"xT P D5^tZPqAURwwy}DO{˦-_8yΨ }K!@ħ /,Bځ̱_}TG;3d3Eӣu;([恳{/8(8 D.RXй<>IrH5Ԧ{i|6'b6*TG!c:'D1EGd"Y:]HGQyʚ&ƺƾӐÓ ̕ BEu=Lj,xk;ኧXuycB<1k):zx[*B86)-EUC9 *rrAvOݻ{ h˧:YK蓩1v}e(fR9+Ϭq\[ k/EnZ5ėd66{ 1!HЏb^H D] *+0W*xT!X]C|=0/4H= 6ddxH8@G9 xWţ"3O)$St@x3aXٹ tf^JZ}"&AE+T`Tz>*i_~{"XƓf%DzS,@XF HA9Wv4'm. COˀ+/A z%b Ѹ-0–MgّXg"\lx?:Py+7Vo| vo^D!c~G[RwШRyW@nxyd>JZBJh9gD)M4j<:X"Ԁl@N9 U08b,EE|^Du}'Y%y(8ܯyLj|3I4m"{x9+CwSٓ3k ݬ47CnΝvZbv pgGjɝc@d '|#p9dpSWX6:dr(z\9~Ti"#}3 W}=t{fȊ<\XV~OFZt{MTOw+q\_JZ) \-Zc0l͡-j}kE*?E߷]yɌOZ3:@Û*߀#kϣ\9﷬usvVvвt lbJ\ʹwX@Ho6"̆ v޾i/t@txSj'c7 +~5o-b>;sEsd#.3%t;\inz[Zpm?0Z!ʮ~|Wl>{ʓQn%$ȨL\%/Z<.FQL7NmrW>CXJ `+G`@?"9fC * rÀ]t*Pm~"%`/ĐUNqfXG4H ,g<(%yP 2 #zV;p_`?Vͯep2`U>Ԅ{ eߍ=}AɅ^9=;4AlT"Y]4C吰++ێw9jݻLsmH\;h!{ģ5!Qh[ʠkpG-~&[zޑz=|3 3]HA[AWtVF%b֫`fW3v ' =:CM -Zdѓȼ~AFXR0mC̷!@k7OJlIRU)FF;\+)Y 6bu< xTfgp7'GP*B=5Fa⇛@80zȄeZՓ2W =۝*q|2sC܌ّ̘ y0UEWjZ-)GS53A%F㨵P[G>/u|i4446'ݐ7_~b2,h6` =+a]S[!PQF.w~‹SUCzL<1;!!bȱ'68`ͯ?R|"Rw{% 7'N-"c;AԔ" oqR&yBBb~h)jRΚ- cMH9u:2D# tc(| 8+Ŧ_9FOc=۝od"9>=֎5&\u@飂Z1 ,?^9¶ˣ|-H%oཱྀ$Z*}UPeцG:N_O p`37HV' Z p|wA'x1*O3 ;.;̞՟i$LտRGh|f(1\?zd ]4|179PmgsV%7XKMMWRzS IUaqwZ9?pۡ +󖓶:[l04:<489̬hBr}Y}e]ʻ*ŀ¼Kq;5:ENxl>Fy5/,I),s;P^IژI?Qgb*4p$1H{O% OhG*;Mh ߧt#azJ:e:$ ;`S~.9`[w4 ;bzЋTNG<)!49m^f ݊WO]KCگ5d{Ң7/쓣-%DPM6v,l$XUgtӑ[2~Z zcJ5C!A,,z*ڢ`Ҝe 7n~BQ\@qW z妈]Ly>qcoi$Ӄ7c1o_@] |NjEyh N{/J}D.}e I>$2k-uS c]ǁX b72?}T;Xt`l=R}qi9 RhPj'E?2m.(CHɄb#Lli0VīCf"Tcnm·+s[C#^+PYwL N|q{yJ P1%_[DvP`zN!^c]:ɿؖ$|fE+Ìb?pc?_^DZvk2cP?OnٙճAFmɱ7%VQs'9ʧTbȮE%}TYTGσn6(0o+{ĶO^?kN>On?+)2Ȑz͋ϢSgrT?[D9QUNTN^~H wrBSu [YD%מ3& ؇'gbwȆ"Ow!;ݙ>R"+yI뮏Hh:/Eo 7d8 [\P['j|+0Uh yJXLHbeUiHug_tov? nG7gm Ot^*5[͊‚!Yiߐ@;"L0CKi+\Eqļ;n 9ɳwfPXB V넔VP~q7b Q>#J){zy?^yR$D7ٲR),۰ȎZ /b}J ]7P? +YCng"l'wd;t8iyx3M7f"IwRގ%{h![ c't|ksyЯt^L="VÆvI+ҙb'U?ԽD,GI!͂m]`zo ȯPc.=ei#wdwNGKc32)TNo)))&X;Z=˓=h~57 D)gqcuD1.ax ?clmu%d$ Jȸ TOwiP!}+hsr_/Bһ*ZQ95r&W68gK`_J ɽ֏s `?/ (BHa#܌Š[&p &EI$p[r?C boQ;}xɸz]i_B959~D֐}^y69ƧS.V|ֳ<"Ĵ3 q_8 M75P|gw=v݇j \wC~hMp(oZA}cG;鼘\H̓қ3_'?<6!/ΟHSh'RA(,逝&Dt׹U&k3M{7̶?q{Rc;ԅ[yx |,Tqxw`[yhw^l$flhq]܊OjnD>>48 /1=` ߰&ñOٹ@bUzp&"ZMPznO\/85MtcmWG蟰Q6QPgj bS;E8hh*c}m&wC?A3waN|S ?Ѯx6G@`mw}^/1}Éim>msrit؄;i^L \ĀOgQr(zw"4 e&DO`s%xBuxQZ?-2sJzVNpΆFQt Z=k"(TۢEʘ  5!g'gc=A*atpD%<,F5`e (}Ө^_d;ޫyEFb>GxŰ|\J }B3Va盉;vy0"oy#~]~H9+`Ѵ\${"iK;r-<{qVՠ%^u0xӜR+f^qK}w->}]aPA]W%&3uQiL Lt9Py9#{ -f/<t(mx g*8r O7mDV'Φ/*haۋҷ ᣝ1x3JJ^CKƹCfmfKo"p,sfq߉K0MB2Y-8vH\iBVM `=8gv0L4`C1zƴ>>+|6E.2/dQX/_sm n+P~Ѻ3l=Vn}Gu}LXiS'n;{<:@nOşTMOGa'rsHM3JG۬23¦i}?SS\)[gVE'w aNSu9";M9ZVd) p]W@~-4 hibQl6hu2ryA+#Z]3K' ש/v0jrL;*SP9b IEa;ÙJ><<b-]o9(vzHW\/CDm]{i7yL0_Yw2`RX[Bֱl&PQ z^\5E5%Be1Pxf"_j> ᑽݒhn|{zr@:)tސϹBrًvJH3,\>e,_!Hڋ} S:%9pw@x91Y!LNuaZ@N1r~2՟rԴsOԮt9^|8@l>>k'?DxU.uWJ -[냪$pP~N]|7)|_F̺+jK^S? j8@azJ0/G =3Vt?^1wzHL~Bb֙s!0^h0_$[~~I8ud7ls7>|ius~^c)DHO #jAo$0Mm5CXjyX!\iJ =r#u֥t܉^ m9MCXoMrLEy <[ ,ʎX.!c+dkBNKO+aœIߝ P&+?zƀ2`:1$:)CB}:$Gɮo3/ Tʪ:($VT禬k ^_N/bۜ9amO;7nk1Xٙ0fJ3&ۚ *܊Ʃ3Ok(k8z ?: :28>2@SFyxA5_WoވJRzRSe~`0RDS믺n^ O9N*Lð1\HsZl3V/p` b忐aO6CrIfkՇ>T S@&i n?MHBzvXyDw@gsܪ>=݉R}ѿ XL[.}DՅfZȈL()5ʞ\7ǛĝϷc^O>=،R{$ՁW_FA6YD c|t@T}'W3@?h=Iՙ-ƂQmM'_*݃Z(:H}dSU|Cn~34m]C0X9ȁ Jt+b>nXWB]y @x^Px-0f<9~8ڼ6Wv4k︱LH5Z(='7Nɉ=y/"&㕒r2N'/$-ǴUL5W<"l|gEځnpKp/ʥx ύsOCs3]ej9J[P<3 8 6UlO2VG?V6(5b Bb 4'$`Q\uP 2%=G)ʇyx.#0'2c!P8+Ьw`ySn2 滷ƐՊR/:Iv:I0\P:BKgwR#345ÍqK (/{O!>j 5A i#!CDzJT Gq~QrF5{k!;vm'uJn2Ck p'M(<4Q^?x6fE_0/hfwS7JRw?q-.OvP6OwXxc9t|UX@QSiUȚ '_F%Lp3h`Ut dp pԭ4k|&jAXQszP̉)籯7̌sTI/R5 ЉkQ,]wXkӗ ?h^]>SY)7hòrs|2$TWá >1}$Қ[ jdFBFKKRX&N4+:^J.r.B+wFXwrC?Jf1 .vdK4-^Br:  Zn ^v=c<Or ݛ3u3'ڠ'OȝrdUInhFi4zF5h;7$v_]?ʻAduPu*2pm#+a~>VPL(1yS)ޢ1^БܪC!N+-tUEA;OWd9!H5_s\-3#aPũx_h#__2sXB8 N sA'ۗ`bhm>NPa`m*|ٹA’#>f$1pv4QQ01tkō|Nd,u^EXR?0"0C,t.ۡac(W^` ow߆:b KwnB]ZFݿyf_f ZNp и9]a&..]x /mބ}#-` MWB 84Jcvږdxt92$E p_!8S{@` Z:T;kmg|:2 Mv7R@*R?&v\8S;eͤ)>g6CG{h*<$LDy<$TƐ!Sd$>  )2>k<>NaY^g}]{\ǭda9*z*8]+L.Jhϱ~aOmNӇՄKij MUQ)Hi a|Ooc;Oat;kSo6ܭh*CfM)jsCO'Y FW> O8Ͻ"̚Lz҉+zDK W`j 6P`;?LCNX^erd]_sU[PXyBJX$gat7]+3<|Sm/Y"RM4U$(9?;Y,@^o usX5ӳvD-qznicȇG)OC_ P$[`ŎLzJ=[Gj(P8WVS/m 3.qT?nsWm)0L;hX\ xSuuPvyeSQDEXύ ,S+ x,RQ-~lkhn|Be?x^9.ʍG6h;C矾^VarStN6aS*`3J ͅhkjsDHbCԪAǣ? \pЯNH`1~T[w"=XCf! A2i=zDeܰ^7ckr*j|qFIK%<׺l&$JE<iOf ;BJwNp7 wr'#4k&tQʙ$D}hʥ}Nhj^{e`IRV}o9`0E[ ƚ|!aƵܧ __G {[ׇۗӊߑD:B)np{K&(R_vK[Ae7o 1L#ugA^$濾gcDWr%޿֪O6hA>+kR#|K'L4xUy8*ݮPLtSp?horzfD6;5i#a~*u3[XV3q${eQ]q*xq\kiPk:~BL2I`~uu^vjԷw8s'!;?9@sn  8 ݙ?  pkMb#$55]jg*}GBgr߄I߿XDWC=sCx: # L w05S AGͅASE(dK҇[ó߀d Z04#€0pta4Ub?K*tħg2ו)]lJpr~MJ%f65Rz.pSG (w'uy0,Вrk0Xԯ̓]DK2rK'Tߕ'8w/`Arl\_yo _q q4+`+\?Ͳ%|6o2rHIdM$ 54ѝQMǧ<=yO"]pqmkLnA|Uq%]?Yb{ u=M6Z$:q}~A8,Wس>લP/noM8V &)!_rMu4 \N g㎠Qt$s!&d5/b f8Eb9$_> [iiTa\hN=^=tN*:8#;=zX`xn<87$HX[@wi761(0ptw=>@4vG}e*bAz2;(>b-$oۉKRl0(ó$1s>=u4L~jPmC 0cvp#},+ԏ_&Nag5~/h9?b:?WǻDA̍V,g4>m&ďsh }3C> "SYsB%ز {<=ÓOOttw5~X^&,5Y Fsױ-Q%Opmufz t>m`讽>9=ћaWn GJy˸ODMzybȗc7wS93|k6lg ݒ=qތ׫ J3&-!N#JstcqyZ|~!7m!Mvo>ΧC>yrW]`m=@H?8+`X&wHY GZB2`h۰ hm>JQ\E|kjAU"tn_ɩH\d4话ৱZ@ZjwIq^w{ζkA9fKܟ9+f(XH w>Z +PVgAs~O*W 8n?n +aCJATOpdmfe;P NM2/ EqBտT-N;IDk P=C) mAH^0.Z`td|eefٮ(`i{OŷUm!}!?w8R~wK2*5z=>.݁"?Rs\>R;y?fTÄ@Q9"ZInns>S17)_TCڬ)Dml=ۖMT8ijs IPӞW`Q6n#4юJ{gN)Npik 3w)p1Ҥ, Akv/eU*H& !҈Hc* ޣq9*p|ّ{/ŗs~AўV\)}Y'(sV/Q6(Iqm#EU}'/!r/~8NK1VI$?㱬o&EWdm7Js~Q5,}!('->!7ֽ^hk` O~$M[ۉt|0nwH:-Na4bnh/V8^n'-nIdsQV˹WN_8߅b&`Od MMyfj"/s/8?XuH~Ή@F<%WŶΔ飪#']@]](|j Og&8^ϣЏr%HPĢĪ3PtI#?K7M^(~э9 95a)+[">+ z_Gj|f9V@cwю-~4".3(G+Nܽ E(Oa!Y22!:Uv'i5OC?ˎI+C,&rs]O}[9HBkf S[ևޙw/1`=\ (BQHQ9lX #y#9A}2H2&yaE˷~!P^ 5$_aeBI+M ~?-9}TKPz'KMTG.zFQ]RR zny-_q6wGzƻ,@cpT8=нB?hoz"E9Q!Jutw$ Av(w,|*U`gNLȝ.gD-2u0OΜ8GSIpC@|YFv5֭%!Ǎ${N, {qYYTW@f' w~~;>Ir3I>X B8Urw<1DˬLᡁ{?-792#PʮB-$lT_ggtϸe\jJVJB_7Wc#dp4-ɵ.^w ^F<opи`r\B!8===9yq_?GA [vCj Un+lx)~$Jߦ[o+F4&Z~סv^0ҏ:#y^?D37^wZj3ߩYDΞ'te*b(`xp/ ')8FdpVGwߙ魈1Y`[T6*ڸ#NP!Km+euT M,?.ЄS}#D8D*CD>Op\ScU5=g8O}Jv1eU&%JQ| 2m.ş{enߦNb̴jx42")_=Qa R֯9hxH2BdnJ?'6Pm v#Ա3'3MAv;gmi4(.Xx|t9J"MSAFDf6G'q0\X@%>yT05 s!{?;T|2!+xƞn N%?*Qk ΗQ'@KҺ5fP̼L7x.(=~N/bx]$7!\gnF3R@_gp^ w)b+\ڽn zz-cA=H%eA)'xY]]Ï6tH@SOуp2%@a%TMxJh\>̹찻Mr@]Z=!T=s=:eԺ؋B7 SEJ[iYPWvkҫLp)R&87Em* |J<3O=[o9UftCrt>U}lu@tO?Q~j{_[|k p,k ӭ('ۣY y_KDA<,@8hE5ÅV LCs_"}cU&3 Kl a^\ٳA2o}!Ccl>˂, u} my5Jga"Q]#nZllmNDo=AVb/>DuKIn>@EA۽| Q"z(4QAXw`cX<-2L7g@2|J'M$epVd#/JY"ujr} yݙ^r~\L zu$oq*EpM >r&5֠ŵO(mY_'fZ*Aq~ç _MfE1J;xs( Hle:s55e!ocH~'zw@*C>yaz2W&Cs{!7hw+TK*<?>l rE V}iG&&Tj-Gs#/>WZT xU8>9uBPn%uY)G%xЎjtŬZU⭍/ĭ|]4X p!.3=Z`+#}pjNԴʁQi"qaa"8ǵb)$7!G H}bg+ia;TK`6q|~,͍]fRB"W5 eq)<<;y3 $LPu۩|jȰĖAAJ_T=(EN/Z="7G1|wrTs̟$0T6iS^Ϳz-#o>x փ3IyT'sۘpe'㪌vKU(h)*%x#C=PZcuk. l!d"JJ*#ЀnW4%(\b4Q_A .ђΓ F|ݓnRјV0\k] fQT6N)3t n)}gpь1^l! +[\ߊ#(D4?Gpv_jx0)Ou!yfo6}zle0G>ؽ#gCJJ/_mD:/vQz*9cz7YoO9*+qL]5_.. cJ7|~^ax Ď"nŲkHŋИ09YX6a}jI4Jծk*.e>Eȧx<2SV%怵㯩gAFNW"Yo5lL"qjIi|A8Gr?|dd+?e?zS^=d]GuR.KcnK]{I5SQ9`fݡ4J'F3p/8 ._XΔK<>vjS˭k?Ǔ,1tgX H_^C3&eOV: +s]Gu!rI"h8K?G^`#Tw, m ٯ|'k?9T:ov0s(#,{(,)O̴e љq@P7Mcq3)1[ɠQӧϓcӹV͵P!F>F'(!kNHj;czNȯ|X&J S;(Anc'6뺎3Pjv|&!ѽB_AMB(4n{ ?2; KAڰ Np~uq{_SKka 仦5JPT}'h3S%6 ?_do\:Z6 R ύۣQ}P.OY \7))ƤG ;(X.&"7: b}N+3YCqw31Vi6ա)-vp qJ&TWV1VJ"4Ƒz_[0̡dy;10J{wݢ\R-~+b1,u'xCO zͯ |U*Qٵ?/P4{kt/OM AhsRdCo1VsP_͗z%woc[[hy -#\ {)j^=E‚{t9c1\s;&gژ~Jg%݊RެEl}2qUX>YjƇkq ;)s R-^=DA8UAWW UΧ~mǝ,?/B9OYqoO2$&NjM1e*BcW@R$Wb dz{аo*̞*Q;A`[] ;yoXýh .ͨh-^Z+ wT[]c BaԀ:!ؕ(>4Yl^<@WcpM%L${ L%֖gu3|!~G>ȞWD-Ց?ΟgVہt\}, MUHyEtu賊'oϯ[|}9 0ṷ:7?2O4S=אG[;{{, fQ+mq3-1.Ќ'My~jGtYܜ񹤜 I-zp;Ll;x+ c۞[c0oDf5񽬧{@:#vP5#amxJΓY Z^|}>x,w7 R#w?e{ճڡʧi'$`._x3o/$|ސs eZozBM?W;g>^4KJwg?A2}ob0|$䠖;0;APʥ6 0}z-DZ|Ղgp+) KMЋyC]s^Byک(}x*5!eIQ#i% v..%>NFYgH/^=?^_:xH q)>FUAp"%Hf?]yfKW\$ȥˮ|)ūFnY"~,DJ.+2[_gCmو Fr`FR 7-t@d+8(d>dN tTC*U"sߓ#:69%7T1aٕxDCCf瞲ipǀ^pŃ@5rM>70Dp5e>E#dec($SZTi@LGqseaՀK&Z5S*Ͷ[<f]|,abߣY\z$̸}fVorϾ3,Gzxc^TO\w[Q(2ڄtn4}ws8%v<̇Z;, ~$Hݧ qmg+WzK0\ ;($Eo2IsbˢaEĺGkXq*U᷷ ꝁbp,ODu43cvQ@Cwl֞{TN&UUҺ|"_I9.I^V=aT&f/^>MV!ܟn$[ ^kQ`oJ ڦ)ԢUBиLU-=y@=|ڛI|n_q@D4z} 0ڹ/0ΐ+>_c biΆN^J?o-+`ڗc+LᥱPJX{Hi ` Yܸ8zGxww DSOR9k^3V kpQ'nʼ2wtO_Rs>^Q > !փ L -P]3Oq$~=ES}l1MBMT[CcIlT!HMKvn:쀉ҝ]-|X T :uoSX Jiof - $?jL ,$zAEYPtZ+h9Q5^F1[HJ wu' ȧ=o38/; uo ¬%EepAkӉ0vkq+^yVB$eβ> t/X-&SZw-}>{5B\VxI;q, \xRUe*$<RLG:9l!0z0:B ԡ[ξ)ph7[d3>N3:w) ?4౱Y4|&Z( 374yC3wL,\`RN OXrxUlyㅼ#ь/.X|?ęuQ6=dԆO+<%u>b;Z@=\#MKg߃G\>Bf3.0'8a^{oC#؛< Ug~E~*Tu>]a;5aYZ'kk40ڂC~>51$xRt";yĆth oJ pTfAY=Ǡ}~"R-+cB55@T-n&e$4R_^ ;.͋P8??W}M!r><&l?MρҜY|pՏĞq)5#}': h377{i^yC̝= ˯6}޻C+6yj1 _M٬!sdyowtX\= / eg4oBۄ}9\)'6w Yat\`^ %8y%EC9yaI,&~XU^kV+r ݶ:B&Ar'(;ۊ_ *u4tqCR5OAFdL_r bCay͋ Ta$?䃥~p ⥑ĕw7AY\o\yZEABB} s<>{U遉gq{$Z:vsW8cР +&G&* -Nu3HȦӄ0tP0KS_qWy1'i^yAM^ЕaT-xZ 乽c* V0ѿ?\"OO贗<yp֜Tb@1ѕӠį;)ceδ0/FK ~5_t9nC'S0MQ䖛'#雦{g۶}BCP2D":(dʔy&<2$yT$E)"2d(2{{?}>ֹZl~ǹm(zFdͭ^mELź>_j:I@"l"du=QyIG58kV{3͉ /o??)u# zJ~(/ BMJ/pPxX9fADLX{7Rp׀k&]v;56\+f*psMwtD>a 5Tl9 -:—?_ѹÜ< fN[߽nV ~R|;"MW.u%~Um k+;25o%& ]5Se< *SVTB?7bWD(*$͸f N>R>MS=_ݣ5Gd o| <! ~3=N8#Hf< ĭl:蠙y.Xl(@oW"J:i wfmx AUU8VJfnn3t|gBo!2+a\3[VKĜ~lC u9Dx|%cQ)y R 9Sm 7{Goȶ@#u]LPa1!tOq"#)߇6\;03oQ3*GqgXO &m**K%>i0CN;jF|ז}뱍Wz9kƺoiWI_mo; =c'dG=A6Ăe:u)tlXqZˈjpeaP^ pKh`Ӑ"U{Ma\-[ N[s"Fkf9WPKQ,a\#en sZJo&|G2}ع*wosЄq*|b񐾙ȴ<esO>~؎XB k"8WOS}} :aQ\4K`%v%Nm!Cπ,/_%5мr6^}j6}x$>~" Jwduym 34R}R؋$&dnpq('ć*JP8_;(T#;]S^HVEK*v5q9cL8ly'~*A".PY +ަhNx_6U KYl\`~I7([ry7㠧V0i8/v \֪U? ,Y& /^1:ʓIy?SG7/OM}OIѢ+!P 0ۮ@Wmj רfY@D[wګȥYbhHLe\E8OK}{KHAzKX ۷?zϧ~REkIΛ QOؤSz&1W,C5 Lby_0ghdTl[%gu@gPVؠXC$־+b(u6ܶQs؍S"d{GV@j;#(}9׃^$AB+^#.OjD@F@2CLXU\D̷Ңpve?ރӡV?5ri2y]^T4δ /6y1&}md,Xf']ȹj~؊7=aW:5d`q'n 1>p7sr`t$-~Ҽ); Hct,+W!Q ޞGrMpL {P:3YwW6 '"q]'xN`Hiʅy\}:)ŠQ}zɮ T7bR/2p/_9<`\v s*9KL̓ڋ܅;}J3غ}bθLE:?)MC᛫?i-c!>yXyMJ$(0Z7'ޱ~syI\Ar.@Ͷlr'gb~ʾrY?p%>|k'Ҭ< MB6 }!,=YOH/qh2>5GC#o9Ys*a 琤!`R涨qd r/EVСI{S}Y!VZR=Y|<$ObɼL'R0zoקv]z/>0Gg ٕSwn]z_d@LS.ƥ=% ѸveLjZ ;nLAB܇l_K;OqqDASSp]^0f|OJ^vŐ^]o;C_~$h5>>/40s1@ώ'UxyzIzrοk {}t/̓(Rb \vz`iϪD6{R=? oqk..MB(C7NPEo)@ZLJz: O=kv%RMD\ (v=L--= h`Fؾ ׭QWZR@i.ZÇ٣JY{k :7qBzNɧtՠD No\X-Mq iнB;\+8%7 ԜɅ0:o d ƌMS:=sT*(6nP ݈D IoKnP2ˬ6N;kL)!P}6^4s+hT\t nxif9+rC,N[V\~xpr|׺n!!#Уm6E3Pf:=4g% ]\0ʡf93Dy&j(b@GtBz[9*yq8Io X=/$mKwρ?PnCҿ= ':T^ .{ U+%ywXԖq6 ]n;_[URKO군kxh>5Զ~rSNJ"/(י/yX*+|'yKyy:FX`|D} :D%ZzytQIIӗVܗu+x>plqhv듂97}ш#/`7U`[58yDNM}.i͹,` $,Fve[R6$ ~׹{)}Ͽ}?s@7ɉ1݂$<];&os*_`~u )E)>8Mɿ# XM^2YG T<&k\xl>CiN'c_CNIC|=}zyw~;ܩ{Y &}rX7$YuP->l5D+4?5X?~,!I>6%Qtkxuk؆-%OI,f2fu& iQOnR YKBM*us{&%\y^O`O_+}lݕNtJ‰/vCӊamZ$Ґ@)?ȀLRdwVV %@eĭG[#Ns+4?$s )}r4|PJ5~+9\REm.W2%m~ffwl-a5P;L1jd_r1RkkN : r0C"" ~-[8ì[-tbv0מdPWtO}q3k/35zAN&p%Vc&v o.ZLu9]j钇h _D#]2o1^w'ams%l0_*gzy3n;!š2trȆWde8eF=bhr)qM*@:Q4AxW<톨V`mF7ev g\N` ٌIHJ.  # X:R9>AZ#E3#zje ڒI{Aq5tp)ź@?DmM%_6ЬAaɘ}txYxs@jh޽а˗0o+b~zFyXVT1^eWs~fKȅ?WS7mbn%5<>_P9~nAǑ'x=u?QhFw5)i@g*w]iP@(:s.)l]J?NXbK)9,rrUw.Uo`zY7z~H VY5' eb{b-_v{}k},MdeJX$՛ B`$qIZvPD砓0QOѕNDDeh xprh1_YmQ5MaR% |uj):'C颤j n }wI`$﷖F >L@B F<5iZ$d)`) 1wԏ}qe {`=u:lYsҦ!iS^&@mxQ;HlfY@pܑI{SfΪ }'f_O̾gϋWCPt6_5iahQ{ b@= ;usdKӀޗ(*ڢ =v#d4\ֱon9 Z^|}Pl \1mP;Qނ:IJ:e=h\G mC~0ENP'.,~}ӃH_=qDG9 ` gGM/Ii)#gU|L )EuoI᳦ \&LV@Ó]f;A~^5jof/$"ѡ6A/`M8"Ry"7tz>33U%]Ti%ػ >W•a'r=i<$T_d_ċVD8w?3l8n(i`c2 NuZ<E)e2vhz':уmFhMWցIw&X ")mQf2)QxϜ&U?n~.AGXls\uHԏh<tn$MBM*~t H* kw%✜qlYYf+"@ 穞wA-eyX\rU>Άb[Nb@iI3.KlWxV9vq<[XYa8px[C+^3%NJZBWU$)6e֐AnhX=zcx&1 6(RK ^٨GW ]d 2h{߭71l©kClJQG ?;*`"~$>/È%/Ӛ 24y᜽Hn`- p{h{UrTRۈ)߻ ϓ)4yŇY,7 O3ɹB s ds~\鬟R0D7\*Vߣ~} *ThĘ w q^l8!K](C3߃.q-lT1C7LraF@D2ݵ .UM0fW'P\-<;[d@.ȥ|=꿯#"Uk0z~ "3o7kJկHv*ZzLL(TV"㆓BŐkHyp8UǕ\4M,&IbJU氙qg/󿝞k b|6W%受xOLOq%1>BE pw_ZS2F~M )wE;B-j,P*2 t:GPHM듁kJh1K8kM11SR*F_W# t@IRg`Tp>!l'd/Z17^Nj5瞮<V`ʻ*^.+lҽSo?RVm-n kzSlL/Xa24d'F~D4Fzb!Howk}deNck<h\%U]OඟJީ]_8p|ͬր~_ \O>ҿVa8BW0!` .V QA:ҩ8yu̡WGvJb:tA]y%2ݟ\+X~ФR6w``X)4SI esbO]&`6IW̏;5zD|@.^y^v/9Lv!{ Gdg^XhPXv|sâJ 6];vxZ~Q?0}s q29`&R$vdٔ @vCU#uzБ=c;_tchF&[@_塞' ɜлIlSrκX;tRr|Š|5C*hy_O"}^+N3Bز0;ef ^qOk5e\,)8ѰÖ~)m#M(FXf%Оz=`{ƍVԵB&n EuȆ{F5CCșlñ,=b95 /:vu/a*~?CG U$_!P $ބ VqO8S5)BvZy7'ף:V Z` cv `۔x]Kge|G/ )ϳY3E패 t },bv%yHvD͠f>7zJۚ 5;"T/i}?G>_Ohsອ Ղp -i9/%ERHе՝/%e"DYO>GN,cׅ8CWRTxq/ )9EguU2Q=_X$?7)ʃjKX(y?)Z֠CGGe%n۲Ky}~JΣpj?X=5qs灎=sYxPg aZ5V"*O*!v h܀W"^̙z]7c]'Q|>g2~4VR̼ 9 (9b~"荊_ &VMa3X`%r}: D|͘bohaUpʱO ʈß" &iSM-Hٽ84*}N2WYlI_xkxOl[kJ(GO,36x[0Jb_c_᫢#).sv1|M[=XM*]S1e:Al6qk*`.p翭鼱l|"`B( -(b¥d 2̛FV|6txOiKx?R(S*.WDžAr b`+zNn'zІˊ\7A˝\#BZހXg9,߭m42\#)Kx3"W+2vcHLLFd1~xO\#tĸ.3"7{" ҵ^ao侎tn+S i8Y`Xβ_;r=nw>z|[Ľ3egZ+Fo+i7ې̩]e)*G@Ţy> Džį{y(&8,LMvӀDD,9kguYQNWZ@w5!>)iѩ> mX(l59fҏZf85k ƻHbܑw%}8K`O'+׿#ä ~慯?r/۲82H2elOד./_ЌXp6:8πֈ+" "~%R*/pc{٠ta"Jy"!)͙EX@dd Afa-ardɎ7?dQz l뜫0cզ쐞UoUl SXser%^OHnhY@1ڣoy&P{8 ԧW(-2KIe+ȶ:Ojnz 2Eɏ$by+f.xђ+2u֩ŁiC.F1Lޭc {|^ks';jK"ߙ+ZಛHĥl& '#_H  :{u/\F3WihǶ ÅjN@:CN«kARpݲ@oxs P>}P5ۛsW!)܅G.\/Ug76 4|a9A;tؖ !&A&5;CLnH6|t*:>ն{?P*Mr_#+:PTiйᓟ֣`(M6ʔ#*6#?ܼPlD<: ^xv]ũ[.U]+m; SD胿 _8(`r? ;mOHv$TwZNB%giqmtK`5kAwk.4))8BȑlkK[`xŰvxxmTc8j._Oɻ 3?s #l4L`U^H[c=ޏ-!VE\QSEgRA+>Ŵg70q.(HsBed#t+kZX\"͛ј =q뗞\&k:j&mM'2 vwӗe\ˑ݃߯r|@ ٵA(zW}qɿI @pC|y[ Gݼn̎d[rua Ȗ 毩ߎg]֮ˀg=QZN';? 0%70P+򋰧~\xᓸ\߀ԕB;?IH}!3sG'ЪNW`\歏(D1hLzFsoT n}ϟ:y2yZ)UگX|n zgjk?=ԮJ#8{DY#g{5l8mFWS \iGo|~9/A˽H~h'EC|g^#FBJpGяPqR/y.KKš÷EK8oL{i}â]vk1!S07{ԦMcw=O;c"V0 To<'^o׹90)3MT.;WOAWA%P E36sP_:Q%)X9Uy7)#dsE/S}׿P]8o$T,.F0ŀcׯ;r+I$U22[fJptr>6z_V@K?;?N'_=Q7|o8!ǚ!P ;kXLH%Q{aP@_{ ?M7BeNf1bOæn0|_KZr'&u<;m<@0l׃Ay-V"ebUyVߊ3<юڝD"VgFkG@s6):\ibç/vvv=;s6V&Îm!K5<ԟ.P¿)i (. ]"T[LXMd$lQv_ax;I_q ^W-f9u|v? I:OE?O ǸFL|NUƃVx.w#]/ĢpkHRQw[,}T Z!(-zKFHawԭxf{0VHmp/JM-tL}d VbuWsX_)sP%  8х/q~aY-\l8Y,j GOv7"%BɓUhÖw\1G؎ύ wlZ|xݾ`M5kFkT碌ǕKO4?TLi_Ġ?)e#>5}Y],V1:9R֓aǒH{=q$)EGGpw ~čH! m`BFFp3.xbPh?vKW}\ϑR=-5m*T15=Tw >>դ]ewg!$$qyݛ] ,sc` mT(N_w۝:js $}W=>g'Jk4qzTqb,ZyN_Pny' `za@8pj}K+(q/˪gA8o74㫈vbv[6 : 4#vGr*pʟbvh^rjd$K|K j\N5@&0Ϫ'+vt}x@imdTO<_P P tCI+?)g]Gw^'8ܹuJH+ݶ$q\s=p'b- ?дb %x-fOuÙ"*xlF\9tM%m$v`KuI-;ê <\R'F5=aH\\?+7pRG6G=Q$eAMz9xCDRTEIY3Yru_#6s:Z< sݩ8_3̒ vBe~[_ ٵ ~gGS@% Nx'p2}+UX2}oLB|?UjzY6AR#v =0]n*?X*ˆZa6?ᳯߝipH[$wίY |s˘;v$`%OL[1 |ĺ7U9HșTOײuI\},e`+wZnH&'h+m wځt@aԗAp+]#xCf\u?xX/1M_Y! 3jU>oY}>e`hQ5'&;cΙAO>Hbg7.oAٴRPn#j3;b 2i׏_I'>m諻.H `Qb{?@hx&l-%mz?ԏ_:C۱oB"<Ƀn0&*|_dgiC1:u9- W+ץ-y`;$#ʦ<%sڟ8xܯsh*J|.(!0LĩsA-yZ>X}8A ?Y^aIR.bH\k+iks1VfN&6ZSv$B_e3ZQp(ڵ~၎WMs^z[6lNa3oXx!ޥ7Gix# rx^!`w.hAp55"/NؖQoptQ6TdЂDQ!|A'16i¹)nkX(Rܾ 1ǘ28 PJ9u?^d\#C Cy 7<@Gf'945j EOjL(|oVI vP xiO zBn'hC4 _V|$x Ehwm=ajaoʢ#Bwe''+Wv27!y'U&c%]wEܴ;iqg!pYhz_c Ԣ1[x}m`X=oΟd 1aD**C%^(w4`[N}cWD9zny >w:֢3RKR݇Ԟsm7{"<o0 {輿 /HShE,ѱ81"9 ܈##!ڨL?6O4EFGۘ"_uvز-5}{ W|vcaNJӶQi}MB8tqvuW`"ހv(=^?ye-07P/xY;f{sO~O| zht呄qO݌&\$K@M`ڪIL$6%h/ 7ql0#P!8~Ax%2[B6E.TWz8 }m A, X(~֎1"֓E]B.{I "MM-1-?^W[fC!g4%~M WxoL,鐊k G]cy8(;B{c,#abbS0hɕ$ . p_࣫}9#10XgV+ԧ.d ɍ<`TB'NУ >B3+lع:SRp_>YGsm"T~80W'G]{]6󹢮Ki"O{كpޗgbq *QW9B**?GqᅦD^N;C@'NnYKE"M|ۿo*!)e*cR)Cq[Bd<2)IfBRdPLHHT*T _+o~k=Z׺}_yY:~}kV !Ohz UY`AG释ֻKQ8mZ=Ԃ X0^zEm FGtAgX_`ڷ+>[qx_9mh iL so8Gp$'eܲNn@1w d}qy2N{F~"toCAgQr}-'56pM1]w):m=fϻw C:gՖ&aLDžW7Pl%Ϯq5!|Q *p!bGw$9s  dh{>?.} [{(g;eyvWτoӄck(NIq'xJ2#8OKΚH{C4\jZkް"gk7gFtXCk 70v/f].opi5ت0> B}E~syB3}i/1[X7ǻVn6:BG+g(6?(܊~^Ot _Cot6vڂMh~|_gyኹ!‰?w>lTnk0:DV]tP[,R:h rFe%b(e2ҵvNgDRZov-`H`Ts܁͗B@n)f4Ѷ>v,0zn͆/̓{̜ٛ<8Lj8z~\_$SMUUw8JЄΞ/'Ddbj$*?XKy- 2[KI ##d9r F,<ܗ|7J@ᇘ*SQ&hU fuб$J?l|e:io,g) [rԈWNLQ`y5N&'+,ەSӝncNvH^C5q"|hR R1Ț.Z<=q"9.uNj.D-C4gk xp>+s.LE?js9\ᇲ^ c]`X5D1:lw}ϞyU+_P8iq{0p0 L~~_~;zUsʨ >YAUIx[9m]NΡM\7[#1AoR-(P.g4>hZ"Z{œ}BjnY_zKBPȠ %&# FTzQ!Dy)2>|~bW|1f xF$xҕᷗM3pT6沃o{A!i"=Owk byd,8U^&tǟ$rWjK/h~.K90xPx򬟍 %`=1!60y2:C :;h ZL,`Rl6* _78_N %&̼N_  \*^2<L=B 4L>F9s%19KQDO@s*`yx\"YADtwlxf?kzE0t;hQ"N~<8ݏgdk J!ťCR;>D~s4zk|/H{k/I֓ZOҮhge7`ȳ8q +D;dᐐtIc#/_q'%^K̮%7d2j(fG0PsbKt-nLWKdJ$(F*y¿ژ+66)2ַ4˟S]o{6hls|'t=1pj5܇UlluCk#ߪ A1s]q <.m'/ń@'3A1Y@n kǻ_9AY3~XØv̤oKwPν^,:y ͳ=p&2T_Spm Ww޽WQƚ`=.4xG´%p)<ĝ0a/==֙ Gv-kE;Jyݞq iNR'ga]鵎N ?l>8"7JE;|ۥA dYcu ݥ`7-"0ͫE/~m vcvrU&>*OX{'NaGëݳ /4&EߧJ_F]x3px+b{gc,z2Kp(sHΜ^M:UgQtFgX<Tp ؏3  ܑNqOTf6rz+=Ї}*Љ[]91w֭||=<<ᱝ:7orJE3Fc=1#Ba`FO% ÍE-/az6=-K T2py)d[<-CU(S60'*S:lRmp d<[.Rs/N<)n ka!`\*' QS5HHI=ցOIwu aO{áӦշ AVߣ/j#*bj&xp>A q)~g/EfXvS\ X_֗<h ;ZԆX)0`ص5t̸I-.d᪉ƪfkgֶc{Lpȥ>.1#ǖvk;EէyY>7m7=Z'~F'8;IݎJCў йo]Th9R )![~f^;ߧ4?—L> -dd+7I43Ҵ_#rJ~_}b T?a7, IgE3,dބ(u+OyoSGMnؑ3ER:YW]ۡDJVBU։Rtm—W:9|4_0|Q]7A,zC[)V~WX)再gNQ7mIș9{d XFd;@]YuotWɇ#u[a~R"0lb .,z[!Ew' wP9Vn2Z19hlW+4npnOͫc8@N؍b(~yY?e$%{T@'Yq ln*W εvMc;l{a ޠYLs-$zў3Ţ_HtlD6ozL/]iä?s-cJt.9dOJ]翭g˞@E: -WH`хDe|k;2*ᱸ=F[qW1#>d[di:n. ]FTϨbT9.C~;@)͋am~1k8?F`T`18naWIz|ُ[esdž9f޴&ݏVa"ɕj/8ĸ=Py cSyāܷڢ^-~0g֜d ]= cocy~+m Y?#~N_.PqKho>xFSshA-ۈ2u>muD=< gPaKhˮLy)O6LN?յYSpj[i,p{sJTV.Ed1Iمqʩ&T*3`,4OA$f##PQ36DUa)Tjr%GP~k N|PwSN#έ\vo 8撒YYn OQ]{\\_O}e3/פu6I˜,Uͱ|1wezP-vG?R]C,|*|(->P"+(8-0l ţOXK} >[wgPx% M9]D*Am}#?K/9̴ZS:lQvnTvb˟?!A.|#w?"pxb'M)q*묔10dyU1{ L+Hga'r>D6A)sYv&S^Wn!ݯ>"Sgkdsr1ޢz|\6̺u]Ͽ{tIu>l. prbٵ? qC1Q@ N70/ɞ]NgCzFBjFT"NAT?*GޕgSA(YPc;6A&+'m0>n'+ۂ}k #pݩ9XhQ1;0ouY fM҉u5mCuI9lp=qʓa%ǘ \P7ӛu'3WDҤp mwOv6B#y5 aA!Y _/E gt/f}Ǩ}hE:_, b38%ڷЦ;'ޱ;?hR}9"ⶽZq_ '">1Ѽw?1ϲZp)EuW<ۚWڧO9"94uWtLh7Ȍlv2m]煖hq $|*tZ!}ߍZul'9D~Kے1=SOݾqX~| ?<4ntq+s7TU],~[6s-a1iq5>}=Jۏ\R[u[eN@bhVgF@%DvjYcaإsg>mٿ1y:O뤥?mbl\ ⲥgYo# lѯ$ #aJwIcfiٞFڢ51R^: B|vgi2+qm9W$rvМfBS`0F:K^8Z]XP $n:+d_[>r+m |m1{z5g ;?JP\~ _$D.7K-.]1~αv{50dI]nsp狀1|?/|iw +xi'ֱAaeku +"O*.Kwᆪy|Bn֕OeGEt?/n'1SItD~;Cq< 6Mqe~g, ¼_:3z`EL⍤#m Dwٳu] dO˾ wίg~ĪΠRIȥ%UڷmtfOqc̶^,Eoo;" ֪왯zY~+DN*&5%>S|=x;~}~CEYd{aR`Mg()6\M}4`#k7m`bĢ}9>BI Do-9䫌= >WyzJeF܁ʹcZ.Պ-,D:j8T?-x\4Vh;#o %~y:f.sAot,xxqX P[b繩 ߢ=ڑd^i,4ȴݴ/ s]m8H{_\lx%=o*!F3?h0 B؞mWqL˕{>y#H)|QĜ~*Ǡ±򾒈Tִ~n/G̟r8u'%xgX/~s?1G VG20x|oϷ23s#$)~+^B%,zDmAwY&B ;9xZ;ûDͺ?(&7ejH@V I`%͊7Nz.M (Dqmms3}>t OJs@(quN^Ѕ^猨ᎥѧzquaGQU췧$45d)q|xu췖ݐwFF1tE\hYb9sdKrz,yȣ?!4;c}Zxy3n0u|8rcvO u+FuxIy& {rwX¡k;]iLo8o^ fH[B`⸔jm v=?#QZ쁂o[HeW0=zG#HpVP}!ĮTYbXUk'Lc^lhgRrx(O(,"{}ՑNr'1˩GӞ#kE'C5$k[sDXa 7~+ (VV\!"bVIxy9h;@y/# a)_1,_NSreKDdtFgȗO4[+m_ڙ4w d錘72u(e(ǭL[ni3U+mG/|W\ҍd,3&\Q?N;T/ e¹ 9{}d$dXOLjvwE@GW9Z[F\kfYwok֠M#kyxYWiKMm KpEv2_IHF{p($dax5_MzɼUɍGpGώ"]AQz ,'0Zœi/oI'鯞zŻ c\O3Wc#q #(AI8vֱi,tksdg7ZG5DrV۹nn^j8V 7B9؜!J"؟}<:w]!wTO`=wa)ƈs@ p>-^n <cRs̯rISlO9@ce^8s1>Rk3ȨTо^U]%#o[IX7D̟*~mZd0KAQʬ1%1Tӣ2+EZ?翪= a:=Rc Þ*{~g;W,yœ2bAu}u *?ޥD au_$녷?DcDvӲr>wlqC4_WL@s9Z;H+A췃eؾ0vLfg m v*Y `}(uBvSlDohN\yˮa{yR4jcSZbZ3dmH4UT͵ :δt՞8^ʊΩLop"zl|nY!yMY-GN6FxiːzչPy~c wi2@W WlPҥX*q>Kh׊GKۅ4aXS y|u@Qt>_T8?|Nvbgq'^R0r]G\BfpOMٟId&k>z98- Jiu( TOz 42rT(.$+`v}g/U/?-]Y&sddR k*Q= W4meȝ~?*$"-În!u_sUWE1&Е[ld{J|ޅTo-פ?7J+ $5jn2*E:(&ouRV$3rocV`oɲkZ4yRR7ZP'T>zWMOW)ϙBVo9ܮبJX^ƋP'aIyQPm8ɧd) /8H(^\}i,xZ;4_q@3MK)+uX.[ )z8Ηx|6oN57t8+_$  AG+PPuu+}*vc6=6Sb6&rjaa- Ĭp;qW5"mC .Y<^8 .\5 sJ=zd-DO b*ͱپð`v ƛ/>h%mZD۱=#aժt rOO["MNq~&sI68C^'ꭾOP;RESɦo!m`o%1'&UiWԜ3uCѤt@ VN0O7¯uc]<>^;y"-n3Lo]g(3.kY e#HtHk:(zU7قV=u?_?*lM^Hؤ$jNQ,-;,Tkm|ۊ _]dTC|w CQKqavgWdʣKxM{hJOu6*}sq9L#[iK v7Qp@Tc8W)7BQiv!2XظJ,ɸ~ВF%O5ĵX =љ!&QDHMǵx$6/ߜ^i%,E?&]J q'鯞zzFYO?@Kb?p]UpKT8/-s,<$mwatk^#]%pGm̿W]NwV̱Dʆe^4Q=P?JȤx9i(gS^}'?7q|ʬ.RoG%yhTn49cNdkW!E9IO:y> DO}yM?amIZgx rk0'פw'΂rR!Cm#ۓdv_UB'$^Y oQ=[y+7vT8;rhtː5u9;ү|b +M492g\;z[Ikȷ$bLWPlRH{t|yMB0'Q^QI|>N-@O%>6ڋu҃Pc4Ūaӫ5 l3({Kf"J'jhrN_$Sm@>yn@w$nr!Y^)՞w<ݍ Iѫ+^QHŷvˑdnVޘF46{e^vV/bv2gs;Igݺ}bO'fNI7{WU\F ǘbÛ&oY'\ѕ>["N;O%(izh!b-Yz;^Y$\4uKp>{ku9o2pՓtiU LZU|ZNk E\|}+~}&{ ېL^Tܼр*)5èE2K2rsa)LOczvuשߘҮ8Gvzb0$rM-DL =mʏIf(%W%NȡXY, V;>2+@?߼oX$pg!ٿ3uRqM}Dwt:PH:QEzD_irY=ھb3犣RT˞!9b.rAӷ+݉SH†'X-b?ZhfE"Ά?iO7T1e Чp)2`uļ39/ٳu iJ: qGEy6VEV#K&b!C=8z{!s{G "5-cTOQi(IPrّUt prx;f6| /zEYt"H] Nx^__NJ<=POH)n37航/T:|?.X|xG+~3VR`p|9nzyP:y`;ncR{eF[prjPm9S8#W߉zǿ +ȹZ]b"SB枕*-EJsP!{C&zRdѵvAG}pcٻJy.~qhܾS^ RSl/ /<چlzB(GYGm$NflF֚cC§&8ܻwn-fbs&TMnFzvj_O;'v伩8Ha8l-<><|2&,I1kH!=!W?`?#&K@/8K^b.E`-(_wd‡/?~=d:6B^Ko9bM~!X&8x Ik+xl{&Zp߫Є$$8fYFg!"E9)m3\g[sd]c)?oLOpjV\w'Vx|G9 0gQNJ) :~yq)?*BylO˚:^#}1vG:ƒ=~rs}2MOhm@ԃq <koyE>LDAiƫ$0D-/yj}ߣ(ꒇ;A8Up=h{*z+la= ~׼$ÆW-EcP=uƯma ш`8x/%v ENk9@Nʍ4頠JM |в_[C';g(Z71{6 %8/o?k1iR]~悅'=|JNi|EujZnKFM7Uc)}{K+ٚ/ak1Hljq7z p=vz^9WN`p3%N]/{:TBRPdzMTDlxF۲9;a78,c4_B7T& QK31ڦ_iΐd"fk `VH6yX}; ϵ_;nw<hX]f"5t񽵭L܎&CA[#NвRz9. kB Z -y|Sgۻʃ\8C^3b`r̭O@憆_/BsFhl `^ٍ@E0M !VXa^m8D;uY.foռh> 䀹LH>2-TX &2/fw>}⇨Mw9Vh:T"/˼+kzwe\ t.jOmr#JA,kNV] 9N']XHƥ»gwn=W{O֭3Χgfx"psJf+ 2i鵿'oj&L5`qJ]o1ogUu,a-(4CPV 8^y%|0:D 8kZѝC[ndgjww5Rʛ5O~g]``1cvIgq}]œP/p첕9t`ZW'dX"nDisctum@(9Gax}rezlsbD-TB}qH7]|+v4TMS\NW'v%(aݦyXMPkQۚqFcʣ_u<1>q3!אK+ȥ]HR'Ix,(u M4Rs_ oX!R 2f9s2'Q2UHPdE2Qݟ=w߽{Zku{gc_liPfe_;F'`m" ^p%7w^;SN9]6 (Y@+dhigY@V,XQ:)+yDbfC6Y(nۣLΤfek/LASřgr~ 1PHm撂oB0Rss{xyT◸L UCoRd0iݤzh1&)x}c =-=9?$vhϑ\O.P>yɟv_@ ܗQӚ7o.RߟWg6ݽV>2lC1;3-З"]؉ȭp9aOٚczhy77Є cX+SRS#p5{_/ hc;1 e v 5E(!kUY3=haj$EA MBs?B >w,˔戆q#ji9d9BjD2TAs{NiA̐)9gaL祸7|BR (04wz54_ׂ0SkBm|$6mzS~mHJx?|(frG󿼿h[)N*^sCu[>6||.Fxa+ǍM(\k ʴʜ`gWռ8[Yǃi`D7Mb7Ie.1\6b# V rTs! \[s7 izO$,Euz>fg$ZIV&#=$n!z' !lgSY|Ki"i'oBZwbPAֽ'R/:VPEe҇ 72^,/yB9K{ |(ap(͆N9%c'MXG8*([]w},{[9>[8[l7Xyђqz۝ UOe/79 '^ ǮGUWO4b{ "s|47k= QO] nrPoW5AY0OKKT Nmޝ$# W/X ji@5Jfdž7s'U/׫܁ )e `dE>2aG76LHՖb$^ǃG+%p.WGf4 %b2,.Fʞ%|%aZH*F(9=; " ˱wW\<(hnܗ{+n:xAc\ Ж ^gTJg:IO#{^!n& |"lAI.CϏ*V%yb?eyeH|;]ԞжAH )(v]0"k7}xVG9_ѵj?;D!8~bKѕCTj(pQg&Y?6& + =%scɤmP :ֳ ?ogHcwYR(\ZqgZ 1g;l2ּ1D%>T}*f+>EMmoF{'ePyNJ'mVa :pw, ^w #yj`E(nR`qPW/| Ԟ ~&Đ/J2~[ ;Mi55qHS<}.;jScm@9Upȩ|*\zT񁜭N`VJ$3vE -%n^]<KB l v됬zMbӚ" ,Ww_KS#h5t ùk\n@c򒾞x$<>Nda B|8?/vūt+.-׹gUh|:k oF.ͩĒohLa+ I4V\38\#W'Io]m9ئ&/*B֧-Kw?9<L@%v`1 q(G7 4_o޸EȐ3 y~$'ԟW= uIA<<$K8bG(i3MC+kޘl.3^mϭG;@BHpV}V> (~O¡oF$6 []q\6[ᬶHntĝD7=/s!3sˣ^p7B|Ka^p^YGl^I'}Pk#PE>,(@!1-%n6YbY Pɗ&OmCQ.^]\*) 23.INN~}!gUqZ*TM6r? eAZ&t0i6pI;<7/PGK[rj49+Ȳ8oVEk i8oIWc}s+8_yL S$_w3On_vBre^*'C,?ovs UXG/:} yLkxßW78W]4qwH bEZ n[{DW_4Ż"{Sg蟾jQmT:o~Sl嫞ˍw> : В8bI4n 2i[?cjjf4S&0@S$ͅ=ܼeS~1TE1KP&Ě*N!*][͸NˆWI7v[[l׳a_y0aNYx}qjnv%t-5}5W^=3jep":U VFqE;f^+AgpɩK󉵳.ˋh$jH] ?Җ AOVw!or٧SwiJ.7}ܙ8_Rw"D_G9x fvR#QႼS$X檫oʂWv}^R5D]/5fr5W$޽$]>A =|u!oK2+(RJ Rn}Zȥ%ʁ~P{9;8'#[ ` mZ9R1ۏ?]=D|ЦФ3W011FH3*ߑg2oV,g}'˶uoѤQ%ьीFl/1?2aKVc?EvĹ~&@\gEV-;›z j , ž wߛL. Q~d]%ҧi&*:= R+c)γR|RwcJLk2vC9JN]/j1\ |hvş8O^[={Թ˃z??(C iN'_1@˷o Z}Pw#r'Gca#mn.l]P 5=?Yɩ K?%_]>P_; Y[|Jԗ$߻1ڷ@KX,Y;M䣴_%lsCFq&_`ڻqOCz`]0Dؘ1}&G,ab X"ދOK>葬-?.`W<ٝ 6Z˩ nP@a@<_Mh{~}Q UZ1` 9yc8xj(XIƉg W~;DMgUOUc/xSmuM3_dZ_Fkq'W{b/-/_נ&vra\'ц} &9@G9j*vy]y|/PW|q$#$t| '|Ù?!og<_hzIsk69C| ̦M oݓk~^ڴV<"ߘO{qT7}@Syb?\\߱|3IR|4d5<"A-[ MStNvt5t$ aYa)h$<[.6,_m.\Ew(;wQy@@n)JDU3"ti]76g#+7A HZgMNV<#w8];]lVg .E*)>?|*LC7Bb('u:3y7dž@q K#();sZ⃏_<@~!8rEacQ3_m,w._d \'Y3LA.󲷎5<^G^_;kRło-3{,vq\H J7"5J) '"qpjuwГ{Luj 4>}n!CE 2H} Kށ2[/ UlTktmGפFB;Dzk۫k,Ni:ኤHa݆g#hEbꦘm:5*Hݕ,9톽֭馲* 1e@, pN?X7I|zKO Y܆A؇sp394*vxD.+4[k76~jsl6Ok;ߛ/EFy,ۛ %XOoL*Sέ;5 we:C .P9W;f:[GTSvJ3:R2m˚.>Cmo钐F}I X(W0v8t$F&Zt_ud=@RG5}^3 2_ 4m78^R\ (o=8.: * nWhמw}>*+?XKy8t(j)tr-CK`CA)辱ohor g0(}u hbS$hlxl8LD:OH8 6EzqY['dz 3L1B.iE'K_l;6<_L͐n*me-bdcU<3g~GW7: IƷb*G7pt㽞^͢w{Dk@ӳlr.E$l|H=\ ^ ɺd*~E9id!rlpO/K*+öp!~G0)'Ԥ\YҼzc2_F?R͡xن @qstjʾVܞ%pObt3Ӥ/gݷAZw)˪ē&Ky9܂8-5|Lk<9U"DΩ.DŤ ,Qs< l꭮5On#,_uk3@&a^ AF`T+#=(gl equ|#]9Y]J@/>Bivw7gϲ4||[:#-4|۳wtf1D\ZC#] [gtz$#[Vs:po&_R%]Wz6zşxbVآooY)K.D)8gxM!+1K"] YJpxňG%O܅FX?)Qvo FaEsw- w#?ET60jExl)(v}gn=+4?i !7ZBݖ03<*J'2F+/D+}`,$Q8[[>::kgQ_q9|:n𧜵"\ik#W{U>ris^I(_Řx_.vU `k#>hDmbd(WdUt\$[G:/7 9ޅ?GiUU> ߲AKٞΐ&\7ko۩Hys+b [(?G^/j4*~}e?Rz?z),ol(e#mzf;P[ '\`zр"1i$ieuZʱ{/To5htf-=^wutG˕I {:W'@LRUřx> Ϲc/cC.3|N!kxB7z,+VntK Ԣa0{60?{0uG%檦QļH?~,gg4u턁2]hxͯ^uiK/AWM qOjZwen_fkEhDśgZ(CV+ob*ۗ3(NH?G8󵏿NM)먑8d }Oc r΍[]U)cGxΰ]0tײTWӮ9~ L4 hu[DH5y؊Mj0L}_osl"/SsAԐBR k|B0-+DF Ga `n_*9i_Ě׾\|e05]?ޏt@GQ3J1(f…FhlL;z\z|luo!wxE-%vQnstytC;R5 XgJ}aw?Z:,`qKҭ& զ\sw( n6 &Ʀe@2Kh4 UC˼.P>bٹ9yZ?])i_\(N$Э dy7y#wa]~?oކdԒ{W(y(E#j/TBC; (Y,|C`2El;8(HS@oAv0+;ACKȩuV(3^U6sm[(rp%a6ZS(@;x=cgc:7/oWӁ*m+0rlʧ|uPCJtXVd@$Pr]V@QӮҌ9mGOU".G,4? 7tIeqK3d3Sir k׵QU3L*H'O$G$sxc-PrzjE ˀKXRƴWx Sv$r_fu?ǣCߵ{>{|/dzZ\3^@ Y. Aq̽EiBO%raQ:Г߁œi>2 N8;j|s b3- F7R |d,pNz8~5J=M} /VLTiJnv݃j= o؟>9ٱEZʿ~tZ _yJZg W~={VJHIBh[h\qiR%8Q~ht<)k;o@ mo лr}Mhyǧh}=öSWA-[ wuxO4Ԡ.!aq~UQF8#& p&|->]˵ ߇y),t@h:YՍ_5G^˩ᐫɗN,܉5WAn=0Jf_bE҉u_u氦~=b̓`ơ6Qæxͮ:S԰DSf"+}tkј{/4vH | /ĎLl(9ɭ~Ryt*`rgxJs]wЖӣ˦(w(G~l v?q hE5zqxrp >Ïߔ/fco_?rX qǢy!讌'7rX1_X$)CBLM>̫b zQNPC!jŴR W 5!HBy* ʼ+3+ID˜-@nnI&s{Գv8s&˸~yt%@zo([DvSyX0pMfY8 gA/t{({6HI{(&L_?_?R_?~u$o=ziPXAUϵmv/ vzGkH- ש"Pi.o}Ǧ mQ}Hg6!8FRF!qv''xD#AR飯 'X pQ5>r81@&f߄/>&]iL5,4~~ۓHgXl!x%mkwRHj0~6w5nwv0$ho2koݸ}7H2w3Z*l1ćG&e%5!w%HԶuIJ_ؒd« 9,* %.>,  y\OVYN}BƳw˦-O!ý>tڈ <~e)'qĜ+=1Նti^X:@J!umdcɲKǞaTXR*Ѧ+-_XV^rqW۶\u^Xl~=Q*q{TR7|U'zdJ{Z[儰;zS48XSR iP7R]{)YbZ˕|.NE(uGzYtO/R_Hڈ^,ESWn\=-O*O_u*๏Vf=ŘHW.Kǡ쿳a(V# IOW= %[cW|])fa$KAmlv?͕ ssINڞ:9}$$EO-y -ܽn 2Y͗4>S_ _U쾫M+\,]c&;ifXϰ"RL+|g&%R4CG=w 'SvIQzJ=rt_lu4}ef~#Ԁl%+>*r9/Q&2Z+8ma)br0w%E_yѬu6BFgB8Qd%Vψ}Iu\g@>ō?*Ѓ;f?|nw3~,ߠΩKiNj׮N)ÏPnXvs RW'ߥΦNiC:OhKS6'^hE`^ Jބe%19c;zs9~Kg[ۈ{[~uʾc{Oh :*#V9پ:m6bnyKg<,D퍍xAF9‰4}ql|O'|=q*K^~v:[akLBFADV|tcqtE0=3$PwLk[ɜ:6o7wwx,Ǟ^$kiWYUե`:i÷]=w0Pf鏖Ոh-dېCvH715)R֡{$kL(AߟuG:^/TQ_A_ l_&|=,տurYY0U;*>F?c0:z֑7]{.6]MsKQv~]~2y!;s.NJ=볌5#?S"|Hv ɍ|B[:c2DEd9s]}(&D<4/:~ m9l>q?A Q,ґ`bAeZ$.x->0D`O2~#82wN!"kwQbvAwjyp"h|FL5f8f$wo^\d v,.{AM\gxuK %qGe8gekMQuHn; bGTnPixz!F+bwBn37Plq)sr\~֛; mLT$_]FYju!/?Vφ]<OcgdtTROnٓsa(ʷEYHs|,}XB4ZMg]O¤j^Ľ|ۮ}wXaJg}]vx0]2i*C\ys],W}yzw럝m^5|h,jOJ>Pf"JK#.@gh ( 7?4JqC S0<Ĩ0}@i]E[ ^Fز <;c+EO㰛N>a3]ܽg]Oj!S:8FdBqc-(G~ 7f-cRRc_a]>1R3:MҷPbߎWQNes="ٮ7cqP>wIĎM\&"S}zÉo1q?pv4]/Oig lzܚ 9~aֺU$ń+J؎F|Fe磘Rνy #לA(6e+>i|S>f|_^us+ca__y@Sw_-`ˢ!jr(DՈL]vȶ}PF:hf>![ my_>ΰ8pc}k=(?qnR>0:,qZiR0KB6\xp.P[]hF*ewda]>Q(~jKmlj^V?g$_0=#i-dƛYHlmD:xFeg%g>.ZȃY1|x^ ~`H ئz5d\߱U\Ԛ8n-pHN)3JfpPwފ<& T_ ڱ3R\@ựt3 4C$N3%j WIʁszO5GYMm! `>4?ٸ>boѰW ^G (RY:qfOEѩ3.6ͿwNokBͣ<>Wr2+Xc{r ةd+|zn8~$):WxQjvCg f j9(1jyDZ1+ !|-A&b\0v[BSӷhGi'lO 4{8AH,3dxb+}wroZܡ;^0Tz>9u..P6 ~B? EtC,\ ( VcOBq9- @NU%BxYy)K H Ԩ{ ]TGyW2Вb.%8יȻ\R]ƒuFwgm2Jop+֯%! $ǰ*^k1*t>G1 A؏ v +&Y"(Fʱ-\mX7Ҷi/;?8~Ƕ|Ílc uS7,fۣ<[EMaU gAqEI5Dk<^5=+6T1֌fEԅ:xz2:@QX=ܾMR-0gi)n&5+yXcdF|ɡVuXAij#o46 hW%etq+*Fec,T۾>eTكI-c/)-]&Uh~H\F`SP20GSs3LP7(RM3ۄYoeCy]JYI׳|AC'~Gh_`ZC J q{M3p`!`~/_,臄OA;M:?XHPcl~gIi4.tS\ȐksON!يףM&nՊ֑ii(Tᇫ:uq);?O> ;&3nOH m,k QiMB|k&0qv@/"҃?IIBPNZ {Zp!υlg{2cM?!i{}p ԨI;{m?(j |A ׹h\?x`-ߚ|齌,iyC_2@A /ל'y )gAI9x8!ʞy*ݺvY(xb "`wJ<{Wpc=O J k,G*a^Bg(ydvb~5Mzᦎ,bPK!ZZ&rս:K_9)}' h!j]IWVvh=!弦:ӍR0ge1|%ҷa9ni Qy5+98ckoX=]- ?wV&"9 4Zsb,s^Ϟlr:0M ;+N7$x{-f?߶j!NE10·tv*v*!!p^ j_6pYi9@"o$8ƿv{TBn;Q}6hvLOV|*NCZV|96t^)L+_c=~O%l{D>I5L/^q)&7 qXi&ɔ<-O-cB=oQl>P?m sSO Mzߣb h6K "DJWD)z ^ӿ  8JüN16u'nL<G"5 P8"*_AjsG#E@:pH:`|OL^\_- s< לj/ +#v;P<ܩc/(+xâء?Q03J=6RߙhO8cub%+eIMyOn !`/ ̯{+O~n-}mYϪ K? >){+uO;w|"€V-&u **0JPKI"'ey'hkM`etvH!"r39b=Zq02rq<)C&ț\l-F*Fq$"l۲E?Z|*^$vB{.8Q*-F}3+' r4< 7@g?C Iw Rľ x&O7ُxTS~iksXSa[ޙ%ɐgyJN9% xo hA bkA/iAX8 G=! ֣֔x 5 _s e}]ևfl;RpFZvUcP{Fdw/]INo k5 \\@l{uмlhu>#TQMY:ډlﷷf8dDLs^k/ԥePx2.g ɜHcEl6߁>߹m;%⤡!tE1tC E=d,Jm3]m>)AUl?ߤ똥P6عzuƓ>vGAka0ắ+.wK^0v{(5nS sLuߓZsU/lүgNސ,^ .VxS!8j_y H4ci@[i ԜIj :+?XԳB”%jWQ_6>Fū{#Wή{*(?#*(~7?9abZouXIOҩ{p uhr+| ~43IH &jm ߺDNuD1Vsu?(أJ|"J,o=xM oEg;ڸ,i w"I&@o8hK ֔QwieEJ[UTX% &3e~'hs3c|e,$<ei\s$0AWJӹy~gKYw$,Aԟ}7pl ~e2x^ȋfvsg#d~]^^F56y2\\MWG3޴Q4[]!mJ`VXYdq^yx# QM"gk}ώXYqQuj&6BQz׉Tyms/Wn ̣E,v0L`RQ0)[<`\ރw.0H n%z czW XRGq=y㸇WJ/)w8%}+եx%l!kxFDZu5,/&7d'ysۀП 7kz1WWYCHhEZ\T؝sKH>q?̇ zzyxm`O|-JrC@#ҏO&͎дǘFhP*}(OL=YɫVV,JX|d迭Gf3JHY"c0x*#,a _Dl}j O|þ7w}D_b2 avwzAܱ+. 4Z3ex^#;\8l _jo-L}Aztݺ<su]a:/_^)sH ' ^\T MG0yԯ}B /Q4'_oS4ֿ4ۓhjg7x#ڤ)T;RS*Ws3^4aEIR"{>c I5(mÅ柋iY0t+Dv1u%`@GsqzC8)3G唷/4ZBhO`do qHRZ1-UTu|W\P(8GX\\c}xc˷G[q~&&ZƆ=PM OL r/dD_'-jVhd 7諜yEL8ЂW9u΄%P(1C᱒65NՅz&Z gcB07䝻!0~V}ּmJ(ψIH]ǁ(sGprMvl>kg߳;lsFƠC6(%nhg/%x"LJ(i棭j'jR!*j b֟NP<#uxV7bAw9 i Y.+t cr 6@>/Jwx{'E7܊| V.`ӽg%l:NUҹOѫZA7 "瞦ϭփ6ȼ⏻d@cB vfu7n[p8 wpz槏2g)ޱb޺m!&`%JU4ٻeŠQ5>$s pVh(''z^qI8f0b RA7Wks bt/p1 =FDecThK7uҼ('$:tD#n9FS3:@4'*aFװ=J[J*?&qqbBǏ]A?C^XOqzMbؕCo? TKwwf Ţ7?M4`3Oٓ=ӈd A36¼.}I`%bdhhy>`ɢg {;Ipz}.ˌ JJ;G8~.>Փ@]W_tl^k^os{ ~O@u<( 2?HP;U=9Np3y4Fx^`~u= &yŹPHQK s۞BsoRBgq&;"y %nU[sA ̻导YҞv'c>tŶ ґZ3 uù%g}z`MRy`OV䭽,ETwש--}KMӎu4qD#eٰWSYAyEl>4]E'Ip%r8/5Cr-tKCury0FsT  aFAK?!*T"a^|Dz~N&rI}MZ%q2M[c~[CBXc;`ҽV=}܉ۀšqIx@5ыU-3ʼ P& D Dåk"uy?13A7esӣA'`R?flal<Ӹ#Sd!sT_ gw S" Y,MXo=^9¡2ugx{Wa\mvii_z2_hh z3p?m%mfWQQrsxLcnP=H E3Ek&Џy&{H9=y&Lu~ŧVlo/&#i_`1}9?vtFoq<ɔi$ OXAT.gG}7rñ^? \=  AD9 j~>YާŢ?+L ;ѸÍw;AYFaV%%wk7~Tf&xogCP; _Wtw")-NvPEJ: 9$"(o|kJCJ B¥xڱ>>(d|aLi^n} <(+fJh8R>\)Ij*t∮K|B=)Dnb?nI_%^ &oFK.CY}Vov?&VԪ>ݗd);Z-&lcfGhƽF%FU~0榖>,y×`Sߴ1m2Ԭa֝K&9R[9$PP:Y`G^MzCzpK :<>%i{֘65VD3c `E0#|%4Tn9ApOh#%0u Bh00Im0=s@/:L@ig?.PRf+FeRĝdrRHd&WG&Qpd6nHKŸn:IW E= Yc0 ~[}+ca5pCK$3O>tx"O/HE`iqLU{,^ئ:Z͝.Wܢ!?R=Ӯ;ͦqIk5+)s9m}iC'tYbʇvO)'sMpi.,d۰SJμ} FRվd67׎ă|U~= v=/W26F![-5$L(`AMUnzf4Tigт6t\gT’:оLD)FDkyp^r&>Kq އ^ؤۈL={ {L$Vѹ@V[%/"ZJqH)D,$ȫY"POj]2M,g+r-WQ湒x^ȋ_jm^P6wo % k8;%!y3i;%/:E󹗎b|˰rwU\cؙMou={`IF (-Nlg^ [I,gîM^224g=j #݌;mO`y9M+0uyUMEfp4T[䐦}A<`cta+5M+lϾ{E" CH\tk+OWlh􃲛9'5.ާN\ `:<rJ]Y0 YlMFԀh Ll#\}SAVb%gA~1n#qKZ;tk+ź +/$^fזC׶9 _FtrOebC߮Hm}~ &,$P&z\.L;swm>싄 쇠1o,ee'tlkIW~޿.{}ughQմb9Į[@l]p_5oG<4#jxαKD֤8)a 5t^–f+8| p˄@-(^z'{2o {dqwNtWU;; =ϒ!%?:"2C˃ )9Ҥ7=JGd9WtK b-V3O_忮9@ɟ\_i$쎣eхd|:ͰBmsdb[!i[͝i}Q]Eo"zH:lcZ~o,Ta&6@yskhL7 l" ߂ޟTIʲwDP7g {Ny{Rp=E{2!)C: r-ο}cX~-Ba gh}jdFy(2&ę?D_Ę- _\hK)e賰4ס]d}&Wc~ `=|{¢QǼ1A> I;D}}ՑIŃ g[.oSB[jɟFP|Uly)EJFJ5Aɇ"Pmw-w`I*u2 jvz:@bDHP|˩x?^yQЅ-x.,Cq4K?YHϸp {+oJ/ݡnDp= _dV:>LMF.s p4.-`#!,fQUIĿhys|*~9KKtM]6z}\]?Q7?$gQZǐ;lOyaovB"ߵ1?7cKWNe!!!L_lP2,;X$&6l(R(߭CelY9O &K9duG7@jW?(awR/^ To|gMyaxOohP-y׏Y3y~ɣtcUZ@+ߞ$<ȴr"/:˵Q<3KCB}_MbVObpϺ(p) 9ZCz$uHXa>غ1}nL\XX4UPՁv\W:7WO_= =7W4N` ;qу&qnH08!VVim8r+d8ӆb~pÃvS_c+&Qtd(TİYO< ,Ys !/KQ H\WA5|B-1Eу,}-؈vX?(Ko9jU $=#l(R&5LJS}(h_ڄg;+35F,DtO)/4 5#lco%\6%1zv+_oz׿y[kR"l JkL%Dxg\#-*y~)x>A󍝋+J~DFT*f2)7(L&tbñ#A|͊| kX[]FOAGFQ:ӉʩQ(7>-4Icmi yq^íwz-b8Jf ֫GZϪ(l",>+"X$63}8C˨~ld҈{y=ii+~C#2WwTA~d2ApO#)sj;fz #,> 1Zof-ި+pLu>6Fjy. HQH>h"}f$O9I}`i3/g߾6ß*3ElWoi(> LS|y{1>Ju1T-!3Ԡ;CH+Kѫ`qGO>A邏cDV,C0'㻅 UX`Q*CdS~|~~'Hi:ngM7quy ̞S:.9Q[~a"#(&HSώ[zqW.)}}1WfC:W]uvϬ+ztpo Lhwlor[x1Ђ_njنV?+QҩqЪbAM- M%|*ـ1Jا >G޿ɍuKf,Pcy\Tg)7g%IO5,{Ms}-Z2Ά))\Z WeF#ZMو΄3DJp):pqV an&}T/*uGZ_ fZ \ %I4I1ێ=3RgC_gmAtSs`OOD o"WBѫ)R,+ꤼOEAO!_f c0K-ΖЭ$4n%ofM{QGN2kZvmVioOL E?5,ۍ=!E+b?+?+C-zʇRkEu?t>qb"KO~"[_cJ~FLP[c)%TIߒI%g JkI=~Y8#-fX*ȗu{1i,cgEa^tv `wٓvlg_?wz<޶q+aGTߎN`jQA#?KJyjcnUD{]r织ax祙i16[4g&QT9~;LOq_24l (]H kbT)nCVYX}jvN܄7kYl4@6krX.InN=HBqʜjRZUD'.-H!cleA@;/iGq.{JKS:6*x4?e-$jؽfb2s0/*"{XR  Rތcѻ?;Y]ٝoxjC͕>^vmw4:1>B լxiiDLf*4hx6$H zc"ljPF1f|IxM;Oy$oZ[P-.ى!t@]Co Z3z=gڔ}_oqGH~*jT$=o9R3zs#M'j1Gw9Xl>H'ghhJc mؘߵY y|~9qkLogY]S-kvG{&84ThDckŔ͗Vp 3gV Z_PfKBBi%"_'_Fb 'SsΣ.^S(׏^F"_/GyG'g6ر"Y:p%9J [u$%x~3bkLjE$%&KP[IܢBpu,j6KϚ3]*R7^]C兜S]ORyc5zΆ{;q`+ }U 8G &~+;xQCG2H衹ިIN)g/r\Rדz"hfnhZuA왫Itrl]"{?u:K~v{ZLfKהG, !Cz'ޔ~sD?z[o1WV[=e}W* :DpGYftka< ɬu"yiWx\2E.f"~cO}N[K& ߷"#H&c9s:G<o TA^ hE2o*ߚ„[^CS&ԹEt3xc0Ǎ 0SEd'~ݟwT]LϬX h4'u/RjE,a eߊ_bic.[Ţa깴e3#h;'H|K|[wSMo!Cy>Wӹ p[<+`I8|mm%kE,`wR1j|[0"1CaW[_&@JʷX.黾+BcyMLrnb2m>YH$&R;>违o4 `;T]m,{̔Em0e--9|cGYL-f籅pca8GЕ.KlV8SD6f/SM=E̓qSov :L˚sHOY1by0:ܩ< ܜãB# @Lt8;C5*s|>m&^\e1|H!ŪM! % c*@sTux6]"=#1S{ez/tR0oԱ*lG< "Ub݃RB1DvuxN EK/|ۿΎ!4N s_xq9$À"g!4aj{ OIp}v oU{Q=O'D䮃pIէ7._x%F޹yf]G}ܠѦо zaw،XuOHdOA}||%"|!#IS7!Iy#})h:bDyBЩ.!wg2hGxt_lLt^z3"gnthOZ1\o>H|tRrgǹ {;dm`u}K^Vgr/>{ ɱ茺@]qShI}5)s7ŮQm,|?,3,)6I:|X9Ll~ijsW=w;k8wyU\9k:C5>HЩ'_﷮T 58obU pQN1Iv<ƈ\|M(w|0;F՟hH'`t N&]u+y ad㐰/?] W*vt'X.~#$H2Qbs +4>cH.  i%a3r3ZCI._dM~`ݙx`WN#(~U3l,e81}B >0; ǥܤ3?n6pHvte腇ϫ5>_Ŗt2O#%ᕭd lerTH,cvKu.< []^ 9)LDGp/`?t㿺ro%$'eԽk.Nй { __j lF7<7c7~4Dt 8ail}|]@BF/ f྽(x焤e-hpl'#p[*rL6m!k| Q|[װ%2Dnϓj`؜XD31_!޶T6aUȳuguY+9[HLa?oqs$#NN0=CebxA'e1F |CF2|kk$U3$֝C֌8YKS8 &sOm4 >3wB(sQU3Vgr@+w[;a'f#rFOB73 T>q=mWPd&$[ӮR>hGC*1]ͣW#nK9ىsE{f^XMB7Q=|=kqQuvxǛls DT9L\j |W "q>4ޮ ڟ=TF]?*B}N52(qL3a!}lpz#=7nj ;|)Z06MsXk=Ul\f *7HsHr`y3*?~3IX| } >Ub 2+T:ŎӟQeHQ&~9iG"#s],2"ЎNVk&j a$\UY N2 iTU.\(I_v +;({';qWfg*'&Im'L<H dUu *E,N=e^Wo+Tu\H,ZD4߇L<sHt2?$-]?(M1\iPR=>W~(1~;,z\ی~p]lP7ܺ2m˕cs,L>\.4f"Xrzo$~E?SqJզFˣ7#tVЂK6a|_a 1>o\;غDQժ XjWaTsoZN#S|ǖN1ߋMfܶzAp;{ ʸݵd/|n sW{{vz_ߦG]7k^U,`Dm&ppe2Q[!l@~ 5 eGAt`1='lD&*X9"_1qJe53`OGk[gV R$zj^Քs 5&#| ~eӔ!w3M U[^gs,(b똣0[0[NwpQ`nnCuQ[bzMU,[:/3jZ㎒e8g H&,g#bfo _?< ae:B]8IEN:{P1\k*݆}xF}zFkd{Ҿ_Bٿ Kְ1 _ZYI.&mJWTllɬoe,%B>fElwn&80ٮWCRMc.gPҤzM$x0*sg6,9n^Tyv cElgFDg[BԱ;56 3O)%!FҀ@[~ʍ75-(%wF5F"Hy)|sv9_ZTbʄIm8DޑҲKvϹjykgL;(C'y l}O2e?AW YPZc.HlQ@CQ;V%$3%ƥ^@tϞQ^Cq^Q=o@t |&21J E9ʙP| p=bo w";:pWv+-M/@'"_cU#w}#Zhu? M2j?ށ=ys^ ⼓naw7<~H:.$xYlhe89}w ZlP?Iyi'SBBM>-|n.X?P]7?l * B#m/'ύ>bfK? (h.lxp]pΛCM#0 eΨ6;ZD; U@*E4Lxr\I_U?Loong8Bϙ%60F\ʈ?O]sCkpY@b?j,}oŪj4x!3?:>\-`1)28AR\A\@1e?AMVp}wl{Gcl/N =s}҆ڿz[!_1)SZQOϣ;>IrB UoO?=@->Mk~y~e1_m g8xSҥ+Ҡjr|B-Qx!O*Xϻ> k8F,N#JA, 2ʼrpeW &M7{h<Kc!ӴKq_c9z>i|/'U*d9jk?6Ul DX#f +Qq٠ x Z{\JxL۸X65eM!5LaY@ 'JDVxc EW4'5љ5 Tas;W%vx!vG;ͼ1~L41.NcUi:m绰~ 0F;t6&^-e[%A$Axk~bO*ox@ܷo9g]@\2)O+ȓyC{ӰM+ܱ{mzҬ/ME_?'4*{r4kSwPZ?k}Ch`)q'^'9\}Rpzy[鍒N{),gLle\;o(2tj9k8r 19O:Z'qJ4IZJ?LZu)+uvJ3R[GĻ]X~e"(Hy;9ΥiQYx?J7?uGkvy嬷pH$-H1uΚj UnAL䎔A,^`,؛z~1o(5:])GEJ΀܃@`]<0#{,V9]/HOĹhpѐ ~\&~/,*$N"ɢ`+^}5lNF7B[)IoW~?3,I| Da6+!PM&r[! /z[wE͎KŨ鐉m";5;,{zaGFpx0Is¤*ǂhV fvc ֩@'HB(+l4gA76!}bqp=v./[&f +|\D8pwd(wlE`:35*tW:ם=g'\DIC"Ĕ("f"^5Yܮ+eum(1[7``PiM8|1ΥhJs*b9 m.s?E畑ͥ> ^\:t> k-YZZxL"LKc'9xdU1*t{M vřܑӓHnKY~0* &&O ~n?`gu)vvY{. v؏ 9AY9n?+ D6#7zO C4T`ixN38CU$Ta(+q@{2W 8zq d9O "-D,Gw5Fm~ϱس(\+URSh**g>bܝD{{q~=}3]vd71mѷa`Qb77a_ɣ8t(t}d Wˤ$78Aez+$[m˶*<4; ^/y9Vß%g:%l(}A,b~X|95}C/yx=x8o{xjPtC m4uqV=MHt wDA4[xW7L.zu/U% =]|?Z;+ )ߙ@=G?#V@e,*XYG:C-|==V5yC-|߷=&Gnxuj[=XݏDOxr6sv!vz;}+ӱ`gx=i•wUM̊FF*¹1OBj=`P=QZ"Z³DTv@{ěQRʻ lOtMAH.FCn\v?oO(;n#o]qCYH=4~R]d^j)xNW\wU 0,Xq_L"}{.NE ?-7o 49g|a߻?_v:@R%a}$3__û?)z}\<uk#l' JY8cʹ,5=v8?#o ws.B9SkKv}  H̻5UvB4٩<*] l]|!moP9 _ qx~Z9Z8rkM)h3vv^new\H/ύY|ĬµPQ{4版? { K~d73xa;!$NųLf?l.|sn|8 kLK陪x6SwL=EV8\ωGÿ|`$ k{XUp3ȧec I/TЗy(h܀A]jq ,PW4ϑ[ :Pg6DufC@8LP:0jXsB{99=#Ivp8C9*@E?X,5JVUC_j_dSMtNE1 dg7.k@Cuqa]fLd]2/rO] HO|V&-X|sGw趹>xb).Yk~e~*N w]Z)En; {WLZDu7aɓsQ_7)s~Ҳv9-^R@B ٛ,wR}3l f`tLw>⼭xlj]6񣎰IwLAvmm8O}T'RZүs'g;A⠍/bU ãσhr\ႄ x_t; Y,J9/tl]Axct\շvw*̯(ܸk#g b & ɉ|I0G hLTZAW lY, j>8"F/sA-gQa ?vb2k$sz)+t>!ό)`i?yv`.{_;v(1Z8`7\OZ<琙c%Ȉ:Z{?1_w}sLvy "U;2+n !x>Mo{xAxT8V.GY3Pb'ڷ7d._$*>0+PG.CVlqR?*h즊j *g` }Ro\TH<@E]y'@oJj*nw_F ^_vBg.o 6&cS)me-PFMlAE&-=_m4l؎.>r܎ S+r۩iVmʢVHfm9;o)A&*O0Śk'ek}~,Dm(xX;%'(6^G8ŀW|[7-}N(Wj= / -n8f[jf_?'pwE L%qipW Wvuȟ:+` ՔNׯ^^p6~խBOn{9TDkGk0fmFq1;8B ^ }[H]9lÆd{ZaMÛt|wag9.~";ؐ^c?37y9TюbuʻY^y!^v&2ǵa՝M]0Fm$uTkt)8"hP^ }ȾĿz v^OF}6i0t 꿸+w*7.7iS s?nq.s^sԓW؈]q,2^x877.($4֢^7 f\ fM+;@}^p^oXuADfܽ @}(%N@`U9-TsDW;v$[E*dDK\iXv'4,HQl_|=WN1ݺSVa _^ʉݥJc~';Gtez58*QGf  3@^+ܡr&7X9@ ^/J|* ϥMU+1v=ň|V|T>S8UP87 vf+qnYN)έDp^(˓{Πi=e 4R ݀^A(8.p ̻* QnD|9On2#Düз~ (o(D_sa8nb3r\_ G@_bƬc3b3W25;d+cW֘!H&y%㾖~,d$5(MDaGaGdQG3. 2 JƪX:x'󉗕vЛX0X$8 H:5 N5YEw9E{W}X ^*Z//?R>CF͎ߛx5z9[i|桀sN_/>9S[sc3}3v 4ֲd5Ժ41N=S+7wl_H&Zw1L8U:uڄ#md{:> BŶvc$ჾ蔿T::BPۥSeW?J<$ UƯܬ!b\lMr1"/Rg#8`_G~7H/_-'? * [I>n8OWw\ w7fفxFU;ҥkUreʎɽEH3t@} +?yΛqɞ)9+К#YOpgrz|mz>Ox W8]>b ϥV߈s/6pǙn`}l!ٯ!֓ )]ȶru[z}Op2af q}ս׃S> dBq=ao`3X QP? n CAG$aH,sL-#E_Ơ,_'S5E7RnB) !OГ4>t7wpXվWM0կA"xIuOs_F](kVZL*7i^ͪ(QYCąToǮ8oCנ%z'ͺMCF %xT\%< vq.wJ{9]] M0p^H ^>5?,*9ޝCw_wo>u!>mqJc] #ԅHʏq8o~6 ^> <|R wCxR-OB*z~(s/Ocb{+:[T߄Q-iljB:lPWqňs8CQ<1VnK&W>pV%j@] Oby (82UiT,9|tvJf#+`[~ IKJ? O{^M 7c?y%qRa^ 9=Ml()J ${Y畀܁n0Djt K>WExLbw0# 9YKۡjnֽGܠ_/>>eq,~op{<dn`ik ik z'[džZX>Zc*^QY. ^oTo"҅}){U2x뎒Ft,v~8gUփ\^ {.Zc?#e?<*xx#s!t,{o&v0MHbiD+a{Ob=5E\Dd_ҿ6 ?+,ݺ+W>_vYrS}i[V+ FBh}@ĢJ :>pY[#gYy?=zݡ֕ :}/Nq?]&#o*ԤBλ4E!}Nb}0Vە>`?`?F- +?[O_? ߔ~=Ayqe,p*ߞ![(DL>Ћ>LًX^@]Q&i&"3~^~$1 w GֵԪ4K?6Ӡ#-kvʜF 1G᠊bNu'*.OLQ4βnfyf;0MeI},WAMZ ]2! /ٯ>doGP,Z܇wC%k0%rW'kZrw% |xOj| U8VIG4wkpjd>#j_ U5a]$K1ZmƏ@MEdcHoJp '̨ +q”߂r/[A?;Lq\JNk2Bzk~Q1]p:Սs?Vn~,7MdUjqCؗPG}90VY: Vke\! \3>"^xx7|w[ nihw._ڸ*%%5\4:=Ǚ]Vqi[Z/ػ3rX=\B eu:Ia-H䷠E,,ڂ$IS#VHh"J[I`B lYt6_s=i]_}q xñD"9_Ip^;׬/]ģR׶MCDOvY%(4@otzEt>_Zɚ(ieo3̳c#пi;*xO2ٵu Wf::ۦdcAq=`>s1©N]b;WtjrMDۆc,HkJ( $6wcXI\u@S$ӟJH%lݧWBG\I[Ȯ>3RS_f{gʊ=a~/G_ӧ =OhT)Z|)_2d/a^o7bg9O^_sc:jҝ(M/˳Om7x2xowmpd} E\3(&;wҖ{@bxC9_sZ m0]EbVXc*̦Szս6'3 %`~ގX&=QA]L>%ωFZD^B]^DqO7J>v,(􀔕NnO*!9/u.뿽HHzWNv{Az@!٘ t_I84,8#8Iުs|?/f.:QoF/DۓeMiω&ҧBZ%[(ZT)x0cW~t{.CB 6zz{NC/]j^X79g S\.VKqX`9Έ`VA)CuaüW<|tEY7G.k:CVCR;cH$q\M[O;򜵅4(ݷx±ޙQS\6v15KDC,!kȐIZ-@I)vz#4 \i6ǜM#z·>躄.GFR/—[A}?Rx7[X ?"Pm,&eQ[z(fVKDh$Ε:d eiJqc'4 I*jʇ ĩ/F5yVF_?וϷs֕M]鐩ij=;L£B+rYw#eNFI*k \G0]J~5ץ[ΉC8PjluP2?ON<|Em2ʳ--"X~Ң⻗!4辛8dm #coCa@U͊O 9x$>e%tRެcR<:Hw~ X_>"+9ܙGσ|ӆ/>/d65\=zQ+O3#6t =6Qc,Kjir)wN0= F>Ϧ-&O&&XaCkz8-1?Ⱥ^l$ɡɆTn6飓] .Q=B RsO_?\q,"+^WY:{FO8 %08֮KB"1ʹq3(!$V.6짪&ܨL_A궼k~]ĺ|SN weղ+ .c6^XÛxڻcwq`""|Wݯ!{/#,EpZWG't\+ί*rډdk!%;ou}`[DY*yZ3__9Aq.x:I@b|l_RYmҏ$(rJ8gD?do3_C9w ERQLWc,mt'֜. <[$~ۭ6~u&#YO8~^nC^49?*roM*wX uUm{Ik/e>YጫQN!D nm򫒪ō/n3ݬFLQǿ䫹 4D}#VQisg%^ܧ;އin!Ox-jC4溩>ڄ\]Z_ 1zģ~1_"5>; Pxƅ>yFYp6cjᐄm9*lPԦqڣB5pD!3Rj!%,)o*dpIu_^}\W3|!3ܬzgfETCc+tWn (l6j+Q#Z@5Zr9Ŗ Wcѯ/u/aON 3|@䩾rpC"jƃRQoqxeWgWƳ3!d*pqfZC*A[u|t @[OPѴ@AWCrcVufu"i>tt%ᕋΧtv'ZۢO>:?U\0 uJ2E/(.xdX: mج">Ո4|Ṙw$aEuF(W@u{ ^)ʄ$d(E j"Fqz'7Q>C~ȴA|P<3 ~"EMi/'N)I>~.ݥ˾)#xS3Q~~s* g$".kreoJeu4.(w0**7[5On2LXҜKsAE__gA{b[g/eWo1}'QL'"6 I:&o.xyv[fqjCw܅Ԑ㋗3F^v# 5طcWש"+Wo1D.|Sp=:4>Ӷ{eכv^&UI@ ͿKcgݡXtEKiTSQuvkRit 3<=߄y`SKog[&2ڿe sм}gqu Ty;@}t3-[Y^~BG6QOʎN2R^{RxP[y>HOu+~X /8)Ď OIVx˄(6{{*NM>mAߗru| v}i8pHFʀ1HYWX*0}J[*EnmT:]W&roJqH,@o.'YvfpDhv{Wۑy;%24PQLJy}\s>|˧p5W#<4]p"f%zZJAq}7KŽT0XrQk'Fk~ ;,N`yTm|tȌ5$FOxs$Z91}$78?p6 Y_-̡Wnk; J'Y1?瑁攴<>k MEJ霕#MK]$xUڮl4rXAzzfug[=21*@Egp;x|bQ$_Dm=y-3Rb"2ёVm&o(-kh/ҺiQӑOVK/V&왂N"QR^>J1=Ө"SDHuDܧ0MR~*lk%f}a>cnume31法@ |i j|AaoH` }  sI;O}j ït9X/*C!'QЙ0?鮔Kv}R y^޴tsй')+m;sQo ؎G|ί<0+Ozt-Gje^|N*+xૣ !45dԄI#6!C6Jo5; ki:DuѕgXX'Cs? s;p;q^i't\!{-'; MqKej&݌ ?S  ⶑP_:qνDGB ␨~yw4c^W9%:;:5؟oPq|JLK7hKڶ,oWG<  7Ty>_c%?Px{zvZ_Φ "< }v&\xzpشVgw_OzSrV< jڢV䰞Ϛj<6)󺲫>YK֛< Pku3OTV?5XS;M:p-ȺLv^}Op_vT(R~U*8_Cs.ІVam(qpr w9~_Ga$ ۥm#^װ!%*jh 04bi)؃Eew"̟GPSiFP3Xg5lgdwgF 18+b7;RYsg ^ 9/Ϗ)A1$ڈh4A:[ʘC=T:څ?5v$+dgFMk ?Z?#OOef>Ji cv4oZD;^+RQU #ݮ ! Ye _/sg bND~ݎ>B/6ڀ/ml@̢޴tNWw|#~nrJO/ܾƟv9t(E^.H,uha"E| )Hݱ[`)4mPNo6!!~y>Ck=hZb LIUh%8&mVQG Lnqc b1_Q ׮5f04TD(\삉""\6AVԉ*2:^r!r*۲7JmT5L-\*|3L#p?1xU!*+i%)?3A@JQ!CJ6|o.0bZ%",VPg,tGZCmドa2rV t,1Ԑ-Op>)>ڈ9}f{ Ӭ*qnV۝W˃bĽOveb,TLf;Q1euoaFY©5Tyb$`W3JVu5P>W(iquj7DasbTw,$t[FSsP~#i.8lz>Z} ^e bl@{[!4Л1~d7*p笵~PlESEpNN2P>/n q#Ѻ(rp3;9t^VE;pQ83^ "c;"~g/2Ӽ 6@3RvԘ;MpN~+Ximɝ;2]FX֣Pȅ-mvA/.l?/j _{u":#Jf34Z6|1zSm%c/ 0Jo,`8ƘϘלSb |є6q#;n{y)t%M߹azֆIiC  (/C-Ohژrt=rY_ ޛ~7TmB=ݝ#fZ o0W?Q~]LL69zSL]sù%n#cSqBK^$fDT5K$_Xt }d,KzF`Άkf4Pܠecs2`kbD>y]l?hGC \BmNHxy!GL$7@OR*_5xsFnyzç>\@w \v$Sz0zHnbӻ4hY0dzG]/ n=xtk<=9؜~z4(rdna&_ݼdú,,oެx) r~mgWZ1 /(6sHa16hXv-9K exYlO[}ň<}v8.;5#.2 N(9Hϡ؜w7}W'xo,/FpqmWņAĽ>n0=3hJ.l D?1pFOm`r8>[T_ȦDǓ#VWƔ6Ja%wq́է&R1QF:6XWz 8s֘Ht&Y Fü?}\W֌2T'O/#+[DZ>J~;)#rMXN/-uQSX_|TG}ȵDX8W HO7>H9:7,{ ўPi},R+p'.p8pBv1M'|m<zDnA m0 .@z ptзsuE@)tm C_7yA*C_9PJ?V$ :;E Ry cطڣ*E2\\{B9^9fK&8r.rO]>˜{tJ^CᵸYsWrʉDzZQu޺d lҺ]Ώ,ez#_B*"v>Y)Ţہ27VGV7>'zrY*c^0A3ox㉥!VOV4y%jXAS"$¦'pn G`Bn^{8oZq6ݼA<ݭ`Mwс;;<+z >\c']ľYχk LhRG$N؂-ľl28qaz^Ryy}7H 8dUMlagŝN`?y[s`wXix$~F_"{!yTvf/+1Rxt!)O"D ,V M<'Rc?:P+_!kY*41tan^} TM6+ُ:&gRoq#D_G%ZhVk-M3QV0b_3zAO_!w>p8O AҌɑ c:Of O(`2ۭqq!n'Q'.1 $N:6쉅Tmn혁ݱO˜ūD8! f\s+d<Y'Rg?srь{pS=~LžMYߗtT;J0I>5nv_0 3+&ᰶ1vDOE,?9ܜqǻ}zg W{E¾L)pT)e"xAϮ.OlpB a\ ~. qU{_mƤ,yS#py܇GÁʻEMI&+qQRGM- ?ۣM?nn e6{8QnM$=AǍA(p 㡷`ʲE,)`DNsX-5=xXVmj64N5uKhk٤{Z{4W>>ctΨk:@EDPOuW)U3!JL.z %w+7m\JNC7r#SفjӘunҕ0pQd:P-bᔝ.c\~SHq ?HI9  ^;×vP'굯]|'\쇭qAxjt>F6zzÓn'Re}] Fgjx#zg;-柲#k_WL!e?{dA*\3&8q1 |>w`{DO=N= /jO׳7V"H^hnYG (1pL̕>`>/͙ *:Cڞ+Pes:`EPAv =|G4Téw##D{`tp)| Ip4;puH[̜NA} <?~F~N"uս}|s`Y`؃98&sHK<7Y^~#i<#ȡG~4?"nmόỰR9zVE#\G$dr>ҊpQ9uN\uP0gN_ Z DW@L4};cf,S &+l7]ު-Hdhk[6ɘu=+=[sTIddĦø VN@ 1Ԥ?D> *Qw%8 [{Q3k?+ynňK(Sk< MD̢tJԙǵ# )yP)RA=z?:HE< ^Q^;\mn Q1{V^儢K?Ea"{?6nЛMu ,słktB k TnBoL< ~zAu70-{<(cpo-ϨsGa_h hv)T(rݜǾQ'ދ&rMw&NFB^;"?'|U^rW՜ܳ<.79L_076~F'7'e':5 :I2{\!HMg6(f}ɅI;=#s1W!y;r͖%LEʤцJ @dm ;.!L1<+^WkeSۑt /+u7F!v*Yb)AXɚ~;%Kf~+Fp,7R9>Jp&_33 ?շ\<L}? N|v D?ԌNB{D9-M~s+/s zDjԦMѰrPF, 5][s:RFwi,6߮Oǝ# O9]s~VdK.n3cw.; E w[[0n9v [~} ">m?v ]QZvf,,0S{yG0s b5}:vK 7܄T9,v>Bi^Ѯ1QUe]3J*Bx9k گ[3d/"D.}OO.qhz&ݼ2=a2Ab/LS4ԋzUdz` ?vݗt3mD2TpZ&|%z# :53Xß%_ܞ / )U9Bٶɽ9eX@L_u%+U^b0t=-~]a&xP_:d|yo} :Fks)mP5F&n9#pjm1ѯ@N⩓c8}<.S/rxK B[}]$|A Z|$] ` G8XYgqH1.Է*Urt̄euy^,AJJxFБ@iOsc*Xngw#s.@a|1OYD\~-vAph-Ezn'rtz_FNt[~Ojs"dۃ%o0"WB0S$X,9X=E{WЏ 0tiQؼ G>_N7  ]{%#,5Mf'Â7мb<@9n~pPu9 pxqn cP+. a(- toEV%P1M!S /a9(^,#|5d!bD[܁wjujO姩䴭 DCy?_SUL?0.);h[!vnع7:&H$}?2yŭNxP/2#]WGH߼ֱH_5 6=EX%0իe(jjKǘSwd;^yHx6j~ 6c x>on(SP&zq5"؆~Mf r2ZKubX#D@`)^!yE[g<<_O.([Όgl۝ |G 9oWƏ/]wb)e'giWdr*7DmH^C^qapAOXv=sMQoom՚NܟOݯeǷ\U&j9"ys)#Q"EE{*JŖ)5v#/%~Pxob惧j!a|pەR?,'P9"ןJ*ċɌGN[1 X} cΒP˞&b.Hhk9Jd8GKIY47kz'het%W?;/>VvxKnHZa&"hP݁fbvE'bqWf;M pxG%bן0C_l= ˥d\5qofxU18޷hpJDMؗlt}=SvދZ<\ DJ3זv k;ġ{V4=/U9CBG}UbSy?zvQe#{}\QOe:^Ypl)"WO'_(,46TЮ">7Ir֙V71/<28r/t}"l"hHx"7^Tw]%8 6; )>0Qty'/&rIvN A|tal*S{-r_">14X&a~tqR:ݿ%LE+JWgak9aZ;vi&qѳ`nW|ZRl YO|X:7xMv?FG6>(IEnn=` stD+KDD{& .qtPzI:/Q.xCi̅_oaw7xucqO񓍞6^oM{5L R=GM ~psyO;u.D?˯o~o˽R!mp/^ kF~yY'ȳ?)smI2Rv^?怼-IDNĶY7h20qևe?,[s'GBEOGNj= $@ެO{ć8^F2s ޵N޹)Δ\ʾ$:bt0u+50)AO9NﺒMoѽtG3ZRycG輹{"FO@\q,~\#V)T&~ٞo= = 3&[U\u׼M>0#zuy(v(cn ^f\qp{o_jBm!cM)Q3SynzQ׋DҝM*FiqM)~_ ļ:!(Czja-|%7ud!e kTvn5y:G?w*w>-@TG^ict{bQ5QiRIiT4P,*}PDI y'M̓D$J}JJРP E뷖7>w뽾[cu88~}}=/^+->Efʸ~ c!+ )He Rnn{y?m([$"u&@'X)BO}`MPK f>禫/}= D峲 HwfegG}@`,*/7{X?t tU_" ho'Y|>ZNFSDa-ߢ1eؘ17h%WսI m_ {C'Ζunٶ /Hy*{ٴc mjĪ@[%di<^<ѨvU/ g9X9lqE 2>11ڡKVjB EI]I&}|:fxyRXZavg .QOuowֈe-gvpA `ߪ)FmMwǙGMۂ!R\j`.0=AzX? "\=Eoq<Əq:6|!Cv&U#\wz@rgMC>>Q씻eGt|)l۩58yJ.r#\~-T8r_09r-N9d?)I1=ƣP)\03Y$/|pu]_$#Φ 9=s9'eʁQb?E7tQx3A^VGWEP70tX!pJ1 <:wE"0`ԛH%Y݁ =/z6'-oV \<>=.Q_*N|.s ف'PHnFDWWBO!^}۶lpx/+~E \e5;zM?%j'3@+){+\"}cei&"p /gODegd@WӶNyrޟ6]v(pG <hl rb leeK~Kg\,,BőA]$IF6.4 0v۩P;wNp26Xu xؗL`$K*]zi$g;_1[͉cO|!}L(Y&Ƴ{֩g`aw6`;*;%lj9kCDϒmкe~*㇈mٙ{$Ƙ`xtLb^|ǵv΋Eo/G+7t?' by:Wg:7+Wx#7J7qQe,&(ŝ+uGLh9Gm_KA~ќAIT}U$5fu֘<}]u+85TE5Z8 j9\M1GP!g7M>yh j\}S4atʦB|]$,D!+е@&'4Yٚ 쥦5R8b-ou4qBTj;7hL?~s|t|Mܱ>bRguW?~7sz. hwşGepD83Yz@h9\1c J f>Ec͖Bd< mxB el9&c_^vנ35 K&5H0 ŠX ⚞5]u|wsleOdmޒwJ?l?R)΄ֽ!}%xg/?hq\7 /mb5ߟNYMv,e W1U oS, ZC꽲ے}10&yhλ>*{ '=-EO׏LP =Ѝ.+aA*^_[ޝ Fwe)q^#yo5ꧡ_E_H&"X"Gsa%{>chtgڜXѠgP J$W=ZU*C^uY~"mLnjj3ߒ*;J_hLv;yo|q\5E|IMF;7N-i+r]7=_S6,^(伉jA0=NKTnCdsEdpktw UzKDjf<{U:s#?6o {#:9sQ;d(,QgzRR{'H]2:X2w.pGB:>LG`vnWv׃ive3.yC*v2뫛1=[o]_w!{/ZA1/ b_\l(&U>T7@K?"Cez{TgK)RB ,o{ٲS~պ)Fּ:vmƤ1ıo/n;[{ʏO%ZR=l5p36~"ի_CUE\(sz9YO,yWo/}ɸk')JA8rxIɷ;#E[(&yiSTŝeҒM2׽+Mǧ7Pρ+@cC"{&+%7jJ?2GFn’;;jH.NejiF7y+K1ʏ5e!QiṰ]wr<~`(*x]QeLFe2ih9)<^2U`Mrl_zGt)x~?;Wqo_LO*?B%3A3(ypi|B~t"^0QFa/}1ꊂ&ks~FCO\Z6ͤa%]QzVNc J~{qtPTS63XޓKG51S{e7x|[GuB/ ?S)Z8ץJJF%1lG1f!Ne̻y[1)OZRJ%㎳x7>N\P^|7 }GU6^O mVڊOX-!U"uq_WG}OH#Nv7'K_rSmDy맋"7(߆e:,6wH׷#1x ' Fᑹ(j2!>Ԅ{/;rqj%nh4O8]Pm:j WX^ _5dž*{%@>j :(+(*ΥkBц+h՜=!ԙKZ wV;_)kf&0vav{,𭚩Df0bk1O~>E7S9d 㚮~mhl@ۣCYF2ϛd[yk R0廩1~Sp(/3"ݑ`sJSLu11E&7X8ZjpR#0eSűZm ߹iZ<_'儣\aد҃ȶ=C3 89VSj *҃-[D=B<&ğ9(sg8"#akuUFT)tIG]%ª%lA:GVwhrVBS.텶۔xWZ?S'e.g/<$ )n{YRʾWJɤNc/hKaNqGIؼOIgŖ`%6:fH2Kp! e;mZY+V'> fgx*l cR9{ӞTT"0O9w pQw()/2r=*L0Nx\.OWtmW@6֧G+y'wsJsuR ~;()]&1jW\|}5c e2zRqBL=zGWOʌe>y5Ӛ܈^))HϏpuO0IWs( 琮1 ZNϟva}W)os XmJՎ資T2BaFt.#Rۺݻw /|C7>!g+TPǟ&[^8Nu$š ea#ODWFʏ7+1Kf-|8U+OS[x?54[C3! jE4Q-^!|(_>KNh _Ǿ86#ykXr]ۗdc Hĩ$,ۨlֵPnrbQߣ)YWao%g|=JMbXWO_=C5]}5~S(DV%I5t&@ITV?rK"9^E]Du %cvO F}(3 tSzjz[isQAHlRriizXX̶m[Q@}7^tvr41}qNKtF mbA~䃏ާ`jQĶx5ƹopymutii//>">,Cg)4Jg#i-'(-6UUoLUqǞDfYゥF=˜nO'R18Sl%ZN5[B՞2n}"d $ؚs![bK#͓iB>f2B_u陋p:͹?t{x 6ĘdBO}/5 !FkJqx] gǙM0P 7p]JZ#f4ʯd%jH^Db:0zX!~kSIv?9MM%&/S=Kbø6A[wL(JHyA#J`FF)F{к= Q3PXib#U;Ox,Ҫ+꣰f%ƹ{J#|.hBhe:NM#Gg-'{|oޚk# . q)DN ۟,DyO2̰JU*7!gˤ a>HŅ垗rS uY~=`VNymNy~ԨZ!َ-uh㺾 Id$\Ɵv~OriC[< IK Uы@%f;Wo |ż_Z!e'vt9W(CQyUE}=g:k*)k\ ) $/66NIw$mv=9]\D?U%$3y oի>HJ3kÖX&]`n^h 3ˬ̀OZ7Bg)33eDV /TsrԷ4T 8 vj5Js3)EOw?bE9|{'NPX9sv.÷!V)~dM52y*׿4E*^Lس2S<'Qs}F Gڭ| #:WudFO_j橪' U8xS(&ڏ.+}[@ܕܹxNjM/⛹ ЋX= 9#CuwN+oĺp?B1O[%Uֻy(@&kG8Z>`ejt| ] y#W0h^(ByVu}'/|Tr2-etSs03oVȨhe1K0:e4gnfrJr-Ar[pk[В `[*pw#M7sӀ^yw$('HOfx2{ˇ.ۘS>Y9ETiK!gwDEp;X)FbNjyrvĽ .EC# x9DcdA5 ̊п MdƩ`O>`5ni_޲.6kd$yIžݙĽZibHHY[f<Ї,9 ҇~{ERDaǕVmp)}X˦H]DqzkwDep,Dc3Nx +IXDJIQ]t?GQg7PeVȉpɕu2 o `S)p ȋ->E7^!|cߛٟC Le$6^1磈{ᱱдaq0v \]hv+7\#c"zNՕpuI>z!xCY1w?14/yMtrr S_fJg{N;B6 1X㿝,I~?ш]OŸ =Sl?insNVXo;Jl:, u[o9awspMSkM ֬* Pt3̖Eq6B9É~dyGh =Uƀ5hgI{CҡDa_^=bTᣊHl owȂ!"|d0 RcR3f%vݵfS4Wc1BT#~ƳSן:/d=M5ԏ6]sԚj'05K$ ;zY}-8 &iAY-Њů;~:*O~2u 䧦_E̟^_39*J';Eg0mbAÁ!<`2l&bNz*D΋n4eZ{BU Ó8_gYs.ۨbU38AJL"jBog=.cxI6$c rV !AjaD?NճnM?`EEk0ZRag GVq#׈ze mvAn"犪p-x:2dCx\Oܤ񂇮Hg2s[=|]3 'Un ݴjv0Kyx3տd/fo t>EK_?9KLGhccP wtG:ȧD ?cpw!v?Bל-D*DΎBD.@hm}C{oNxwnS,!`=>P ewΙ&[ mかHz[M= ^2!a5Sj̟ޟ׈>麤0)e3hZe^bm`.2V˷R10߻~5Lf,tfyVUkj^̶+Ӷ#:&5/ PqKE#%qdsw>߶Ek,+e:d: D+P,H@PK4+ Ahr V`y)pS+;Xchi-}lATH<5{V3jn+*zehh $K6GZ}yȿr?>wޝa]4%'Uak`NR nU]8e~'_l7zw3"&۩OgcVNq_5Ngyu9p?h6ت}xkANRo#3qW\$jQ"[xH\뫹^% a"b!};usϡ461V yObE[9 l p!?E4\ ūi`nAl/o`5 /J/+L&0oa9X_Uajy{m2T(T,!up":mt72*6)6>F>'Wa{)O>NXfSomŸNA´Jǒ?DPѹ|̏7 f aǏ~]:?+WGNL J•6w7&לs]| zG{Qjb&]ygV˫̡5|$ 8Va%dJPo[MU[F%DSn|"Ygє'vO쀯眊u.0h0zx/9hOCT.Aթ3ps}/㋂:yr|x NJ -e׼i:6ڨ#HKo*.#02|=`x,UPw9o_eYGV?z4޼LjEZ#e>Qa$>4n16I&mB ot hj0~n>&|HcH#FREJ& y 2& QiMǾbsPPPUx2#r'&i0soHMT|~n·2T;yw;Xȇ9ǣwow'NN$m`\%G8U0'|6nu0\#w.@!P}03ӯUqL@0B K'zRi(+7 3zv{̉DdIM0Q>5TJ_Oz£ C% ?|f>E23%8.{zu%F_j%=Ju{P!_<֮&u8xՑ 5c=8U2w&d OQg;džJɈDtL_޿X>R$/?b>F=eea9i 4]2oY"l![kivˮdcnWUZq9xSk WXضcо;&Ee9)$[1V@*43KVV6װ2f ˱[1{ S1%_zyܬCG? m?IR`$O\l5!H-E}K}TY/q"|E[$;Six{Ou.jE?IqZd.˘ٯ58ۗtix\ʀ|a\?&h&a. ^C"}nyL>uCravd"RG d ] |u9?H-8TK䢞!$m6-AO-gV}Η6̓g+*&x .[@}}/sڳ'lo"[ 궋PûMtL6XuXܜ^\ze/rug-S[`s +1;P翵z 08 |~3&2G`\,WְT9/* Ә]c#0u!y ir i߆:IΩ譍9L=j?YYyF_U]\?,ʔ5>4Ws$Aⴼ*{uWtlJ䕩%[/m9- 0~2be&ގlP9cQ9By7,_"Of`7R|>'Tp߶_`k `ڦtDc;đvC˸fk/`jH9*"Ke#XsqG{ /\GvϫFy?ß?#rg!geCٕρ&EMSXdPC}r"ά0ӓ:~CKϭ iZM#+3g2n_! oӳ28sjZAQp=Ub}+9^LY5T^zTY ΰpM;@^㏋\o@y*~v~ ͫ\J݌үBf0QIp2>f|E n?X4=A('wDN 6?q9XNUD 8$|eb{r̠ f' 48II;\3/dY oмDU۞&QVwE^ $ꖅ*G {e1J=t6A+/(])>L\O{w!Ë_ph^Ǖ{#MRY4zG}<`-7M{fHw&˱si?𼼛C1$0Y'/DMͺ#V#f NôɡRag.n{̺- r9ɄOja-&BiV|s~ɽ|of M/bu71O>Rg!f6_\muL@A{$cC -@Ӊ0j,m ?2.6 מ3(vUJ- >/#AL珪#Mx<Dl=aհdӯh_[DWofr D ܹѠ5 DeNF~{P~6XZ 3ZI֜/5>8TYX-Ćʂ\<7U>C~_M`&*bن.lX@ݬ}Iz`Dߘhsf`=Lpϭ#RD밾MlsOVA c~KlFs`VS06hكuMMfGԟrxa |M[W@0R\\}8b ǿȾe"r |jvxqu(ܯ l=x̾!RG]Hˑ#3]xlE;૬D{-I eY:2~!s_P ZLSwp''OL_=s9x]Ӄgn5b9|Ϻ(kۏΜV`Y[r% EN-qo~֍{3#f F'n2_]ۑEJzPljhρL,&_+Tr]#k?Rj\DZ-*s9g |Ȅ[P*o7}*5e$X E}kГq!|?npxY$t+jU59zVn}gA[0"ij?I SjÒp\B$ #/\9 ܒhL?v#t!O]:tN>֮[UZ5v~wS>vdyWy:3>v*A> oד_8BJI F٧aˊ;Gwt\^eCnb:n8B_DݶP% U-Yf |qk^_xm^дZ4kzvLp1޷Sh*P  uR9 Q[`N->o:R6?b]?s~$^#[!;>Eu/=Y:+8d}3N"욞F3/__ם}KO0\p=Q/}T@hk#VdwGzK١e͞腈f:3РAv/=0 Cgc>{ ] $r8ke%^~"A;{B4s$Kwso/,=t?n;ݰ 77g/LozF(fjZvljOu7>g1= \1xV (+z]*P wif;&]^zE'vy݇?4˯> ER!I"C)CʹvR!$<<LJ"C$cs"S!$("Jyg{o_nGXXk_~FCml Tq=a\EW1I+i>m-FrRN^3 O|ZɍbaxM]Ո<6"уa-X^X7%1U3<@u5*@K |vMڄ4_i"?9J{Y}C|+yKs/ }@8t>^0]__nZ nrQfBxA5/))FF*'nO4tA^|ծ?_} K&c0(Ԥn?M9%D<5{R`؂@R[Llt1>-1i6azuh& }lHy+BzG=)gpv5{(`/so,9ʉsoc |ZIzZ{i#/QYuO2S#8vU:`9h{?7=CpXiG,BN*/u O?"ե[νw" G=i4?)B=xj[eb}Mr/GoU"jDwfq-ƛeLd%gi^7z~2-hW|LDC%̔ȩT]ix.閸8~L;3 C˷HκgUk.v}X gU:S"#awD|qu} 4ψÁ}= oO.?sM{+~SpC+Z2|O٪ytf\ #'!z'ˇ!OL)_<9!(a2nh)ɺO~ɁCQU^@/+s؇L(3LM.prIgN+wwkL^8`dm=7zE޿q{>~xٯ(㻔56s9Jz;I,uD|6g$*c w;(egaI?JӤkw~FyqD=#Dw>w'Jh9J4d7*}|SQ,QbO#Pٹ \7n/~(Ʈ-\gÀ7/TȮM# ډqsVG'Lv=&wGy,{*7WSfc⢘=Z*UM2b12] Y5讻/S&nu=.ŷȦK!o4 t;XmIc7\k !C6[4/ V'#" հx 9 §B`AMݠ З; "}LfJ="-긃gheY LR $p l Vno#=n`kb$@*ʼ_?\0Kf'/>|>הt=)+8nC+zZ1ER.y%h7F4a["vyejطON#55k$͒41Kv}ĠRX~{ܲG׍ /X na͎k4WtVo@4~N^RMTzUxu4q{z0#qrV; ݊^ O/"xEE^4k@{4sMBS.WYRf$!wp4ovɰ&V=~ F n(ӸCQZL+38E}n fѣ-Ov6~Sm@ JEM[Z8?:fhn]S4(A l])[:/SKKv>iaSm{1yMuHMfls0[F'`K^AΛ o"hԠ-t%n{*whvU|a]O7-^(78L߿Y:ﺸS*]8FqԪ#?̬sM`;ع!/KjVsKNvRu8ӰM2 Dxb| 1nH2UP|kWY;ݔM]7i,Wʼn%57}  EKmR եS%Qs 98SkgBqRh}G*fƤtRqc!Vؕ[ER7WyWХ{}j"3:n05ƖM#>Zŋ85M=A%+3 dp{R}#>/0x$TV-P< 7@?bH->NqIS@g.;f}EJ 򘋆Z6zCfI[ߌ)aGM_ ē7S,"o) SYهuޒ|]-O(k*zgD Y{Me T8is Su#\:WܕsKSvKC yS_ :Qv8=ff @Ҽѡ8I!wy0u7RmpL}s ps2FuKDIG:GX8"aDuޙ Iw?yAݭ8"~<~kIu;ĺ‰.B{a$q<@4\M]&Y?p_W\O|V=}9cEA}}T˕a$JMbA(7jˋHt͙ ,Xz5Tآ%_(܂N +݂8:.>_kz`$[}4[#)_ wТ$GiPqn>$ԛA1=Pu?y\Ny-[Uecl;o(lETzqj+ߘ1_,koV\Ub߹Aч+3xm#*KzybqK`j*nS{Z?3{vwnAQ rfvbUSx==$^01d'PC ,3mO?<⼎nO.: Lvt fz(ݨc$;g='ݐpz4* uz;9)#5}1>dMjByM Z<l*֟or };o;j~Y\@2|(?o*]ptZGML-<𘣡%mN>;ZBX}gv(+JM[sZ rM7.K-2|ou}>aB[k)P@nl{ BSw(ʾ'iF{'6>+Ӯ' x%Ĉ{|#}07^^|XV{ !kLE7z?TC]ھePΩOJ=P_};f|A,AZOuѶiKJBu|LwW ɹ._{Smp䌹,Fo$n DB^Lۏ*4{~}9EnDoquީg7RN^zZ >&b8B셙ONŊ"kwR=|SԴoc8:INۨ0^zG[ :N%_bBpWAۧؼӟ_nJ;lſr[Ʈ!?VR&7PD&tid3g QMyY(Ո]Ǒiޞ3J(i/v=_EZ#=_}}P-en]6:5_xmaG_I{^bG޷昁qt%]67ǛpZ*g!Ixn/6,SQb W(ySes QZXE\>a)yN~ 'Zdܼ!:T;K%6py5],sks%61BFf= "">.,v}|b mPxՎ27Et\EzQ  \FoX2_UWiMI!Elwz)8ȣ'7E*Z q 7|$?"hڈ؍YMX<)W$ܗt2 468{5q _ΪfQ``06䂝Pޝ"wD紇ܸ!Db->dqK[А-W d Kթ^W7'i*QItUL}/~wWs{HGJOQKBhԼeYRO[ݧ>w^8"Հ[~ NM[.!=g4P| HofOuv苤H(ЛdR1.AO zm[%gD:2y_L=X7H4636_}?Jn+a;65x5sc8s8ts o֞çպL&@$!XC(!sdG4*J4Ek4*GMcz$ޏ;!uy|"poofض[|=HvcC-Ï^!fPюh0tG WpE杛xd 9TqrGdFnJ8S*aR8k@7BV+l,R"r+T?,>_E>҅PUi2 a bQPRRR+rIm3Þi|%_Q>wyshXcĎcmײSc{veTOmu8t &L" d}v^9qp7htݎ OھϏ6&/V,'63MPWSa덋"4}R46cE/z`v.4^4Dnb|9^ #D)k>3q*eMz{n&39F$[FΫGy4M9`"2/:$ijӷXO=4C v}jxt9δh,n8$V$J\Hǹ^mO×M؜Ϛbܨu<2>_D"ϴGxc!ܝS-Ǎ '{vU!âqt~Xa3W4@-8:ҜlCޜJMHnuDr3z1 {/K0? ]3w (`KѶU__>tJQiҖ(Snȧq%[WJǓds!1QGGXA_Y#SQ1i K#:tHfZ<9Uj H;6Jm>5Nlu%4j a(r`/^fc/ .IBWosw82ȑH/C^r{8]Xn%jڔMo. JD'FWEuf uɇIDLS㦜寒?7}E=3+eMOh)S˝xhDۮ12OI9p`@*eŜF #oM>t/;~^t&0+(p)t~lRPf5R[[=cW+[Cu4%CNH%IN, j4WHPVΨYj^84rbۻz4+p]u^VE5Xu_;=LK_5&+h`z.dl}~&%r!dG>t-b_z(8ُKfM_kO_?$<-[Ô?ԣ?3)yAJQAsuKWW# ݷ<;ku>62aF8YN+ T_2vLƠ[m+:_RQm +]U Ne)LנuZPaϘ%H~i%uMe/puؼs#OWOd7sOG_2fqF:e<E_tsM?nMT;KY"Kd'7dž[RÅلӋ eGIW䶡"h)O ah?63ȼ).6GWh[ Fq7^v߈NK"ДR=*TѠ,3lޣ,;~HW@i#W[E# W$QLg}]9lvcQ o㳊?~״Dú1sz$mA}8UE,cz;M8!(3gx*G?zJj- $8GS sIM&7,)m{`5jI!C?52we>~=?:cUu_k4q9s"~qۿ[42$’սr'ODtbpU@y BJvBBE6r 1oֈF.ߍK~A߱g `mZFYKN>s/V+#I^xj5A{t*ŇK.&:E.+4y!ܥ Pu_ڦ*V[_~>e5V$&}:V$xc;3Tcwsͬ2Wƅ(ב?H};S ϋ# H udVq-~I'Bj-[:w8THmw[^dCrhwhпAIѸhU,J3&jaR`۹,~2&Fv][j o+dɫ٥v9ti)rzOyMH2w/ʪł,g(i!:gZ1PqA4LXDr/:6Xm5>Ui "Nl8^cڿa i641FѣuFԛV.ϠC).R_O <#`RHmK VDe7:2X+~/|1ַfA{?jevgfsƌ[z%u?`qjTHiS9FYS}i xA,|ꔤ?"%J{S ^Nxe+{hLu?CHŦ*ѥQRg~ᷞD;Mo& 3/@zNqX8+jŘlMEj">7ܹ;XɬJ͹и\2,~>b`uF/(g`y)͗-$o{}^U-Fͻ j@ϻE%>ERO.e<@7tj8C@PD x,0;?|\Sǰ]z1˛O]wE|_cKV9 ;c+H|\g˕EP{uц3h䳰R6YfxEǽvQx@/Q[u4j('T3 wGUlQ%7psdj pU@pef7kY@P?S#jHy:ac =d}qH8ϲ.Aa^~@f.zbŸӍ$&Dꝳ~> ءDx?ý)NLBU9m%Thf7*Nn;a;u@fXv{W ,8cGC_#'r3~GиCOQC^wQnȗ*<# W+֯e2qXF h7ŬU3XG9!p,cW%!TҮӭYMg3a!Q.-ϻ=.l rC/a .,Ea˫4QpEܳc2$DsI>Qn'xkBs'K~߷Njz֝rWk (in4p$j /u~[Hlc,Lz*8VlY6Nz>s훝U"G~~A44E#* F3I%YW!c'DH+.$!vq!\q?7HaADݜ_5uRՊ %<1:ILѬoFa1\h)9<w`u㥁*~Zrg(D;?Ң[εbWїuk d'C־<73+ȜamX~rppyÇݩ"rQdC#-$i<3kC'*ih\^$ (:lđwgWmV1kNLm_lY1*5D`MFQ\qK+)?ύ*+"*hl|xz_Vd9fۆy7srn*>۬Z*/8$7cb.ńR/hjtUsiwFf8iG׊9~K&Ɨ q2;mBgId7!Plyij]2w nqG3xՙ5%dV'2֭DO˺D~c5:ٜ r.LjvOڀ30DtF#Z ]G7y~TҀ@Iy򓞳m0<>Z#/5bSR|?^vN;vW *w墴Z0_B%yOެe{`] 9=xM;HGpo8| 3_@~gOP: n9g@g^3kxaopxb12"W=a]mm/T>J=^7sv8GVP.o?,3[ עLeA@IQ;h(^If}nj6CEɵw& ӓښxAd"EʙNH3zKWBj]kSS_j#ge׭?Σ?~V1yI幈I[?MGsD S WX:T+papя`-P'H lGfhd? 7q"o y-Ĭmo d@?yKJQ'n b?l[Yϯ&Ans8vF_ Вf3^J$m WK߾a f) hr=:f S3Vr_-G9LpIU{5k7 9ow]K(R?BS;Hс_6'].\C_㥪g OKel1%r'Ѓ*Rè-<ܜ [僇bGyT@IO"־;D$\OӰNp t> !W#ҟVoiq4Gg&mD ;Iap㮎b-u}09o SL@QΝ23c>c^ey֗ EԒspG9ɻ [ݺ? 'l n0ƓsJ=NܻmҎy6rId^#"]*\0qAUBwx͌Os⏟ޖϥy}άOk/ﲣ)\.u-3 b|݃GS:~^2H~\]"poי]L*Dn޻?EʮwwsbxܮdLpUXA3@%K[DmQ7e=m &2n|Ak_<1}3Wb2ŭ!{x23߲Օkf:kpxOʶzWdyed>(盯؃ՋGvЧ#t=ȱ|cgV\U"y!.g[ў6k} ~۱d\LhOԫ5mrmskerԃ :K[1L!x -rM~b^m6N=f[ČfZB;y-,nUcJYȧoT5[٩uM#uNon.AwpZg*Zb)r$/^zu`C*$zݛ/{ J&=x/m}8˶%O7 [׭<0O#:GQڽw>BrV^dʣͼNYB_(]Ζ2)q5Hbɺ w l"x8 !))G!S6>keE0WmTPp8;B5 xa #] D~ߎխ;<Ղ:3Ȇ?eIA%9uYB'Cgyxבt!j1jњ`ճ۾N@ܹ^7;4tnj")qU.M#ҧ͓D.s07wSd;M} KZw-_kj&{+X e}Byjw2Ĉd2;<0AQdfMA ⠱Oz %}@?'|-_vM"mSvOlA21,QـxseY[lM=+jT],sȱK@h8hv8\c"jzI qpQxUMiX9P:Q QXgKsS5"'nH=+ldVnp+fw^ڰ?W"rBu>t戫-\?lx ىKfi$S)ҩM-oMH\|m0E?v('$笞/-/":|%}٤.>Үte!5&otHhBK޻ׄhQIߎƣz˥ 3ӑ GRrhh䀋Y} >I'[EخOp5Yɵ9_7@cIɊ,kpV⴪ gu]=pO j\pLk]ާ˩0%(v0";eէ=Ju9-tW~*!Ǖڏ V Lmk^/9u2B w"#үl҄dƤ^ɭ(uu3f ’bޠD>\Ҟga9/  f d">q=cKom6؀[x ({ݒK鐰+@W?|,EeZ;3.o"Be.B7uvSXwMBsRvqzdv[yǝx}Iei4Mjr$j : t!桖;x͋ xg8gJ8*x^t*3GWSP}}qZJJ#G5ߘ iM*" w\P'c?[<3a!wӅ7)Kd>uKdo 11iwQLr,*!S$v:-z$#:M3*uWZWg{X,П^W& :K4I5bJ)VtSy-XT(`A}m]Ү8tU> ?-G8Z}J=XFB[7C~{Hz h[œ ?.q5I18R^pWMwTe)fjn/F<=3I`]V=,˿d/D1S%!淢#5"j T\_T:=P|p~Dkzr֚c?6| :k &(7@bM7nBLI<4!ydHJ%S$N2O JJ"ֳǵWeQֺ{_y35Ri_dwr $E6Sqhw+ɜb+P sF"L]B!VUda~'vq?#m@E :  x+n%o+ۂ>ck1:{`&kck-0=3 Ov[Bw:l5 s)xہYK077i}S@"Y0`kM)uƓLyHتTgC;۵<'L䎘:kUOBݳc;8(SG23S{(W쀙g%qB*gwQ5s}M ::C贳3(j Ӽ:ՎeŊ֪\L=q*5đxd8.E'"3oduL9g# 6*"sMB5ݔ0iɠ#~4tz`2нnېkj6.Ӣm!Jב6UYG'w{S7N(|oOZ:S(`a W[? ُP`DI q/?+q8V] ۾OiĈ$ !_?B 9&B- t:>HK nI 8.ߎ#A4vǧwa0_]L)\e)7ЉO< =+a&Ih=Eg[nY?֓oQ9-N;- DNSƈB<}gs}I;ƻps9sr?01MkD[<&Wș-*y`tk~+c,R6Z;й`Ȳz' bdy)$7R>Ь{us\`ID~vP Έeڈ$IwBj0k!GV˰{v% D֩y;G*0RNʦ wD ~P:+Oa&3UN\&44Ďnm!P,(fj O⾜wr>1R{ r7Ȼu/g*z60@^ei). b=[p-iՄzhԛcMg%b+X8euZt?QTs%-Tf (=yXVb%՛Vrр`WN.&Szï_C+[ޯ"woyzۣmDgN<+o(4FΐgS8\҈:Ldz6XrW ~cwOe;b0hK7x s<eA+m'jo7[ ڀ޵Ձ'7c :oӊ8q$y@@gQ~8kuBVU3!G0rq+Hș$Bt&BpoE}&sŗ޿> .z2J&ʵՉ*m׶L[\u$\8QZ߸w1V5͐Sl0IxG|P%GxCJgHHiL'kuHM{@hG3I6!_}&32] o(51{'Or6m}[jkT0|(p><xƔ*|јMG/եYM{t>/&g NsuL֡ o{<{G|~‡Q =dcC3vWy%nٚo:'<9m17ݗUҵ9 gvu뿼ŒwK_7ԕ_ {v><^f5kr!άNe>2 u!qt [d5(t&& 5DcAcbf\8nZ/6=qYBk\AǽB``sH&aPɗ/m "S>>o=t͑Jnh+osJ·~+Xγ &AG m/n/8=@g]qI)mMt?WOiʡu5qe[\.S]4S1%nghObmØm/ҔکsԊ10¬@/F5d{A޾ \֘9? l}-<)$rMhC!{N,М ڻ{u@w7Z 2ulfLFE2LمB ss7y ps'pAO>2hehVV/B  ԣ"o|63t0!ZۮTX՞}i)dj#}WuNHDTp4 Q(ok&'f݈eZ zĦ>\:ɧ?&l~n esBId8q=8CnSmjΕ` Ϲ/G`B{n Z&[gjZC,׹)eaCJL=pYT,@d f:]Fc`&"~V—z3xO(cQ]ZCUhφz ̇_bkyCr!Cv{]6O i*Ք=x]]`"/ 8F٩S]KZ#c_;mS}'rOZATxY,~{BwCTy~؟t{|M3Qvv4DbǹMސzE7/7jwl(4dzdفJkMOμNg, soY~xPZ{6nm?:IѽfWcA[^|!k&P'{fۥkrC};xo[ \hct&pcJPhcbb~,P}-o.Y,Er!*f2ӘRgħ KBgt$Ww#Ç6ۆ9F8lĻHgڀx :?0:`|M-G y2N%}#^Jp {{Š^˩_x. ] lD.v8_<#q>Հ84 'LA J_%8DJӚRgh^R/u%1ڏPaK,ѷm=,U[!\Nw\I&Kɵ$@Yx8/Z Фs=0|Fv{P+3DĺJvHSLZ?^}?zVXAʤP˾o+*')ɇl;|qbl?lYt#VwoX vFCYTte@r,{=)P?;p]锑8G5U!Q(\2/iѰK]0&szN`6V&Wq{.8-1ոQOD+:kIK94dN Sy^Sevۦ*+z5u+8<ǂDC(2R[p gh̆b=3#zcU=I1?ЇkO׳dZL`:/ 𤜸lvz!Q6B:Im=w 0=æZБX$+-7L:o_S_,UJDN%;,(r3 9.3n|ՅwoQ. l7ؒnjŮfB3Mӹ߮<s| {\xعQ%h:&> Gp_zzp,G~o4#=cJ7?%rXZQI+\W—g O(%*QFƭ~X[/ʥِ/E_y3TYn9֙~jC~Njt$LZKFɫ^"RZ8}ϻd3w$/5`$u&ؖfJٛ ǵKvu ]`X U{= yf&9BIͅ~CaUTjșKX? "#;0F+!D}J%8^%quޭJJ#`|KZEĩ 3S/;q vwfcXDf7U-8";5i8BG/wlu=u͙+kOHdqfn"X{ TiR-Z^(d0&ۛ9Z2~)"'[Ay RW e=9V{ .JK]'U\aBwUp͝+sP3bKDF'^h70t9!E+zi%(0ng 8 i?7$Ra0 hYSkUzk|vM]8J%̊\0ǵ6JO_=I-]$OI͆yLokh`Dq-9sT.:uը/b>q `/Ll>jF5փ[9!JvzeLC|z{qPR #n!=@Nlw 4Ič| #R .=DӇb+{F)psr,%~6c(zl9A,Lcvsjma]I*~x šy6!8}3Uٮ9ذNJw*8p)5 ,q ͤ'ߌȍrͺ(n}D~i^T o;ދqypVEXN 젩WQ½ϽxKlVƒ֜sTVj75cW!6c&3ytSs[(IvwO3 csGRl;i;1Jxz|KP6"Hens'Eԑ;agV'[ aӤ{q'ݺ ?^I=sw!;ĹaqAm~h#Z_{4*x.$fS;9xY<)CҋxkݺWOkkҳ?(+<w1U-4> mw+TDUݖ=^JPG94HC;[w0lo%#԰0H7ZMCo+ަT#ڬWIPVwƉn{z-95 5?9u 2rS֥nmb:o1Q }q=ܞ߉9%o{M*NpRFr/Oo%goC {qb0@ TtP-igEv^3ynW1n_/I2/q'_NJ/ vG2Q2}5F簋R_#}G<=3;3%egBręX|/ǘkC2z|=Pj2{.g8]搉=F?AU`uD WSdpld^Xg{!5PR_}:u Qrh UvDү/#wF'Tm l_I++}0*˦gQ6HG|2bҮzQ8tvT[%`lu $g5%'/ <d~j$r5Do7YC_?9b8x͋9&sфq_si@эwoa;܊JJQWѓ$mWP2CJjz6defѱjse߂ ʍ ~Wlg_=Il''iA.ɉ _T; >[ul3a7@^>ϪE& &Po99*mhK U$UoIkA{{ԉ}yBo(,N/9t MqWD7O8"}ne祉)*@ E]-}O JZM!ܮL YMGϤ%5s둶o稳{3P(ؾe+/8ٶ5Oh~Cye #WX#eŖ{{.|)Y$F{U>vžSE߸ ӐHˏh>l6jCzİ8LA[|fuj滏{1/FWt#lR(:~="*c[pOED).RQ___I*, i_E1>\tOUOsTdjT;;nDB,!Izg*n!_Xck㱋-8SE `MwbI*PC|x\/BtyF zrSqSo+@TBݵz{z6?͊8lDvzRIs<8=tsҵj{Wy0Ul AM$GLr1m7کeWN7u*􋯕VPSJu\lFΨ?j@uv?i{YJ n$LUab{xHFjGYi8Tgu_u\/ G0Mou$v:(raU o 1AzP'dU<"d+A_xPBw_-3"<8jDaM~\r;@ bZ_NQŮ.st-͛Gw^]i|wv j Wo4S HkA#ѓWqAix\uD/?)k)V*:C_?:߄1m-Ǯ^RAX.1i|M˒P{AvRhiCe=wC~/=C'覟40_jj%0~`h氢.jli$}72MظBRL)w/~sGFQH4nVZËTu;^ӘֿAg>IrpqOG7Æ:V4b"'U7ĮñRB+tli ؾ 4_Ͱ .(W߆ A]p|ˍޚW6 H>+Nhv0|Byy'c ɥ;A:nig). &BCQ<-VL[}o價l y 8Jvj,Qt_= 1Z纎B56`Fɏ$ yͱ}h >TQNQܑR2Xֆ~(Le̫W_]]c(ݦ"m!|p^CR$7Mn\}r$j Æ[裡A"™(8u b|1(__z{lhoi~u~NR乯qa ؇CB 2p~$@|`n{eO9}<NKu<~QG0R'"wPHJ_&l`n;H詻 ka="sK&YVFuO_m(oU϶⦸ݶ|Ct?].hpYyKUB-"$߆3f^Q^KvBIyK6[ٵ׸¤nzNȯ5)=){-Q=h{%bFm7O9C4}oA%c%.jncf݁@qE!,%F))s@!vt0wC&/nOV7{o$h e k'qmleN%ȽIS6ۂ߶QD_9λWaIw쪻{ `0~IGRҸ6t^.h(('&捯 p/D03RV+1gO;Pl" vZ $;}'nV_ a\l uǟ~"|X7y[BN )I#ϐZR +UIMo3'nk$(qlBn6wLoiiaF*u^˘ ~4y˯)h_乍wgxzߑ3 /}߾[ yM.sf?n0~%,( V|0~xE)*e2 %\F my I,)kGb^"M_5BQjwN YSU? ڃ+oX2?!~]p/&]'az= G"~3z<3ĜhLyKl4#3o4Azixv#Heew_Xk60n"}7y 9i+þj[e|m>,~w26ns~H\12jó血SS X[:8 \ru /[*S?SJ]O0c߂O.Ϊ(co.V%)JӪ`RU#g>оyړN$n';iS=~Ds˛@C3%DGfv9Ts,HZ?;͐ 3eEj(Os}Q%ԺMHm@b95PAiuv Qrrzjmpuuxd?r{D~-T3&L?0q~טFy#(?73CHb{6e?w;ÂxiqF$i5 ~6`O[;P<pƈ%?tN H?YW :e/_g<%FUΡO bS:yiJbÀ,/XݶxO*8uRQ)H?ae /U<>)Xķ mwڞp#+́K(u덶s] C C m&6LEC 1J}nB9$)!Z}]׽K;~8J/did^P];дN͏ȝd[kVl O%yНS4a Y{e OJ0Sz'}ah; c i?o+e; ᭮PTp?1(={| -kڗW'S/bw+H ?$v 4\(W/['ҝ6;BIO9e|-n3ye{ݚu#dB׉F4ܾNBӃh!eft %OAgU)UJo] |#4NU G'}]^RL g[|E,g (YZ=ONfo %2)] ZLVYLg!l+wM؊rckPi1Q\bŌ9>֗bA|zK3m[mNДQ ''[SzR_'.ȴ Jr?6ܽ~:_C{CCjdi8Kt$r˃uִD;>֤W> ]jG]!GĹuMxԲ1jZAGQ]1{sc#H ?I_n,oZf|kS>P}JC YNZf?ѧi # =ޢ3ttYI#}-ʔ]*8 SipaI 4i+۞'1Uen>&WH*=ɓx>(5 wQL'LIW.2>}&|X>mp#~dPGJǂ9qVRP'G]?ѰpP- k\c%C['frssBZ]d̹ ʳo}F\ ~59)8Bbl<#;֐홣Od ˆE'8Ns%]JN~Z&&Ydyd o|7cm ESXNDW TEzHoNdẇ/{jUܧF:74V'3vgt #?naC-Ukh4#gK|Fӣ wf3%qitP G>qQgql/H{i Fo آᲭ-l}S-[/2D?OS5s\uC{( 'rB 8."TD@KoVu^.eX{ӗ֐cR>Q͎߬} yX9܎ߩ˜k j-=Dtr5κ-Op2.#rQ47 : z&ޜ^ 똉_u cў'nn%| ʪOm@]kˉ'(GѸvȌl)YS^e$rqilo"3~83XUO?JBLw6VUԮRڽL>uuh@} %L& |\s׃m{xakOmP;܃-?~Lp\՝' |{(ZeG &KEV7|3E9G\Tg܍. He Co[:soUmѱƈs 7.W䒠wDһhcK5/_ưׁsOԓ%-؋M !cX $2 ӂjz3ybˏ t:?<͔\AS?+-D@dҎo;Y&WDT}Sxlc)Д^FC2_?4ϺwӁxU{[hI5bzsވ,3M5n\i+nf!{.x#ǷpiLI">زykI;+N?qUׅu=mg0[_yÂ4;Z+wcs r5Sذbtq>Hй \{,?{cܮgb*J|. ͖\,>LfXVK:bH&@ɇ)RަdJb~%׆vSd7HU{BfE?/tޭq (wk8fa0xp‹:_Ӽ0ji29)=d.Qڊc,y73ǵkjn#a(}pJZzz7{?*Pzm\SO]Ŕܱ=o+q\LJok)w{M:ړ?R]ncr@9^oy>4M\Wak=f}`]9Ot} E5-6qfhk;oclT uUgs3Ƚ8q oZ+ uqTײH:#-ZlG[߀CDCc̲j'߲$_ M?&/‚\ *:}w3%W7eq!; AӺV߈y<0qu]ٸhHؙ}kB3Vy4zmcA=/:YZu/: ґdp9Ӈc+0epr$|u8t-ʵeZhJ ט 1x}+xڹ;L3IQ9CL)z9vdiy[sw I4MǸ_춷 ףN-iӱ6;(p:"2i FGC.k݃׉ ?,bN-H>yއO\6J8 >.q){xY\!/4[)LZ7=Խ./hțiJˏnpB}n|s٪qO_pqBo]V v뽩`>4OǢMi^goʺuynid?Xx`*ТnoFmi6 qL4chgO|@'޲mq?iw) a?#FJZ7}D=@1܁짉/y\6/um+x7b` 3VSra+79@p2tGޗ|@Zw;l#hBw}p&-D2iT99M?~}aϵrW[ײ7i _qAA]KS|=,öi7X:N $9}6?Xr&=[0gXNԒc1ߖqǕeL~KSuVld6^}OM(\C[ُEûY^bڗ.9isRD}B.".+d%,4BI𓍵;GL~Kx_ł8HLX flOC<ܗ)ntn级c5~"ji8ke_ ;}/?R+NJOfƲ#>v+FoF+PS~']物NKYOR7v]X=ߩ:£]goڂ~:>/x}P.U}8hL,.Q%.9-hq+XΦ!ɶtYt zZr %/.a=gF:X[:gGv ~>h?0GDoLR|F:?.np]NpdMq8!9E$>ѿ)BW]Kkp^F&k0_𙈀+Y`BVi>+"<>b)BA4ͅs r? 蠢oXH "gsișbb4pڻn0Wu!_;nL뗈\ozb:ND teb:iO쐀托mݶ8R^sx\;NuVnxUyT_-C:ە~X㘚nR?~ůIYJq\y4C0%j;8:>hVNgO!vD"u*~"ʔ/޷,Gc},yU8:),7NN;[` {MpFx$[G'w`P{⓴c0w7m"%##b*CJo L;(6`,1Fɟ7K%8X'wO֓C6s4x4|bmkb+bf#@tЋMD](.;rLdP+ieP 鴦[KX'}Qf[5[zn.ܟn ;n>у4 m-J ɴ5LCf%>dF/۰+H$&Ddx9qZfAɟN\ZsU~=R}?l@rx ;u9Jwdۂ|rsIN-_nk+.HQҥC0:\ϕ"@XgR;^_"MMzU\׏ 1"O[/Y=k4mͅۺL#;o@u 99/M'& Gz{˰&)̟xnlM{nҸLE>P{ 5@D n!G!aNDŽsh#-;(#<Ѿq5~Aߞ 6Ukجs?ͻ:l=sQv;3.&WĥoB.a7+7h,KA5 sLx=ϯ<3D]IRIu|L1nZn)K[#Y9mr"8}%*^mgrYo6Ж-vĸx'!&DYJnpx}F7Ie Pu{^؃@go=Z߻\sC_tx%u }!GՉgk~'/>k sqWpv7}3e31;o.z 7`aGrkHt!s7య,k7I*INݵg=' ou?VuA]Ly2|;+D}]@xy %Z}}ᖫ?wPe!9n5yב3~IOkJU)v0;[wbR1lP%W]k mcp ~Mc6QLuʢa? J"m^L9LnΖnRN.GK2&)cV N_2sw2W.G= Ny&iXdQ~l."盬_qvIL׼D y #Zx'ℴ´C9K?:@ic`[4 W!ygk#W?LL<_\Q˒R OI> | ϏZ}uY{=#m x*&K2Ȭ9v"NDs:߯[= ;}`| f0vHQH+ȉ'߷9:fd5P' O̻#Uo: /w7~t#bG+qo_ӵxP׊Ec<^EZ{qޅ3?J{!>\lQFW*GgU Y`C/ɂBtc^*.+t\9F $Zߦ#%緽-ݚbڳ]3(YIeOcn]$j}7~i'6CK&ޞ ~b3FG2?i?~IWU6z 4TN|{HsuI ]xiAC_]a4qH2t';/>Yo+tj᫕32XYOu3^As^yͷFXs'aj'"{Ec†]#5nI7/BR.Uf.LRrn*Dƙo;‹KW>E({$w;Bp4w2m#:'J}*n1ΚeO ?:@tGk>9yae딏369n9~V$~_G"EҪF7U,KJޫ@7J ĭW|'v;GYezEDڻwQv9=lE~ԙ2@Û ö0{Щd Vŕwd6 +'9DSv+"&7]"5wJ/aבQPRPZb- yM9M~-~\qǟ} X,۴NiLwz@Y!P.˵⧁r_oIR^2NDЫ<HjCmRHmd~? %lp|Uq???z݈n9pVg+7%kICݢdO7ӭc 4\;eG}W*d=w:Ҭ/smA_\`QPu+]t˟]ʍY4Kvw ,NE=U}UڢAfw"Nzwd%@m+cRP4eI{ޔϠzݴݔ2Ĝe,uJ )?uNv8D=Ak~k3NrEIX'+0WGʹO`"3f?֯upg:A0WJ'0qY-MwOo&) nr=,a0\uX+|cC_nA}4|:sl'?R^ⶱ"`OMpsA^%d0Lekf?aq!*@:pU[.9ݧoW"73g1H9ې_?>H]Fu٤I]ISX7a3)iK$)d$,Jqqt>,3 G\#RvEC#g3e ͠\ -FHlf>ZSF"r9Zjv^#c7H&xx}"k&_s #rj;`ڣX8zG&r%cȾ2;&o *ȮF;z|Rz }^&X rt;#ʔ&8wotsI?<}^ÆJUyI,֘Hmx^VLT-rGBjmِiFlt跚 $\E]uq՜;l*(FZ Rwx!f]XRiv:uJO0f0FW;C3vDnC;v¿g̨MϪ%HE-=.;,v;&PIx$-"-Q;l|,pM?dvhCOx"*k*KpGlEgt'U w۠P]?|@}p򟷤O_?`I#DZe#K:IHףlHL#9O#%ĝbT7*B ӗqHbft걎ݍ]X::o%a,Ƚt!g2$i'^?%AMU>w#hG^<Gv<8 :Z:#;BwKC}ġZ%&w6Lم>43*>fA;:`4}lQ8̯, l jDmvQ-)U"f9]]B?"HWO_1廗㘹+dUhΘmǗK$S匎ܫ~RW}_c$l4k)< 7gݗHO(vGk_uh9{o z[kZMo%Jf hVP}A9eϾQv\ΪMeu +hmtmvWutn!&~ڻ֣KdtyS$JCmC(x)EL%~ιǿ/ȞE[:*$w.ӫƠ˘-?޷-uwq/V#ogY_?VE47pOï$, wE ǚ&%?"4:[N\ӮQǘκgQBmi369:N[wOF_w Nnx4.[Qߣ[1,3]xiF MUC(4<nݎ{γ1xo}Y6``ח 𙒴`TӨQ_yBي]m(:&yF= 2RWk,nwUP*l~4#z, ,:=djm{WLq/*S,y5$E=u> --<5QvD8ؘrdr,LqmӓsyiǩmES$O_?I$40GqYjQvZSoo7?bpj7s <_jzN_XxQ ׾=7T{|Go2cyɯczӇo:[f8Z"5HEvYA=Z9:yb$O%|G uzcr?#wMtI"{M?d68byQ{ }k,|nv\ND&/4TE3-S1 >nw)_0`5]k7c<׏ʨ ݛ]~AW;d]$1 ~ /HF׺\9h굴뻫hccHAlOB+hȟ"tuXb~eLU7y @R2U1,T$zv_?F{Ss 6Ȝu@U7 EoX߱"ɠ9qOˢn>djvGY&AҸܷ#iUWoǞIjE~jDJKq7|;ej4Bt=STGA3Y7q*G6_5XOq^Ͽ Csy9[|4FYw eŸUs{ε Ua`gK(/0mC##qqZÍX5RpRSK3cyۆyy 8M)(u~&!1:kR񝷧R¯O}dsɥxW$t} ɧu{#;R'y>~^,A753D5d/rggܣǪOڿ,׫󆴪ɝzbx.‘H;3K- p!2y7w.1k,H%.rѧ uwd jS)D7DE@ v /D&V5ԤQ(vsx#c^itO޹_s@ftjBQC?<\qٲDWRD=Jk{qWzӉkG PF+eDw5w/Fu4u~z?IZ^xNgq 5Oo.炏*$P܅E"M6XIEbχ}djëĦG.TFp}j½ i;H6*21Km'/'_``M(ч B=B14v3k:k|Q=gu%'bj6ɝ{8,Η_vK CƏtmTҘ TMiXH ׼M-E몏Q̮ӛݮhDl`} '%UZ!RmNa9UD&HM Tv\~@@6lVſs/ ϐtW\2;?|jK s_Ҫ~XܓXr-T8w40o'cBJ hOD8ۨy dc׸ݸ:]|2zeH.XWԚ^5*koa6]/}u!i& ~㦍IvBZfuܸ5gӑ#\+sKW!8,w ʋ쮝d`ZߎeHv}{)~;IrEKw,?~Ӷ(Os|YY0!tW__qU5E&Swrƨ{:D.ѷnBy>/ٲUo(%tNw2?-M1seBQDW};>]B J.[IM:JYA0GV VfUgռ@%\с~PBq_9.$[K_ۜH&#$%瞞__ zgv,%Dy&]]g钓oI#HCL, /đWϩŞպ0j (ʚn5A~BʷO/^w8뽨@`N:ʝ IiS2c pK~ߌF%D9w]L&n% WFΝ]{46'6z#nu9MP{u SR̹}c_!J,AjgU*htNk$,sNu#LbʓBn Sʶnb;{qjtVZR[:x$>hDp;L (ԽX L\~rh~>gT.Ç#*<`Ys3-*]{=iu&%[ qGO=;JEx@5_x[R]TӞxfkKbʼhY6y_)sJmAu~"(?ʖl;[%zWYUo'_cD=.f)SZ+y ^ݿS`xzh%wcW_#ֳ|\CMIVڦe } qZ775V`g/`^!EZKz`8gn|7t&9~1?pq@P p \EO\> /֫_~K2:v%.fppMWALUuľ0I֍r>0R-~'30$ϧx~n'5/^SD6!&4z˖]}rޖǤzCT ֵ\NVJ+;^UROCBrg3LTzOGyE/6q *9\02lE~⶷Fl3*#UWj J '\u^b*ΐޓ"z7B%_$@6~ގY&(z§Ie_#rNPL,mRnƧC]]4yYqM=QO@XsCzGվޛFS; !I$I>  IʐyYmgPlʔHK%(Y!uo}ﺮ~^{YU}s~|߾ ƒ֊ti&sX&Yue'7yu#ꐏŢ1]Iސf]ښo6?I9qDJZa)t!FA8ۦ9"'2OwXM`}WT}LW,soo07Ƿ+$S-Q4+:BNEwaT/9 Nk Z %P (<V|G<6TABSˆ7=W0I-|g_§ʖWL!G9novdlCH}N3:dܰ]A;E`΍Gֹ@>n5z4~MYPnP,\3 )l2if)y%>{m}k03ply?}{ v ?SR ?o2*^yhb! Azv(TeQ&zhx. _VgsOO[Bz-dC41a:j,xM+qUvm_M5ͮ-DnxߗwD{m/kw=V"Gm?C;@ ('rwYj>S\ .g?4 ~Cy1nSD$~6g8bBw2i yd0+xMɾ8F#8GOlYA4 m?/lЯ/5|ˬzupkb$d |џq!0׈ wwϽAv Ib@o<.0=J5G-2db(ܓﴃAmz~mG TYRSk]`ng|r9$\+&HӃ 0c~*7'ڃX-!b~bpJ Iڂ{vh>n w*PN0/~AbdJ,Y484t~e:s;mWMFX/e}9K7W{/:e%=yY$0We nL%mR-roX^mIJ/Ap+ U<ϰͺ‚skX:=}1zX7y!;!@̑u ksV\aSwsp Tyy Z$S]HgHOT5:u4ƟLQd%j3!'փdŶYJ3.x%\7í?qtw#)sGYŧ Z/>4-ѳŨQ7q@˚h2ތٗOr+ eAhD5IpV\ҞFL+Qhʠ"ks5_nYқBRn< v34bmbv 3CѾ.8,B,4Vo㟳pSt#,ĵ<'w*aik̞K -} ަ^_t4yNZKd Ŝ[ofp' Xq/| e Z>Cgžley-^s:FlL݁uZ xu&AJ|#&Su5IaWj(\E؅/Όg_NLm&̲5#H2 Z׆;oM$q_# 7ۥ!-G|sA;|yNOl ?2)m\ V+g5(o2xEZu0Hv%sx%֒ +#x8ry}fp HCf}ӄK%~=jv+q©43ձ[}hIp2_YG YH@)]lA*5'ྜྷ-tvm*LC%g'܋^.E 8,xl#V8?+JiGSfh~FN;\ڣԺ3->~\Kpj+|voxЎ$QƌxpBy,P37Ђ-kxWQV}B7Â(+zoon9\xVXBاW-!~I f.o/*9͉I z8msvLI +gSwS)Kض&5B/(KCB,?M o'-ըMf!_NyoQ T{Iyzz/Ϩ[XxB;qh\Yyfu=6(Q's=a#=I3 Z|*umU>ǬtcJn#j.՘ #fzpb~S nTW a'oxysЍ2<;?ϱ-=no]B^5T-*L@ļ>~vL. ?T.Trپ8~ICu*[0j=ZoOCI pD~u > /ڙknw!H87+|)?jY7UǷ*|˯U>ԇiׄ u8+#ʒƇ#Vw#ͳU4|vN "rV<] k gVyϯ[yfA޼ְN's5%)<9}!nAc:O'^׎޸?;Sw5T[e`i E+v <R98LYHB\sMsZ o+~ w>߼A UE;[1Kmkʰe⇸}XU!x9]?oa2-\<8x@dzZQT^JK2;Y~۪㓕uT}vx-y`iZ2%];"/5"tuv}nJt'1WB$ENyO9r̯4`â @?;9 3:@pۅTKeC*c ;KiP֌'^JMmVp%0/}$n z-UA<5)AO"17.U,^^Շ:K{0k%i^pi~3dN}p'~BDRޢSlЍקqt FʏMtoF9 \TI׶MrCuoawYjwUnY̲^z*7%9e4B%ee]pJR"/GchwC4v!Պ0Ao|Wt8s ^=k7#8eR[V  }9FpoH8FI[Mn9'o3OTYPwD%6? <޲p X>jwKOR۱7ULm+Xk1^k`C,9z'i ^R=0;ŽH44O@Ty{a+줈>!bQiteg$t+ &N]d%/q5RԹӖI\(f Uˤt̚.Eк3Q+C0E6InK=Nu;Z b9][ks|UIʊKmŬT!M {(*6 _UwCǿLWwb H<{*NyۮQt83m=nNggX= >)}$C4pױ]U>Pሿoň~V~˶2yQl枌z$w|c?m/P] ee*ؤKoV!|{-M Wv|XrRf~Yn <'ozhxnH1ǩL5C5Q-c&F:Q/ ssD-Pg rd/Ǎك@J{h ΟK|͟CnؑK\5 F,`SSVՊׅAnu>3D=Vs.h@3]@Y7*"~HAuS SL7x0xvҁ- |95N;A]p` 5R:l1.v_sH BF$;ja*ziPՎMf6EjtҐ L}ve~9/{4urYe_.ٿ&}0#XyExf0lKnn;/oZ [Bk⬂ !ţU+{j9c`$'Ļ3ϸ_E_59q"O;O+3]~$66,@$4?\ a]ES:DYS$W^qQb*wpH$x3#G`f:͛f+n\';?s) {5P9;RxI[ g V;M?L-4n)&fB.,Sdݮ_uM{#Z *cH pa'n8`P\v2tzX}jV@jў2 / #xu@D0hM; ]Z4@8"r,EM6kOpiL78CՍ ֓lO=C`967 N]+޹)kFC|phDi} `R9o dKٱu,7\ B|Q=I"{N;l79qƋ)P+"UK[v}': ֒hJ~(4t#-t3 OtGCfsoo1їi2K櫟`0A~ccW,W" -Z?U`w~_/lMU-\W  >&2E.ny{+Dz)~ 0wvBUS!Ve# ˋ:A[O>vD.Fre?!@(8SȜ6SLA &8B@ޓH|:OնѬ=9[ .&m㴂n-~(xdGypX4$}W^ 9(X8zC6{(^VbEm{ 池a E|]RApm,(e5fI\xZk1|$?#]ja(dϊdmvD,2]Q:K/_(ĵ}\ŢGI^V9MsE%- &W7"7Ɨ(X)l8у.֟~:;V3|a󿴴ŭmm>2w )+y\}AXp?M¦B4p?A ½^u"=Q!Z4' ǂۆCi[Ӷȱ xfg ]=BV07|w(V6ز2׵sTAc/Ȁ g΅EDhv\Gԃ|Z4gD{]XӶp;'iϘ؈>n/}NL/OwGyU_ %y4u0|䚑CÉgmϝ)X\L6D5Ke QOEŠxϦ::O7-Y]^5]x}] K5GںFAf΋P(jy>D)XuseaNpW@P؇_7&GJuhԵhLb%T6} xkfGl+ 7Z=}`5e} ЀWD#j `'.Qr3ELd޳!o6Qk* V;jDw͢ *F tDMG*!IRezaҜO0`qk;s}!d#_?OzsiB}6$ZK|(T'˒TA1>z:M]um5IbG坻" ?;7`)VV.7$@ar&qۀʱӭt+r?3} ۇ'SQTsPHO^OrnIB ?*z}$_^79Yϓ5 10E~שRC(i({־ق̣ ů(sIȶ0":kYRn<}[6#\% "ߋw\څ6>ࣙβ'wZΟ}Ttڤs9ƨ՛cۭXqъOihxqZ3%rUhsQbx*l.7}yQ+Kn{Z6[=Z@rP ]9WUoiO먹ԯJ` 칶2udpLIpYb%C :>i"SOu)e_K?u{wVL K^]Sq__l`9u 2lFupqusڷ~+Fhye%c3>2s>TEyfNG{k<̽qwjƽ_إ쫺卬W U~#yYsB dqdǎӊSd-ZE,KWLJ޳D=)L⏶)߄ӟ9j^ ёLr xތ Z T(Q+ _ oi?{@Ib]ΙX.\-W_kb2^7\߻|#c뚿f@CN+XGT!.%#X4<3"uI;h'q"*\%+5(W1H>u\9%J0 nTK3)ڑpA:G:Cv'\p(R }=F{b4_+Hw_dA lYb fJ|ZU`/8[\WN۹b]&XJzbeT Ynqx_G]/ dDQpo\'dilr t_}\ ? [׷c;하Zt2fu wq<1䗩ʺ䄑'ԗוՉ?O?~W!)e]gM*MB)})bi05:rΆX(1HPA[K*?w)o>3E`?ddU+uBD;ЋEWxSOB9*K)~DL1ndRt 2/|O< P\agdn.r|.OxOhZ'U:z!s =pg D^{׹P{,Q{3Fh>^.+ʯwP',]ᢺi5q81ʵ;x[i\hےWܾ~@qoiDrl bMs~X7)ڒO\rW$,颹8>R?* Y(lbۓ{8;.JrMl^=[مk RL~2Z]JR1^X8ʏٌ'NeKt^GaT6y@2!&dݏs NA1?y*eD{-[kOo~$񻁬 %O=K~sB Eɽ@{HL&NBWa q~\DݭEPuLLP/P3W sVЋH LW؍O `q XEҟ,q٦)W,=BIV_A-⿧jnp8Sa,e޶{Ht83GҫEZ۰l/{%c9t~f3Sn\nGEl$l ~pJ=쀣u6 v90zJ8x !vܵ\ F@js!ÐnkJ^uw+\NyaTm@B1>wuWD:˹{P8 X'} p;Zq]q/qz[L>ĉq[[}#7$iX| _ѣH_o7giOI޸7,ؽd1!{ b`6gpY!ghxk(0N<]?E3P"LK#Z# m8o L|VA{?nϯ@7xr =""z տy_?` bTT9?MvP 97tѫN$xNE{wsgM%ν}; MaSy@Ї$WghHvu|M Ԍ 4&fddߋ>0"u?)(B=_#x\=[Bj%QȩCͱ6ocE)ח덟Fx-U`3HzFie_}/+KtAŘ/+p}4#+ q79y8YSjbWqkGogiF힆OadVDp La٢?xc%cΕoQ'qR ߰7;sG#7=l|Bqs=m7 (T N+9JY7HTJ/.r >wtw.  =抂ٯ} TeEnjڅ m$#%[hmZo5k>Ǡ 5IJa1dffƩJwn2/pW7z^5kp!UJHdML3 4ڳ1HQΌpU0J00 {<2\e :jIZqGEKt;,R+%="sZ*| Zg m׉3M]Sl|A"QI#>98ceWdY}¾X;m:<ㅔ F#;9pkDѐ{*NJJj4\ߒ?$,ڼL ]ȡ@٦MθڞoKm|8Ɨ|GvVa60a 7K!'$t iZa*EUЄK7{p](q$ؖqàw>kqm+= ԫ6;Db>),m>QOV9yl7@rlNE A`,OX }òqgRn0\~̉d&ccA?Ly Wlz\Qcη.}Ǭ@js+wX:X.NrQγhSOKDj×aC۾G̕cGqCy#qcMyi7S4uՑɫ :4aLܚ؊lݸQuM|Hǯ4u[ej Q98)mM5ZCY݂}&[?A[K}qAF2KR. @<) u䶩c;^'G׼΋_r|@%e7d J}tE(%I'~~sg-UZ:~FɆk3#.Q'w_Ho4𻶭cO1wUٟ_Ͽwd~7 بԍjZusk=ZpS} T ^iqf:zsa:A0[X֗dz9t&gHघdc=꼹%8˩?PmQ>r(ٸG$iZG}.I^߭~WCe͛:=1KroV+'PTdqyxc1:ͪwM-/IE$iǶ>Kd V}&i=2HMb|%m7mu8J G:a:~ĺ7b7"|9vaȍT)tD2{W X70J6Ug`˲?/_*;XO %,tk=hכO).ᨍ8JF\} V]-.BfxRNV4+ݱ}zڂ3rbtE'tf!Pͣ(;9wr2Xg^-;VPܪtM!qƺI_;&e5nW%q[TiG{ǎs KzfŽ!CmOiw64$:/rT3z+Sߪ]J`1J2C|8sKIg7\n>[B/_OLؓ%fXgFmp 2wL N1Ԫ A4CcihE*F_zW}\9[3&yY}攔"_i#cSo=޼ꛐvGޮUn؎4VJq*Cu<+vZ-!8Dm}nG(R~@1(w\zGڧR2ܞ.$4.saFɪދ9RhDVF?{9Z25b9m[&G^HJKJU>}f .t=SiY<`+Xv-3EOxyP["'ɏW}sIsCCjxM~!seNR">&"26lcdJa*S&s^+O'뾯ukW:qks+1~!/ ̀DS4aDLFtķռ ȨM ?h<SOph[Efiu2_~޸JO=QBht(ts8oy2%a%PvDTrTnC_g6[4i0sNa54Fk<ؐʊnOP%̓cqU 1f[5_c(@{=H$x^b, N? kxpZ:Ic?ﺠEO|t4[]hI03NQ*X5vXt}O~%j Bg>?$5O[7}ǩ[#kx[F. {%"Y1e|aCKըNrQl0?^W븪%2sZnxAU[Vό}F5zG O:Y+ *=QG Q]Vu42l>vDaV3ک5;}<ӥ'D(e/nv]\ }H~xFW&p _#Usz<(TAתC3N!Nlc.rSȢPe|`~R,;R rIyT+\ixJ)D]-{ FIw]YGo:GT/ō)pF|H0=*;(mqN '}L3$Cs)ӠjGp1jc_H2>{o:pBcx)zFDhԎ6S_"JW|m=X +=1>?y@>RBU^se ukS&wQO>?ɺb ZjU(3^']4cs-n3N}݃UU ^Vxe&_@i'E;e҉1r4 Aq:s$jѱWc ӧyFg9rYyq%8-d␷lyvDP;@iqOhi˭k !Z*`R@QX*ӂ% q44)` 5A*_I>lgǪ5#} Pu+hfX*Y_`҇8/&`]DVq:cT4=e(>~$J~ۂ/zSw,YjPt !H"{ |5u:Q@x±It/{eBƻTQښAuqp BP~J,YNvgul']DpoaNiȋ5nU9gs?DQp^W>:ҍȘɫ7LOIE{O߾dqy8w8q=J /a'L>YBoCe'S)7{%ܭVo{0i.E:@'~Z m~9Y@-ų8sx@f"=x5X% :PlEvc+#d{K^8QTШn@Q]HlSHTqB~WѶׅ$$wZ[r HO׺γW2'n~M 'ГB;ڵ#J^cZ:}wl=Ě˙]vIC|Dţs7|cP2֏PPÛ4͜ [ ?ܗ n~U㇘бc)"&f.xY{ EH-(lqu9q3ڜN xܡEga?1ee>ٝ|Uj eAܷ7|kSFe@bB"V1*bel,۽iUIqb7vUiQwܷ-g9 mPwQDћB*cMThFg,.AP8d~/[N;&B>MoCq7xXs) v1 |K'l-pۺyzwsgB?\(Cl "Um\`8Vz=x1?0KcN*@:h3x5]˴Ó&V1V>'n7,_p=|ǚTolx6,s%;%hiw7mg(g)6<| ׳t43%eH|mr,i1&GFDu_Lj3ʶ]ԂIW~qUb~\KB@"6P=#ku(1~7/]M3yye,W侘.P)hrD+R]>|[ Zi~9g?1{@rS{S}C^<'ṳT0lywNg`x*bCF͠ @?D[JuN$'#Q٦a<{̚(j~1 }{kJ(HTGƆ?2V̳4d:!: (@o=3hOz׉/ɆHj׆xͥ.i`}L.FmE[z+`nn#3X w!5[v?99x{q8)]r<|C`MNz*S9|?QpX~ߢ:m Zk  N8h )fL Ape"ӕ0 F3xNJOb<-geOkɚ(Iݑ'8fhF̠Co)CQ^hKH|ɰDzӷv+.1oHk^܎l^%{kyv\uUltߞJv8\!D.dC=Db[qB;r ._Cdιq `&^3u|4N Jx_]0.G+x!łs+vYCאq&G' A<xh̋-$/ -XPsP { ~l 맴^Z} VΆIg)n W9p_v񤥏/q!TźzzZyЖ]Q+h7#q qW8I) δ&mu&wd(u [O.%^NplY{猏iUMhSf9<"R=ivH6hc.^uW7 5@:yC[n5z(c'*ɼ !tj~WJCF瀍M8?bd Lg&Z}_o^ PJo>9qvZh7'iLZgM}g11)<<AB{!\Zݯ#W8hQKiOgCʐ[0jkQٵp*` ZD+&{BN 4筇xq)';jN?^9:WUbB;XtQt;uBZ9~ (7ht9Aͻh)lWЀƜConBlI?4E~JE\~y/`u#ʽ 5<6W:iikh> jU9BDN~._7 v<ɽJ1VQ~{s|NTZl9mssUv曽cұ԰;T{[+H9f8W`k-gi5}<Gy>< gsݚ[ 駏%-@Q?'|FKmĆϕPU~H$,dF*1Li{sU%~ZU\~ϱ:lI3E4I  mSóDQQz.20%-ƾ}@w"NF v/oj dxI)Zy'b&:JvN>PN=+rƵ2fIx(IkO'cRH,'Z#\6Wos.nBpn=g +Pg`EZWXt&͡v4rW#NgqiSGs9WpO]5SPkW6:\nK[W y#b{7mBWMֿy> Qydv;5+tQ̓ JZFZO̓ )ó,o K+26i\;@G^d%16-ævOBVi8շ3fK'{ɺ9e3mS;jH*nm 1+%Z_y;L\!з//[z kpߏd(+f_T(da=[ƒG%JYCaѡӵ`pFd.}귬 .*]YR`l 7Z_6{e]497N~%^:=․(Kv!ٺZቂnqw_#0y3pODɻLV?}8J*ϔV*^{۞IuJ43`ro%޽1@yzn2O]UMKBO2whP!qcNQjxワ7_>0ܞćShkCݛ5؂@ Oi?ʳ $Gxx+"0Rsj`e{^߹RLZ.5gH*^я`씖٧K`|7oj_w 7rM8hnZZ}O m`@K+}m^158!5& 1t]ō+_uLYnӤc!MB3,~-#HȚCσn7䑸g$F%-i̐dTg=6R5:͇x5 &VuD-G|fzjpU^SOXQ|&rxZܿ*F&[~8 ZuZǑAq1w _ r.ea-m`~Qvs}VA=1bpͩEr: xÓ(H}ubre_W۸ĺ! &G #Gv^{,'U۝[s7zS ɭDU+1[<>'[8YX8fWq`Q;hr*'N+ٔqO"UaAٱU\Rls e.J&3x~"!{M6S@ p>y9t|ASm'Zl6,2p~q5Ν66x3N 4ÜMmpƛgqW^ sn[;~yThxk #eD(m|ƆzGuI 8@T.e)11M=4}Ȱr#4cV% ,|%ԩ?Dp cN'5rF:@x_sĪ{yH}!? n!fwu=OiHDu&ZĵZ%BA14Pڣa(. /sAȁ!a /Ua:_pN_dxWjJiaU_bUɶ\ǔDa`^M>Q3w0Exr=Ľ<<(uϏ|FڵDZD'ߜu䴶/ss~yo9f n>@ u]X|_Y1-9rCQW{2a~A8l'x[eED&.E~ʧD`dY3\"݇/ᠴpP=b5%GI?8?:󬾳=Ѯ8>R|[yU%Bxy葄A>H"-^{/1Ww_KXᴒc fI+XPӞ;ސӣ:0-.|jGyOHNMƹ9@VlR Hvv?ap ZmX1) :}hM$>a;έ|^H+W-NCl2 |>=)bV'_H-K;֡/yu?ux &VuGtM5v%ң9LT_ >ygX\;'pWh{)J{),Fuw\Vd2-cO|\=~5[ P4\k-G1Svk8N4'cW{ϩD9(&9[Hvj$+7MŁs9h/+W6i1fW VgP nN6<1!F8^?\9t,_5W'DYg%9B:ۘ@ GjM|C_y9]Otx0ηhN5{Ύꅫlkt]|BL3dzF??',q7 8ӪooD%8sx|w&|\s7d=^p׼q7h]ߛQO&E,D0Q.<η嫑4&?[HRn+ܖLCχ6@_Қm3 `yڙF>y`{7[PnX I*5SL"@6{KO ?(1Bkk Mma Gxc;mOQcpO F_~Sfz5od'YD| J^BD"'pZQG_8w`{ly\D6cS{_?=]VVu``C+TַPbT2Փ7лO;n[3(Q,{^q!ptZJ"pG&{KKzdg4HݝaG(b+ul99;/AA^c=Nf&Q@𰾝=t62Fu\ .`}|dzyB~ 7/dcZ}&=a6?]kHXT;SO\ 'f3j)V/0*_x}uGf~ꁅk@ҏ\D'Qhsuv$j#: .Z'/I3{S_ֳvZwiU'7 vߟ>f׎fw8A@.9 )ܷ' 3e# U E}d&ELdU$2 x:AbUos&ofLdžyL2+u`Kj[<݁W:(!`ҥ@~!#KλǷmmSJ(a7^Uy1d?\zw2,Fs_߼-1oceO)4 ސlI|oLswi›]g4lPDK_Wþ7θ`>܋ !"u+ 󼏙wot+Ȳ6XL5NZThEJҴ磡ݍi0=]?K-36g='Њ5w/_{<2@]&IA>c?_']|yq×oQ9Ze=Ǭ cCkoAtVlk2⼾=Ho]?> i@lv?K0fpewV sj`t'qXb exhu^\/+\oȇ?(`,ݷRU8Os6Z$C|q޼V>ny,KwM7L` 'VMOݖˁf-twû.`hoS9Y_͆-E;_/I3} x@24b>eY="rS"EWI[ODXN.>&u+ [#C#&nGwp=dk |ۙ~6U\x ><>$aNP)o=IqhH|e|D}%Ct%fpҌ DRWDgA{;4kC͙|: MhٿZ //$Rx=yy[͌tbK6]gci1#.ba;l]oבu&#6J9KT^/S$n0y6O+<,=>ǼݓZ8*%|1cwl28o<îWڸbs}C<,*T*p;L=1ߟC9sJ * lJI hzgWݞ mGyr7Y$'r%vIUḠh*]7ڞ DOpa@*J~fy?V5CAnC ! {K* q%ñ7%l?o}ݶ͕( y Qvwр- Hzz2MTR/hyt#qxQp:v~D{3(݆A-Z\=?̈́=j6uc]>Dz_ ˹Av~DoH/v!ֽx2\}c݇큨[WIi ?Uv%jZ.9LS%Cw9w#c=aznZ;X\ "YŘ)R~7-sT"ΛY~ݽ fDKp ~Ϣ0;v){:K9z/WtWGۺ+8htF"c"w0Ii45P)sJ3y$1+t]Ց^+k0QyӛW(&Ҫ_aY%ņkME9O>{#wk"g-f}}]yD΃-":CGzhĀٶn̕==y9{0f,{H:4tGe]cW;8 Y$_s& شVAehwwظ5Ns=hm< w :ã,>uN󢺂7k;QIhG$>у]a<_*E}>V}$04d]>KZu@D i&swWɸK9ߒKSΓFx~%c #>ܪ8eFl;ME=|#kSgXW`H_fv؏h<4I1:CTAپ'DX9Z_W M)ЉpB'Am Tp5rqkHP`dev(MRqUѿ[5N~J$H?eR%^7$S5s^YY]ĸS|YnVT#aSs'Xv#η2S~ JCb޶p wc<뱈{'bʟK/ʞ ʹ.BcS(@pb\5utpd zT*}'="*f |i\}'16 lj ZŘ^Ϥ=i.aywҪV6QRUd٭Hպ*mCBfkWhħCm&.g 6[SP@Y/b~f{]@mӥgD^ 6Í̿H”GxD]>vuʀr!qDy >ܹ|N5.e%hzrHjDE+asσPql+"DZBkȁsp:᷻x 'ߣIvT#8sꍅ6ۛqb6˝b'察/ϷGELGw~:tWxeۇd8;AH@f˪Cx,L)0-VZLK_w fxMJj~Aj3n2CƏw"g>~Gv;T8n)lbQ&5r @tOxrYD ±i# VO dTs߳JB-`۞+Tjvv6~vϴ33潚ɦucI[28hlmOAX3FmJгVk 1x HWm;OV( ูnw~}b eM'Ae+EplO.*jfL |t 9!d_t% _Mz׿Fy'b]3fD5~GUƃ (U9wY^ 7Vӏ=%37SH,aBԹ)L0_΄hCqR)ֶZ]nKn9Kvv_f'2qC2o @gBq}٠苖Η1Oz++%LMӗJ(?}_*w'p5-G1/Q.qk X.>jk29Ex{ܡL}3eY& 1+>:!e)kܾ> +RԜ98&PCrj $.O (PF7m@e^[p@&*y+8왐aK#w$&j%5+8|?O|3?Dhf4=u^vΖ?T2G cMEpW&ykPKl#Y$^B*y[YC4ϬZTS*$Y>PTޟ\v1KP'Y.˧'"̑D+Ͻs0 M~ tͤ݁w10(<aF68J.X`ET5,ly[ޫu鶹7,uqd]l&W4oJQBܫ ^#XUb"8v_smM*B]G{bp5C85 E?K@֎3;{}*o3E)uUK?(u-r.vL;/Zk|mR+֪J@ [MP߶k?$o}86'ٺ8\ܙh4QbB4:xlD;O ݇nn_31xwk*Sq<&׌1OZ{`U6;w'(Q*察~ckj,ymQ;̷O_%c#=Z9 _(ϧ AqtI(sOv7%.`fM2T73Zĺ>Aľ0ΦἐUf.1k(gVv ƎǑV~E8&~yVL-h#~rj2AM?+cKSC}yk$YȞ/:eD9{Q[%65z]qM톽f.RMs;Bk)Ow.:JMK]Ey'p] |VK]s?_37?ZhPv`:Gh_M"s,cNR-]q{[8`YQ OQêI .tagnؓ>80suY?Z{U9ʰ1[Sbat5pU5MH[7d)}e)[ݵGp"7ջ5U!+#С'寣)zZSǹۢ1I)v>1<"%Dg88<׮~2di㾯Rsʟz7)H\ *a#Hҏ <a>-s)e[<pdv⥤\|Үx8wl^~PzG"|8'O넾b^Rr]݅/r|S&Wl|07fM)i5הݯJ*K#EC?Pz\ 91V4L _~Լ4CMUJZYv~Dl]쑗+PHӽhfl x >^8m~h9[eVa7I 9]g3IBF2ߔmFG3c4{ {~1቞G){wm/![A1n\&QߤTڌ?ʔ_;?xC*sQV--Eeh^h//giOh[#pA_پr}rS?GXqM,KN@^m˯\AFiEP~kbhuZ+/5j⒢hcYn,}Ҿ]\WY{]KqQ6qE(ҠT}gQ󩢡0޾?DMG 1*9Au3p`BL{WYիmкyw?\S- [\ /9d@b[`۪'|xGV}z3\V$c9t`5߶SdKeWlwZOH^S]G5xOb+sn G}_T,.`XXHdUaD)/X)*x'-Pz ɛ4m}Pl|sς.X`PC^gB ̓ bl&\OogÒ[$_xZ1l~+ TYþy퓯|aW. q+J9_h=AP(1ixBPlP ]w(~t)@[?ƱIcIS%"ea웚v/Mg`=z@+{V:,#Üs)I&*֔16y`y~?K&;-b #F2Np@,7 ;4 >0h5QyT;p:Qe"CQ+pjÙ-^tT&XlG*qв{4rjQpsϮi)pSi'[ enjj;9M0$tMקOoM16Ŏ{#ɞlDވ=ܵ C?zH/N#] q! JEa5YTUVgcm?+Oi*08f6~V^TX ۜfAp1yN{PPv0c_iᏎQ4'JMGXN-,vwo^"\ͤJg๬R-vMVlq7H"vhcxIǪE]2ЂCܓ8$z MuOs&{BA*98yѩKk{]xMp#ؓ;ٍSQtC"ߘGcX>ى(anJQ.s;;V2E)h^c'r+߻mO"FHY)N7n` FyM74 `ίt<~E# _M^s.VG \pI*1 6ZV9WG@#` ω^7P=^_gnQf7Baݧm<@M{nU|Qq.G8>f-Ix$g=G_w]aEFZanDP0=.$rKWԳ]wiŠK2F˥(#XbO"lO'2Y߽ȺiJP7*o EE1SH/%'A^~ΐ`v#\b9it̠*2r $vk8L_*9>&1Ϭ Oq=#CXȳltwJ/9vYn"Wm4nǼ̞y9=˚[/"zV'/94\|t?6Ce'7~xK" O-|maP΢i0:q]2>yŝ:~f6D>,*{(n}_~P5=qu Y-{\ǧ.)=^GNY -:q4$:߄+i'Z2eN#ݗύ~G24_03t;8;p!gP:q۝A#t&Ut5A_F)ܤHE~߲w9C^Ļʾ.P'OY-a }CK2FAoYw}H>j#V{Dqf"Nz%"Z=e4f9 ){DqNvPuT%g\DruKDKuG#A wj9x"؂ctZ_cAr GhD,)kgl@,8;oPk$pLoпPۜ9~G02&>LL-l :l 9f$O'r*ܧX5._{Z>2 h8ʱv|q Np$VCcوy{#!^ģaB ҺdHe;ebSes F$ߨ3:/Bœ6#o߈J:۾9ui'_vKWȤU8&3+g:N`RFi흫k!jqHpTBs >Myg[5JTu)0_LQ3B=`?UX0@sd"PɻSU$/9Y o`nްu8.-%C$]lU_:=RS]aWfF[]&-ZePj{icA~ 5ߨxaZP7YA:. s3;3p㑾ؓ"n7n(5(n[_MAňhF .|] OG9FWUj[˨N`jdD#{V;nH#W\G'aѺAUhdË OlPn@f9K#5j[9B,epZr鴦 5Ls5,-I G:LR1ʸs! U2O-d3/CI k]x|gAWlP- nQ A0u.59o##jޖ`s({„Ru[Dl$MA2#1CNF l>3ܪ0!xᦐp4wr%$8`N]bz'@pV4Nswl4{kfhZ󡪟WM wMBO*UpTAzD2m$:sT^Amś+.p%d#sgo&_i;bGwP4D[8uG$5a 1lks*: js'FDj])qUJ'pk(d}wxSS܆?P:2᳠ JOΫQX85q7<u|mY.\-*jxs=us7|<[h#K 4P(rS}bwNAVIYT]0󅦛OWG )'ÁȣH w$/`~%uP0_HPuf9H"ZRDwY;1~CԆ?^ gCQ`${Z*%s7V|gά>\{}Nlz eMC*_%r"%iU.0훫˔fx}eڱj a [vnz.p݊VR:#d z%f}%n`[̲ ^ {|&}zh]zEc0Jy}$ V'lLVP^$ywkW8KM_"OL嗠iYmHPuTS %Crzp=9*́yYyz`hQ>Sb:DX;(yȢ}=vϹ(*u*"7F=T\-_K0)}oYСpw$[Z~mk5X >{Ln7?55g𕔌iȓf?q=T3kte- 9ӈ1:a` Lmd_񫇋t{Bosm;?9zByMT+h._j ͅ?$VPEq ^;fO}z,VP.hP4 |Ϸfrbᰄ {g={.e!sԔEʎ-lܕ}=\hKh~ٗo>7Ǔ.\n񶂅ܛ2`qڒF<0cG\ש R~j4ԐU~aKzʅ{Q}=Oj S4\äFե;;(ru7J,O?Ӕ5u^|I^FwX%_C =~5ۜ00ws?ܞ%IB_o?Z ){_y脨H0Ypd1nu+U1p49bf5K\Igj/>] u;~qC֭pxeǜ+L`{>+sn6$@ϡ5E >6w*P;be>n(uN=Nr6[F$ԧ *"xfi 1i(sMJJh0D2ȇ6I'>`>GYS×7%s\GVlQx)My~(֢ CS?'ʘ7LhڢHL%2'L "=dt37A|K/ٯ\D*>s/{j> ٺOQGa*]zP_ WEe5NE/xw_^`ga5!hFDʿ:CqIeĴ9 RޢH+vѶ0m3عك>C;V0w) q޽ ϰvMt^86^ "rƤ?JI$MFVlHJE=:_~ j|/ma[xipB l_Xfw+>߆~ HHF˼iA"zƪDE:˜vA]"׻PW 7>]ҝ}*zds\liVK;)9,gT9jSX(Xuؾ/BBw-0\prPxCBsNw%HvEBEwʄ,<>ୄHYyxuv^|+5!}6oC(tO'sYg- bnBЯP}&8wpSV&myQ셚>.%#JU٭e$qQ .+_ > b[G"_pIA 7D/"R˨VgD1+Cz;V4EsOǩ9)q nL/BBYgE/eFꦄU'+j'ea<{VHpzi !Z9R1D[r9}r5ıYȵNC%Iz֙XuW`qIMau6sEr?\Zy1&dpp]񷶍)w{Orc·||6,?`gg=JDަnF}O$ذ'A|&?j'񤆼|{A5~f|הzaMo Bz%jI  S0\Fi(5@pFo<4~g3H[naT.d_a62og ߮'cGapC}&~'̊qy`l4i՞+}Ko`ۜ=Y47; e*2&Zsm!Hwe+~5S^=ocu[DRl]o$B\z)'G#`lgMd<-RlWl*bC^Q"7h}jUj >TzNpqW1VOGVy0ߖ-?5M"X&O7!8375*J ./^3' 9C=cG;Sz%hWO˔d<Ў̢xuUf;Qrڞn  |uat#s鷾 "Ơ&j<1_qKCYB݁}Ntԅ]7S"s%F-.%c+'BOd:؏" 5gGTR|8&Ruk2فWy+P*eM38e}) 9Wva-r2T$Ir^Ij!I&?z*>B5q -ѳPA1;з=l=Gp6VůAcEЏ+mA6}lJm;rXtlj:O3{JDu4+[KإE?>Cpw!+h̹dLѶ=~󃳠O^K7р8'1qAl#"s/oHk.'-϶'qyD-s*u"㿒Cu pDj0$>@$reD0rɗEN+nIfI&>Y +=t>4@[8K>*v}3NNNoL-]~dCLKB"h!u/%Zsg{ep+ZJos1 nJQ)Oh~um+<x]>n4٢-ܿVU΀N_|jUE7aY+~Y-m3vx>"˻0[UX[_I+fNi 8@nsU Y>Hu2z/8ɉ-muEKLIW`~>Nξ[USqZHՇ,*1 '?W&M G= 0.uk[Aq)?>xދO)1hˣTAUԜgmL>f2w= 7=UhCn* !^(te_ rfW`|s[ ~wQW$7'g,Oa9|[%t;)I@ZY-~VL\ 6]?~'P; y=AkfKW:4;L~Lam 8TDۀnpxZ;}ٔxnW wr$rF+idglY&8> JC(#mF|oK36#lW&8Yk³ѢFMGH\-m'kA/)c=9m`4 ?w嬈i+>n!1QO7:ՇϏˋG8}Y;UG 4k#{6ISD܈BF JS/@WTvgnJ %:`,\l6!.D]o-0^7\I5b>`9NμZ^|-V5[8[%:YÎ~5;X I'.E6sNT-T qb`v-t戾wl\Dщs畯_RhBAljvT҉lnio ůU"—( '/›Ghz4^8;ϟ K c>m/$z]p|!w=@+D*kQ}/ߵ8~l H䛖_ *z36`zZ D`W_C@;ܴaj2$x^np擬ugeHB|-ol~Vk8( "?0p'|z9Oh|`g6}/ gḫ,#6AU}D$}'M0I\ۀ8=,U|63',J.('>#'G ɅI qN?/'8a7jmQ'./rsRn/U >n= Už7x{ =, vY)cG a{JN.KfmCN!{a4>=w ./CwTcOK7{4z,4HIBB,'RD\u}.da2 {{aMkMm?үtX׬ Ν{(}S[p>E>{5<{K0,'ͶH.s]5Cd)+eVɐTg="Aɬ݋0-2O7]s;צ8>;1kx2a"[\@mZ]Q?+9V>km괔 P5^"1Ȫ~zl4/74oi[nYe@V.gxɵgKӘ~oqY)8G^aךX3A~CI'q ISOw '(^> 2@x^wn, V'*nl>K2ڜYsz|r Y7^H97hA< k>$i,ĺֲ0Hȇ}$D#:B6T!^*˷Fz4x7>}NMaEype\D8I5_]2yb~xئP |+k;) ڈαRTX-++?ߐ3C$W ^xG=>CQ!co[ }^{!arR ;K[0쎟5-vYkޖl;)p5(Ns%.2y>zjpm}UWh%;X@ 7PK$uu#\;-OBjQDOp"4NxB>󢱲ж?z~A_X$AQAU?Q#$ھWRM|xW8"ͧ tYK Ȁ=)|V)=W8W|W0J\ϑQ6e$)6EBΗ*ȮB䴯mx0޵-}b+ʉ{l rSIbq?PJL|ӳ"$IIeO{[ `+ ׋ }s ͮ >;;d>fR%h[al 'ڒoIr&֔g CRwgϜ~J6]J;/)vD쏮y4B!S2GʔLq6 BiɐLQf>kuy{>:~~؏}jcX\$ uzká^U{(}At3?dX Aq(\CY{<:KqM@zu1L G)ްY)*jr,:BZsIIfpـahIjQC9pSt-!hNw5+'?rp :2rH19QBo+hI8{l^ᜱ,3BWf{@5s' ;G JgXT5I/fO{@W?h4vo q4lɫ6_p%j~=3yE{^=Q 1XLBsw@Hݢp6\~9ŭ.A_U|7cbt2)Xu(E2(`w+}@EQ>l%F6/*_gH܎`Ep]j{@xZ:=؏5x^'- QRVicY Ik?[sxiUF jJϱ=,c}nX>Nrb 2$Jg@;biaaOSr?_{d{C]eOxΠq2iA1017{qFn@f[$բoTyWO7z4O1}%wO~[V')D,'D7*$X0EN^‡TC.HIQ(zl3൐?Ѭ~|9G7.syF^Q+Vg^@xR rzK@U_ŞspD[W++>dyE0fd9@Mxb q3n%!2e#\-P$3/Ag_6+ w@tG~|?Y:"Y<%I_ $"w2B.9nH?'zn=q[: "eNXSOyA  ɉ?1^ +@ԳZwƊ"#HJc"Ța%FT|MFܩWM=R;f*{CVqIr[S" :"I8b/t/ab;?#ζÁ6dJ;+ WN0@tZo#s߰?IQ(dy*V>S ׷Av 8pI`|A _Yy.o;[Mq鷦gNڋKӣik:)G1 .cO'%a;'1gU$k{+x%.#0 1^bM89k~Vj_|W츌'B-¡%?j L5 @"ԬIH6[c!blmQȴ! rjD>ݟ{D $}&A#?7J䲙=Ӳ+=! 913*|lʀIg@•(K=$" n[ST:<.QP pza'4p˲4[14ڙVЎMɨS*4C *:srƌ ` 97)jGg y  {wȺRMRYGa)֠z pyzȃf_PD͗nG_ÁU)@@=I} 8{Ǔ83N 7QR86TO hiMNjTsALڭ Q@ZаY%ѨXwEѣD`Fq"U im/GKS'zXgC P 힍}P9U>bӧW l?_w}GYVC] C2p_f}⫒K|Gw{,`,JGPzchEMV*xS,nz3v*AȨWgk =WY_lc8ߨo C+h;Bn xobIS# y{Kdt/4_REd׎UWHP/Ư+ %\5RI~I'௟3Twߊ$(pp PZ)}#ã@dqeFHb p@ż}[Zp2*xE0iP (/,t_P4xkia~4afF=}oaִ[*hG0!} :x?mfm!$RsJzutdiJ_?I?Ax>2Ǘj2]>%N6;ȱkbl=Ae+֚(=6rxi"3C.=ZoqW=:!9),`N:\_NP-OLo1>B .7]8my_4>( fHYZ&(у܈Y!1]3~z )6PPN%v,H_ $z@WR͏Y ƃ\`vg}'e \&xNd.0#YMMo@oFlOBd(q<(2O}gڹE)GR/na9!GBZ,{Nf}:$֏[ 8=l'pںpO_? $g~#x~*#Lz3;†vixTG iE{гU39k~Pi `s x`Mӂ uTb_q6ZO T s*ZUj(B=krKƞ&x*d1|_>crձ+dh/MBNH=SFF2LSϧE*(vV:V֯=qpPEv&9pE94M~"Ak#gǀj&gC-!ލ6v+㛱9P1ӫz- safl7P|8R 2$ڮ'sݔJ+GTyk{qEyTJDu%vQV1‹LRhO}?b}n:䯂3z;]cx*H `nM92O5ۇArYsT#Owcg҈u.]0}!@blo98Tw L:0Ӳyt+| na%E;j^W.E`V-KpXPSJq@t*.O;*(Y7v{"/j((1Xq>|9+L^'$ERRǃW_*?k$LP&Мbb앣P\ͣ3 >&7Mr x:~_9`d{\ҧ p/=}q/9v)?X^ԋetvKtSm14헪F+ggOs JVuj'~ʆ}r=SN MM^r }J[:*n\GZ:G]1ު+ھap)pj8O&N],j``ԾlvoyσDhdlFɬVgeQw(#K„w1.jR%ذ~T. s)AwH+=B8 _n@IU{>TZ {FVיtg߳b>O=i J^lБWY0Ź#j TJS9g!#:3?9eb#Ep"'sDe؝I #]YZk+64rF sp"*ҳI0m m0}O|ޟY~ m*jBb<[h [ˎ)FBDσ ?r ?]efBm GnVP ORPBͯoYU<XeMCJjfDXcyO65 j"&a7ׯ-5)aMݓ'i]@Qnٲ8nZ1Mn"_;{o-(sCW4bN!~n>^%blMS3D؜]rǁkY0*Kܼh%,+ST eT4p;[WF >w/k[E ,۹,R %ҳ%(|f8<FWU]n-Ǒ6>\ǽO_T1=&E^nʲzj~Cesjpu@qw/8<23,% Xw$"{S+h";ql2#;m\{1[9 rЇC8=!u~O$u%Lz m9HLFĨ`A0;1މkc ?ZA nO6>Ed3~ތNlg ֨´N]ngt5Ԕ'?\W +:õ[?oP zfW9?k_k˘f/ѷU^ blUP/(iˏ,Ow0COAe jSɃX(i"9ƽQ+W',gBlK݈ ̯Bg}wuhHuUs`@p8PTV '7˼o?*3)K01Pg- r3pJ7 "宋$Y<u`Lxի&O ^nFkn?ZbpSvȠUsB]>ȫ$F7z5Os>5n? i0 ]W]vW2@JH?Q~ݪe){@y/z@۬O]ؾ{DG 5{):].@u2z76ʛ4|P#w4%גK14Db?@qŭs֐cو%k (uH6B GR^^}`/]<ĥpy m^s">fػ+B,`\^ua"}Gu{l|Ƌ1䭘͖_~Lߐ0 V ;'EGW"H&Û#awmt?*.MY1G(CIy].IJmjÊP"[<ۥhlr^U`'Mv?ڕdZAON?㶁.|ls;E,e5-,Zo s6fG6jtcTi{)n鳅jV)6^>졨8,1ԥS63t OzyouK,qΨgG&ϋ2lFb 3:I,%u÷j4?>7U%k> uE Wݰyybj!8%bN#>FZ`˒&Ϸ?lIq2sOR_E]F8qAh)aI0Ъs^'6Χ &ތ)m$;DŽi |l L`eĸz:GpN ˃:^o}\bx] r\XC $17ɬd1.Vl+?4ۙ?Vg;g%=!󩦫{aNϋ(nNCR߲s*$_ˀ$ci]N_sK֭w̕[ŸاP^x4x/H(ea}RZ b".vH NxPl2\+$FRB6p^=T ;xƍ`Ճŕ"]ᗖѰ`X~ڊ-<5cy \9`+h wN >hs.>X~g0刵ۆNL!~N&o6fF|̓%:^;Ndؚ=nC nĝGI ,+1Kt4 ^ArTVLJr?rtHf ˖ay%T$d2St #SX|\lqûʐ",b;X}O(L A[7۷wT-K0t-_3k᷁VF Nʇ[i nRT) m|v;(g o(R[CCp4o|W%X^rN8gyrEH}$yCs~t&ay rb2E! e{op͝5Mߖ-W[żV2.nF<`q,J6؞2q߳@ y}/Ӑx:|$_>I~|$QWvx?ZJ^D?DpW4Ye-D髃y`+;чɶt Vl~hKWadpY]tB;MSϏY .m2ktVk||7ސd:{*ܤaq|i,be:N\R)|Ҫc)> :M6vpA. Yua쥣t1_1n0e vJ-e b_WOwr_mM]6܎#u(p'VA,;{ (f.b<>ArDT,Ot?F-@8<\AR?!՗;Hg|J70m<`s.SmHF_i2zu;W˺V>h}I04hs]2 )?+-+ނK&V$n&㭻£,mJ'Pzѓ ޽.(ϰ(cef Td}I~: W٥IY+vxyINnbzT.}ldXsǗ:rN6҅ʹ~ЏWYp,y3SSS^خٶ}qF&%ivꕾ`C]k4<+-No?Mq'kBz[ɬ6wxM* iy9Gea}ao8Qæ ~Щ¿M<>vy,ƇL tLC{گ+@ !8"Ccc< Ш `X|?&q i,9"/cDOR d4FrZw_/?y I;kpz.)}PE{jb;8hl{5#bbofoGF!*=#9t3TpC;l /Ŏ~&lA㰦;Xz}Y;|D"= [:c@7Oo3 9}y%ΥDŒ߯jJ((ub1}R`yeYpTDN2CxsB:$/h{TSe@r~EuI} >2ߔDDorL)[⟎~, ~r/φ?(C ?au-ԋs{2'@h椨/Ե{!MG,QuY$pMq{]_6܊GXGŕf¦ٸṬq_v1J.٥tN0ӡ'>\=ĸh=Oj I N^rjE۹M~KKOآ Q_#aH|Y&lțDbmC<l}/Py๥8I8ĭrMP:O|߷"Wk,+$VeBx')?*q^Xr e3s0'1/أbSV廇\;nPgf]+yEޫ'3zA>4 >u#|<j:s7p: 9MhmE8É ΰ_AdZ++Js:²V*iW83R ˊ;1ܼN l]~vlD|މ.DǔHgF4|ş_U9|VfgQj2USHGIbW3stÓ}y1@,YO3t"QdKN%o4f]?K5wrE1`c&D NP(YNi`Q{4_8kw\~ѩ3?ĸcF7I '(.c/#84J]7&}4` 2'皆axC$XE3p7`HMY7'C(`?ϼyߓ9eEhwo2`߫I,Ip-Uњ KkVa] F7h!.֠P=Yeޙ" nw-h̥*QpV$ .DɠWߊQ)GzХLm[K?ꔕ#?K^GJci\]>ɛ3-ʹK.iG3U3ug'̃jXfHZ6U3׍d"!(KHc&~OίBHzt ,'u g%6^t]>g- 8t~glϔOd X}7&SR?K6=gnƱA*s}6&㼟 9/bLY+_dlGxb;\=ma'Y~a&*^0ɪ e#]>ۍp5|{^`=$]jf%.GKjAӰD`sw!"hG~U^I-p2$~.s“4_|-Jt T1Cwo]_H)WrBw`7$ew=3$3\s ٞAX}VG^%&V{Y]@gWV s_*M1>97xiwmb\POq,]u|H (O pU,썇u;L%|T緯/d_LJ~ty^X,y^Ja0QPO_C K@+;@`8VoW~>NN}ԉӠU bS;OPT?>;h<Z'D b"% IsȻ2'ifh1{EkjDHid#ԇ7zs7`ZB+12sPhu % q#]o!Kpa@_٩n]|W59 #.Q0Ezw%9bc|`wGظT.,_x\F6VOc;S}p~S |Z_[#Q}rꕧ6\u%9VM_)}w1.DY{tv}7<(Jly+7g"Mb>XC7R3~¾3hq-P6Qoa :w]t_|Rx+},PϹ/{$1fOҦ#m R} GjFwϨ &筱sbǻrWhN9 x={k6M1MǞkoC^p V'`d 9KՆ1NņO7xLg3whsTN˯5)q gO}srsXƌU޺Y+EW?>v͞s+,$`ʍtoa֐+|``ʍKЪ;"OG'0'ΒW"770ypj% q)*VW<)Ok'IǢ;#GڷDH4ŝ:)`#7.,*iG0Zf^ӇB@"oO)p{@Jj~OWz4cϱ2h>6u^ZɢnP VFGJw=%Jp_BAKϰ桤֩f2,XORsu Ч9~03yՎ}Gܴ(xİ<0$ß\\&Wx2% <"iN(~P1uMSzy[$ê\ҧW%jΫ~tEWV_KBQ^`UdU E&h87ؗ}:OHzݡ}/z9ꥌrP9IEIߨ;~ g,~"Ѕמ1go0b#O(VFhxF.쯊+3AAO+f%ky-l 5f,s9$-Lԏ~ H{c'1~uȥ8O(vf%_ўL tPjEa]d"Ki_Ji7p2/f:>&vY = %oS7v8 ˏ?!y+/a1c<.P!)/)/k, &U`5 hbm{zDD'P0*RMENο0hdQ_cAd`BH]esȀֿ{Y- wpn?ޯצ x?{M}ǃNzsB[{0I/]Lt=F&Apaϊ*w.1IfV n]}~f(8s&xaW d-W`AǗig~"5o;XQ% lGofحi%;`mfթWq}hOϛ C@R)|44" 7QOcӆW"N:ub$\' T+X>?P79\O'NT6ygѤy~[}{$EZ(X,z"FQB&R{ww[4c<sMsźBfe\qo/EwwFJ\{5o!oy}Z Clnx|t*hy$m;!%Vw++AU.e=U,D"G,5[5J%Āk(GN<@'0Ci6U=vPaW3:Q;շyz}`k4} 0<< _KM.?j߸R%*s!ZDy]ø ?xV /`K-}Ehl14QR>NsemtrAŸ0eЎh 9? 9P c{]MrCo/Ÿ,[3(2"&0] T{Ta5hEg pѨH UZ%ꮤF*^5GHW.YAVQvak#G ݾ7H{1'f>s`]&8Z?QktS$~++Hþ[x?vVlo0Nz/V'!ׅCaj^%WpP&c9|4!u8ۧݻ*&]՟W>i&>p13}/(2Pce-ϖ#y1n,?Bq&)BnpgurDTݡ-,JI0&Z=ivzC/kYޞ" q9lU,GPݻp ,Kg/ {fR~gQyqDRƸ] ,O!^pls,I'jQXû#{فR4Wo)GDVd\f2pi/8XY{U`W0,[nKOz{)iꮐ9dBƽSWw Q?.)Ox(*yL>n#ױ2߆O,h ,k_ uayDX^ZZ-xXLr=ԟG?J(ݞ=3LՒy-{0G"]6֏d۟)q՜@ݩEIPׅ)}QM1ޛ`c@lvRx| h>;H iY 6w wFP=_%"ljq-6|pZڬqѧWf5pQ,@e?Ƭ=_aX|}n^ptӵn7=b{Kkkbuw-k|V˙! ˷#|לl './=k-זe]aƯ=kn0І1 [M=1NpaނC4.|ʸm7vFc a@/`DŽp.8e% Ve84ql8ZWp%jnb fFpv c9':ɐO;]78TᓃF~'-F͈: ^F z5q4::,S0.J5VHR헱W!Yo1\|AdJ784'H~r8J{BsZb),'N˷bP,QϜ&1wT gHꍥh~=+ e_A*OS/xz6_d.GNVYEzzֹ>(\(5M W1F`])4*rV:8ga4fpd䫜zr3!!$"iWUm$eCa0X7nP:f7mfsߋHt~_1?wKM:'Uv0Ϧ)O6t?_} VC,/5,uZϿ \ӲȂ\`$X}Fw=cCL]M[TDpl }]|Ae8_*N:2[r WX䕿?]t>if +ᐑm6%wܣMRDX?z J'@äWhiw WK# aZ{D?WGI C$g3MTnˤMAGgXݗHhz0Յ] ȉ,IΤ$O\Bf~Ǫ}1}۰%O8oN [Zbv_?Kc|Zύza952i|l{2cNzC5ti0su}?7\Sr$eE^oBܶXrq ܽ/4?DCA5Y[]/fi%8TK,ңQ3knW6dz=:O|̎lXD1OtX[ԵY"7F+Doލ]@7VN_gq*~e=Q!ԼZ=*eF Coɣ~'pvJT5~3z!R>3mSݟ=p$e5. 'iF+aALXT|7,l}O 9=^.>&ca 6`'S. /DA0oMC>ipRwr/}N笴3ӶC<h×zaU Gr'{ zo #JT+~D3uzA0jIdeYO\{ͣ Ye y[- m;;\kwO)s f[vy+xvte W_-  郔jl]EW7\ESwnpݰ;|NIf QwQy>Pxo{o/́OdUnu.rgTmaJiOدΥ3-ig}d MOk30BjEK6uڲi4 EؽٯoR{%T%wO[}nzyK;#. ?gdM|yDe&KgJW M%q mxb2s  8+ 3.= 0dD`+0`(F\1pcu|>/Yzmk_? GEE:TvE7- Ok>e iQQ[~j>WͳA߰1GW{ϕ3.~yB{7\e;#Ё>k o MlJEëB}b=~>qaOBezV|GĹf>Hu Js}o}p}^3 M*>HVDCV51? ?On L,,5 Ug=D@#SVRO2v+uܰ_HA.2gEDUE˫褳ܡeDexD j=0=Γ`_;C/RyF|z{"Vs9l˟*Rt"*;=`ֶc=Vz}Wdғ$>tZuz(S;\{u2k.{52vk'PL.&9-2f,gIW>g!pЌ7AWf|r˪U񡪕 o8^C/_=~q=-X}0{ QOg9p}\H=(G>A.\MQ^Q5#0k.ƩoS+DE0A;(u`X}||U,M(/cwpϸN xBr6R0Y_*mny;_1y yT:[{47\^p 6nyhx^]| j>WL4󦛉; kٯdQT74@9g ks"%(v) ꢈUl A{o/+۞VIhwQˈ4ʽ`+I >!OqyKXg~χ|!@D۾ [nPm^zS`tw6 qR&@Ɯs NtQw #rEqY-r #G&n5G0IĶVdY6vYFJGJA -7W߳v(_ &1;Oks +:JW2ɤaJ;UJ>?䱼 X.Ɠ"A,/z=&ZZE KUR/S}VBv{Oaz;{#ő0‚ І %[XpB-~ވx2sdyjYo~t;^%,q6p#|+}AeKs\i,:-X@Pvp-pOX-.:)%xBVxӴH|~%ldn =*h,=3Vī)l.Â2|ш2%w9@~R-xs$tÖ:a)tJq6kҠf&~g"$=JXlrʇB:[̸E%KsaFl$:XAugjCfW`(xN5Zrz^gAR~!WOFo Yn{ևY2823BwWE$zgLj-ʩF!R~(+9_zW}%`0I~ wOv=q "sI~X~\F, $~)LŝcJ&>Z)򕴩9=%@QA? 󌵬 FVǩF/r6sWV (Do ;w8" C pgP3:n9# EZ^^\CM TI\'MAŸEuPM2;!j9rq6{"/7;:G@`u,! ,"9m<ϡX BTH #U .<0|yd׳?>ՑGUڠcHRHC]w9 YStDW{r== }6ro φ)7MLQqXKY=⇛1fAZz6!D yvMi|T|`-sS A\QsDj{!u92C|K- +-7I )[-9R(wB[*;/OfN|+A,c3ՉH!o0O0F#u p૦utfcVB0|9hEF:4Fp|hO=]~yfcӨfB(EA-գc7^K'yZ5q1 g6WHt2d~e+gȜ?wM_EIUEڙ%CJKÇ[:gS|)"7 !YD4qWو۝_Hϱ=I훒6G@ d ¹ _%䍉@[y  ]AǛu:oyrsnƷꎮU`@I.vYhNL0 DL]3ؑ Q󵍨CY ꮥ.FU|uRid}yF^ }X1}?3\Rͮ8Jfwm"ydqgڳKq#(/֠߄`k":֋ߚm-ml@Q5CLW}>Ej&ƯL3֗s8wc3t`/@Bx#O{\GqZ|=`mg<{wjRDsU._!Pq|gz8xiP]y_2js'|7K!qP(kjAr. !sʴy] '5GC%{|k[=d^ZJD7*8] r~vjAIsqIPw4ʦ#UJ7]Gx,ZSᤇ|I[e.OqmDBmau]O(g9 K/h6TItNdx D(:jR[^Dlmx r9@rYUR|+l i^]zu_QGcѤSxQ|ۘw%OeĴ i}Ȍ&vWCԯ['Y(d qbjQ|{BL\cL# .!WgvۄGIgZo _q;J"=%r 7MxϞ{o?y˕=:4XFմ$)%d3x֌ ~OF¨CTq 'OC,'l6d%XYz7[5ߟXk@YsW5B׻~rہp,6wuN`}|@WaO7͛*]elָWJZ_ngˆyP㹟 X J^jJӌf{n@hQ1hȪt*2~uۇ1;d c8Tqh)m·]8vo _|N׫>խ+c T?N_^>Y˳D,\ιnnxkfqT,=IRRÃzB͇<}NFզNU$IA~@چT{?}~ f]6O 7i.J۾J^sҪ 3N?ѮyDcyeO\TQ:9_rG g(;{Nɲ4Z h??0RKC |bvT]Ûs+*czC*xOs -7ɦ=Bꁹ_xYQdm~2*y0(69}XcccGl1o)Qw3Ɇ#:Rz$ϐx]Do&RuNJDVDH[5_wc!l^[TDj&%/J6&ӡ9M |CCoCChM%,6Ҧ[~9uW7sǣ*ô(%y(]#׫1l$@{P?1$ a nB:j|L5l>1JA;ޞˑhF0wwP{RBNi943 ,óf+ &VQx=SL_ݶ&X!<{)AS3%@ⵈ ~_tO#4 pKMxr#\ϜWZж_ݺrU)3,,eW(|߀43T%˹.]U8Q+ fADrg-_;R`; x⿜"\rY LڨaiEM9i]c쳤 k&CSS@ߧOGrRV~k!cr51{_:9VůgSq "8lRF gE>Zh:~eĹ m-6ɨ2~rsMR-}7^jnTxMs|̆X> gϷ=rE_n^̂^dZbFuy=؇/<1vS5Džm`09ϙLt=P.RDtճ.׈%SG4u}BQ\/'I=ڇ'R"7?g sİ {iV?Grt^yߣ<>WM .'VoSV䚌8'^O R}p3>LA %י_ 3#!N#2,jMA_I;lpr:)ٟRUqQCc%&oCIg1p?FCb9W*SAfmMy&jJ*bMC@}ı jUh\fkћ?R v< N>Sn8>7 (51鎝h= Sd|jn0wޣWZF)u ߍ\UFqT DN?9ckgGv'#7޵&H-!M|Dr0^x!J޶5}5F`:(qr$=[-4Dc:IkAs!$$@?']NQGQ )N׽"lF]O!35"&n؃ެkgMxe%.~ϻYpN&*u"5<GezQ43s)Z`/|Z(֨jӘY-=njV ?D<"{ھ+bco!$@&J=KBbcI5Li !3,r~vdL=UDzrP|)f j6~c '9s|>m7"?o/*#1&-8P~pВՏC,/(ߍelg 2⚽t\e>bP'3-EU5AtWx{u~06ҙ*~9~Ru s ^@,S)Rq$ mr85aǁO77z{x'Uie?FGmbwHVs`_␼_?q̷%)I..*2tFu^|T\r^8eBp:v.^wc=V%Z}\c1.P'aXW{y`>mO`ScXz" hok&ewXs_g#6$@uӢG>!yp$k2/=|Ef>Dǯnpi;D#vzj&L9@c]bs oV@ kp\B?O1Wm]%Ѩh+u8(TƕxCenŸ bgwu`dz?PQވ cܠh ɮҴq9NsnGмi?o uU| &iy-FI882}:|gWB˵d>Rb$:mQ1tA D#Ī!!|%t8ݟ߮'Zy!GB1[((Qĭ<ǎ!!: _ûGYؓKXyه%0M1ٳ Z:$:3fEV2vؓ쯟CŹp5\zȍ07SV19PGYVh+IVG2Jxq4"J`݋Zn$}Jeg<.F $]!8l$?d<"ӧe=`xoώR4XƋ3(q@rh]mjڙ¹.1*lC]@`Y r!0L|wQD򚚗N5'r.Mo?(}>jx{2=RYc: _߹~7ix PSѿ8(uN7v.qоIG}p|__ܽs4g1LZok` q8".I8h qoPsQXSVH!ID 212{ Qpߟۗ|ե {/d:I-n*DSJ=z^X+*H}qt1M[9"pC5oOmR.qLEte{,t ՜H74K/C7g`쨝iOHr3}w ldlh4_iն󼻼O-!֦ - f2zR %InÞ|@qΓ,,{@ &F &~y#{TF Z48c9lƩ_!zrTkG}^w`>-g  xS>H|!pvHxJjbqc9Tp)蟼Yrt/"kT|8y3̝'7Fp=Cy ?M+}NJGLn@ԯhg o;&E.iVm(mv>AP?->C,輯T?gCԶ1mel{pUm \ZiIkX΂V~FAS{C9P6$9c.@Uhr*u J52;C JlF#-uɨ`ɒb `~=v`žvcpUX^y]b0*҉]ѶZ365\!"w%}p>NJVjao ?ijŢIޣ xUcSFCvA fۣ`-_Hpw~5fQvY~~ٱ+5Yx :u!_t0A0H9w4;'WNil?,$rJHfPUK:Q* laHP+m;4o G-+l 5yZIRdžޖSAy}WPr];oa96T}.p>lEpe8QfWvY‚?VP+C%9m %> j{>pe>ImOuq}_=K75 |Z*]0A=q-^qwWanE öșN;Km VD꽯菾&.ݏX; h~ŵYOyIR!藳,]U|S gE-bp,\i'N[ "Zd,.D^PH#ftHi8Ԗ$}z;BgpKs-cv ۰T)7zloLqz^aKT T܎.x ظQ&'mbSA 0xv~U N;@]/؟ςH~/Y3* Po2 %`@/lūњMv@ ] >&mP#Evfp3)m48qq v4Ur=w 7 sC 8*|Xp(_C5 _:b\YNeOS$/.&+,~B>NY{֓/0uz:Y(`"ҽTj Ϫ\/NV"ia7G;l괡P a9 =p`/X*QSB=`Web>{Rcta-z2La ^np[COy8?} ;^×A,qشz<)Niơ>uԵdJ5PzI4>3K8\sAn_RcvkMn_S?Z:I@wJ<_#n#.e"1v3kyiϠj >~duk/Aקap*(X_~I0 WBΥPaU ;_6 cٰ|l#^JځZְq7,s d@?It@uַB ̈́W>֓%PGze;.vXں#~b$z6q?Ag>Q H _`Ϯ!Wq]ؾ|__̵*Ož23w~Uy 'c˽X^9я&aNp}wi'VᡖsVE&5:{wCASm\t}Vl^mKNq 0_8!.w~=ti5A6`XCBFf,ALk8'!uq;ܚ[\f5TWb~?'$pzر|nxzpى?ePƃЊYZeR(p+=y7zZ(BvK.֓Xw9 +OPXi:\ʕTLW>lו](z=@K@Y-CIGdɔB33qm"w֣oOy‰}\[xll]IWPy$=R ۻs(;8-iv?Yl:WڮU 1֢xΣZCYS,#ܖ0p+d=]woiYH@Rmv^ѫw8E ] }Ϲ~YKF=ٵDUA78_ۆ,W{ѹ +GJIɱ&ҩe]U'Ckx A'eLaJG,gZtpgij;Ky[wm<u\_$`4)[&8Rm IXCEN(y&fnLSSN~}cG=`MrO(ZqNQ-NĿ6T߷9J PTX>PIH2gYY"!YyB1}ϝ~uk]}{=nU>?yL'VhQ{HoFa%NTCqg AI` ipBxp܀Za]ɐJ&p`&ש$y+"f'@UW' |b5]]6[~cyWyoAv[ó6XEM<#O8X3]zp%C9/Y]IJ7MV^tIs9]OG״[B}WEcj٤B 3ԧJP٫@{uQD:#3".4ՔUƺW,Gz5 ;/m{5Um[]]xuB_]>aǫH%ʞWH_P$B<#v@";ws,$.~EF;M3Gzaϐ&GH6㨔% /N.:B9~ɧ02 DL}xN *w>$|kTZḩx%UujOQk j95/*V\r˭X}2l!0+PnYw{ ߥ?%,<vrt~*lw;lqw^0;}ٽ`v*$bpZaXՑ!UW{v_[> %/0~1^x Am?_>k ).e9Jm . CcgQPe7_}* M5DQL^EnLl5yK@ð$FXgO`9qCOAɨ_x|yoߔ}XAL{^7G^NJ)er60 -Jrx@o!- ^z۟+oZa Yo o9A;[k!  }{9!G>vR4Sq֐]jΔ[^gǔc.ԣiKy'ޠX%5Tewߐ6#.yxFlD$ŎKvwOJixM9UMiE@ב!ǗD_8~x)Kg$ գlYROD,h=FUeg&pqDտ>VSь[P@#hp댇 )n[Q4j/̉c j[66P8 >Oi nzK*Es0"V󉿒DoGq(/ʳƐ]gD:# …^iPҜ+>%{/_?|s%x"۹f4O_ѭvq`#v6'0 Uda7I݆0_@2 =QOs[h2mvֻa R?(;aoѩxf }ۜ84|HKCmi?k:nPan\Eܦ!/SR%/=A/3 $ 7{&+CvMEI0S!2L#_Kȃ$x?s Y) OP\ v IYg2Hsͦ.#6~#s'}дq^jO8x8 7H\] qkj%Ic5zc9 ݧBV:A{5*5i 'ZF_|`y_"3 /9ۯ t J}aGkh QMA7zLS(XXS#3rNާ#a,I>O_A_|~CU:M,_/Lسn@6;% 0kX#n&9%@g&cAx肈eF6qXxzAjD!vL%6@;{%^)ߘM"RBbZtʗ#|X.R7r|L}q݌?ͦ<'3.1M+{nwNB=~S9;,7lǂdl}8?rWnW+5ږᡱ3bZ^0΍i t~xK_ .٭Xg7n }"yJ]!3^tDK my."2A-Yه<׿VHRBz,I|c_m(fsNY| }K7^糕? v/i) `dĝm/8U>솱jҲb=l ]Fe:q`jr&beDPNHsIov|+dOr^vAq͔._X|0V{Hef7_묩SrwԎ8B8Xjxi+0üYӃIʰ{o o~~m+|i&h q4?OYW#؈X}/9KX~ASl]DT- Z]¬ǏțW QF/qٕx z_cख़5xؿA:F<x8%woSu=heʤ۶HUEk|^5 KKYwTzlVw~ (2놨vMѺ^UiIvZkd']şsMv R}k5TXߐ<~F !7pu;9c_)ac |qvpfΔ gxqU $ OHx|;L|$ ( Ϸf^_ ]a-K+Bܠ?pC'(u¹غ83Zc>Qv?ֿyZRe$1Gkő‘MD7} }g Δt<DSx8L)/IGJ&E&p/?Z}DJp$al<#,os@M 3R!SFASKH_$>4{m /񑕿fnBn!=L#*0BvYVb `L9W۴:̌ΤO`׀gVfaiIKuhPg ٥ 2Cq.f8ސjp+4.־aoѳ.W({gﳏ\.?ߟv>u o{tO?züIᓷ_#.n3z5*$).JvU;բӂo(_W?+~Q;O{5cPRN5{}ёk/yU@o '((pyF`E%"x6I 5n5| w[_9u"s)ďuguaԟP6-X>_]g\ rW;B _졆u'ֈ4vpHk),#չsXI_־;g/c wUIǹB;\0kGZ,o0auQ>ueO(ز{4?!%r{(z8 =!pakC=Y3G;Q "vB`犄LĿ:PQ6[}zЛjižhut'e@*=ASjaQEZ_2WRx.`<mu^7Hr|A!딹 J?\paؼ%=B7*"}~HBaK˅˦P"Bү `/Avod$ϣ{k25x9 NC, n?o9 p$=82n bj֍RXwTrV^S5nb xZaum/q+C.G*TEwS{8L}^ah@|P9=Y[9`Ϧ<-Quzۿ>5*u]FSUp7;d#eJ;'h:)~]Lg Yc}`bŐ˦p@Yxsj%dLWk^0w(h3>y`p 2W/5g$逇nI41Z%,%c#4d yY+-?:Rd` $?=yO?wx~_$AE '^n,?6GΙNw .c1O:N$I3< F؈ԍq q"/{\qxy㓥lmy#O m)VG qd`^亖7 qd؏~"]HWs 51y !ok/ Q }˧_%%ƋUv>PEiEA\,`(آ+ .1KFXt<ӵDnEEPv(|N!t$HsU t8p6 F=GG|diň_K 5y P(uo$]H!4"~өn{ GcLόKc#i҆ZnYz@EacY,2p/Xtv:YZnyBtY5 {hZ?ELoN}A$O>\Wp&X{\V8R0ݶuOxWF@L -tXA1>+}/|य[,$ѓ*)9b76y0VyP(;&LW^/&F}^;zMu[`vNZrG+77=4#]AzUODg)es- o[B,F𬓓1:TvXښVhJ6{VYO4JzAf!N0qrU.lwGv,,=G)EH=sp[yu t{oXmO]h06˭ >n\l@4z8"eL Hm%Ĉ̻ ם9PE_󯟵.oSZhڎ,y(rXz< {t.juK}&@^oK3DrF#;۟Ʋvmү}z@x_%H=j ~%68y~)+|veDƮ'Gw}JlI 8/KȰs6+ANg>t$`z7()fd ^o zҷƻ]XԶŧ5@N =2 Lڋ۪sփk.|Mx}I'Ni1@s"a4? 绒(]n ԉ!yϕu߅dz(^?g(6韸Jʞ@PvݟH)Pu"qex2NsPL]z)``p tb)'#dK0ؤn4}:IyR,)#!|3.dj DD")Yk}-kd6eك\@CU9j\*n؆KEѸ5GCVR$ "lӃ " s=K?z>cŸ].X#4}n2 = (hn'ǘͳ%h>سy,U)_AϮDpĂ dkh>tGj!xՏ5ꜿ?C5,@D(!-Gp.q5mP7lPgE Jkr xƪ ^gގܲDRM(Jm%7 a>\7QfxGP+u}r|KwNAk q-NL?"Zv>2{ϵҎ5 {=U~"ݛ:0YC' !,—T'Ő8-me1PԵQAjj'hNrRh50VF6/1zNvseԚwx yh녶 ۓ-GKžHwFbbJ[r?y-%l_1AA!El]yN6'؃(2fWp 9 ߞ(r^Y:͕I ?-Kn<*ܠ7 &HlKTК 0oW3\EёK6W9w8.ţIƾaԣ2Ѯx޾c`8^0 < 8}o6DEi4OFd"ۋлU? J$Uu=yl]x1ZE0. :NO/,_6eIZ21.@|՗ɝVig MLsy剔ޑ._uW+$ yq.@uV=ImԔ4}]* QLIqpe$W;P&ִv qSq#n$GO= E f2;|qkXyYbߖuAy~8|U,f?7.dQ3ۗ=-ĩj@ĸd6p\tg}ހ ̞YQDuʫb`ɏ1@Nu$$aDDqi5rë_9\M5nkEx*4w„ta?ם-/ƶZ7C!חjչeYF*:{9dJܦ;6y-s0ck/[y =޺y8O1Hg—Ytq.ܽHhȞ o_Hewyk:pݛ } OJR@pGqs9gb.z"3byryF,o'T"#%=]wyXڝ68F׬G -;׫_LTRI!4])*cZ}e1cIh9#"׿ bc*Sڤ|j +{n t)4wr\ eS U v{ Nf=)G!4e$ϟ̾&(Om*)z ųꅹ-YdjN44}/`$N1E^"q{=l;W]vu,ד9'VC'(̞=M~ԧd?DhXu2l @'Xh'=J +?#gZaLosG380;AM2ܝkŧVA|%?= 4\m=KԘ*>gE{Q/'3Um3,7Xj}~fHdp22e$tWNVs#iO>.vrc'4 "ԧBssnFX!DgxV1 RnɇB~;Z^HDt:J62jqjX=q +a]h\QOs}ESg'WK߱\~'Yp.4O j>&l7@$Jy#~w^lVF!H?{޵y}7;pt?n }e8'~܁v!4%g殎YtFCńP]Eӑ$oH4nQP2"/r dC^aQ%-s`񽊵J8OqѾ60^,eXE1k%2~#!}; .*b &%1U.LQVЭ3]{K*;'u=FCQ[Dj.J#wCYI9F Qq'ø1k$dSq^@^@rMi@%̠&)P{*qU1;=AK* .m"\5p=![pl{9Y Ҕ""y2*=}9P?Ra':g !cfnun'&Mq\=8~Пj83RS\D4wi,Ç}^X Zى#+E]"kd.ѽ~y0c'>4^e9b O6.[$ XٙWZ>(PH z>T, N +F W{p/8ܳbMZΡ}ԉrۘg;aՠb )+xNCc!VAmY*HG&ڨB|n/zK# 2yr_ltB)Y-`Kgt ^1";Gj!O-~*鬱:a͸ yČT/Bo74ElDb,`)E*I&- r3$+ڵ}̫S[pwT§L luϯ=G͋Qh98Nuܾ\3Ds?m# ]ᔤm3y:?H?Hglzd#a)ƍrX>@*jf?IFjB>+_D 4{RW&~w,%4݋LBQd+_X߾otŗ8Oy`2#c*۞P;S W!:g`0.\ت3Vn> Dc=EW[ @ ycݏzBz}38PV Ly_=\:(TzCAmVneI&)}8g>xN4wīSh¾GltnQ2 ҵ.CWdQix7mc*mzw-M۹ 22Îd-; b}:O$4P/CmMܬ N; h^CS-e.kPY;zzn݇L\䜠]w'%tC3L1Q< o悫tjh`fAv՗ 9fYvF Jp=FG 06G24z| @~q߆ "S$Y~ro@u=J@x}ԑBz9%mc}oY"%pl]g&K z bRVFlȤ ȋ4a}gr_PnX4#no;4+e֦3 w$,˾R;dm҉Ta/XmD3CaC@'T4v4ìUXX\&4I|z;5(L~ mάC|2tsyo{RrkTf'6dyo DKEr !ɫ>2% Lr{;lv5%'hLpJ3|a- .,w5N|-Q27fY{IC -ar Gbz; :9a?%@1Δ*- XtQ``z}P|ױtls*8mʛNffɫ矞%.o]@Xx[E?€T`bi#V'%:P64@ G޾&:YD@Z"ĖlnZ SaOhQq&;bpopX4єN|S`þg7`||h'T;|4gus x&Ë)=S8r>%Q&֗Sܯ6ش위fe88"QWxaWK?߷;|Aϗ *D2.Gi<|s{ԬBp\w܎wh|+(˦ttY'pocerx}:uE4buuD#Ӛ|9̿n O̬! +cjyjbRJ X0z'0H? lĞ#/cQ "o9E;F\|Ve)*^X@H/-+sr)AQt7m_~a Xi'x쎚V=ԕ-ͅ\XA{˃)G8@l֑gmŠw:4#2)v$v Q,C.邚5 ECyoc>HVLaSL4KlŐxB%5.ٹS?iPrr:O\RG:LEqޮ3|sӿ8iJ"R\W~~;Ķj+mtPP2WSZRLT|NP쓽"esz)š/Qh}ax7<4xrPUit9!S̈́S;-)_'~ zby_43YA&VOj(tP?d v<5{V<+T[2#X3$H DJ Sf_YAjU7h3Σf0+Tx'x!ߠP~"S /ſwfCO!ܭ;P![3آ۽j :J͠ٯ`{5As<^`zWXqs2 mhuJ +A:YցL!#N>ڛo7Xݩa|0MA.%JFcO llfZs/h7UG' 5-k}yWezgrv)Vx9~u0H2je|Mn<r4±^mjuz%kI4X*W\E-`~՚ ?{BU-ay*iԹBVyiJze -jt/Î1}fcm\[T2灦~N}K%Essd|B:x$;.!2[ %uj( F @l~Q .ΕK:{^_UwvpE.oS>49z0ɦ4@TS{g;~?*YOŒIe_y,߬M]nblHWا YJGP%;n<9{(d{;}Tuj8A - #XYe/s+fjC CNСy ̐LN83@bRK|{HA`W_x(Ka֞_k0fp%5wSD ڀ 9rh&!3JUx!K/J,Pb ĵjw3~&[e&?1n&yg_FD戡7O'H,q/? 6BvɐE `6 sϣGԫʹU ~G[KdEo'^2C#' /vӘӢ5ˑ7)-vR <~WŨKXC6Aw[PiF38Q%zdTQ"tGz{:vKYlu8{jmXI4nt}hh/y‰/[_KěnGqo'~ bZ{5^~꧔A%'XC6,U'-zBϮG~Y'35QW·A̗?7> zߖ;i]Wa 1^{_8XQHtY oy3Hl TM)k YL`JLdL $0HJujN3k9)Dh<0Ჾ{oM}#C2Ӏ$3 qIeS<<$!DQd,C):2 K% BHB^s~sc֪:ϳ_{Cz^w$''|D99CѸ^]^Tw~:?jy*F`Vcaꝸ.}#,*דqu][-qxnuoV<*$ z$Hd.GBȑi]GkM?hC>Pc8d\ǜ^恸J>H 5b~~w=2?a뛌n;CϋҲ= ?->oJ 'h`^{-.0m#= })bUpa=Cڷ$qлwM#PZT./Pn4PbuA.aB(L<t"!M>Q gNʪ_4۞o|b,|#'zy*Q*gXMF֢ǷNmT֠ƴE\d\ZTJS\b?'ꭱjA(Ņ dcT~Np3IQJ ѥh';Jb< Z@ѵ3xIBoaH^#!;J#^l8>o}놺E3}O\W{u;g<įn-C v|'ht: .v"'D.ɰc(WzD:éWwVg-'~Gv?o=vML[ė#>} J4᫢Q-0m-Z NaJ ^P&wq@gbv~ HXtŒJw2$ |h3~k3ҋOC\aMb>reUչv鳇 ~o\Ε+ק= w:uq;Ԓro=)iyCi9:q<_l5z&h {#Lf(##.PUGK"!?ϏAmV?AK*6klBբrwR兗_Y뾣o9u3˥Л#L°D;wYr[,6 \vdV;EA9Cވ8$^gKf\1I_Z(Y^3áéE>!Hf -mI}l<ɶZE+\Z? }< ,=oŻ<1d 9<|92ybRzTD)%?֟޻1gHÏ ,/C1+v46&\ ؾ $Uf-aC-E/ zo~1dя @Yjv}saF bΰx9h`c)yuxUυeo~] NiL)~on/݄]^&*ǹ3v-nk͇<@RI֓$=Tђx,[+Vfj+5<03?1nztWtCHH  { / ߻Avm`hq} $ K3ca1֟~Z˜`f坣㏚Au8/o8o;s-{OqR~=M_q7xJڲyO*OcD.zWdi`}̫v^0+ > ROFA7=qUqp7YcujF[ud[g[KoΞ Նb^KۗKOzl(64^{cAhu7H<q<2>Ǿ KߢR@Z#w1W̄! R]y ODBsI&ȑoVKA$Rl t9Y4BHJ6Q;^!rxUO7Hk\x04e} EUBHk-)B`yR{XcXM#NWb`m5қ&|v+VV0O{lGc//HNLcpqw|r/,lyx%43ܭҠhQ60Fq;1y tbq{{pƵ"A \{IduN^z6_D 'ˑ[{˰!Cd*֏C$_ę@NBS!ntc# h6͜r'v-a/fuGKtq2; ៰S6T'~y.[hketc3X}DZ_;f!և", q"K/5@o͢[Rw? ]ĞjI'0]8Wh/]r |+0̎ka~˯RdC7g`UE@5 2XECsEA|uLaw[,׻:%5f:Npۙ^J)z[}:zR,9\_dG'#mKh!!)Gj uң(]lm0:1L^zqG 7RJm2ow+RU. Z,;(n:!qϡPOD6)9CgL;-pI2w hzoi ^ &@J5^P\ijaT]?ѕNP߁-Jiv1NّӲ>~{I"0\Tmm}BN bzzR\Z;|,SF>5櫲?6xboz Wyy2&|PļdQd~0'>7{>[ ?9Edf ^d(Vz=]#3+LiKL"&tF'bI_9η_Rٯ yMf]q%vJ$OdOt׫Y}3~{|  ɂChqS):@,IPG,W5H:}ח(h{?ga ϸLҤ5Ƃ{p3wN'qi3\|㌿8 !S3Y/{unoӧ78[ϏbJ& A}g7||vF慥J"ꝏغv\}Mv+L)̼Ǖۯ:íp^Oxr?p_"'z ;bbCr ncnfzå)?hN8/$v}(x?L {5拞}^ɧ! y-uĄ!Q'sO鉚+q淹CuwL9Ni..“Po$jo d ^"ʡ$֤_l揳;hCe=XSHl&Wº}AEؾKEl}Yc|:;[z*U%?W-/1/kCSbs`>"ga76hļyfACw~yBmSTnA1GHZVݠWKITc96P87G+IOh_ﺵoxHehaf5yQWQd&D-C5_ehfKŨp+U (~syK#>!2z:)gX*Kꖙ(й. q==g߷fK{۬ivX!PfIKG0xX$+*gdb>wW\=1Ě#a!:}y%?/;@҇f \ܘ0浥V@(a7zdkDCnI_#l__cj5nhdza#=/Vav{o+VgtN,use5lތWz܇_TjG9Ӹ iO*3o!> cuC,RǸ?׿vn(nsB_g>4om'_z $KM$9`NUV0l|Z{x#-1e8" B; kE.p~ WOpo#N)rBKBtjՍJWBU:pޠB|" T`R3cFg ~;n-sy{hI^ps:fƥ65w[i~1߱*@,׉?xXxV/!';JF4JPI ~\WJAEZQA:JX Y*h#{krhvjd]w?׻Z7uvy0ߦr=8I`tG)tBUD[P8Q5T:R $L_;}svwY``T_;Ѥkq,| Ņ8a}~]mlc Wk1 PA/>58&CGSR]ɑa9دQ ̪]4V2 ۾! Жltj{{VƧVnw'hHr~ q*6?}q {nt`9Zyv#>a#뻪x\LKBmV)̫YV}tQ  I#v~dxEmpSE!ϝɨ g~y|L R5RƃtqC[f/8mI" i;xɦ 34" 5I`nlWR]tEon U?;s퐥?$plv>PSpO2{H73!2Ȼ{;b"v//;(Ln`Y 5N?Iu[W^#1ZFt<={-L+IvFV5@"뱽B B^f]oMg0Mc˯I; ?ik z9`|A& wx+Tfis2B=-D& JS7Cmygwr&)<p3:d?gV|\ tzm4k|LJs /$) RfrFU1X=;\ཆg-x@PbPa}w)~6hq$mr}>}JeJ^\>YW4:pV«h7 īk4(ygA{F9e])OԐd=&ɅO*QD0RC :9J>@E<7 kU.ݹ; y%^Cߘ'eS7nQ:AKʌ#r1K|.*ds2k>=-f(zfv;ǹNVMcﻯ_rOM  l?ߔ9o6YR|ik$󁶇_#~osĊ,XkP53J Q/-y0xTʟyAcIsWऄ7QO~=|q~qlVK^L&] 1[>V+g!թSGԤ޽{,:ױsY>z9{a|XU8j! W 5}1j毓]lKt_6*^'2K_x7/BducW{Si7L3 0C4-{nكy[Fwxfćy7X#zϽJۯ.x(Th WNT̜0/cҩA{fO0UtzNB8ս {ϡT _DŽu_ǧvǓ]fR\z/͙JH~X_z)mhKe*DIdջW^iQ1@xeh1F{-8oΤ2=&\WFW_0}ܲ=<)vgޒKw |g0uD;&as1MX+3@ U/Akj#o?i X V/nka#'b^6|C` _xH̪.J-!WC2|NmN$=Ĥz$.\݈w8qe v9W˧6O0RLpw$D3:5MzFM.hn$jϵZbFኜ'ocM7XqDEGvh%ryW'nɷpcJ8ō"%f5.t!ҟDHi-829W8|Vs;坠1~jDz4!/|o:'y:qx,ЁnrSEn߁KpReH+Lb{WJH'НSf؈_\~w' Y(r{¨aga~ߕk֛ ?xo@`q{3-æF=z'<Az_^UnnwT9g8K%R I8;i#p?Xy75@}'XҜڲUOr]Q"0~f `;o]#޽ l Ԥ0'x߅I4"'")B\zRjCpIQ oo[y4%AГV0TL8+zb_ŸOB@g&|G\N}f+a+J8Hx_Za<¸ dU*)Zr(_|f^J^{jyЄ7aw/TDror\!aw(c;z9>rty;D /U`h{bn ~gc yr`9E|}v^%?0@gQ~C ]GFI߳i;POID>ʙyN)OlԐ|SpA}>.yq>d`/jK8J`]=5y*LCI\y=chz5~dTzT {7JC:Yc1"ؼW8^͇.v!)gpv}'Fj㎝Drn0?n䨒3p䊦I;#^ؑV}z(^8 g\> Δ󂡧96ݡkq͛Iĭ6T+YxD).'"7*Pc ߡ@;6ڠb2©(*⇿H @1_G_[񂺹#.p?>_uɑQ̽ ϾwjOۺt5#@,Q >yH/E<@vMZ)9h19 ՝zׄ*LFHN2de>1Þ4^Mtnjf_t}Xl"g_?V"7klD01)\ՠ\JQnd pBG}@0UTrZ7-99ޥe (DE6ڇ>~EʲyF^ #/Sށ@lPy .MMۧ4V߂VSSPHA,ޓ~H8#e(YWbh $D.{MGvGBg=ʏLoos<(Tͣ).) )[8i)[`P 5E44=Va:dn=^wph 7k.J?yxCֺ &TMJѸCrôV{)՚t 7&X)յ w}"~ZITV@LoD(8ݗUl(Rx&&ӫ}-ޜ DQu 3= }OP=12 qdu0:ND}Fg V1C2:ey͗}(^(G V]Tp-,jw=If_p̈KI8OYOqEYq쯵\oɽYս1O]w5'omLp"XV?+v*V9#F:!SXsid`% U![d*(ϐfsϏM{DAWЉb  XFdžc4sOj탘=A&F4Yk_ŕ^FIy=ݗYfcce(㟯O]?p!)![%zPO QF*͞-vך tb(;PRr9U9¸Ze:=ej-xk%16X^DB#sm-06t?-Z;/>檀́AI%ϋZCW ^g33 $s_iZMI]Z]Еiw}9_LCUA 8Sq6R.gFq-n5)yQ94ZReBwJIaϏv'b࿜N8bA9jTu cqw[#V)ƏBVUc>ގ(B^'WAϏx?ny{\$5aBݪTWp2 p??s"X]jM-UXZL0UN#dLfSkK8 =܃J.+;3}c@ӹlF` e^p7_H&Ȉ‰ 7/kM,PFj|6\KO`vԹkh+J/Gej᪛ "ĕ_50$DrGwnk}S|\\^2JftU"qi G)Vu iz7^ar}$P?+-@zG~aEd Yˌ <9@zPs1]i8CE NHE3KN྘SfElBB1sszT0Nә8{2t,q9e^اw˃*' 72&#fSE}ƵRU >'HU 3/>:}Hǎ ܉? ^|W#W\;IRKd}toG,(Q;Jjߗy #Ҍv$ kj^E֗KHI#5Tש{Y4v dqx dHR,>N*kxKs u=&MBx5[ f]o?rF'y˫IטWu y 9=⡩+7] 1ԘPX/X%M˔Mv2 6s1-XFŽ6(L˔ ݐ./b0mp'l\0:.bn6+UGeub~gW2PF.Vg%fa~.#.stU"Dv/_\%hy p:zjr 8YQkUctD5\n穙9&KeKbP.A}ҐJ05^Iu q\Hvd|hC$cPpVtYH +v/X^Jh_/; #t OcK@GOzέ9 H:*堍G&^AlpE1HN{+(x^kU@m}#9cV#,ORvnjT5|2ѭ~jw ./\ًXƾN#)tH5?~>qsqR%CwC`fM%$;$r7䙗77 86t;]޺D7դDL>X='1u2508`3&-P㗶^cI&#6Q1ҶV p2zC,'?>9 `=nDbNX;ߘcw}N10;~]j*gEz*p(nƔETXK ʸb\y$ш$ w1H7]hlh',ОJcهZ- ̛nB^x4N5O`q(ޜY գ }O^E&DgT~@J %"m72q!߯: #gcmN~*0iEvqLA)Ň(+i.& ÉHc7j#]xJۇ{M~q!܄c W(R戰3( ݑ4쪨ul{D'qߩ~8 }5bϦqA~`z)g +a/csX͗/_y T,sߙd̝F>-^1j=EFLp ܳ^P//Rk@:7 t.q6S GC,%m`>rܚxiA^I >SӋ ]:G35PLq<= .!rScΨEPV;+n4pҔ) ʟ=r~(Gt4X- OٕWc'Dʳa=ȹG.h܁X" 6EX_%xsv2@fZ䧉i Nj)Sb 2䈳%7'#PdQ}(`Ӡ3 G_kcD> Prh)YcAuU̞j0}^UwDyA:.ch"Tx$ԄqK]|wkqO/ĺu%ҵN}.We?u-.}(To8LJj:gSλCPn9? щ8t"؃:Q[F{q_~.Ǚf.zB( $D$%*|dc=>HC8Eՠ0v_Z774Y @7f&@."IDXoN[ySK[6ѣUuҗI_Q@6[+4%'Jˀ֞I; ~u+ 흩(IP봮?n|pV]juHU g;0[2S g(ygrNGǯ_, .9F# ތ'b@˭{2.ӱWb&ek>G"+IOGmQAҌ䍭.;xL!mLY{OSB@ͶpWf][MNm yPPh#'-§dxR&$)Ϫ:OʨV>h\YIMtpY8X5sфYF}:eO^{qn7Ć1Ւ @l\uqe$(¯žYpz13 Xӟ>;ju}4E\T5)G;J3Z~p%'w7ar!έ%0 2e z4xE9+فpj"׸RO83CZ26sK#(bQVT7*72O״c^{o߀+{.F\dB;U2i v8tvȀGrQDf* | uH{kD[m{|#a&Jͤ8ij/.?DE 2| {OB.߷`(R_jnA CwDX1!1,(iКeIl~o#{kf Uٵc|@U< ?hױ(2;2vmc}O)(V-awIaP 'ҨWxmsVS{(?X-'%D=J<ێwv:I$8y,J=:Ѣ1AN;MMN-R{+FG{Πؽ/ j:C{ +:إSh5oʼnW8FOs׍|~L#D7_U .p+=O:M(~ZR 9Q`>ոC޿hօĥ+7!APֶN_W r i]a,)?А"YZi T%zad'OEzC6Tuv.Ϋt͂ 0h &m/%2cv"՗.vo"S{a`7zj9vu bG`)F~Aґ|i{X$j?v}/R#hz1S3{JLh2*8D?%K6#xbaK~8tuTҬ'.)$wJ`{]w>$5UEVvY@b[߷g&(Qǣmap*'|v"m@ i?>y#^kėӸ,7S!P+:z =_# Վ6RNޏm~2eq^'xg(/]:N@h~ȿG)3@-N@ָp_qY/URD=7~w#pny2Rz$xj &ɕH$Nu ?*魋/Dvy`hP< )m=HƂ{\@v!'* weIsɈzޓ3}b!J & ἴqwӒV6J_SsyQŅ,X?D+o_v֑c>pz, *Ex!Y7{L4R$bW]`aZ5-᳐7\elaR.=Q*g@r"T;k'0jҿFvb`fPcģi4 JxpفH3gv~ѠtkEuĹOo/ _&MeNjmg5|q6fy{lqz$uktK/)wTp'SL`C OQ o3mbVsfbԫ_A-WHQl9=MxՍ2~nP^}s:Qgq)M1燏IJ#n,aaNJN|5Ou~`e`=Rzc45MP#(7 dyon bT>{m"8{h,2T!d&SB8 $SRyyȜ 2Jq$yLDڛwkeZ1|^)I*9v#"ptWDPs<9f¿^ Xۉ`IcNYd,d \]"(|7Ўm~qfJ]/.UG.kݡKyި ;?\PKwxrϣٱoħ;K4 %+IzR?~H&_.pس,S|USa6wJc P7NNkuSJ" cY]!{l6/(o):pa ݂Wca[A(4x 8>?>3Ag,7pa.J*Ьu Ϥ՘8_ >kfrJ;cZ2H 8"w~.V|qnZ%ָki|ŶoSߴ@l:n1c(/Ad4^8@v7SFDsexa*_S{PSđQR5ulo|?L{:Ԓ;`>S~<>6?atc֎cns7[{C=qX,l6=w;rkf+̪2YexYT>/$I%-ה E0f# RHOscjQkVB%5ӈ8Vg]FmsKsҺ?Rhiw{B+WJnN’C B1 O"|"(j 6q5%a+ew"7Vh]3Qyu`UeB"OmuΨ.L׵nl ?}[0џ^D쬯7)OF>v_`^߶* );z0jRmJ;[wOyp7$t>7nw̤G/CbCe bp`:{$?H4G2Η@Skp_t>wgLAN3Y<&t$Bԥ5x@+O8|-;9C+मuA+J|_ uU6 ~n;CUNu_Ϟ$:a5ųFT&q-3'^|625exY[v X]]9z\w6ioHR˟D@ercHBc";fddU`ҙdUEۨa \cFEo^v2%mkjtx~й硴k*;&꺵=-o) nEzNFA0>Y/hMUoeLgX@%&83(93|r VĥՐACMS'w!A)Q">g~uE8ɲ+Rf/IF_'VPp>נROʼPr= P~"\,GuY!w P:mK裠dK1N\t`r,̗BCZ:_gem<8G\Na¶9b,9Ԝ5Q[t67@sVtCI9Oqm^Wl;Tep7qpL|+EܟI5۷CPj[ m_62v&øAO!> Ky都MܗeFF [XFF0IKGWw0tꂼlI5;܇Tځ`ޅ_PS=l:ޭoYl\:[2m l0uEEՎrL` uW]a3}NJ]*Nܙ}vHZ:jJ.3E/\CmJ?cJvX$m,@>~D"q%B9%jg8QN =]^ނN\|ĵBD'@)V|m)^-]DC-Mj/Йö|Brʉmsih62 vF|cُIz(߅p6e{FZ٩.È ٚ3GYIap .y|ͯΉ@Hucpj$,FWyvM%N܀|Jԓ|%6x ʪoH㷺&,N0qeu;x~')4gXp9蘬+\=naڷ>:Dtf ,`rВ Tq[gNo;C;>ʿ ~/z/:@/A0໳nW-d\U9{wX*:<2RJ{2 `zT: q&έY̚"=d\o߃솸j6v]%ngFs We,vB|ۃtAgr MJKC-\u|oIJS)`pxHm.'Lz[`{lmoWfF׳pP~#Hq/EFr]1c2k bPE+1:^YsuفKi( Xz}0x^ qo|K Xz VCJ[/AN0pt;1GDuW'lh|GT} ^n[sАg=2ANz>xaZςwx%tC4q@xL6( }UcwʈD)>wLt]CG| qc(Bo#Ϭ,2g e}}WM`NJ7AЎrĸ=|\%Ȟ"qAb}˯Riϝs>nai50 ?e ~Aﯼ oRJ(s5]u1v)rl7_Vf<Ǝ=Aut͎W}X)|==5Ģ&7z/̢|opsK/W,PEStJOV7 L( }hG5tbsվИzwi!5`J梷mݾml~P`.6WZ\P;`r iF|Nfz͖]t ofỴb[胧]x2p?CIn΍2(½wEga.U&#0XGx\Xz~5x?{ ޜ(-XAMI+Us.q"%>o='z.}ym".aR GuGo.%z|>@ڔ0k!׵(채\,so^w-x-w0oo`eAKn# .; 7i*Iȕ<`AzxgwOFv=5o ň6 Zخ"N:n`x?6;{3ZWKh~/wg5cš>4I RwCޓ>A~Pc +Bn$@rywVk=>yo'3 x7_T aQKǁ0#Ypt{K(/ө>osz~ bsO}q*f8!{,t3չ --9\ PR햬Z\Y0JS}+o/dXݥA%̐pIB_DLNj-CҘ twY1y4|XG Pß9)xp)@d9j:OLʱog!Q Y-j@1 _4 sDE6YO`s5^9$-%9LEPbr*y^cʩr:gu&;={eꀏ7n ~NxYT:,u#NP"Uћ+Pü9;r|X`veXˌ-/LOv'oJ|2~:fG^ϘW,kTOV+wW{'--{2P:lȨz k&'E ϹFՉ` Gh#\pF6ߨ7uovO7{_ w$*}60D#s {'hNoC+Ts8j T_^̼DsņK{XC}?NߥC 0S7fMT9#ON\6CUSdSdЦ GnH!lCHS|}$j?ΟN0+% s bu' 7j(WwCD7m!EǩCua]#w<⽽vt^p}ybz➿`P k;-s|?)߻@ =N09WB~=^&=cğ,^q?ӑ {<_ӧ x=S=Ļ>ߐD+QxPym"+t`KUjۂ|&Y]`%Q~D8p?.A?x!`PX^j'uPi~:Ͽ+15z͈ע|Ѹu{-;O3h^BR9h=aQA ж/FD4̟˿gɏHW!+$p=kqZoZHtv 9A?wWn ,O/|{+X2'Z'!uj)'5$yBJfuS}>sCK3d)Pt'Vy:Ba\\;(~z8RQޕnj'}!oSIl y.4GT(eO( j. )}Э y^39AoC  Q@YK jc[sh!m2TU쁚hb-s4%\`ON~{ ^ Sgf) EuFF(tdI/ R ew2ږZ`7!i=iWx|}ݠFexE~o.Fj7 bq?zo]'Q@;br5{ fi.\حӶ út6uI·}e A?pVo9P|d'ZHESs ZOu$)O9 u, (? "FE5)q֟`sܷ("]˜Dx{m.i;-Ͽڧ;B$Rw|IL^>%?&bM1baascpwtI!@ .yniM@bqs GB/,|ß a y5܂`g€˜ ,ry =<3 e7Xq7~тLuTCϗWBػKkG#`cr٫c@ee>1zj[]TAVS̑'=6i1i1P ۡD2OHXH_c"54]ǞQt/vnIo( */1] Zn+2Ei<z!gyG$jJ9 ]yb[ybkt_AU/. BTTiָ Mb/Qb)m1u;(-sD_[|XFffQP-vR1)fKmCҕ#Wv~;,9Ua R1i t{@#q/kAg# 4x>V=,zohbN ĴT=pG^'`:k&2 FhtD߂N+cؿ9LjHdG(vd KG8h%zku1Q<"~j&NN4FTsdTRM5u81Zg''A)+( πiw&\V<ݘFC*Δuʃ?b&ɮVDtM X\;l[CmflI} <_?vY%{=bX^+=QT%+hP0p숖E~_}rEkHavi 7Lyэ%\*\࢑HW4':4Y=N>"|;:)6\ʼ@6+x;M;ѰDBq xַ՝CyK1AAMڦ{|ý&ex?.:Z騌Qq/ǫ@}V ?͚$n_}+:6Mܦ~yCAWARcE.F{+d%ό; X(f,FrПբ їUrW`'W`y]6"&$`rڤœ0(4*z ~>7HW<_ϓR0EWe66zw}(eO)RIC:UZpd:%YԺ /σGZ,`o-CU8ɰFDP|H>*yдF&̹v|PRޗQ*ɖw鯳N9.͚u{0(GMm^=RԬ=m_?A+F-ɝ0 p=(:pF&~)=;@y#>M'=,;o^gK_ {zt1]qח= =k`1 ڔf5S?lQXl{ZpK)kr8]ǔdC㧗a1YҌQiSag';uDE5hb9\[,Oҩ8Jmv`K 2R1y)e YdD曀C,+DhDAG,Q~V)gTƀIM!k6CeR?FnAtgs8vJwIl¼o=0fx.{߀6iI@ S ڈ'LHb ҝO4n^}C.Sw~K ̇(uOz8'6W&S=0,x"a(IU{I䂩Ai?A~-DXtjTy4?xB9.А? 2dz ߿]R$H05-f ѓg g_ wRM[=l;.@?ok,hnLh*ĸVO!_mV6y gL18]W SP[D=t' 0wf|E HVc{jћ4 n}t6J eP!>y\:`FjhrD9nғ=Ć8=І$7!ݝ&2Ird]zAѡZI]Ck 7܅!ܷ9qA[mäWL8켃zMJC\;pu7pC@<"LxEB>7*z(NR@GUDu̪C2snpA%G)У~:2$Ihz})*Y&ыM྅5/-{*&01wa?dqԘR\z߻:= .{¤wBWhpYT= 3=~zHSp"xy+ޘ'cX/1׹h<&B?]>J5WӿɈp,iEi}H6;v8 (38-t@=(% Fq;ՙ+w}|'ha'p?k gߎK@vfxI0Ahqe^zc}S5F쿄b'8FVk/MVij}g aC7@B=:}[aYUT3ؙ#y>GAHI܆e,(Egp6_ [3mS8@,8DJdݔ(k x(F;$AvOM<񍻇!wπp]F~6eV:i/O=FFƝG7bԮU+)Z?矵?:>8oXނJPF{/Vcg(_ ە]"TueBϻ)mGqBĞX:4hWK N;~#żz2aȎκqBq~ЇٵZK(>%q=Szk~=u76_ WOcϩ5E֛ ջыkɦ'.dcMljx+SR5Eg~P0p S3e;gm̝sjyIOZ)=jb0kdZ5OG;)0gfgکј_@|!Q0›])Mz{ճeya]'PK"taX#I[ }ԎvIP?Nt4Oy8\tÜ(~ ΔxdG۹O;6Қ("Eܦ%=gtVL)!Cz!Qg,>vhp zɠstnrALnH%/Kyh"yAfNĒ|4AE*ۅ]Rqʢto4SG;x.05Fr #(oط;5E7E:"CL);;&%.QsD>Kr;a)CDGk'!@;&[B,+[f`¶_=rĶ'q+Ol+Ol+O"kJK" )e&s_ tD),cO Kn`W%SbtagI1+mA][-Hnl9Y{Irp`Q=`]j(kc? +m}I;9tvҧb7T$A?؁уgTblA7X.֟ö.Ͽ:PzYFܦv}>I"s =GjsN=y( *R\t2ZKUήʨ:S ^B(39yw1u"D5,G1&694ÍB2 K1v6Ć_{B~z%UcyҢcvTo&^*°$n"K'\dz__?$nIʓ'ʓ'l剶D[yr[{@}e)gC.й#b )[U|6at7XoDڠe7n<=yKE~Aw3״W't3 7IK0W/;:xGz}W1,,22hYvn89,PКy{[M~%T(C8\?E FJ|t]{T-S|FYoEҾUN B؊QWU U}.ODA/؝-ÑkgeAX&\W;h2jU=2(X}*b8Ok| aXe2~0qއF)w7lͿ)^66#`iwL¯XC:5li0iVs|g" jk31G?u]>`P|5x}W()kP' HDK?sIVuHbuC0(OyR\ֈ'.հo _( ve+@Squ |ᶴ(o*"cA:hPKw i}R=ϸt,GVt7ȴG[KNUm9f9іHv8ao=9naPZZ7:)3V#H3* (Z};MrG9t(?M7~>DU:ަrft:&G-Uv#?P Aj*7{ uWq([ >33 H+*Mh81r4]KCE;mQKTէH~FAV;}۵ZZts^~:T#x~^Gj.AI$s:8Qi;Oecq21Y>X V4d.uY%4i-(^ ҂>^Bĺޱfњb'of\ĉg%rtf[&c_m@֤ 8=SKAt{HGf\{Wt P&nH?+[l.|Una5mjucewsPu+江/o{QDoТHOәr6H#`otJ61]Sh5tE"Jۮ)rM@w $_eP [ ל˳5+J}N ڍ+ʈ"݉, 9O MNԿdU7z{b=tK}_d{ûЦ=kga vۜ2xŝ[N (r:xQlћ۽H|3Aݘ:V\ruΞl݋rV ʘc`&)vK=q4YHRFHW7)j4lYxK13 r.B ZeSDߟ47뵁n}a>PN{Sq /*~UH~KuitmOƑ1 trf ܕJv4,eykѻp{*QjA!&q7DShsYHSH׹b"tH5 ًVC@f3Jt+"Ud-ӋA6 ]͗1h*s*Ѩ6-jG]{tR.IJy4ʰ-=푲&}618y;VЇ5& Q 3 rwSetCLh xFwo!lyS@t)(u h>붸TSM"T,<ʹ;smfX`kR *Boy+q.8uNg}k}H WշS4"j7ޝxspN&Nsh\B|Eݥ/E} {Љbfī=C}MhGÞB? rLep{;vX(#xw{v`ͣ3!QE "e3d}oĉ+QŽE#0O](Ad8Fؿ~h mCo:̋h]QqɌgzCD۲D/)iS<-Yqo}J=߻#z;U}tc9g3."~|98 @jA!!G ⿝1 +Vo߈) KeR gdc|gqt}l_oOɣ۞RuiM0tL.bږ?ٔN xB" xXW`]xdepVR7L xRqع]ݾV~ k k\8w*u hnmҏ ݠ?(A\Zl Uk9y 6l^ 9V`X-qy6qA+߽H{hulmM!fB*Y&_c 2zOʰ"z g@I)A+7 4@ _!R|_{!kJ yܶJOz̐=2E*(~-x$ ܹ}[f= Mc2hB8yZ9C$aw9~.µE6R^0i_)eIAOe!s'3c?d1e05r?8?^D)g2*TyJpDŊjb xi!ҋj ȹbO/nN|mODβ=JXvDLݹ*)bZ͒kQ!~2}:uP˳ˏE]m&K@78Fkif8jhFie^ë9T\=}Ca= Xjk״?\247K=*>(,s}u+FY\\&JһPn 0W|"WN_B7z1{f EYq&ϵh|1ڟ} }=[Cbc/vo/Xnz*`!81g9x/Oٿsc~$.q#?~^{$e,vxr7ÁQ;dx &Тv if{V^> 4_:e\ j8@k4[E=ӧ틃ԏ*]!ًP̓,W.<-h+o\y+~6+w_s!3AJ57<4FZ Qs]||¾}\H4:iga#gM}:,):XH ZmFSKg jߛ(ʍy faV݈SZ%Pi;3lA{vZ0 5<4i{ф!;~`@H]l?LU g' Vnyȕmt P΢{)@[KG] P,bY4_0!ƺ>|:k@C}HXc?e8L<`pKqĔ"8Ӆ՗,ӘG`5z懐 ˋZhHhE" ;Ц_픪-M7*Y-~柶ؕXp3QG Oae w|$E1al]5!97hQF;GGI"RbnA{u9:ǰoX+bHd۞K?v~$ܮix$M hL/s,? x]E٧{}BVUY7^bKqdq<ԗ=pۻ+OY 4 ذR g LU@nC!q;B`-Si>NY%MpU H%|>i@w[@;&{I=f Brlqt>HX">;_(_8mRK d0#]]ygv,gz2I@ӿ7G[ Zvh.kE/_]Pyta q!R'j[K:v@%=rTn%C.i|r_b}xLf ":eȥ%jTJ͏V#רH%oudbvA z=thbU8x ~l9NsmU=X O[GV-/BYqXJqOA}%W/Q-gww2W(m>?0^wz!,OuSdmI@G^d#quFd_=IV 6 m눻1+m'p|gGsgLcr$:[( p^BtWxzq~$ G?|=N4~v]O[wԓޜm舤1ܐQ>+ <V^ɁψBNdw q,sްt'$p}0Vk6"#l\ *e͵Yi&vdQ!?:C7MMP2X3t`+yEGV<`n #M]xm`BOvohVh>==zJc8~dzty7ۥ [$ Yՙ VI+l5j(egq`C M _X皢j DdY~ sPe |ywe^yD$'/!کDi/ԗTwr^R)lT}<]xӐM79q)Lp1BCr}C{T~]~kZC]JyI> m8͗հϰ !S= GB{^=O\M]/nR'WLwܕz+(pPop"TDQ!z:D\Q\fDb7j37[mnIg*@O;b󅯔s|(_=R#-" gupO=݆}l`a`0r~ܞ-6P a?L 1{Xq54=/|`ic}oqOVΉZAvT_Ŗ  8AuCcX{U} :g W8 "Û?e0_J-18JL 95gE^$OD)WLysb -zL0d vH]?"yv^$%Ct~]D4LȅϰC\g{#ZcW>l]˙HoN^օw]o5e\!Oh=0`yj}m[/_ ]/yk(.Ry‹眣_VC+Y y,jݠۃ Nf?A--85>~"d%WڂNxy#H)w)E ö"gG99zhQcߵCM狜[(/]KAt% #g^fe4͐NDG<|grW,#wH]? GcD΃Zy~ĤL"$!$CW`|Tl/"$%-eqsBKPs{%dEXv=ͷD)_q1- ,p=Zw94`"E};-/p#3+晫s_\vl_Ԟ_8%3'~L7^+YVt?4nlWI#4.u!OKya>9Vf(g赯_t4uhZTq^{DpˏXL2wkۤ'<Ϟ4R1MUw1x;jϻ*jO9V~l}~Tj) alb~O}.{y@[4"oib@8D$&_jpqa7xg(60*h4>kbUܡzǥ"eyM}8DZqԯN;>_Cdtx+o=1x_:s8oLh׏#wH{"(=I~p_d4^mn91aAo-wG[!O=f줙}`:5%?` #MRs|ҥ28YGfT!'Kn{#lf!r+=}_hv<&atLiUelgs <NДO!  @Sco#)2]6Ǘ ́;nPrW'(N^ &쾥ѹGrnXũhW56.69%#2;6p YIHPFѨ~[xr/pDJmzoiiAlZ34yġÙ@[`[s&x? ,(a1#ss׋#LWf;K7&?lA+ q"{ͮ>OgӃ.bȮg0Vi{Y.2puoi-ByS1^obq'Ȝn: /3FÊ D=Óh%A'H61 |~Ru_LSa`GF;0ƏR&lZjE@qRޤrxDFV/>| Np z gcUEr.K LǏ ãFW|p`>05W2S_CGHJ3AB`e[_ >`AdNCF#s[iǻCQSéxB{hKs /W}(Bi~=s~!`Zc*-$j3Jv_VvAD!W-""?ȎrVQT:tz(5֙ (/;Ў;׭ E1zʏop0 ', , S~`t;W3MZ'}f175܏ ^3Y&;@͸/:b*qM[Uxcd 8L`kuQVŒnX6kU&|c{8s24(/|.#*f+a |=F2?*w'dA1]fslA'5WfDیY5c*hHnz\bkD] sϣLj1 ysOR ${#>fHIiO-h?ly~w6=^XFC.#^ Gvസi.pe$=;Tv虣5lMl:(1b]c6][$p [6Zyo;C3* ppNC#p(:5RO:݁2,U^K^SB/&k`x _.|wA TrnpxcK?XSc9wjF ;HMxr@"ƏP񂜴҇ؾ[ G0_ 0^OXKM3@x:Gm*}56D+S~,vϟ9HR9}Swh)ฅr-KpW4]<.h~A7rl41L(a# u$X9o@CPYL> e8¢SdzëSS=>f;HָPCF[qӔA>x{aBώiGѼ[R%۱D|yM}HI0\a8\V+cmqB+j<rxaW>ցߣ h'}D>TQ,\1@L|A=!9g)HLOύGisVN4 f8DxyঁE;W4> "[/6gXuOҹ%fC!}tvA0, JУA0!HT$Tl'{`Ϲʿn||tHGCnCK.&0+E.- j`Sd}<ն!vs7|uL B^w pyQP$V$i_64e錭?`hMJlV5&}al+w.8pE] ygW3I I~8p..Fìik :R RFX>@[ /Q{{_ pлQFozxw$c!֐̈ fb6>%` 䰰-c$`HSw0x# K|І0vOS`ݍhnྐ\9ץn˞lNxOVWSZ(iQ<=xTk7]43 Z^|H ;ϵ=ߘpVA"aW#)s<ˀ_e3d9w.LP"svU G h Sp_[Ħ_|}|j'BߘT2?! P6pC/}GH~A}\p ֑G"'s{PK;f:SxoQݠ0LNzJ}ח~8Vc>7df͔N7#^ _ BwʍBα8!s8sUY7p2<8y7҄0/mC.B1bDrEK'8VnX@AiZ)wW^%3$3x)ᒋˇo1JFUV ҪĻۉ OJ=/T\sM-#gO#hfk X_!s=_  1՘9UX25Gٙ<[`HF:| 9}ny-u:]iwMV*:E_.-r=sdC901="d&.]^GaVJC";<0Qٗ@ >l@_ÜbpaO >VZb(uSP !3-oaRq2/dk~4Xy Ķ1p^ѡ6R8}Q_.7n [=}u,֎Vzn*%pN yV::KJirF{dn{? %m$K:]?Og0}D(ojz<~Qm| 0u-D]ǽ&IAp}扣gKƽ yG?kGh'k]~.^AqEœԩ}gizޞϑALY<=(Cy ǟdİLY)ԫzӱnwDew'?*ChL49At ('h`^ :rtfrQ##ՠ'u5:w*5|)9\AdEK|pij~6JY>7d"p|Ty)öˈO(x!t[aZbF't.(^QY+Q_Qj Gӫٝw* _⯼_FR,}[pD_Ilζ]rϟ?(  5qx̮R ̖Mt ?$u-f@ tx{%yC9S{#EVTMF@f'4nWO^A<'`s<ؙ#3nĬ,|kFH-hhK*n UKbg"sl'Jy:QU0NK0#/_Kq|J.a@V" qIT&ݥҸ`>^ خz)^ ЛCJL1g2(@Pɳsp lu1`y=>XH+cq,)؊gӿq}"kkx0 l6N1r2=ǿ}AxHXg¶}k⨩$k3_Uyq .dBEA\AqqqQHL"9ěpCuyg,ef@Xʍ|z=y ޾l\kTօ ,fuX`X)GKQMV}H+j 9Du]HN7Q ^>0PA kgZ-6Ii 7@LeD3C lT}!tl0ݢW0YQpjW6n__otK|Mg|gK1*j.=ǃpeqz "i,@u70`aN]$\ސG/dH^UW&[0TyI Ev(׶Disw> RL<j: Sʊ(Ӿ+7<Zt:mP7?HE$|w}7-D7}"7LAw+c@?Dl* $2oPᗔ Е97eV{-ۏGw/Ҟ'_=sMr] /Cיdkz ]heƱd|ԔX'6۟A?G"6X3M8`֧'1;ZZ&UeӟC08pwUOC_#O@sTAeڤStWs1fGTH6_$bӫt?k{='n7OnH{*z/L{8("^2$p9ZyeVP}N\ԷS=L'8B*-UZ5R{w*zߑ?g^W+yR{;(47P|}ʴE4,&y8{91y5x)PJ¾~J~5 n}2UAT: }l,y:;; ߫s9dOFB\k)EMIYN?, A]%eԇ|޻UC/)L+3-p^-ZI/f%~h"k hdwIO;`Şs:h -72\Pqٍ5GL0[[ۧ-@pJy=S]yߣॉ㟕l+" ȗK5?c ڈ#jiъG#ٷ~ȀVήj= 9>% ćz:K?#YӴG gHfX͹ cbԚ:>|UU3Bd3iͷ+ Bqkr{r,~]Gсߧ/?^rʳ= iW! ^Vqn7m|MVq9Kd~e4@*,IL!if\"( qcO 2\KqcO-o,-i<· ɗDnTuu6+ڈzuz|(-ߢJxZ\d(˲"RՀ:y-rQ<sFN損sPMIRf5we3-JTԣ+'ϐ4@qo(COpo8PXZ? ]CQޒ](bϱdiʱ&`3c)q X~dX dl#͘V@!GxXڥE.PsNp˴~lR G1Of}vUZa @KLˇn92;~XP߱P$r e6G ѹ;tSxJ4 E\9PwxP.{(_C5/~ j~>?%ԑ `:G!zuf y$k%2 g=)m% Hē}'nŴZ+šTxxɋHw&z%XqOrպEsȆEcf 0 tfV$\DHcI/rT%(8Q:˻j 8:Q::蠣GP|j%Y 6wsv 4PRmfӝʔ!J,Ԕxd=g%9,+eF6tsظI \(c% Bɗ2SнS춈Ysخjf518Za1:ˇ%,h-b.X q{ϧ?WQ|g{N1b`=A*&8?&nۂ}ʲ#]숧wgͧLQ?n7?RUI8)\,ʿzXW|\/KlraA?,Aq6W^8*J) /zmtz 3IC]6S()y=O؈P꣛<`O/1覼K@3>KBh"@ƣ;K+e~ |asK2WbM[Ű]'C~iWn]n6$qt!O$nH0Z1O^.]BՈޙw8`Dɨs|:`ѷ32"&A0s\d$2yr;u6q}r]8㿼njKaɘPz/(|/.~ЛPz#*~R/x :JuejzcUsOFC܊41}l`'Q摚 MP dbдѨ  fu8yl9lY< 9tc_[nIgOF@w%vApǚOPTkUܪ>ߦlE676,_~pO9l~|ȫS. C/?P1'${`ɯᣠa@ <%IfD4/_ؕyd՘'HЏܣ_>3N\F׾;#k&I] ьGPfFʦ%b2' oI/+;"O Hd '#_]#uH9*SP'g"saYAn~8/79J"X+"!cRIpȬ Olaa\қ (QoV<@8"NlxP{vVr,(l+m3+LBNѲQ3VRK"rNNJ Y g&UAfQd"]V?0<[*Mly1Ab4Hnq6gϿ ">ӵƂЙ:;y{|/Pw*ȸ4f^&9] (1lqSxϛCIg V[QrX]C e_w< I^ʶ.CˏzB9jT8tʅz#S oELZL+p0)C-q|t_?9 &ǰs~$OIGǬ_XkOSGig((I"@0~#$>5kvݟ2D56\;1IFZQ?݀(ݘA*Rӛ_1'_N["bƨ8De>7ѥ.t(KÚm0EzUQDlբPkx^Z&Gx1Cd̡[>(-O! =N. `s,lXQlyзp8)c; ycGÝm 9 } x$*8)w;Z.&iE9i ~׸ w)@"̯W£Y L] pd2G:Ys8yVo7aapOfcWHF >-Vrn%a#ל ,8cg!/t"@th+`)tK 3*DZTtAvH y>B>w}?^~oW>qFO 2`9CG U#PWEm5}P |e? v.myAgzBvl2Ēۍ`!8j^P%M ZR> {HVwފ;'Օ-Ne)4M+/H말5Rkg?C[.-Q_CWKxZtP tzou<>nnt`I @8 G6qFD7ug U^UIͼG?'o] D/˂&` ǿ~$jw]LeM}&D~).|b9&+*: }!`=/؆,R#xXDdoS5k~.JH',>P۝WFJú30xsc+2>_#'ܐ΃nKnPn]ZcnR2"UaT`c/?h 8zPns:~pvL컷b ]M`"L?;=\ 퓥S¡\Y_P2Sif{>Jr&1nty+c,1q>pNiT񚔷"xXNpksvPf9&6kDqھ K_s C!ɁpםEbє6qm0ؕs(mKq"a9@4Fm%}@{:^w4(3pb\[7O' r|궿q{g÷6qsCw%{ U*З\ćTDdaHF`{7(r/FU©5N4P>3R55;k2L ʞEF&PIedeNX-.DQd[ %>K ^k0'NM(?HVv^9"EWGղXazN7%}nԼ]FFyqxF8Vx@2&A4;2mʯ~7Ox߯o~Dm9VO^tsʫ"%`Ius"@drU|y>(j?)g@}ϲ}`$=wV L`F-S!㙶&d jN8avbqzOeEvn<HwՊbpY5eߩ| sUD$z4CBYPwGIK%=bFR?P?(򸦼?"+m|pIL{JָY`Ua!+7So3v'}tiwyHHwYkPz1{ϩ~,n$ z=ϩ)}_ 0gGn[ ,V)~Y~7w9H-Pl'M"=g fa|okjپ Ǭy7e{0(DA\Egu\I?5w.N>/j#VBl"Jlш@HRir;ueHǐK _Y? MUC(^D﹬&I `n2oFem)ĮϿ=ϖs)h/&*@p?hW4y\Y5*tb_&F Mo_;Z&Um/4oO#WbC .^0LdsH ,0{ڨ_!>4B/d>wF''A ݨ}4ƒ IsZ(Xݱ FzF;Q\;3J؟åP`PA?f, rxEWBϝ!PTj, d*Cp#¾ʌ5?o\>;r=롯ig+}Z2nL8k㮩 *ތz Pu#wOgF SSe`eH1<%20Ia 6Ga lzc-0Aso8e37rsa#Te*dLyC'""y@#̫,0&5~w19:e#E᷃{'XaN[,L{0rij7 CX(w}Wv%N6b _<<ɬ DE&ء/Mj5dcݨF_zT?.#׊FZS?!nobn /nv FB `H(Cp:  .F4|B/ Y9!G7^4vßKn#]P{9,7a>, ъ/YNx74XlK8I?{2<}ʽjWh=Ï.9EPHFd4~߶p^fm8 봳dܣlݳ!P-\YM^vs6eT&]Dfk$$v}2֬{h4melPWlIy%4L Tg=CE1"&a֮yA4ETGhRR# Ρvp > p駴1{UFDzq 'ÂɗPJ-DAfGYθPD یȿUFJh5&O\lug-{d}a"XJx Mٜ1-,2 qY"7:'>\)n$œ5C3`\gvu?DN<.2q.0bHZǭs."iپS# Y9cGm +S7kqx9dTK5ܾ_g'8SWt3=q\:nde<EBg˲Ɗs>ђB"j%Q@hv0R鴅 ݙu-=",z}4|"*F?|aTN q#θYwP< ^q_N2v:nIWCeضJѰ`ʽq`7 m^^vWX^sq!4P?:^x2u)^Y 2| /ka!l(vڲuŚlk;/De?AbN% KAsO|  Ůc. !Z@WPpTSfOM*ߴDjr-A u"DS'Jްv3|I+o@վU}/HbqOWmxF+stWOA~fIM3vMVIaaѕ8?}Fi}8o&YFBݾdO 3;xn x6f|YA|U,N ޶o3lZѐPٵK+)7xh5,\yvΣZHbA2hs!SUa+$6_5Z+/ss:f>xdNGF8)Ziӆ,Cp[r=پJ>_?;%4~sTn'h"Eb$ (È<<0=hz^υ@mF^pLnMGfz_B`;vRq(Hi? ӫ)*Gh{ ?C`I\>O ?wےkXݭʶ H8z "[rljų;|aqWzHQe71|,7ӉP}[Q')8kZG_zME6TUFwބ6 4T< K6n)6x,YMԹA>7(2RQP ~S׹EWC\*AvC @(*>-v/ߏ~ -"ËJcrϫd"aRV=&0VoX\ lsJV8=U=#{\I>%tlHc^*7F  mj4#v绮0 `2٭.A.NER|7} .|x:I_u=.%|Gzאix|"cX*v6Q' О({z wZk?4;k ^q$d([fx[(xm7Di4)ZYbЧA6t7i>^EWLX!R93yc=|ZnpG]{\ں]A~t0|BωjSÑҺ/I'|%k,'+a__PGa\v TT=0 ~g-ŏJ#`2 `x\kT 'l'XO2ՁmOE %q5DBz86_yEb4d:Bgp=KԒht /"ZSF$p)~b;67Q"C/.AF>M unlz_:7k#CZݫ1HcZ8/$ω='f!It0T>ԣu;B OGy^k| g@DÐMyrփ१H Hl@<m D2*ѧZ-AZOŏxY/0i{m}O'5_+k ]/ƌЄ(љAo>C/2aG_"7,l|!EV<[KXL{6*,4 :mŲ@íoI`.O3!:yuC- ƣ^1_K82K z}SRa#p h-}pK^` COd h{+͍*ЗDÞJK ԂR*[hZxBxӡԳ-8T_cp_:o!wr}1{?+rN!faozт ]FVt_[箫i/1-;+8*cG*Zf\v4]Jp>lSf: "=# EСa3y6M#|PӵM[OhE˹oHVAC/6M֜xmw>nhbh([7dbSO a#,x#,# DsKUsč3&=7i|ܚ%?PYqo(bsqR|8fxL?<Vj495 }P@`N z.(dkxԸOh/mrvAZrf>[}r:.HݯȞ&Q̈ 9%n`1;jxF0܊'Ff2OȬQL 7:kk s `ARFS֮wrH%q/V¢ hC"R. 3GQ~`]in*2Zx|9hl)]!2#F=P%7թ(N㛁'.j(v`׋/ N:l!RՉ| ^w~,[ȇMͧl%w [&qؤ/6 *-8 Ϟ !CЇa=K mu4y+fF1+c?)4sP;`&% Ԋ- b<갔.SE#P8瑭A(E=r-sgP6VG.v.~b \~"jXF_%'v O"7qDxt-p7D7b/Dyz^$FT٬X>X:(mQh+ځf2&ߋaDy4^aP\3#,0[̽+x.f7.F_?rPPyc`6]@,\-g`fɲ/^l3'^q/`mVA#R 4`=n4&vQUn2 š$3kb#g13.e2S eH hW71탇CуCrs qq' f<Ʊۧ-E)i@|5OT2~L|à }3vTa #~]rIL3KZ 쇌']N/>}VX%j 胥Ya~wg]͹wᲁ~QKG3" >iFw5.T!CIŠռwfՖn^*M!Rm$k*h@6L.\~;B7" -NkVxQ/#K?aXxکw4Vϼ~ M(dF:w_.6l8~5lӡjc|}- ;2T_9}-3 2Nc;| S̸lGOx+i\:h͕ G<뙫LOEѶ>O޻IS}t(u+]o"8ICSqA@-b]v`69 pW.~K߲mQd)/ͻblSC.]&x,A|8וDs$eK{ zӢt5֝"ռ{3UNZxeh9ntT}r=Kj8?E1j5[4jFsX!Se/G>'- y:fN6 P<™6D-r:+!g!)}=zs[~nr9r_ C,/:_%$Z]z25;艨6Z #\㑨q@e=mXUAfo^KF_,! bwZqM]?_հGcU0ry8tUUM^E 593krjĦ n/OƲN,@JW~]WFtޛԪh%8{ѷa!`de_VyJtP@Rt [k0ӵ?q}dC+q fr=ϣtԺ08ƍ-xlbfSGq5y~lZr~Jĩg}0E'z4Sk"[o㌀?=_Vpaoȵp)2 D |(uS0MU," ,ݛNa񟛗Ɔ;y̚1y #^hsOk[:B OŸ/Ϸ_c6xn(48|8D򈩑w>^t6m &ZOe܎rRS17IP,/IIK'jEJ<&.r`Yy}]Z S71TO;pzΞ?~>7'f/O^<1{y<^h/O'n/O^`/O''n/O^<+\?ZZZ<:}t0{~$ܞ G¤r#nĕx%|G)eh|zp|l(-P\ Yz=Fqf|yEA00C!1!%xv D؇"fL[6>< G0{~}O?rϣۧlF^Ϙ+Lvd.N`bܢ' jt 96&AxTz1@ͻe0`K*uJt&X=ǵw@W#E&,X06ExZ+nsKzvӗP?P4}ƇRD=\L8%XE;p#$!Hd\Dg-R dΔBdB2eLƳNBLL)Cs٦21D?$~x{x`ť{g"4'Qy(J^B(Z=EWeh  [{ k$78>TMnţ% rBhP 5[ȓ[Fb Qnhpe5ǢpĤ+Ynkt{y'iV6;YfT;mdE"z×7${^F"5mq gcUq\rW!nQ[xH oO x6 EQN"U+hq€>:v(䷠;п5X4獦E`cܻ\QR t@4G`aLBu#}oi}pz.df#8{cf?<@ýmǐg:pՓ"TRT$Gy9Qz1,%w0R3[08k\1ϋD$ET,GFTS;kWq4͍z_xD12R>]GX&[^=?xRFZ;qkK)jG"o?K&qR:eP5ހ/MdS>3q`wPg9-:Uwu[3j qېXX cF@euϷjO:bP[ʚnǠ!zt_ɰ~\0<T 6 3GtW)%f5̆D@%jKzg"1iypDcWa|\5,tRQ*S:\tFD[EsŔ-sy_7Zp¹l4Kݨt%c `Ѯ$˛yUEZP01lV /ExwTn"J{_cbV[`جM ,N, }4 s@ُXG woכEA`_N:0D "Qᐋ wZ.}tdP{{~sIaE'#qyypDmh9hv>*چ f;^2?lÈ/.!' 9gT4( M9WtXwd ȽK>i .;R 6y+(ߢUn$S- "Gt%I~V#u@7Xs4顰){<THqRr$.F/-Hvެ Nw*\WxvҌi Ծ5޶CEJf: ^J+70DGqo$`fri{cω[ypD.pd>ԉթǭkjрti`GzRZQ@v8K)֘^^󶘠_C@#Iɟ#UEDA']g Y+}c{]ZFG&PόԱ뵮K_ ;[>|62!c6?I"V5Ma;J.F'_xC#Ů3xl8 b"/雥Enega]R{;#>  -@ }79׀lM])\ψ{^$ d>2Xo`d@@XVMHIXn/AG}U`#$i7LRA0CxZ 4%2"}IC9ZtE̠'HX3t.%|xM+^M|v _Mp.2?1dTxZ^e{}{ץ7 ߹m-mU ųjnYS uл/$MS^Q]")\/mcyg`9 0=֡COGMȓ 刌:;K\="OUD#GAds>__Ti vVFH#i՘$mrFKʜ)Zgh-n [l#EnBhڡo0WZ8nFKҽ#~?VjGz뤨5̊-d(V[Rk63)/(GkQQ1 rQL , F헐D&WMD8wAHқovP( h:ѯQ)}ҷBE% SXG*52T5<+浾䕿uO`ŸǺ|!}|EJݨQ rW!e32$Ltkz`gnR"mL,,̧Q`,J(VVT,muO׃#C#j-ȄvmH3S !=xA3-db3ICL=%`wE0߁F߀炪0õ~ׇJS0޻fbj_t?B Owg9On~񓓯SZP+3TExXSӓ+\ huiVj't=qЧmܽ P r 43q0muX5g'oޡ9}Ri>R'~V,1nP g?}Hu <[ɽ̏xrBH+(902R:@fD φ+.mnm?01%9pgg =k*c[f3 ֋KIP+ƹUȄɷD?-[ЦB^@Ky3)X щնTB'ahEr4]˗iA{_1{oϳXKў`w9vrYcQcy)>GsE!״pr>! !Kإl/D -O@WVe妚V4Ϲ

1%]/"q eҍBVtܬE+я!zӳ(M0oUKz j8 IQ"Ж݃AQe=&$VL?or@C%eNw#`Qq}=3 h8?Xj|NY[Mld'- b_۬_8 }R}[pwE,P'd(baw ޒش6` AoZ]orO.ebs/lͤ%ePHmnI}\G ڝvQrH^_ +A֍!,իΨܣ35bgUG!Uof.w&0<{:OJ]J}"Op@,R+snmhͿz/I>£FbPz}(K[Ҽ/V" 7%ĺ,.'+)Yd|L~G#fUⓒEw7M.~U {]Px#y轱 ^}3 B&J gp#^DzVh> k9"a|]'b߸>ˤb -aUmU0mߏ7PiS3 =MY `E7ށ5~ OwcV:ۆ ¢ПK=]#-;Co1DZ"Rc-̼8$cKbPB'6.q8yM".{9x%&Ԁ 6%TPmuWrhc^PJñtGj[wQ(jNҴqʶ}KFnmQ`~3{Y|¢K ?\]7Ƶogga n $aaxHq4">miTwsnkxػ0WXkf[< g=aY-ΦFS? :!a_v KzI~6ZRt怜!ct!yę.&苛.*@K<x_BDg.S?uQ5"A/0$uR)G}NR-j57Cv&-' 1(f>I|!̏=uGab*y?g}0LtcUW4噱i8i.` OmNb֬xWҫ X([ }?Be{Nd. {9R}uaKX'oꖋ=]qf;Y2`WAȾeqLЮ5RP$5޼a|jv]WpKG Z6pl|)#"SVl \_ bef4KRM鑁Z`e8! I$wzMu*~ 9ʃv{1{tfRytE =Rn6:Z^n!|$π;" fj)1bV<쭹r!")|2+ Gk| <ںC` mῈI»SOaM:3WؗER>P ctqW]7?/qz[,y =v6GJ6P>SJ?#k9kB~_\5#{#7dy~fÛp& I0rON}?˾ JC fa/#0TEWb?!4O-[{tYҖx1 zԾŖMkupYAZ:X>OEu|u4h> ܊x5JboPM/O1aݥn}6ΎQ`YnRr >2V@xJ0qJ'舱Ir<``SN1_J7?#v ' 5oȴϘ^sqSrY.׼+Ƴm]Qŕo,\ido!GW<Тm"XR{Mc at (}ϼVّ-ФטL"3Hq#!~o5!C3Wn(a(n2A0#t U Ua6Xb#ZI/;6]љ!हУI)sEO ڸYnê.Ip覟\Eߘ=mis|IM'd>,u# -b߅" 0/SHEt> - bxε\ nMwS|> H2q]'h\e+ܪsW%"J}d}mZ~fF Q>- K1(#yC+0y-g)O]M_ǂ{}$c(Hޏ. "^Urchu0`8mVط IPӀo_Ip8GR>@,|V4CXyP ?rD%In"k'6/B#z}HN%?A։8}:sb1vL4,.0'\ѕVMvK>Tdg('7R,&/q?XؚQH" ;0z\"nۦ5#MRg"Us$Qɯ5U̚s vm a>.*1ze;hÊ^PS'ҏ0?e㹵~MW<'z)5B)ma04rG4;Cg{@KV!QMA^Li\\)12/[f `Bk?1{;uГkMyB(0E0l5|Nt뫟wu[6=pՅLl8Y7C ?/v6$4 e헋AȇT.01_::z\] r-sSB)N_xDw[ xfouAW ]ayo8Lx\b dž;-tPsQajnBj|["g~s:yIƃyQRd*<,(:aKCՉ=A;dߛ< nv;Wኬ pCeZI^ >~n4#tBk=#sX2M."oeG$0ZlY.˨CwvωܕFEL9IV@y"fKL`ܷ@MzjXTN 9 gO Bq?_WM,Rkopx\)W9D-l}s曔koO~>Dؿ;nzNIJckG׀U# z]{S<+ C N<"0K)h-<ԎZ]z:sn~0T]w{0F1<-2\ Y+/G@&nٖpuB57xcu] g#?4mug`fny0O:o$ gk ^3~E0XAh6w@ Njc1ay/H"od"#ΉjiA?'nϸ1vs?_lCZIe5Z%8"̌pm'{ȹA܈3c =یwy 2ݒwW<ڰ={ P"=v.WQ/"<|pt?q\1}bN*%0GaF%/L&6C2' \@4QNxp92f#x `(:M/o!̏Ê.2 v _w H\YMPKi.Zew¹N#^,N^IXb9]c_}`GR/>oS~`$ 29e]O{ل=ݳ⥷[|<@~1^~;«Gi('^]M*eU2 9ʊRq}vP?^$B!{p8BaÇX@d3)$4?z݊[Wt'x* ~s"<*;IU2Q@'FBq^~@sl0m x[EiyU}W o8Y=xK)Nlj3y ^ƃfQwCϬ,0=M{_> _ 'A!0 } zP]]'ò@)TxDϐv.5bt]^hUU4AD?~@k%>Pd.|K(Iɹ W%C6~BI| Զ\R8S}҄ypHmܠoB2:)!~.p;$"9wF F%7xaX4mاd|e 05ÿ;K9,QCtRPQ/C&25@ ]ЏLE>Er&*=yvUctxx6aV@Pp;TE" 卓`>_@j>j)V9>x}-tRA=nŶE~~@Ƒ\fi#֭9h_ 7^+3cOB*fY O'k3C7<PX(4i7m B-_p8j)x$̴lH+ x " ϛ~~sUp8iuv x"O'4j(tn@<:qd]Ԍ<p[p.zߡUZ'b~8_kNƾ M'襦d;0@~9$bQ/ǿ=ϫʍ &0Dc_o]^cgDw?mU-$@(_N/g"9'.GPZʑ:wQm ;`o`fg4 )NVxD{5&N(sMW o62haQP.\s^r:Bɰ>BgvްwƄ޻,q)ʚ>8 u&o酆s/*wcv~xV5$PD^"pS~P) c{dB pW⼠rf_dbZ̸K9Ү"ϻK0mcr߸@g{b5jAt>Р z -V-Lw[>E|so?jk+ $ 3! *uJ%sgmE a[C bpTBUgIkΐJ-=&o vHHM}Cy#Atw+:U<'"1sTja! ̀ (F6n_!NOc1FPކ|COý w .{ط-C8Ɣ:b{ )gB y0 Z"vAr X|Q1=9|ɷ>nn+Nry`:_blEB-k&0V4 xC!:SLq{^8"ѲŃyc8K~ ɟ_NJnЁ8 Nl-avj?j d?۽j@7WXG?b"b+߿' ZƓY`g+{eO;*:oo:#wYBS0`|;q :]CLÜ  9aM()F|ǣCHR(l~cÕ_iQ9%3x#39[zi'̻'P pAT?$$|0rDZ.\!#:בX1(﷕Em9wJdٝҰ񀝤q82C|{q }wd)ؘuz:Z}^z%Y*mx*|ϳt=YP%*u1a%mEecl5pix >*,&-ov,B>ƇLR<)}<_ j>.,yR& /.J懫j$FΜ‡'Kg",,zy_qh;ԛ&ݓgI!+?>sU.~a7.B;tE@Mt|:5&_1c:hrJRͽv4DpBsMؗRipY%PAL0g$4XqϿXq6&!rT#,N*{n%Q['!'E,)x4Gu>gQP ^Jk?ns`;<5 Ŏ_.bN|ɫW"[/ɠ?߉Sg7`g[TFoEѳ)YM-PhBX∶ v:Ji^m#٣v+4x]VY9Rة3f)ufejA:Q)|z}ŧ'ԬUn+_ [#mID:7b"S/U,l+WZƸt/H)1l鶍˓ȿ}k&_1D^ʜ_AY7&1}L ;']$3zQ ZM{|:h,e"Uو3Sm?K^0vtf%hTp8՛C<9[* }iW85'%y nip…n3Mrri"P TSoQ9JeL"}Pv2zP6U}?սwxw^G@ԜB%0m C&Yͫy#rke98 M"QK\QUdžfԃ)ce=t|<5ϼkPF6H1Vdta&HZq8%x{5> Ԕ6wyVb p)\@lLVqՄe{Y.Ⱦ-ET$msȴőehN*E7c3~ď)xk+_DQsJsSzH+9hvvj":0.\hO6" q}v?SfFc? ݘHs4. }͖(V y'B'}q GIa[KxP8]qaH̽-̵[F?>'Mz` YcҭU J 0ضJAL@Lrc\D+QK' P_cGkh/+VtQɥWo?QE@|* Z `՝ .E!%9)Qx#OLf؏~\dB?I[ZuLur/ލȿjlDd{ZY%A( :Cs˾f "*О#SXVu u*CO@.6tMp$6|&OoG F(=sP'hGJ-<XFGOL7 v;%Hڑu8(PN5 (z_ <1{ybΆ\ Li8رܯ 3NՊigMrN,n Gag0^[g _EqA1YYsݜ:` ~R٬y#Й]qO5oxp1aW9`Yz`32 D (v9|"mut6-,To2g*{"+ݡg_w )A[Adan~ur%#mI,Coe7&GE~N#5`IR" ąo~k}j3GG^<剙md{?a|\뮤4LJmjXoWN,YݶX\Gxo9}ZR ~rxR6*K=M>v ݴdg(CLȇ"U?F&Z͊0q9b/2NW|=::h"z6b}&&( <+,eNCha; ۮL[HsLb//E?^knb-$:X/`jTLr]\Yr;/c/Gz˘d:Y5n6m]Lv+ŀ* K!L0y;d Z-A:E#t}):ePj"V`L5wQC65ERӢp~WP#id*?;5y9#O ٍ8H¸ݴ+qUL ]u=:DW>4Ê%OORb.sYUdžsNa0fww1['+0[^KBH039 PR*c-9Ed%M1tB@E7.֓h3/b>:weEяD}}>&z|W{y<^`/O'n/O^`/O''n/O;{ 9vx4hN]$x%S{ a-N< a2xw,]+ч ޙ&Ҙ蕢s {lsfgO9ygx@g] ×^'< p\u:҂{^2E E\9#E~EϙP}VT8Gf >\ju\_-.[A̖m/F~ qB'`7qko?4..jĩQJUc q:8 ?$BqvC=b7_~N<{\"NTöHLLIPːTF]eŌߤۢeҀS 5w\!f#NoOwȃ=aR]=ZO/_y;܆mL6F7kV$6m.Z_cFoѭȣ!mU|fshQAŶ^b/Y7+DrW^݋/4ĥiLoJٍ^Em@[e\me],o&dU.hĉZ$б532'W7xgK7԰gq7ŘZo33·=nw>}ݙ6]fEj Veދ6${f+r =Le+M9 B#]τ#ߖ cwi=n>N~'ڄˮy׼<`.x_.Q3K\`Vw+ 䃓@ԌVE˂+GDоiKe*5 X #ğ;#m ,* 8v/qWo.⭛ ~?u=`# { f={Y~4$bu8H>N^qgmyXME3/]b+~(S?p\BMV;hR>slL՜A ƀ>{D$NV1n6P1.rE/tݝWSnU8g麕 Z{I,e<l13%1_%z9hY.]u=䤁K$5iϰw3@a%ۺ@1<#Ek8c^1>$ӓ}Ս5x]r*!m˟ JadeU|EΘ[u{n0}V^(DT$_5ʨ)lFE޷x!*_5 QgvP0k6 ;=3׺~7X*2aA*$j;r>yNOV #ȿ7xTd>yV߮I|&ll'blvg`YS|Ա<{1%EXkv|Z"fB~kBu@i:bqn ;ߩv}HfAiK.yeKys! &CLk FH^%XZ&P q  tC^#{փ杢Q*G눹^ט,/,b#oȻe`OM}Eݶlg+Q],`T/[FcnĤb!Ean7Ll62/ހ_`*wI:=N(xV X0yT"Kj~j] &**]>-H|,bKځhQ/X7;a| :+}[+ zni!ܼftl `caТ$["gp2Xk[ny;4 /j0'Ոxȁ#Gvܽ4t f-͝زH;qVI^7;? Nl;Ӭ2.b>aF ~sk0iB%Eu.7)S/AfkȺGRp\ǯPx'i^4C;I<֡Ty۠V|It@aR0=Nj{[ |[ ,A@8d O=0C_p-Wm<[ޠ |3̪gT9.WZ+ 2Gy@D蛏ڧP~oByD0L|]zX=S< ;\HsQ(^:Y~ToEOfGXL ?ƞGt@/7pJ^H@;xKңȃ27Y۾t0>TE;2jQ4ݜP(``3W??D_ ?OrƁê]7V[O|`R:Hm/п&Jc/b2/&is듹xI?gS_t6 ̃ A %RocG˫ܾ,a{b}+tMeZԃ -:8g̈́WFwHkIKQf0q;b8Jי:_ ~^։}l)AS/L{`d1##(nݽޠN`{sX/#/3h=xS ޜaC6)rd!23!R@Ke`gf1%A솉H" ;U Pyg}L3O|D!6 U[ڻ5yEې%ys=8VW%ǤUǒrMձmP0 %s.[\9׸A$A:@u#&,}%iŒbnjEҡ@fq@ W)@z!&qAiwrW!-M=|`U<.it[50U8nHqXvld)­3c3%be'ƄU Ӱ$Cޢ 4 '< Wݷ R_]*O( H5=-t>8 1]%3,{N°d˴J^PN|߀/6wdKגgD_s$?x\ ȕxF /]5T4xys M9KP/7c=G0\ǚRP󋬻M3`!8o9(НB~[ UCH35JHjNm'IZDS5vBK_5P\O }&G;+ǹ/!4҂`OiAھpKGә[a'|r>Z,JHz19E*%ڄ_J'&Spx~_- gK~˪oٶ=SS(}s. ݷmH(^b+ʘ v`gVf=hǞ@Zr:pq`l3>͐Gֽ&0V ,Ɇ~ՒqpAv.׾E{d\Vwev$z0Wތ-%bXt_=^-k!ٟ6!Cо+// ]/^< 1QpGtH#'Z87ɼT$>{EٶUy6Mp']?_7ovoZB:>"`VHtsS"Q)}ӈ#ԇ ݭ&-$ks BT2 j f`6OY3Fͻ NaJItQ+b@'Q.@Yx->$ջh; m}s9MgZNӭ+@O CC?Vk ZY\M?zC0S{DI= 7{a(ӨBq‚#4D&&H1?MOTaw]n}XmZQ(H|N%:_9R=?s|KH$4]fP">_FAt6m/1%+!{Cshl̷1| ~1 Phɹa?g$p0(Ƿ0+)}_?b&2^ wQr\wfUm/X r\5T("NgEbK{a k)q:.^*M^/W{"b@h"W8w֒auxGcߑ+4z]jP6i`xhN$v(,=s h?n|ri1lo$4*ire%tLXc uYfy-XoI/%3EڮFi]j(/fg"Z/Vǜķ *o'\+vdY;ta3E'5<ix?(i&Hyv4Xv=xI"3,TZ+q5'fQN1}Φnz ٨mmkWi ;eV.l7PC6-|!34b)V, 0M4QTڵFcL: .;sWm9{ap~W.8daGyyЁ]oU[k A ]#l[,c:)=i5UywW<ߕz8rEM|sEO9Q5^S74(f{dd)hKV@FxD'VHrSpE`[?LW( Gb;>lCp$些QM;M;=/W@ O~Dx>څK:o* J'~߻쮦(bA  _M~;@C>1p+NwNZ?uiT~\f׍o),و}2.7~^>1>8PM+*mJ$u_bsY7Fe82]0 ̦2FrG<"$^$XKdQ'S*.~$ ܑzsMeT$0^~~$E88e$-5Z@AS|+_/V `:ө4NHxwvLlXk+nf¢kl65XQ`peHvJUC rGڐK%~]gCfHAKVڵ޾0(HLFMG OXXwf vgWϫVeyAPǺz<#m7;lL?ع2iۦM_h9rW]/x|&kiI4Uz.8Ѡd =ǃTϡuwkl$}"%|h}ZJ.!;q;hiAJ08Ɲ"γ }ub2XM ;"8`7ߡݏT &uĜS5k( (; )wA2>IpdRC$ qV_OXer#࣢?{sk_URB+DK P.ޮM4\c& zV[U=UE[!^1$Z,m6& ]#74o jr8j'2 }FcQqc_iHWE\pДNG5):O'o~H,w٢hq,b6`ǞY[YQt /`Q>VNk!b1uuzV3“ͥv>GiaD g+a0>( Iܼh=Nžl1{1;,v>nE+8WN%b"l~@ym(6\:~1i\*7yxlH)ѣ=2p0cT*sF"7iU|D2wZOi92r{\Eg*_D*Ef]2Zjy*X>ֺ޵:tOszsq*bg%p`(i3eS mwj%kv:"3o,VB[_4}3rahWp7OC1DZQXyX)L!ЦǼﺸW%2`m} Ss{*2 鷣!U5l0LmRP8~}uP |r+kG=I֤{j/#gi!s׾h Ci(|򒡰R2jE&Ӧ-y÷93> oO-''J~fYuw[<5S;@H^L, C% 7+z[ 5"#/p'vX#w>(i].Ƿi^2J>|~zs[$*k<6q\:FNa- ͊FEB}2t:wACaA`Ls\kx>T~ ];M_ћrFhWhH԰;C>-òt$^;0fLp'kVQ-4b Y/j?#Ĥ^|rOfp}^L:m_.TD~MYԯ񢂥)< ~|~P8J PX% ..qߟ e_k ԃj ;_ݓ" <~ #و=֬w߁{3O7fw~JԮgXﲩO5}PI~ wtCM7mHϧS+ |V.r9-@E.;sN"y~zc|9X{7B]ǜƀVعw X)[*C߯2`|Ұ O`*ޯ:PcݦEy4ڛoakD$\&pOEBy`;OBz8*D@!\..O=Q0*% ʩ G`e א  JC Z^D6xҷ8  `hvh -"^kX%w;i40o Y=PTZ%j6=EU]k#~osL1ΝAD]҄wIy!ev)MqdȄxx<Nba0tcA/dySA ݝ't%ug0.]zX3z>xu! M}O(ABްaܵP/ѵ(1W5r`B]fڣg`]nF?ȑ=t$F3= yFYjڡqЭCkF c%v"ʹ7a`]RT~aXqyF(ї"J"JW 4oz; _?mD6=U/q:Ju6p6M*F-x ([_FlΈo#Y$u,R|'FgWGJdm&-f Nu~ό}(ݎbU nKAta`8uŭ)|8{s>(#_'%%豵~C 0PLbY[3s)ͣ#/!)#3SK$x>)5aU{Y?|E=>>2 Y/^ %=4Qؽ}XY> CCnqJz]'ڵsH"Hd'TKlX< 0dR@GyO`xvbRM׈2p*,`hPI\Xl jy_C-jt ViA7]>##uA#CĿO7&_E'7|SUG\߆^J$ݪ̍^6K7zxF?D(_ hN{@Pg='hWk Tu?}ixh=x"Nhcf{&E;0u7*j6 |3?G NY@8Ŷ?2C,6 :&e`1+UͲ=?w[#ܖl 6i>{8Oy,T*#5]T^E}0Udڭts8܋^cA?5?1Q>1OcXdyKu2yxȑ;'Fwل[=p. ਰNl,H |DxD WNy/q ^:I9.\59b1"Sw\'ݹ 1j]7 R*Sޡ?+u?yr7 485M'lQ4Y Z~20(%ͺo4I$k9SROIvhXpjqZhj ӿB 2nfoǡ`u.⺘LФtmiqc4ZΙdtT*g~4vi]nH}8>8{l{OGk";o#&L~MJ =xzڂ*G/[cf`|iR35Z%=~Iw@A1)XCf3_{A:Nпȍ(0\tE*3rn[\MHU>Qoq>4"ҖRU>YE:|d X[{~~:=lv|Fx ,w٢̥`tkt H$[q[.w|{ ÁpF}Τ 5wh9r֝ ~*p"Ens{d湡[QUǴQH_}(xۤuqJVBĕ -oiP CWQS˸' SQ$ݹVc8gZZ'W #>[,Z/]5OEQU5- xݝgį^1ZFMkRo"Ll<`A?ZYP@_Wh4o'W5ǎ=b"44}w&JiSwCZ}[g?8͊+Kfd+ETL-(Q9ݍmgqaΜ?m T^Dz6w~Xr쟛4VNX],n8;R .r xk;r(WyGq"W7DpZڣb`ggx<;">EHHZm GJ6,kB2nNZѮ6ɬnf~ l>p#ƴ?юeH6ț{oTKx_yO Spl8mWxiTұMl/xb+DhUܛ ;Xd'&PźϰY|Cҏ?|:ݟ2.bی:V3Y7Gױ]Yny/w7 lȦ>h!yeT_F+HKUZa;tZ^}mDP7W܋NIF>%\C~G)zנ9>bk( =ibw7i }cPz7-6d }pkBKb~>H^. `w+vFoe wHz0Kv%2x-ZW8[5{6E }3.!#yc>F^k] qmw[[2Լ..N7g/W0s-C~-ȃG:܇ʺG|r44' jW~ZyTΛ*}~>OS`x5OFjvz) THߠ*u~}ہ4pQugxݯNG߽gjx/LJcsؖҁʹH)sE~p@"YIJ~|7/ pO Y)Hשd H4xaNבX]cΤҺ{؃F[86ymuxE%o3#؎glo/B40|}qo鈶]O/|'64}s'&k<f3I!@}R qZH\|/G v_c. fx#&8?LiIB^a38dLv?^؂c8JVۤre[I^;97}"١T, DرsTDPd03N*oں0ag #ދyk)<"3|\pi4fsR,{f#y7M<_NX/&Wpxk"䭙N<?zGPQ5ӞS}j"n=<3s236H~y ط1(qx<PWvfZ'b,06I5ļcz{0\u~Șl| Pjh;,wReWJ]wbd6@TBQ53=`=:WQtOYv-ӎk?5>upHC\$e /olAo#T%I>6׻nM[AXKQb\ -Զ(&EH{N֜!ӆ[{QŃM}#@}a``]( l=v\ggDV5HU[U[XG)_ex*[Љ߸*<-|W F߿'TdybۼOMP2\0 V~q]^|'6N#T1O'H>'P,~\v0(9: omMR"Rev~Ԙ2FDʒ?DF ;ō 8 Dl`E [!e% NbzQPqm0mǿ[,&6Zuk/9z(0=+3'|3l)y%ZsoJnE@߾:2YYM Ӂx3j3\ WŘDDŷSaIm|b$8'N|b$'"I>1O#I>?799FGyMROGGI}$GI}$GI}$GH}>E$_u6_K#oH}$GI}$l0R HGH}$GH}$D#!R #HH8H\i6OR #H}$GH}$GB>"0R HHׇMR ' tqp 7u ajH@7ξY=Z4d?NTBwcP,:oR즅q8۔$ST LCTNrc%E2:X*\BڌFYt@75JN6nPxxn&.6lv#?n^=nGI}$<$xZޝS 'ce39sk3G%BBL!<%K ؗ1dJ!SBmu8y^ܯj뷎þuf6v(@(~ax·e8%Erln~ȪTjg޷Ԝ*no>eRg ~j#7=ec/1ԛR(껏z.V|\ q|A#FlT;b~2} /W s$y9:ylzHgSZ?^ND,NszKy&Gո[0GB"aji'4HQ,Od-ev,g+,WMju߰82A_F?MކO#m+QC=4[2ͪ9=]kՈoЪT+ʬG.gRLkxBʙlTnU&YUSģ}?B@$Z`EҌмUKmRRSW`~uu˱Q?q- S,۩,$x:= #?"aqgh[iXi2_9g q/a/E24=dJOD~s0,*#sWqn+QMeEg+bw1"4‚"ϭ! /.)>n~)rݟJr;G TqO^B?x {^o{!ЙVR V:i'1^#W vRK fo gyH8B+0}ǜ;qs'..q&} 1pa .@ThzpcG18n@`8l>F bh6X۲ v+3mz2-G~Wb|Y@:sKGdKXBG,z.JIG͆aYj@Gp0e(옪EA fnѷ>6 ts!ӴGqgsnB{(;ACOvϟt#l%*S+ \7$m͟1~U&ԏM)&%;FҺTG_0ɭ|$ܞ\$ݏ\$Ћxk_0Lv1G1ɼ WxDY($ʥ(5C;j6+nLdL31H@5!U-A4I]+`=SI뤟%Y^|8L5O,`'UF-)#Xp3Oi*"0xt{^Ec_e;+svR̠esIΰÀ1`%NX. 8XNq>T踏3;'#|qstpfK eaK!#th"35p#b-9T' HwxM$wr)xCik(GMcێjc(d4gID4#?,~RwPVvg2< bG? ts=_wom=p6Q}d¸ugl~Vc#Qw[UPnH*~WG;4`ܧ[Pv{6犆^bXU}:NZxK+an,MDO!!G l2Fk N:! Kfj֙8؆uT^d^G s$T~B 8]QDk87ҁk@NؚnDaEs/`uXiFh~ENC2^IG۲ŜǂqY6K,7LbG^u ~6Mރ 5&ZkR*p-@hE4Ԟ${.K|F S,I‘[%zc0;[A S\ ׇ# J:e{Cפv)TSt]kZoʼn9փs ɬp"ڧ'(E+t s P d .@n\@D@и!iwP#<1> Q|辇`,֘^sۏ^zU})ɱPRI%Ad{h:x~y>-͍*`5x"ˎӗqn"f8Y&+k#T4~ aWz0?Whj ,ht$lsZDާ,9Fo~v| O+!*ZEecf3V0/26ת=E?5w?Eh&wbX"ij6X[ǰcY Tȋ>_e\8o:L\9vʹ=vd7ЗtEFѨ/cMlm8"."cΉtѪˀ5,gHVe4™r2pL35DT܂e c_yE96>M=q jq> OFprvaZxx:|?* ^݁ :PZ2m95T>3d=ޝո]n|ZZ ި' KNݺmCC98l ||0B,vsW\9%/o[y؂(m3eo >.]nUyaTy{4n +֖7#bb\6Hc1o׆_M8X] >w[t $ L-cLbEh lvձ><ڪ[堢)R? t^\$oǎΙOx!mBP|?Jc3irV44dwqxCTiwPgTP_~29hM.0Zx~# "+UedA²z6}-C u*5lIJq{Ͽgp}fhJ')VnyIfOD P6ҙYmGpvi';UZ_ƠdO(A}/bi"cҫEu;5%YMsrƱg@j e,ّbJ!)'^25jUZBڋ''堠xIGh" trA7<Ƶh_p$?γCSoLRoCϴ *_mIֱf4F|]Tͬg qY"2Pt߆i374ؕc%-_K@MX a8B/gJ`8El@'HXb *ұbn2^ANZԢu:љ,+J: ϑ?z-{uNUD?sG^Ԇ8&ӧxF|UMGӧF-0$7<=5s 2< l~ŨC&%t*li?qxp~*o?6KCvV4/E8>X6w7RV7%O~Fi6)ۣ|Qu`Fm:2d K<95< -F"_ӗY/mÓ^#Fڐc3Mk!yh&V݉ шv^4/l 3 b}A2£qPtPbD(w8qw)K1CBs7P೮2H܌]|G2:$BSn[LSw׾-"w u+յTPV3*3nƃfmL{ݪNda@H^9zwD|a[[/(4@ɝ  <hT(&C4 5c H| 4$Z3Ϫsw5! JĦ9(ekGX斚H[A}y̾dӆ(9y~ :4hߜ7xӀNUz"D蝙c&[{f ?ПdSkyC:.9j8@^j}NfKe?&2Sύoï|4( ?ݬbW~#oWp[r4\kNMw`+"5QPǮ{ .O=I"UR`@Ľ+10z iZ‰𥃎yg!N(]{6TIB$ Pc#ZpxQuTͫg|}wyB@nf|u GF<{;cwQ \?(Gؾo?>Q F5 7r^kӽ䡝ʾbR*KSoށ+rɥsۈZm e;~y]2=.&Ev1#e2p랎f>Y V{AxVA yIBcl&KO-AD)A\l _X*L[ymLgwvҔJI_xϏnpcNR7%r8}\\&Cp$`dc3`&c/X FMf8%%Im\GqH o X\85dɏBf+?3\{Z}/XBNj/,[A)ڏ"z +Q>Dm-Ba UMp{n?Gܦ+-":aqֳE~Ͽ$ElAimc<4<\e =_-/ 2L^yWMYU8vHr[kYVa=ia@ma${}1 BjsQE;tR&`mMݴ ;+n*2|۞ВݪtZ JPJn^C-^@fRG#M54WMMD<8v+۹F.b 7(> .mg>u]`k_yEɛ!>|/d- ]9 3kGU^Ry=OBLXUp)g.ˊB{})P'z4 6 7l?|~pd0TƯx07Q'vu ]/ozw<~2'lL>R||25db$ *LR"O<@`a@jA1,`u\v>ܢi/Dة@@>Od DQTLZrIh+vOI)kۛ񂞙W'3yrZUn:{2cPB) WH c&t[$DMS\~l:EŒs1wg\l~3jdK2Q[/ Wj DKM 6\]pZY֓'gO©GQ] LG8!i]L .OaãX?ei8^oߺY|9b^Goˑ}L9O{C$ŵ#8Bό6D)UFOo7X"G:,^6ށŀ"ZNqYlu^:Qor*sLë Fd!pŃi&y{Ja*@ʇ!3P^oW 2sE))O~:&Ghs8v?WMKBm__IZa'"zf#dnԼ7{ ~;H\K5qyd{u}UvNne{5hk߹xWk71UX.̺Eoe2nMr ÔCU׿ *:a@eO(qK|"< ~yl⡀ { NM| T"S<KszOB\$X'3|U}2$bm g0PSru}b1M㶴%|`1M0~gT>P{`ųi7"XĿd%Sˁ/1X?V^g"zWSP%r!ɇJ}j?b*GCgTҙU9,:uh nWif.ٜSI|>\nH$tT B8(w4(6繡}2 ‘IJM]ʈGmxu'pxIz#>%{ѵb%BE}I2d,AF}`e2[H%&$} z U,EJCqx=ngaҽ<>iHJZ<s$Ó<^4 -.Qܫe4pI()?J n>K8J|$$gU:L Ъ3aP~axsUv SC @?p@F!p;| <0+\8Sp1PeR in~A1d"'ϭo-¸p;M=Nׄ&ЈY;gC :dt x,ajm7Fee18FG vlAk5W|d mN0`z`9>0>y=rqgFfyQۢ;0g-H(I쮳gS\)|^W>L>?JyM#Ъq >P ɓzK LdNO/M_H>\?:C_UO>?zqݳY7ў#8II$}1"tIb$#0L~5b;hDPЋ$'&U¬9!}yl1'`(g,#_<"vlh $~}^`rx1> sM_Jo]"3|<4xJ\?ůXt@?3 4c}  ֢Hv dX]MwM4\*V="4u P䏠𶚏.Aa"h=TqH H-@ڝc-j|$_)^ bd| 7AvLgXN)M ^FD*Vm\5Xl39o@pN_=V||sL (چ(u ZAsJ)6*j dx2cyQ71=pDs/%npsg;3KKV0stnUKҮ6=vy - {ׄ`f'l*a W~q3^ס/%>W8)h@/S?O'[4|rfָ@3j72%0򲜢RPՊceಈIH_Χl [*wQa`ye|li: TBfQCfYX.*t."I[9$Q<[x.Ii_*X#<#63ˌM?_}‚B ޴F?Yu&Ol|bI C#@2 0-:3t@N7n~BZ vM7gaehZ\I+f+̩VrNAWvF YV& ]8;>Y8^7MU^e7H_o-~ R=2 KIqɻ*0 *#bײʢbxiK䐨yS<+]b$GK~{kO'$^,UZCk)0!5맮8lycef_pEQ"կDx  3n_<{KF׶)*A"zI1BrN,##YNΘdwѴQ0> .S6yfh+/5ϙ ]/>_gf!-]w;\<-GQGGEz_ ^>AP;36(ϥJSo5ހp6וBLzϏuP41gV١݈2I'uf^5_?^hi*|:`l]3eR̨$?y~~Zp~LWҬtgFMaiSi#<8&P]Ͳ9Ah)u׃j(}M%ds͙'@)6#x Njz'r׿|=DLRL_j+*t"q #iXQu`N)m(SLS-zDPD^>+hŕPKMuJ9hBw*9Y'M4Ќ$0pQsqX 614wWAN!YiAB@H#M顳N( ; ]N? ׊:fGi󕼔g#F%޻e͎~I ;ݼ8zEMovDof<Z1- Be X*b]TPTB><~ҏ?>L4EaS[57 =e>vgn{si6n#H'>>bwT0B,dV$swpvdcv >ZIK 'PڷOskpڅ3oo i*ȑ n2gz(\v'Z"cqX^Uz4OgW[eրVI #D(M!mU@:Z[&L{\_2[QZa: * ig(L=c+U>^PJ1G^Γ{Qv^my)hNMcCGA>PDk7N-m83T0ej\+W\]WA;/D^_<(mz Z0d|3n{=J}Ʋ+tSZQD#lH9 p= z< @H Ruu3{!HbFNSsX)tVŬ]{#H0ٱl|m&&R:ꑓoozƙOl jPyK]借 k~tXcףgVr\C rᠹGD>ozC)JɪJIcW煶\|qP&T<"tDQd^f".32]η[!-dBocX(,}Lmt/j+0M4Fe ŀ˧ܸT(2r=w CJY˜=kdkʓ| '[%:bB["TB(îq\8cC&V~ǓD|1g PqF̆<,|z6qbuz,| n_i͍\[UI4fqh' ;i, X+ymMÛ+ȱeו΁G+Bo!B]z[$hik:oo0$b?W֑S֛#g@u45l\•;a9"ngi lEPvB'fK""PŜa 5gB$8:vpZ+M8c|cA$W1M:x^@E'NZ~A?:HH iw!.+|"V1tp=o!ԍ3ƠR#I7k3kR)i| UO+"cQ `~fœ8#"YxZ6MU,4̶T]NbXWΓXeX:+P$c;'VYCΚS]?ᏻ6mCk2ݎS}Q:N۲^Ww?LQM+X݋'7ݯ\Y#ԞR@FNB?h3;д=|to*x4 ftVasQ*qr=P?|ZyN+ yw#{_"zo@,>agTٔg.CT[QydJ3d )}8 D "Y+#G$OO&+sdzauyBS^Z0K sI&vTH>B}c6+^x;ocM7 XCߴ@]K$;ysE+&;C{ B#NUJej"ƚ5a 28*gq9QVre)]3P&(ZsX%.m.N^yDY7h\,,^+=(aL.nt=Q~W\3~G*4N%H1]m<$mWIa ̣'-$b#<+*=ԇ߹S ~az^ V9lMɐ Nv!IC{>CIU_6|&7g l]$-\yl[KWfC`e FcKCw*B= q feŵ;M ;(u@^)gbeZ= $㦆<`{)3NՏ4-Hg#Ox%8:<GBg;61pM862L%J֬U\0]O:Tvz&ՅzAU%ĝr<;Rv5k5#M>}-FDtrhnDP`l75%7-B@^x0dZFB T]O%ó۫_0r_m\8KF:C~=_{W W.5`y4-bFMMf802BbR59Oࢷ=H `؊j9t9 ctOm\28|R4j2=b .;,icR{fdΠ}T<j[wGk@YiBP!H`GH؞H؞Dpt?ߛgjGB{>|$#= H`GB{>О|$ܞH؞_'={sw~n5` dipy-0.13.0/dipy/data/files/cb_2.npz000066400000000000000000001454761317371701200171010ustar00rootroot00000000000000PKxD1Q7__ points2.npyNUMPYF{'descr': 'BphBnABwBAA B)A܍Aѷ B :AUAB `AQ4A\B/A RA B7PA%AB7IAU ANB+PAEA["Bx@P$A\rB)y ABQA AzBA A>rB B A=RBHByAyb2BBA9BBAABAG(AtBy!AJASAԅ(A"@1A(-Az6@yGAJAqA(AqAbYA>@GzAg%A7A]Al1 A=A5klAQ B;S6A &B9v!@SGB{B!@4:BBt@T-B=B=@ BB3@WB1BA@BK*B@AB@m@-A# B6AZA> BA{oA2(B0 AFA-B/AA<=AANAFA_AgA@AF`Aq@uA\A3!J@AtA%?@AKAjAHWAYAGA}7cAeAJr@sAhoAc`@ ̂A]EwA+@?)A7AQV@}AAG[?3An+A 8?QҦAvA^P׾kAAȿAcfA5ؼAAAAAAN[A-ApAAi,/AAFA(A}q^AA4@PܧA@˭A'AqM?3ABAm@yA:A@A52+AC7AAhAmzAAA(AB>Ad6ABK@O8Aj B@w!BB)@ڝBtBZ@+BBAAH>BB@pPB Bs@m/bBOBg@sBX B@wB#BJTAi VAAn*AeRAAAA*OAA[[A7MABAGjtAIA4'A%Aݭ?ArB~A0ABAA6Z B OAg AY BeׁA0@BlAZ@BPA@yBZ4A@& BA[@PBB,A@bB3U@+ @g`B@n@ B@Ab@o+ B57A4ABP8AmA4BzT9A B_Z BB BbABJBþFAVBS#BsA-ВB9+BS@BA-Bc@dB B~@:`Bv?BA1U?BGB۹@>BIB@Ak B8oAWATBA>#mAA!AT@WA<$A#@_ACA A59A5eAAIAt@A2AzἚA&A@nAAgXAA B%Aչ$BfApCBjAoZB_I@{B=oM@QBi?@X)B@6@`BD@L}@AИB@@eB`@e@SB%A@ԵB:lOA@XBzAS@BA;@BA@WBEA@ҁBxA8@IBUPA@}BArA&pBA9ANbBB@TB5Bר/An B _A~CA:LBm"`AvXASqB#`AmAƃB^A ~A$B`AABhA*AܶB*pAH}AFBkiyArAB(AbAt&BލAKJPABA?]:AB EAR!AABҺA< A6BAA@4BAj@p$BA@NBAJ@TBAxT@{BLA@(`B’AӔABAj9A\BA OAB)A|DYABۑAo_XAPBALABZAL=A1BA1AϥB%A-AsBiAC-A2]B+Ak31A9 B'A4A\JyBmB5A9/nBBn3AbB\BO1A"=WB B-A}}KB BAk'BKA8BcAA~n{AFA5ǑAcAzADAkRA!AA@D7A4A~EAu4 A"@cAA@A6A\A~AfA30ĂB@`A B@ntB/B@z;1BsB@&PB~B1@nBG B,@BB4@m BX A@BAS@(PBȘAЖ@LBszAAiB@ AKjB阿@AB@-A䞸Bן@3ABKR]@XAB @/U AB?l AiB ?AQBKV>9ABϗ?AHB?!=A7ߝB@Y# ABgC@@B+z@@(v8B1B$'@+B1?B@BBO0@MKBshB@B'BdT@A B@A_ Bu@ A BiA|AaBOAABܹA}fMApAM%AA/Ax5AD@AUCAA@A8=ASj @+A %AxŤA}:@oZпp2A@ AA ,@=A$@`@mA`@S9AgA_@AA@VABC@ԮBlB@B]Bj@N:B BE@VB' B:@[MrBB+p@4B|QAR@uBAH@)ћBAԬ@^BoAj@5BϗAҏ@LBT@/@|B$@@L{BHiD@&rBwB@pھBʎBGv@BBABxA>tA6BAt+AB>AP$?AB3A,PAsBAZAB[~A$D^A7B3TA`AˮBHA{eAB-AmfA§BA9]ABJA'JA>B ALj8AmǜBIAs,A8[BAD5(A B^YA_@` B|A @$8B3AAoBy>AoArB7AUx.AѭB?ACAe BkAqTAYB+QA^ABA_fAnB AUiAc B+AcAQ,BAtVAqBSA'IGAB<Ah9ASyB عA1AmBA,A{1B9A+AhBkAm,AYB/AH@BAƿ@BA@BAA̗B8A5*ABؾA?ABAOABhJAZAf B}A`A8PB|A?eAdɰBAriA^B#A'gA©BAY\AGB̌A.KABًA~v=AKşB,AJ1AB A4)AB9ڞAC"ArB^ADA~AOA >jAl>{AWAy?E6qAEFAtV@HhA6A'v@)^A)A1@XAAj6AUA A["AfQA%@fdAtsA*=JAK,A/g4A^*?AXA ACB|A*A҈Bl6A}AR~B" BD AiBԒB5 AhTBJBsȦ>BjBA}(BzBEAGB#BI A9A BqrAA B@6 A]AB=AQwAgAVAgp.A3A͆A@IAA#@pAAeoA 9RAqA4\AOcA:3ALLAYAx+A!AA7k:UAAMAn@ B–BA@BFB:HA~hBa BdA^ZBmAQA BABAjBB:A3yB B^.AeeBB%+APBjB'r.A ;B6=B#A''BBފA&B)BlAA] B( AzAxBy*AwABhk=AφAnA-GNA=A6AͶTA@uA-0AABBgGABL7Be[ABA$iAҞB+AlANBMXAdAoB/ AxVAĬBſAd9IAB0oAu$AABA&>APzBC#A?A(BCAܗ?A2}BB+>AtBޙB;AkBLB7AjbB_ Bn4AXB{ Bn3ApOB B2AEB{B~x@B樠AP@0B25Aba@B A*A0Bz&eA^A7B`NA+#AB*LA3A؟BHAF>EA'Bj=A WAbBl/AgAB"AxAB AA2BnAMAڤBQ AkAIBߩ&A*{AnB),A2_ABz5ACA~B:A'A"B+:AA-BA A߅Bp{B Az!{Bc? B( AiBJB AWBBA-EBDBA3B jB)A BBVAuBBlAQA B{AA BAŭA"B.AAA$'B*8EAUAA{aAAսA~Ay@HAAί@NRA|zA?FArq@A:B[BE@ACB(B-?AoLB=B @A;UBB+DAoe^BB JAgB6 BQAoBv BB YAwBBaA;BӘBEkABB'vADׇBCB'.~AMBB}AB Bl tABBNeA,BBSAB B>AsB&B('A:B,B,A AOvA>סAC*AA\ cAPKA7k@rBAAAD )A@4}AX$$A*@w0A vAYATApA䡁AAAASBF@`A4E BF@ B-B@֯3B%B@*PBDBd@ABAHAb])AA AW@>,AR5AARAPAoGA@ι@p~A‰AA.A]AAv&^{A B1Ad!B{@,AѦ!Bw@8ABF@wABAǛAPBAǩA^1 B2AښABRfDA6A4Be WAd#kABBpAmXADB1UAD?AXB8A$A/ BWBtA,yA%BYAABJ;AWe A4!BMAMABg@~1AmlB@ AɨB@lA4B.@lB'A`@tŽB^IA%@rBB2@LwB%+ Bi@:bB_B2AEMBBTA8BVB@ʨ"B$BG@F BB@A B/AЗA#BqApvAVBPTAbA/hAT|&ABHAkA1A1@sA&AlP>@4ǠAB @YTUA;@Yh cAseAEBpsBa A[8BLBA)^*B-"B*AMBB @B'RBA#BfB_ AGAߪB;KAAi BAVA; B[2A AgBNAAB1tA\A3B; AO6A BYA| A*B \A:a@ƝBK(AuAWBkb@)AaB@XABE;@dBnAABA]+A BdAKr2A婿BuAJBABANAB;pAp WAc۳Ba@Au]AmBAMbAb&B֏AY]A\B9MA@KAB|A:A,B|A(.A>ĝBFAs(AyBlRAk`AHyKA1qAlbA릀A Af @ҾBLA@BRc8AT AkB|*A"ASB*A`[6AVB^ADA%B>A_PABA(ZABA`AfZBA2_ABAVARB$ADAZB/7A81ABDA5[A*tBGAz A~BV FAc@xBEAҷ@չB?A@BR0Aͯ@B,BeAtB\b-Bu=AҀBѰ$B }`AdB BsAA=DAfB=CAZQA<,B%AXA>B=AZ[A۪B[A\\ABP/Ay`AB pAiA`BXAHoA1BcMAkA3BfLASbA]8BPAVA%BWAiJABK^AA({Bf=[|@}BZ?JH@ϜB]u@@B1@@B*T@PH@aBBr@ݬ@B@@0٠B[ AX@_ӡB9A5@=BhA-|@JڝBAA@80BA@dBA@nBBAc@όBAPAчBAAB@wA AzBEAo@PB0AhABщA%AWB@A<*9AδBGAGAHB/A/RA1®BܥAN+ZAӱB&AV]Au$BA*TAiSBA GAᮠB'qA8;A"Bd6AI2AșB$AC'A1% BiCA`Aq BIaJA2A8 B3wUAg[ABϥdAA BI:tA$Ao\ B)xAVAjOBbAv;A'B;A46A"B:AY@gBh=@͕B?͐@ˑBg?h@縍B%„?XA㹉Bw>,A;BAdB ;1A}BIASwBzfB9@b@6@B Bp@wABn@5AoB(aAvAUAA&AAZAFh@}AUAE@«A<2A@OA$7A, 3Ai@aA[Ap/@zA~A}}AD"BWmA80B)AiBQPAHBުAݙAȔAEA"V2A-AFt+vA{kAL,Aw3AޜuA|xA]fA2iA?LAiGA5W@>1A/)A A4I4A~"AF0BAr#A'@X{Aa+A@[A6e@PS^Ae@g")A@誵BX@1=@r_Bǩ@c@ްB@&@ Bs@{@@d$Bi7@@HB4@F@ҒBr+@@Bu@@>B@0@X#BB?@DBB.?@\B/g=8Ah{Bџly@.mBTS5B)@'{B54BXAB 3B2AG֓B{.B?KAߒB'B`ANB BZqAB`B~A#9BRB^:A vB BjAB^BwAdBB68cA5~BB@OAvB8B4FA3mBB@AdBxY B)BALj_B BIAjElBBDAxBcBUj/ALB,KB AfƂBILBv$%A}eBPBALB*BbA~BBf.AwBB&?Ae7jBBH8AeYBCB|7AGBA4B&;A5BmB.Ae%B[B!p'ABBʥ'A@BBrK1A`Ae Bb;A.A8 BʈQApA+ Bg5oAyA@5 BmARDAB8AO3AG B@5=A*(Bs@!OA+BA %A @ZAvA@}A@^ABAV@o,AAq@&PAjAJ@4u^A@AjvTA1A+UAA[G^A5A08rANǼA)TS_AA~؞AA'eANAiA}AA1R\AȁJA6AbBpB"MAl5xBt BcAHBBLmAPBDB~NABBTA~B'BXA:AAIAAwAH!"(SA&DAA晬@Bj8A~@ IA~ A#@8AvAjA:AAiAUAzAA3B@wB B~A}'BjeBS1AIBYB2|AkB BAхB>AI&AWUBA] AB"AAFBX!Ac@Br{A|@ AHBo'AgA`B3qAdAfZBA>A u B_AAڊBb4AA>BQ$A)A B|aA\BABAoB"B0A^S3BB-AuKBB.AbbBBB)=AyB> BoFA1A#A'AZJ2Ax@vYArAh@YAAAX@YкAoAvA~A3 ABZADA}@UA!B @nAyB@zA{ B/@ B Bj@."B#B@8B1Bq@"MBB0ҠA*'@OB:A!o@ѱBqAT@T AzA@~A<Ap@KA$AI@ALmAvADA.`AsA֣ARA_6AADUEArNA\ACu:AkA0Bq0A,A>BT#A襑AIBAt{A B@$A!Bg@AtJBJ@:A B@ܾxAB %A^BxB0=A<^B(oB8WA#]BZ B_qA]Bl BIтA|^BcBd9Apc`BrBDAÞaB. BAHcB&B`AdB-BzAfB 4BRmAA/iB>9Bc]AkBR?BZ>KATNnB0DBe9AqqBHB)AuB\MB,AwBBSBt A3yB/XB`}@mxB|]B7ATaBBR9A2rlBBM!8AQvBA4At|B,OA/1A/B AM/AtB%$A@0AIBCA75AݓBIAz BAB̾AQAqBgA\AFBA]A%BcA]WA9BTA*IABbBrOAN1AIBeAbEABA@BёAڨ@BZAFA@AAA(XAu2AsAәnAk#AyAԃA_AAA` A+AMA3@ 8A!A20@"B0AZ@@HBНA@3BA6@QBQ!A?BA-ཱིBHKA|˿@Br…A3; PBuAhB _AojB+LAuŕ Be9AN B?)A6EB B:/A3]B, B1AluBB;*A^B: A*AѐB A+A6ArIB4A(A#nBmAAoABNA wAZBQANAA]l@ϓFA6=@\i Bb@BacBއ A@+Brz'A#B~:AMBBPAzeB~rvAfB4A&Y@oBɖA@@ J BA?: BuAޅ@ B1AI@۳B1Az@A@AAAbAA>AVA.AA2A2CAAA<#Y AuAµAA>1wA9A}AjAϏAt APMҺAA ĮAվAA,AlUjArAJ?^$AA}(@A>A@{A煙A@4lAAX@G``AA@XA.oA'ALRADWA@A9ZPAAӜ~ɍA6A pRymAFgAa?rBKA*Ab@0A@LbA@eA&@CA6AfAp ADA8xAA} A^ABCAںAd B A<BrBVWA5BBABSBBApB BO A5B#!B. AXBF%AiA{B`AW!ATBA_AQ^BS4A]HB:#A.*BA7B.];AA?&g/ AAVőArA9+zAhA??OA-A-@y#5A!@[aA/#A@[GA7AA*A4 AsAGAG2@0ABx@A B@BB@7B+B@DUBuBAbNBrnA@BD A:.BΨuA6BvA6BA#AzTmAADS" AglAle/^7Aw ALvAd#A+AZUA`F AyA;8AfnAX|AkDA$M\@vlAz#A,N@-ZA9AP5AwAAw@-tA*AhO@4,As@JBwA))AB ]wAj@CgBiA@ژB&A@BFAr@}?BBBA nB BA@5TB~BA9BB\}@WBNBf@BO Bt@wA5B AOAmB6A\AfAg#AlAA-AI@PA,At??A@BS*RAnARBNA^BZjCAoBRn! @DŽB>Iw@@BM@@z.B Au@&{BjAc@2vBIAG@؆BoA-@lBBy@HB B@}$B* B@hB>BB\@kA Bm@"@cAAJx@%@AA @wA @QFA+" A;AB2A.#ABEB"AwB)BAA^BBATEBBgA,BdbB A;BfBnJAA B AA'B 1AcRABLAFAAAAg AA2@@#AZzA?J BrADfRB@A؉lxB^ A^JN6B@UhB@^'B@Le&BjTAb]"BV,A!B{ FA!uBcA%Bi~Af?BBAHտB7&Az>BGA~@3BzA="@ B=Aį@uBeA&@LTAA@ڱA$ߋAAAPuA+AŽASAy&AAV'4A8AjA@u}B~+BlA{B"B`A0AE7A6?)Ai!AK?_AA?-Al<@#M?(Aݶa@_?EA*A?3?AY?WbA:?A@}/AgE@An>@tA?QT@VAVI5+@3SAS0:J>`:dAH4jTڕmA!4JAbB4A3oA(BzA AHB#fA A,oBByAsHBA]A"A^1@Ad6AM@VTA`YAkJAޓ@#AҒA;ġA`ҽA4 ૽An.AnA_BnTcAc8B*CvAWBPq3A̴~B AKA=BAdABsA|AiBA|AB~BAB|BGA)B BǃA?BB|ABSBu"qA!BPBcA B#B>gTA&BS)BY/CA<5Byf/B2AB~5B#AѕBy:BզAB2%?BA0ЕBoCB0@vHBGBSAB?B-TAMABVA4A) B8ZA̟A6 B^AAH| B8cAKAL B"jA$Am BrAѥAW B yA\Aw B1~AOPA5B zA(iA)B kA3ZABTA06SAi~!Bc;ANA%&Bq{!AU$NAs+B APAP0B,@=XA6B@AfA;=B6APAoAr9A(~AֳtA>AuAΥAρ-AWA,쿵AeAufDABBx Ay&ByUBAۿB7B%4AA\ BNA[A BcAAB D+A! ?A;A O6Aȗ@dAs4Ai@q/A@R>]A{)@us`AU@p:BUAQ@ΑBA1@2bBA)@OBɝAY@BmDA@tWBX5Ats@B/[\A@{BW7AT@XB:aAH@9B`@n\@B@Ŭ@)B@@hݒBp]@ @cB<@@4B @k@I B?AB..?@)zB4l> AyE?*@Ao]@g@6A@I@A@@惫AAB@(AAy@xA,AlAzA@Awu6A5fA҂@m]AYUAƦ@QȁAVJA&f@WA!"NA?BAvgA>W|AAyAƷvAbt1AfA)pA[A".-AhIA8tAuARBBPFA0B$(Au BkjVAA" THA@AIlAjw}A"?|!7AR+A AAͮ@A@lA4JAU{A AAPAT@@AB@TB- BaA?BiBAPhB B@&XBA @B AA+BdgA@XBvA3@AdB B=DABB-.IA%BJB0LAO1BBrOAHW=BBRAܪIBlvB-ZAUB/uBbAkaBHB|fAkmB=qBiAyB BsA5BqBH|AՇB'BKyA9BBDiAq BBkRA[HBR%B2ANגBs.B A!Ba2B@xB'4B_AVBJ[AymAlB'ZTAzA袾B:|OAAꢺBJAT,A jB(MAVAP7BPAwAVBoQAG_A5BeZAqFAB3`A6AhBjA+AJȡBBvA'ABrnA(P%ANBdAvG#ALB~A"ABAZ"AQϒB(A #AnwBA%A4B,EAd"ABBsPKA/BMA?bAUBiAYABAGHABIA@A0BԢA{y>AIBBAcjlAЯTAf3@YAf;A\@#JAL#AA BA2iAl2A7A@^A^(AȘ@hAA_@ZA@AH>8jA|Aj?A A$F AũEA,.@AͮAuA^AeOAYAf$V2A5A?B+B A".BcEABm$1A/BA:BFAApblAAH9APLA% Ay>AUxA:A(cA_A%@ʜBA?AN@J :AA(A{+AcF@iAPAlb@A)@h:=:OhAAύ&AUN=A3,T@A8@)AyBXB!A@jB9BBAqZB"By A}JB8BV$A:B{BAS*B?BjAYBh?B_A BBB(=AA+BM"AtxA B(,AUA BoFAABlAVTzAdBQAdDAbBjADAB}q2A9vAB@Y*A9#B}2r@J2Ab, B4N!ABhAAB^AgABA AbBA WAE{BBAh`B+dBMADB6B,!A9(BTBl@z= B Bk@5ABJvAhAyB(AHkAsAA; AAA@\=AB*A_@`mA,9A]$AA>_ATAP@}AA/UhA GBAWA!BAeIEA=9BA8AJ&B7A4AB/0AT94A 0BGA;W5A^BpA]8AB A#;A|B}AY=AuBA=AS]nB'BW=AUfBiB LAZD͸AYAAc4bAfBAF AgA>mAyA[ڿARA$*?:jA#bDAP@~ѕAUA@7:zAAx͈AcAMAUDA@AU@3fAX_@'@Ax@?*CAW>24@ؓAЗ?Li @,GAl@>|gAwM@9|S?'AAS-A-@5VAh@PEuA'VA0@DA `Av@QAA k(A[A,bGAApADA#iAYAByA̐ B8UA`%B{oAdJXB2bA?wB1[AABNMA6mB&3AݻB ABtAdIAPAS @AӻAdVA;AڅAמA;|AA!:k]RrAAh uoA]A~;=lA+A?[iAA R@RdAyA&P@ WA!A:@HAřA莰@\>Ag[A(@U&9ASA@5A ׅAA2AsAsABFBG@YBB@|tBY B`@~.BB@BAX@}ӌB 'AS#AW9BBAvB BAIdBcB=AQB8BĄ>B+dBae Aq+BB}A!BBoA}>B B" AA) BQoAA" Bp $A1ABӗ:AjAշA=MAn)ApAuA@AAנ@PAA> @A>@q1BB=iB_AX6BhBy&A9B|&eB;Af9B"_BNA8BZBPbAVd7B TBhsA}6B}NB%At5B2GBAC4Bz@B2A3B9B؂Af3Bo1BA4B5*B8A 5B"BR xAݎ6BjBaA.7BB5HAK8BBr/AF8BB0A![8B BA0]BxAA=y*A%B9A6:A0B0AbIAPBF#AŊVADBCAocA BJzAKpA`Br A7zApBXA9~AlB@WA|A"BsAsA\QB:~AaA\B &AP LAL٧B8+A(5AaB.A%AB`"A7AaB|A{AͤB@qAJB@hA_B%׼A%@B\}3pA1BUjuAqAy㿙ApAIA5A}nfA^BA}K@AA @,sAb8A@Am5A_lAMQA"A{AuBHAB B+A/BBc-AVBBH@A|BQ BZAEBA FABEB`@VBPBX7A;e@g`A40_J@AL`@A"0䚆@|A{hH@AŽ@۵A?;Av>7?{A7\+?Ad?{(?AHxp@!?dAB@}?:Ab@9 @AA?ɨAD#A @ {Ax AM@T*AA/p@AAм@ȺA3AGAK:A; AE(AAm A{.A];+AGA\-AF\A{:AudA}pAoSnA =rA<aARxAPA > A7@Aʶ?êA1Aݰ?XA_sA |?wAmAʾ_6AEz@׿ɊA@ȧK(ىA@<}A^v@|B&A@*BA6.ABaAcLVAM_BBAj5WAąBAh@?AKBȣAw.ABA6-ABIA2A>}BtB_1AؔlBBv+AZB B*AHBjB$Ay6B@nBAB^$B BuABz B*ANBBAA-BAjAPA4AήA2uA@A&LAAvFۿAAgZ&%A65A*D-.A@A֔AAÆA]AԿཀAueA^ ?gmA=DAy@ڛ[A\)A<4@=KAj Ac$A:@A@4WA.A@AqA# @dAAp-jAAUB9A "AeA0AA4Bh=AKATDBDNA{@B[}A@% BUΒAA"BMA(A^B=A“MABV=AMdAxBnA2rAFBNfAoAIBdA [AꮮBҴAYAB}ڔAVA2BJAGA.B A -AxBAKASBlA((@BhAAg@`;BLAAăA@(ByAQPA0,@_A|)A`@0hKA@AA/ApV@BA2As@@eA130A/@ɛAE"Awc8AtYAAKAPAATݯAB0o@ޛA B@UBQ B@Bi'B@N/BSBj@xEBfB .@ZBB(Q@{ pBB?@ RB2Bx4AbBN6BPRA B)-B]iA1BS#B}ABBċAWB-BMA.BlBwAI0B>By&fAwBBYA AlB"} BMA`BB,IAQUBB3HAWB H@^0XAb@c%AV$A@3@4%k@@Pƣ @y@so@[H?kA[[Aܫ@3KAo;2A8=iA&RA\yFDAXA.X3@Ah8AA[APAKRA^,A@ВA .AR@ឿA.0A@ AҢLAPXE1-?AW(ZB QAJ9?zPLoAKKI@W Ax@:?A#A,A'\ACALAZdRtA.מA]-lAȨA=A E H*ADg 7CAWqm QnXA_GmAl?zA`L !@h}Avt@sA3@cA ?@KA*@1AJD@Aj@nQ@@@ìN@@䧪{ A@aCA4}@ pU.A5\@gD@r@,_B0yA @BauAzABnGdA' A|B LA\wArBVAe"@(NBMAل@@B9ZAo@O1BȷbApb@7#B/^Al@eBt[Ar|@ BVAw@KAUNA@vRAgEAm@AG]CA6@ΘA"FA^-@T{ACSA:wOAXKm@ŗFZA!#@_1hA Z? .~A4-?2Aú˽aAٌWV۵Aw ɿx˖;SAh}?L߰AI*JATf`>BA?A.z@AqB@A0tU@M|A(N J@ "rAPmJG@|nA҆S@rA3@9@0WB寭AQ2@YBAm|@ZBrAMPAN#YB]WAZ<AVB҅AAPBlA1@HB \AyE@=BOYA'@2B&ZA@9%BVWXA@GBXA@] BfYA @LAmOA@&)AbEA@#A&>AB@+A|AL9C@&#Au[VΝ@ AL/&@`@J@@XUh@ܳ@k@@%@Ԅ@BBŃAns@f@BA@BBMA@}DB ANAdEBAa!AEB(Ae,A EBFA׸3ADCBښAI3AGABKA+A>BPAaAI;BnLrA%c AC8BfAB@k6BaA@L4B`Aۤ@e3BbAT@62B4cA?Bv?,@iB B?%@p#BEO?ez@T B@W5?"[@ BJ'AB@񜿆A0Byg$AJBqYg-ABw4AѮB8AΪBY8A B+5A܏BD^?-ABп%AB&AB=AvGBF^@B?o@y?B M@|@vX|B1@ Cr@kB;YAS@:sXBAH0@PEB.Am@)1B$6A0@oB4A0@$ B9\1A8H@IAn.(AGv@A AQO@UAQAw@UtA AAE!+AApJAl@nA:A?| AĘAI1As@@DLA?{A_A@?y@@!@ķZ@T@3@Xs@P:@9@yei@Q@l@@ @xK@@)0if@@vٛ+@w@ sJ?쑖@@@3@x݃ޘ@I~,y@![8EAՖ>J@R'>5p~@t?2o>@FA@&\)?RAdARO!JA<#A9A!6A-%AHAA^A5e2@wAYAC@ A|N"i@PAA$C@氢An?MvA|teTl<>A %A̫XlAX0 ZOyAh?\AP@(=!KAR4g@;@'1@BdA:@YB AA}@2xBc AAXB@AeBz@h@N8B @Q@tBAs@geB1Ad]@DUBgGA@A@DBYAt'@x3BqdA@"B `AZ@EB~]A(@nB? VAWo<@AFLAhh@A IAG@{A{QAվ@P}A)FeAn%BH7A@ BAAwBЇA'A||kBv>AM`@XB9^)A@/>B>A@"B??A'@B9A@nfAD(A@D A$AڼA__AIAAI%AeC@C@΄KA?@bAU@3A&Q(@h@ w@~~@OAAO?`q@?GAfA@`.HA9@&@FA?]_?A>ۜ?A^?HAKU?"SczWA1@VjA@'&AW,A^AO`A;v%AAlA$FAƷA|A:IAk0A 'A4sA_pAA7B~B5M-B'BX)p@(B֘:@7oB Z@dԛBdKܔAAB7slA BQgABAB8B!ABl5d#AKBS? ABa@ABA@OA˝B@_@ BPO@@Bݾ@0A@B@ϟ?\BS@e?B,^@nIo?B:@\'@UA= AӾ@A! AQ@HA5 AA=ڤAP AlAPAt AAktA&Af#A}IA|@~*A GA@& 0AG@@V-5A@ӑ@[:A+2@|7@"uFA B XA8A,B)OAKAA BAAA9A$ AVfA-4AAAXA]m3AAA 3AA0ANA>+ACA%dAuAUA]@cAAbA!Y@A&dA ?(I(AM\A@rLALAr~rAl/A\fXAA[ᎌA?]@zsWA"!@혊A@7BK{A\@@BtA@QIBkgA\y@QB]A:)@lZBIhQA3@rbBt;EAG@nkBt8Ai@gsB)Aӈ@zBAeם@kBAn@,BL@`@[Br@@ @B"@@WB@*b@NB@5@PB @8@YB/@Ź@dB@ @6A'A1@SAT"A@DAA @A?A@QAA AΣA1AlGAfAAAamAAoA!A1^AQ Ag(AZ;A|bA/AEA{@-7A@'@e>Aq@S @$6EAbt@o@FA @6=@oCA 3?`?_?AD:=9AN俽[:?Ff9Bz|A?GIB)eA?s|ZB uNA@jB6A33#@8zB@AaB@@@B2@@EB@1?@B?@iBO0 @qBAZ)BAoBw6%A2=BX&AǦBna@׻A̝BI@u@,BA=%?2B@%Bx@A"I?Acl"A PAv8A5A>0nzqRޞACH>HAV'AQ?eAik@ŜA;@2RA 'AM)AG]ARDIA~CA|AAUay?!AxgA@h,A=|AG@{9A!Ar@GA1?A߄@;4PAeAKAOABMA!A;AAAB/A @AwA AjإAejʙAAVArAJA?AOA*H A2;]A@]A#'@\MxA|>l AA7Г8 rBALɯWsA?{xP lABH?AK-@A+PlZ@uA@GnAҟe@<@"XB%#Ad6@qMIBn0Ať@S9B>[8Ai@)B5A@ZBE2Ag@%~ B.AY@fAh $A T@AA@A ,A}@ԚAA"AlAhAAt.A@"^A'@%@-Ao@."@?AZd@?3@,9ANA#@(:Aez@y-ZBW"A}@drB'@ @DŽB@&A B,֗@_AgQB@ Ɗ@BAǷGBʮ$AJ@{UFB%9A`3@;u3B'?A@@ BY@AV@} B:9A]@A0Aj@}A&A@;AMA AcǂAATA?ظ@{@@v@:EZ@gAɋBZvAp;ABB6A/^A١uBAyح@N*`B A5@_T@p*A>=@@jD9J@@#D@v+@G T@@Y@k@=Bf8WA@DBOAͫ@KBj\IA@LVRBiEA_?@pXB9\CA@A^B~AA XAhcB_DAkA7WhBOA=AlBcAGAMoBFs}AA pBAArBOA.@tBAd@uB2+A\@ wBʩA?ǘvBEAƲB>&uB˟A/GTsBA BvA„AUA AALA[!AAYUAT~AUETA+zBAFÅAVA+oA @XA}?JVNA{<!_AVHֽ[Aj?4AdN/@B_A1$z@-A+Ac@'pB(AA @_[]Bt:ARx@HB1RAAP@N3B^AA:@zB_AC@Bj`WAlZ@AdNA@тAHA@=AIZAP7h@t"AГ{AZ@BAΏ#AxBrzA^/A!wB,?PAA5mBV-Az@9tbBNA@ SB"A@bBB.A)hc@0B}4A'`@ B2Ap@/j B0A5w@YA'Aʓ@FAAF@AA!@A @A9@DC_ACA AA;@AG@dP@1Au ?~G@ps@B2AɅ@B,!ATAIBA AB AqV@ggB({A;@XrBA9@FMbB6Abd@ "QB;TLA }F@\~?B]A'@g-BKmdAJ@lB.>_AJ)@8 B.[A3@LA߲PA<>Y@`A+HAyI@VA KAZ@ gAS[A7@+sAPAKAAAA#Afm@·AAL@AA@D@eALA/LAyA AuA5RAAԘA9hA_@$A@fb@ 1A]@@ xAH;i A`A7Ab)A_A1X3A92ADA1A9AIwaA3A=[EA9=A!/AםRAWA`['A@^A@6y!AQ~fA#q@g"AmiA6@E<,A0ffAg?@AI[AS\A)AKAa$ yA6A'.8AcA@ls A:WA".A@yyyAEɃ@IZA(@+AN@vBA@_xBAhA }rB=A AXiB[Aߡ@ ]B"3AC@LBl,AXN@I8B[2A o@7K"B5/A8@n Br"+A曛@AEqA5@AA@A^ AA(\AAyAqAu@:&Aj@ON@?AAT鑾:?"Annؿu@9l|7C@^cOBֵAr@SBAAISBnA/AR LByA@`=B3\A@)Bt QAm@B'PA=@LAbyGAtK@Ar9A@g(A1AwAXsA3Ar6A$AUA-UA'2@LA9#dA>>F"Am UA\_cA{%-A0Anc@!>AG?+A%@k;WB'8AOm@,YB/AAWB'JAޮAţJBQI`A8@7BtMA@BHA"@oB|(DA_@A3A@jA/)AAmA#A:5A<A A`UAVT@x@JsA:o#]@`A(v@)AV A@ƒ:Ahw@oY A @iXA|=AVIA9@|EA@l@GAãd@S?CA|V?9s>A=]`FA~?AF4ZAY`@ᵄZqAlA\FQɅABA\|A0i}A})QAԀAO8ÀA=A]A |PAo8AX™BB}YBBF%¹VAf)BÄ,MA(0Ag08@a)B1ASTE@Bv+/AGsS@`N B,A}g@LA$Ah@HAAJ_@AA@AA:E@zA1| At@Zn=AAAiA@A@+@3AT0?@9AE;7_6GAk%u A0CLAɾA)H+)&?)ߕA'x c?k/A+j @?AAZSb׾ʪA(zSNARwxCAXO)Ae7Ez@A\#:AmB2AY`A\ ?~׍A?ͥA[8@ kxAh@AzoAَ~A$HAIJyA9Aݭ/$kAUȾ@`A8@2eEAƼO?"]/A1$—*D8AoӿnoPAxF]bqA 1fځA;Ofz$@J{AvA@YA3'ߢ@ԭ0AkNYu@) A@@Kɽ@@tu@NI@j<@aAx}3bA xl|dA*gy>OKkACMXY?tAh0|?2A?҆A۰!?AIdz?Ahsʽ xAnJĿfA:0GHOXA1A1>AE4AK>ǀAFA{?M&mASAC?@AA#2BS6hcA$B.KBBJ'3B?B|gAA¥Ax3AILAF!A6AANBGAA,ADzA&AGATӵvA+A`A@JPARC+@uIbDA=W?Y lCA~>ANA?uVAV{@(@&?MA5@8HA:'@^GGAnH@85TAGy?' VkA?_A>Z|>lKAn'xZ'DA@qM?CAI@\@Z>AAA@6An gA>@bAmYAU1A^BAШAA*AHŊA LA&A4a;ATlA: kA+SA A=AFA#A9AA wAv}@mA}@[I-fAR@opC `A4?"#XA>v0KAx$as#FA-+3\V\CA'ya}@hnAIA@^A=AT@Au=A7N/@&HAGAx*@f B;OA@v"B+RA$#@g7BGRABq]?d{;AyB4?e1AzxBvn@Q@$B(@Q@pB@/)ѪB@=AmAkHASJA|@A} A{ [A8A@$gA9@%@;rAZ@a@e|A}>{@ AG$>@EAפP@#uA AgaA,QAm6KA7Qp/Ada5AKy58@AAԯKAoAUAWA؁@TE|[Ab@LTZAE@k+\\PA @`=AU0?az]AC@nARA 7@3fnAB?T,A:/?#"AپytAMǿ& 嚩A -_Aw”Aʲ A4A"1˿AChA,fOoA8+eDA,KUAp3Ah,~&A gA A*ؓA)=B`*# A%BZ'd4BAB #B'A#BkA'¦AA=wAwAjA8yA@A /A٭EAAI@9udsA@Io`Ae>jڡwAdbAq@?zAQ be@?A'X@&Aؕ@Ak_@d @|B@{@voBZ2@܅@DsZB<A_@8DB .A?@mv-Bx3AM@B_0Af@CfAV)A@KAA}?@كAA@ړrAL AAh/A-@ AK@@kcKAgfˢ@U?JA}Ӯ&W@GAK%@r@:Ldv@݃@}RϏ@S?b@VARvA?ABAZA9AX AA7AAA:A A$A<=AqA7GA@ B)aA-/@{B\bAW@"BpLdAL@%BjA@f,BvAAT/B A5 A1BȏAAW3B A@2BYA@g.`B8Ayg@gB@kAy:B4O@LAB @M*AǕB2>@9B\ ?@В}BZR@d@]bBmAn5@%DB?A^@%BzHA.9@B)BA]!Z@_A>5A봠@τA5A AJ;Ab2AIA@<9AJAOTA@'ӿ:]A?HA_AtI^A>%bAw?@IlAlAyAXA,œA1AV.A|@Ԣ~AU@,җ_A!>љh'3A.&*IDA8.ZmA8Aw?uAzɣc@3>GAWQh@>#A+r@h@q@@@A(@%3v=AI9~ߪA@sĮAZybwA S.UAERKWAm1|AFo˯AfE_z{t2Ah1Q@AU>! E/XAI@`ukA?@z~A!@W ATy!A,A0mMAoAnvA TNAA@A^A_bAxA€%@]@@UKR Aa@F)l@=A.G@zAS@A~|?n^AY9 ,eT2ZA>^6mA4@6sNAr@AYAAMAHaA̰AAAA9sBA> BA >A6UVAAxB2A,`QAFA[@OA}@@LA\M@s?LHA?K(`@A1@y>A䥽: GADw? EEYA?7@ymlA,*@QoNAA+1Aw5A^AAbAAzʅA:2ApܘAQ AQA+A˽Ar !ALAF’AA @#>@乨@<8)_z@T@׃0`@"@T_?@xfY>Z@0VxA+ڐ>xA.򐿖2%A"53&A>Z>AO͇??uA>b?i@J>K?Z@]?@gp@#B@q9@Ÿ-T@NBNA;@4 B|QA@I|BHVA@BVA@BJAs@~B0I@A@0{B"5A{S@Y|B(AfmAB"AA.'BK A!AOBQ(AJ'A`Bv6ANTABlFA@BfxSA߭@B~YAhpE@,=B&wUA?)BvNKAs݈B $CA D0Ar@MAKAMfŒ?[Aoe{JkA9`jyA:" ANs/A :TAŅH-5A[ϔ4A{Ay&JxAJ VA.[A[(GdMUA]dpA$yUA!@5A@)(JvA@X ?BH9?x@^B@'A@B > AFB5@B2 @ pB3P?K~@IB3@=@pBAh@.WB8A?r=BTA?"BIWA?}QB9RA@AWGAN@,AGA@@_rAD-nA ZXsAApAP@p@A[@a@t@4?A@@8AAH1@kAt'@ Aā5@h@yI@f@y@?@@AA@:vA1AABA AkB[dAfAK0 BA[@ BnAH@e'BaAZ@%B _A@}e0BxcAA&5BsAI)A-:B"AP2AK)BuAn@)=BA@^:BldA=5B8ARB@I҃?;ZB@,@B@0ט@OBt@@"BXW@AMB@ 7AӠBlv@T&A;B?'AZBf"A7sBHRCAvB`6 ANB8$ABxA{BE8@ABOF@gB>ۊ@KB9}1kv@B^']A 󥿵?HA @@6A@/@׋$A8A`@EAɐaAq AVAAA$@TA(AuP@;A|K#A9@A/A)4@B=8A\@P BI;A@Ty0B_GA|_AG8BvhA5/A@BRA(ACByAqtAHrDB JAa'@ABc[A+k?ߡ>EBGAyi@}PMBAN\@)PB*Aq(ANB AWAEB?enA@7BzXA*@#B~PAA@kBX#TA@AFA@_A9Ap@pA4AA~As5A0A8T5A(AjOAz @AbA @BAWA㾿NA6As@BN;A@tA 1Av@JͻA;!A@΃AAQAAkA=.hlA? :Aeac"?CA?mA=3i?@e[A@x#? fA ?T@E/B^@Ae@g$B(?A@B@ܾBk@g@B@'@uB|@2@LB5m@T@ ѐB@d @,BV?7@ZBF#>VAֹBઽ AiBI WAB̔A2@B[@B v@ÚBf(f}@B`?KL@L~QBA^@YB1DATr@QbBO@͞@0jB@@pB@_@vBM@?A|B¼A+A}ɀB:SA~:A7#BAf:AB1AdT.ABgFAAvNB&]A)_AezBm7tA.@ЩBMA(@-YBhSA@cBKAA3@B,A+@ћ~BAE@O'Bi,A@OB_4*Ak@9 B#$A3`@tA|Aj@ؾAAaj@&A[A @wέA^v AA׷A}A AՅrAAAz=AK@"A Ar@ "AT@4@ )AM7@%C@x-A4?7?J)AUu*A|W@HO@de` ezADA'y%@ntAݝAi@5A#WAj@ÍAKAB3@A NAo@7BO[A@BÜbA/@-B}hAf1@&ABcAMN@TBLA6y@ygB81A@KyBpA@Z!B,@y@B@rAB@X@ϨB@{@1ٛBV[@0A?DB\@f?gAXYA @HAtAy@AYöAD@%-AA+@A0As AAV~AAB`A2ZAD B#ATAڼB#A@ BoA@BiA@3|B6[cA@TJ$BCU`AB@4+BAcAl@v/BjuA+x@Rz0BMԄA4uQ@1BC-A`Z@I2B<ŢAAA"AP@/R@A_@Jk@zAu&AS@AcAQ@@]AMAY@A A8@AIAAX#AޫATAVrA(ayA|AKAhA+WAprpA2@T8AI@^ь0AV%>if ٙA6&L9{A J Z'AbzA>2K4A(!8eH>A94aO@hA8oW@DžAn=7@{lA)*AKAB%A@kB>2@NXBcb>ݒ@wB9]@f@t6{B\@:@;gBDA@QB0,Aɇ?e;B*?A ?%BBA@a{Bwa=A@A(5A|O@Aa-A@3ÖAS0A@m]A5A AcA"!A;AgV@xA2Adž58A@KhA,@uA@* B\Abe@BDZAQ@@BRXA@#B5WA@.&+B>YA@"3B%^A@f:B&\AW@,@Bq[A AEBfAA9JB yAR%AMB#&AA޳OBZAi AAQBBA@vQBܓAD@PB㾽A*N@MBAP8@/jJBxA@^FBA0(\ApF3@MAqԊ7 @E^A¼bft?^oA0 @PAb"@IrAD_?SAB@x fAq~j`nNAN6ZAAs,}EAg+섭A}RvzEI@]BZ@ @tBL^@+@YaB*3Ae@OLB&ASw=@7B2Ao:@Fa"B62A%{M@N BK-/A,h@A$A+U@үAA'@MAA@eA AAA@ 2"AK*@wl@qLAb?F;@6XA"I,@.ACa@@LS@\{@5~"E A@L@}#Bl/A}@Aq#A,@AacAAʤA PA AA Aq!A(AAt@k.A?@@ 9A@WU@CA?l?C4A枙 _A#@?5vN@=Ant@ >ſ=@;+@}w@tmek@24@[Af@@(@@pm?|¿B4ALBtA6Aj B낈AA2AH BvA 8#A% B_AA# BƪKAV@' B:A@ Bd*A@ BH4A̔Bh=LAli?B f[Aa@ZFBdA}@EB^A!@ދB-QA0$ARB@A,APB<.A`AơB"#A^VAn}B$"Aj@z~wB'AT@zsBl2A]@пlB^>Alg@dB2EIAR^@u\BUAJ@23TB -`A>@KB:hAf:@qCB/sAЦ=@!;BlP~AVA\Q~AUA\OAAemA9AuAt@v^8A:'>,)~A1atAm*H[A2`A(ZAzI&m:`Abز3Alh(tA?gA@.6#At"AAkAtwݷAgAA{ųATu9BA.dq@>Aȟ A@]A A%U AvlAd'A%A A,Av/A.Aņ0A;YCArAkmA$ԕ0AncAje[A/BA xAf:AN3zA[5@Q[AJ[@(+ٍAxT_@j A-@[A4Q?kA"O@eAAx@y|AdA@AZA@Az+EA:Φ@ҩA|rCA@|A=FAՇ@OA=PAτ@, BXXAv@Bn[A60w@Y,B)`A B@;B ]A6@zJBmNA@YBt>A@ZgB//AA4tBI1Ax8*A@BFAZ!AqBlA@zWBvmAInGNB@.@?|B"Y@x@BBY@p@6Bp@&)@hB@QJA͹B@'AB@+,A$>B8σ@;AB`@7HAVʧB<@\KNAhݣB @*OABE?IAܛBҕX?:9A<2B>;&AگBp=ABv@@1BHx@ыB9JAtAp52A6MAAAV,A@$A>hMAɰ|AH KkApz6L vAyfn%A2XSAOXA]M#A ><^Ajf@趏A33@qAh:Ae cA^;zAxAA4#>1KBRA(Z-AMB~ALx:A|EBAAftAAoA婕AdBAD{@CdAA`8IA}QAA4M&AJAx>@!jAp+%AU AAAeAT$A@/١A=U&Aw'@A@(/A@ B@>Aqw@-B+EAUu@|I4BIA)@LBm8Ax@'dBAdAkxB=-A6ABy;AJ@⒌BvA2@χB&ZA>ՌBҧA @'sBh0AG@~Br A(@iBX@s@B`@P@69B@)J@UB*zo6@B@)Bl A@eB.+m|AYHB AB"In)AuB+nOA)AZB%-@!AlڞB_@{@AB: A^p@BAB*?jBA#BhA*@w)BXAY@h B]AN@VBOA@@AAA`@A}9A^A3A,8Ap A%A3A`-A݉Aj5A!A\DdA 6A25A&J9A)AIHA AyAYAdT@A#gAC@~A:iA>&A`A+ݿ)KAZTA1c:qA$BA'jA2#AߓAPKxD@ offsets2.npyNUMPYF{'descr': 'PbtPKxD offsets.npyNUMPYF{'descr': 'Pbt(PKxD1Q7__ points2.npyPKxDb0b0b _points.npyPKxD@ )offsets2.npyPKxD +offsets.npyPKDdipy-0.13.0/dipy/data/files/circle.npy000066400000000000000000010001201317371701200175050ustar00rootroot00000000000000NUMPYF{'descr': '\xb7s\xd4j\xff\xff\xef?\x00\x8c\xcb]\x1fmH\xbf' tbag4 (g5 (I0 tS'b' tRp11 (I1 (I3 tg8 I00 S'\x08\x99\x85[\xcb\x92\x99?D\x0b0\tT\xd2\xe4?_6nvHI\xe8?' tbag4 (g5 (I0 tS'b' tRp12 (I1 (I3 tg8 I00 S'\xa0F\xb3R\xb0\xdb\xe2?\xc8\x95zy\xca\xac\xe8?\x00\x1f?t\xcf\xdb\xce\xbf' tbag4 (g5 (I0 tS'b' tRp13 (I1 (I3 tg8 I00 S'\xf4\xfc|[\xef\x1f\xce?T\xfci\xe6\x04\x16\xe1\xbf\xb3\xb2Y\xdd\xcb\xfc\xe9?' tbag4 (g5 (I0 tS'b' tRp14 (I1 (I3 tg8 I00 S'y/\xafK\xfb\x97\xec?i\xc9On\x9d\x07\xd1\xbfr\xffU_\x95$\xd7?' tbag4 (g5 (I0 tS'b' tRp15 (I1 (I3 tg8 I00 S'M\xc7\xb7\xbe\x00\x89\xe9?\x14B4\xe0\xf8\xc5\xc0\xbf\xbaCM\xc2"\xd3\xe2\xbf' tbag4 (g5 (I0 tS'b' tRp16 (I1 (I3 tg8 I00 S'nX\xcf\xfa8\xce\xcd?\xbck\xbf\x94\xaf\xc8\xed\xbf\x1fB\x8d,4\x0b\xd2\xbf' tbag4 (g5 (I0 tS'b' tRp17 (I1 (I3 tg8 I00 S'\x91\x95{\xef\x95\xf9\xed?\x89\xaf2 \xe7e\xc2\xbf\x17\x15\xcc\xb1\x1an\xd4\xbf' tbag4 (g5 (I0 tS'b' tRp18 (I1 (I3 tg8 I00 S'>bl\xbe`\x1f\xe0?\xe7\x80\x90i\x87\x14\xeb?\xc5K2\xf8",\xc6?' tbag4 (g5 (I0 tS'b' tRp19 (I1 (I3 tg8 I00 S'\xaf\xb5\xd9\xd8\x81\x12\xd6?dx\x8b\xc7\xf4)\xeb?\xfa\x96\x04}R\xa3\xd9?' tbag4 (g5 (I0 tS'b' tRp20 (I1 (I3 tg8 I00 S'\xfa~i\xee\x958\xdd?r*\x81\x02\xcfA\xe4?2\x06 \x86-\x01\xe4?' tbag4 (g5 (I0 tS'b' tRp21 (I1 (I3 tg8 I00 S'\x88\xe1c\x80\x9d)\xdf?\x05Q\xdb\x85\x88\x88\xd9\xbf\xc91\x8e\x19F\xdd\xe8?' tbag4 (g5 (I0 tS'b' tRp22 (I1 (I3 tg8 I00 S'\x92R\\j3\xbf\xe3?\xe7\xed\xb9,\xea\x9d\xe5?\x9aX\xf8\xe7\xfc\xd3\xd9?' tbag4 (g5 (I0 tS'b' tRp23 (I1 (I3 tg8 I00 S'=\x1b\x808\x9c~\xe2?\xe8\xd7\xe2f\x8f\x04\xbb\xbf\xc4w1Q\x17\xe5\xe9\xbf' tbag4 (g5 (I0 tS'b' tRp24 (I1 (I3 tg8 I00 S'r&;\xd6\xd6g\xea?\xbb\r1NR\xda\xe0\xbf9b\xaa~\\%\xca?' tbag4 (g5 (I0 tS'b' tRp25 (I1 (I3 tg8 I00 S"<\x1f\xfd\xab3\xa5\xec?/4\xca\xa4I\xcc\xa5?\x96|'\xc0}e\xdc?" tbag4 (g5 (I0 tS'b' tRp26 (I1 (I3 tg8 I00 S't\xb4\xb4j\xe9\x88\xd2?;.$\x9b\x90]\xe1?%\xa1\xdd\t\xe4:\xe9?' tbag4 (g5 (I0 tS'b' tRp27 (I1 (I3 tg8 I00 S'x]e]\x0bn\xbd?J\x0f\xc4Ke\xd1\xee?@hW\x9c^+\xcf?' tbag4 (g5 (I0 tS'b' tRp28 (I1 (I3 tg8 I00 S'\xef\xfa$\x95$\x9a\xe9?l\xa3\x80k2P\xda?|\xa0\x14V\xa6\xf5\xdb\xbf' tbag4 (g5 (I0 tS'b' tRp29 (I1 (I3 tg8 I00 S'S\n3q\x9e_\xe0?%\xa0)Q\x89\xf0\xea\xbf\xec\x0b\xa2\xaa4\xf8\xc5\xbf' tbag4 (g5 (I0 tS'b' tRp30 (I1 (I3 tg8 I00 S'\x8e\xd3\xb8LPF\xe9?\xf0\x89\xb6\x0f\xfe\xab\xc4?\x83\xcd\xe1\x8a\x04\xef\xe2\xbf' tbag4 (g5 (I0 tS'b' tRp31 (I1 (I3 tg8 I00 S'b\x17\xe6R\x85_\xee?F\xc3\xda/\xd4\xb5\xce?\x0e[\xde\xa9\xdd\x15\xca\xbf' tbag4 (g5 (I0 tS'b' tRp32 (I1 (I3 tg8 I00 S"lmL\x98\xaf\xb2\xcd?\xf1]\xfd\xf3\xc7\x19\xe9\xbf\xc2'\xab~Ih\xe2\xbf" tbag4 (g5 (I0 tS'b' tRp33 (I1 (I3 tg8 I00 S'M#\xcb\x89o)\x94?\x00\xb3K\xf1\\\xf8\xc7\xbf\x8f\xc0!\x8dum\xef\xbf' tbag4 (g5 (I0 tS'b' tRp34 (I1 (I3 tg8 I00 S'g\xbe\x9e\xa3\x08\xa1\xcb?\xff\xb6_%\xd9\xaa\xee?\xfb\xb3f\xc4\x10\xef\xc7\xbf' tbag4 (g5 (I0 tS'b' tRp35 (I1 (I3 tg8 I00 S'W6\xdf\xf1\xe3\xb4\xe8?\x9f\x81!zF\x80\xe3?\x83A\xd5Pb\x14\xc7\xbf' tbag4 (g5 (I0 tS'b' tRp36 (I1 (I3 tg8 I00 S'\xcar,[\xb4\x83\xc4?)\x10\xf3$a\xcc\xd6?\xe4\xe68\xbc+u\xed?' tbag4 (g5 (I0 tS'b' tRp37 (I1 (I3 tg8 I00 S'G\xe4\xfa<\xef\xa4\xc2?5^\xd2\x85\xb0\xaa\xe7?z\xc5\xe7q\x0f\x07\xe5\xbf' tbag4 (g5 (I0 tS'b' tRp38 (I1 (I3 tg8 I00 S'\x1b&\xfd\xed]c\xec?/\xf7\xa4\x9b\xd5\xf0\xda\xbf\x7f6\x1e*58\xc8\xbf' tbag4 (g5 (I0 tS'b' tRp39 (I1 (I3 tg8 I00 S'\x94\xba5\xbdd\xff\xe1?\xc2F\x9f\xe07\xfa\xce?\xa0\xd9W\x0c\xe7L\xe9\xbf' tbag4 (g5 (I0 tS'b' tRp40 (I1 (I3 tg8 I00 S'Rd\x1b\xf5\xcda\xd8?\xaf\x1d\xc4\xbf\xb1A\xc2?\xa6\xfbq}\x8e;\xed?' tbag4 (g5 (I0 tS'b' tRp41 (I1 (I3 tg8 I00 S'\xa2, /\xfd\x90\xd3?7\t^M\xd4\x85\xc9\xbf\x04\xd2[\xa6\xce\xca\xed\xbf' tbag4 (g5 (I0 tS'b' tRp42 (I1 (I3 tg8 I00 S'\x9a[\x0e\xba\x01B\xd5?\x83\xe8\x00eR\xef\xc1\xbf\xb4\xe5\xe1\xb3+\xd9\xed?' tbag4 (g5 (I0 tS'b' tRp43 (I1 (I3 tg8 I00 S'\xe1\n\r\xa4\x98\xc8\xee?v\x12m\xb5|S\xd1\xbf1iG\x81\xa3f\xa2?' tbag4 (g5 (I0 tS'b' tRp44 (I1 (I3 tg8 I00 S'\xe6=PTW\xb3\xee?\xf0\xb5@H\xbe\xe6\xca?\xa8\xd8\x8c(\xf5\x14\xc8?' tbag4 (g5 (I0 tS'b' tRp45 (I1 (I3 tg8 I00 S'X\x1e\xc6\xb9\xf8\xd6\xdc?\xa7,\xf2\x8da\x82\xec?v\xc8s\xfdP\x02\xad\xbf' tbag4 (g5 (I0 tS'b' tRp46 (I1 (I3 tg8 I00 S'\x11\xf6\x92\x19L\xa9\xe8?\xaa\x97%9\xbc5\xe4?wR\x90\xa6V\xbb\xb5?' tbag4 (g5 (I0 tS'b' tRp47 (I1 (I3 tg8 I00 S'\x178\x1dD\xee\xb3\xe6?xq\xc07"I\xda\xbfX@\x82\xb7{S\xe2\xbf' tbag4 (g5 (I0 tS'b' tRp48 (I1 (I3 tg8 I00 S'\xc7V(v?9\xe6?\xc0\xcd\x16P\xcf\x96\x99?\x1a%\xcd\xf5\x9e\x02\xe7?' tbag4 (g5 (I0 tS'b' tRp49 (I1 (I3 tg8 I00 S'\xed\x93\xa4\xc1\xbf\xcb\xe5?;\\\xfd\xd4\xe3-\xe1\xbf\x19\xc2\x1c\x0b\xeb\xdc\xdf?' tbag4 (g5 (I0 tS'b' tRp50 (I1 (I3 tg8 I00 S'\x0e\xf4m\x94V\x14\xc2?\x83\x1b\xa2Z\xbev\xe7\xbf\xdb\xa3+\x19\xc4H\xe5?' tbag4 (g5 (I0 tS'b' tRp51 (I1 (I3 tg8 I00 S'\x93\xc2\xb5N\xd6\xb5\xe7?\x00\xea7X\xbd\xfc\xd8?\xae\xfePY3|\xe1?' tbag4 (g5 (I0 tS'b' tRp52 (I1 (I3 tg8 I00 S'\x8f\x1c\x9eX\xf1\xf8\xb9?\xccjn\xd2\x88X\xea?\xdcr\xfc\xa9\xdc\xde\xe1?' tbag4 (g5 (I0 tS'b' tRp53 (I1 (I3 tg8 I00 S'\x9a\x8f\x18B\xae\xad\xe2?IhMr\xbbX\xe3?\xae\x95\xb6\xab#X\xe1\xbf' tbag4 (g5 (I0 tS'b' tRp54 (I1 (I3 tg8 I00 S'\x1c\x0bo\xe1\\>\xb6?F\x07S\x1e\xf0\x17\xd6\xbf\xea\xe8\xeb\x9b`\xe7\xed?' tbag4 (g5 (I0 tS'b' tRp55 (I1 (I3 tg8 I00 S'=3x\xe8\xbf\x9e\xe1?\xeeL\xfdq\xb2\x81\xe9\xbfK\xcc\x97y\x18\xbc\xcf?' tbag4 (g5 (I0 tS'b' tRp56 (I1 (I3 tg8 I00 S'\xf7\'\xdc\xe7\x14\xca\xea?\xf5M\x9f(\xe8\x8f\xdd?"\x98\x00' tRp171 ag76 (g77 S'\x07\x00' tRp172 aa(lp173 g76 (g77 S'u\x00' tRp174 ag76 (g77 S'\x8c\x00' tRp175 ag76 (g77 S'\x06\x00' tRp176 aa(lp177 g76 (g77 S'p\x00' tRp178 ag76 (g77 S'\x92\x00' tRp179 ag76 (g77 S'\x12\x00' tRp180 aa(lp181 g76 (g77 S']\x00' tRp182 ag76 (g77 S'\xad\x00' tRp183 ag76 (g77 S'\x07\x00' tRp184 aa(lp185 g76 (g77 S'w\x00' tRp186 ag76 (g77 S'8\x00' tRp187 ag76 (g77 S'\x10\x00' tRp188 aa(lp189 g76 (g77 S'k\x00' tRp190 ag76 (g77 S'o\x00' tRp191 ag76 (g77 S'\x04\x00' tRp192 aa(lp193 g76 (g77 S'T\x00' tRp194 ag76 (g77 S'\xa1\x00' tRp195 ag76 (g77 S'\x19\x00' tRp196 aa(lp197 g76 (g77 S'@\x00' tRp198 ag76 (g77 S'P\x00' tRp199 ag76 (g77 S'\x0c\x00' tRp200 aa(lp201 g76 (g77 S'\x8f\x00' tRp202 ag76 (g77 S'\x96\x00' tRp203 ag76 (g77 S'\x0b\x00' tRp204 aa(lp205 g76 (g77 S'(\x00' tRp206 ag76 (g77 S']\x00' tRp207 ag76 (g77 S'\x03\x00' tRp208 aa(lp209 g76 (g77 S'\x80\x00' tRp210 ag76 (g77 S'n\x00' tRp211 ag76 (g77 S'\x06\x00' tRp212 aa(lp213 g76 (g77 S'a\x00' tRp214 ag76 (g77 S'U\x00' tRp215 ag76 (g77 S'\x07\x00' tRp216 aa(lp217 g76 (g77 S'k\x00' tRp218 ag76 (g77 S'g\x00' tRp219 ag76 (g77 S'\x15\x00' tRp220 aa(lp221 g76 (g77 S'Y\x00' tRp222 ag76 (g77 S'/\x00' tRp223 ag76 (g77 S'\x0f\x00' tRp224 aa(lp225 g76 (g77 S'p\x00' tRp226 ag76 (g77 S'\x93\x00' tRp227 ag76 (g77 S'\r\x00' tRp228 aa(lp229 g76 (g77 S'h\x00' tRp230 ag76 (g77 S'?\x00' tRp231 ag76 (g77 S'\x0f\x00' tRp232 aa(lp233 g76 (g77 S'X\x00' tRp234 ag76 (g77 S'\x81\x00' tRp235 ag76 (g77 S'\x14\x00' tRp236 aa(lp237 g76 (g77 S'?\x00' tRp238 ag76 (g77 S'=\x00' tRp239 ag76 (g77 S'\x12\x00' tRp240 aa(lp241 g76 (g77 S'D\x00' tRp242 ag76 (g77 S's\x00' tRp243 ag76 (g77 S'\x13\x00' tRp244 aa(lp245 g76 (g77 S'N\x00' tRp246 ag76 (g77 S'\x9c\x00' tRp247 ag76 (g77 S'\x0e\x00' tRp248 aa(lp249 g76 (g77 S'\x1b\x00' tRp250 ag76 (g77 S'A\x00' tRp251 ag76 (g77 S'\x08\x00' tRp252 aa(lp253 g76 (g77 S'B\x00' tRp254 ag76 (g77 S'\xa5\x00' tRp255 ag76 (g77 S'\x13\x00' tRp256 aa(lp257 g76 (g77 S'}\x00' tRp258 ag76 (g77 S'\x86\x00' tRp259 ag76 (g77 S'\n\x00' tRp260 aa(lp261 g76 (g77 S'@\x00' tRp262 ag76 (g77 S'\x8e\x00' tRp263 ag76 (g77 S'\x16\x00' tRp264 aa(lp265 g76 (g77 S'$\x00' tRp266 ag76 (g77 S'\x8d\x00' tRp267 ag76 (g77 S'\x03\x00' tRp268 aa(lp269 g76 (g77 S'y\x00' tRp270 ag76 (g77 S'*\x00' tRp271 ag76 (g77 S'\x10\x00' tRp272 aa(lp273 g76 (g77 S'T\x00' tRp274 ag76 (g77 S'\xa7\x00' tRp275 ag76 (g77 S'\x07\x00' tRp276 aa(lp277 g76 (g77 S'j\x00' tRp278 ag76 (g77 S'\x82\x00' tRp279 ag76 (g77 S'\x19\x00' tRp280 aa(lp281 g76 (g77 S'M\x00' tRp282 ag76 (g77 S'q\x00' tRp283 ag76 (g77 S'\x07\x00' tRp284 aa(lp285 g76 (g77 S'?\x00' tRp286 ag76 (g77 S'\x93\x00' tRp287 ag76 (g77 S'\x04\x00' tRp288 aa(lp289 g76 (g77 S'r\x00' tRp290 ag76 (g77 S'\x8a\x00' tRp291 ag76 (g77 S'\x10\x00' tRp292 aa(lp293 g76 (g77 S'\x9b\x00' tRp294 ag76 (g77 S'\x8e\x00' tRp295 ag76 (g77 S'\x12\x00' tRp296 aa(lp297 g76 (g77 S'\x96\x00' tRp298 ag76 (g77 S'\x97\x00' tRp299 ag76 (g77 S'\x11\x00' tRp300 aa(lp301 g76 (g77 S'e\x00' tRp302 ag76 (g77 S'\xa7\x00' tRp303 ag76 (g77 S'\x0e\x00' tRp304 aa(lp305 g76 (g77 S'@\x00' tRp306 ag76 (g77 S'2\x00' tRp307 ag76 (g77 S'\x0c\x00' tRp308 aa(lp309 g76 (g77 S'[\x00' tRp310 ag76 (g77 S'r\x00' tRp311 ag76 (g77 S'\x08\x00' tRp312 aa(lp313 g76 (g77 S'e\x00' tRp314 ag76 (g77 S'w\x00' tRp315 ag76 (g77 S'\x0e\x00' tRp316 aa(lp317 g76 (g77 S'Q\x00' tRp318 ag76 (g77 S'\x8c\x00' tRp319 ag76 (g77 S'\x0c\x00' tRp320 aa(lp321 g76 (g77 S'v\x00' tRp322 ag76 (g77 S"'\x00" tRp323 ag76 (g77 S'\x11\x00' tRp324 aa(lp325 g76 (g77 S'\xa2\x00' tRp326 ag76 (g77 S'\x9d\x00' tRp327 ag76 (g77 S'\x1b\x00' tRp328 aa(lp329 g76 (g77 S'k\x00' tRp330 ag76 (g77 S'\x7f\x00' tRp331 ag76 (g77 S'\x11\x00' tRp332 aa(lp333 g76 (g77 S'S\x00' tRp334 ag76 (g77 S'\x9a\x00' tRp335 ag76 (g77 S'\x10\x00' tRp336 aasS'bs' p337 (lp338 F0 aF992.05050244317545 aF1004.0307819709187 aF992.16372365672748 aF998.64382386853549 aF989.48344613176005 aF990.58854125405651 aF987.00457077448243 aF999.88224459176058 aF990.24926647985785 aF1000.4429426677075 aF998.16475718741572 aF992.32008352195669 aF988.2706913413632 aF994.63057077140354 aF985.57914467532316 aF995.74587612477353 aF988.20307136686631 aF990.75951822578531 aF1001.1083783354445 aF991.90841421488437 aF999.6582291142721 aF988.44940351310686 aF992.31413008681807 aF994.12050886666543 aF986.85780956899418 aF1001.5717049277649 aF996.32882435626766 aF987.80747919441819 aF993.85668754423943 aF994.71195241265218 aF987.35168048372861 aF985.06260837744503 aF986.12672062268973 aF985.48928715054569 aF995.12792207504356 aF991.89399589858431 aF1002.1866327590234 aF998.2199341226268 aF990.35905828638545 aF985.10772231282237 aF992.59917742412256 aF993.6172958073148 aF990.53378981226797 aF995.47758389009357 aF992.98732183421407 aF987.55356587877088 aF997.73723515575693 aF994.56379309079273 aF990.06942752292616 aF988.51781688375172 aF1000.747909545062 aF993.96117952476254 aF994.20318850889521 aF989.75808235351883 aF988.6777283166474 aF996.2943494724866 aF988.41722440296496 aF998.1975649976838 aF985.54779099062011 aF997.88768851250029 aF988.75360728578653 aF985.37260384645003 aF994.99028798954612 aF1003.0305096565716 as.dipy-0.13.0/dipy/data/files/evenly_distributed_sphere_362.npz000066400000000000000000000321101317371701200241140ustar00rootroot00000000000000PKJ<@"@" vertices.npyNUMPYF{'descr': '?@Ba? =?B?a??a? Xc/׿a?` S?Uƿa?l?ka? m?8?%?`'?%?%?%?`'%? m?8%?`yQɿ:.?? ؿS??E? ??`???T??T??޿?E?? ؿS?`yQɿ:.?9??Z?g??ଈ 'ҿ?s?N ??'??۠? KHҿ?Z?g?s?M ?۠?`KH?`?9`? s??`B?`ӛ?`g$ݿ?VF?p?D?k?`?g$??s?VF? p??B?@՛??`D?k?@ mӿ`?? m?@y?? n?@m?@y?` mӿ`?`X߿S?@Pu?%?`Y?@Pu?vɿ@Pu? :?@ٿ@Pu?<IR@Pu?@X߿SPu?<IR?Ou? :??Ou?v?Ou? %?`YOu?P@?h6?i?N h6?8?`>g6?P@g6?`i?M ?g6? 4?mC?T?`?@#˿T? PٿT?`?tT? {?S? |S??t?`S? P?`S?`?#?`S?`4?mC`S?F?ڿ:.?y?:y?@?߿y?#ʿIRy?l?`?y?l? y? ??y?#ʿIR?y? :?y?F?ڿ:.y?#?w? #r> o`ӿ #r>#w?#r> o?ӿ#r>#b>#r>`@Uƿt?@՛t B|ǿt?@՛?t`@U?t·?`KHҿϿ)ƿ '9?Ͽ)?·?`KH?e@տp@q?#˿p 0׿p@q?#?pe@?p?޿18tT?@ٿ18wӿYֿ18hۿXQǿ18^?:18^?:?18hۿXQ?18wӿY?18tT?@?18??18 |Ŀ 8 8?`ӿ8 8?`?8 |Ŀ ?8 8? y@mn߿ ,ֿ@m m?@mn߿ ,?@m? y?@m?߿V ?ɿV ??V??V@NDU Rֿ`HU@9XQǿU@9XQ?U Rֿ`H?U@ND?Uk? Pٿ k? P? `ʿwzA`ʿwz?0F?@lVPHl@nYֿl1?l1??lVP`H?l0F?@?l@?`Yl@n@Y?l?`Y?l@Яg!xٿ@(z?@?'ҿ )ƿ@`)??@'?@z?@?!xٿ@(?Яg?@W?$ݿQ@W?$?Q :ϿBQ :ϿB?Q`Q`?w  8?#r> ?w? ?z?mCଉ૴?|ଉ`y2xzଉ@n@տଉ;?@|ǿଉ;?@|?ଉn?ଉy2wz?ଉ?|?ଉ`z?mC?ଉ ??a߿ika߿@Xc? /׿a߿@ S@Uƿa߿>ۿ@B?a߿ =ۿBa߿?a߿ Xc?/?a߿` SU?a߿lk?a߿ mѿ8%ܿ`'?%ܿ%%ܿ`'??%ܿ mѿ8?%ܿ`yQ?:.׿ ?S׿E ׿`޿׿T?׿T??׿?׿E?׿ ?S?׿`yQ?:.?׿?9տZgտଈ? '?տsN տ?'ҿտ۠ KH?տZg?տsM ?տ۠`KHҿ`տ?9?`տ s?ӿ`B`ӛ?ӿ`g?$?ӿVFȿp?ӿDٿk`ӿg?$ݿӿs??ӿVFȿ pӿB@՛ӿ`Dٿk?ӿ@ m?`ȿ m@yȿ n?ȿ@m@y?ȿ` m?`?ȿ`X?S@Puƿ%`Y@Puƿv??@Puƿ :@?@Puƿ<?IR?@Puƿ@X?S?Puƿ<?IROuƿ :ٿOuƿv?ɿOuƿ %`Y?OuƿP?@h6ĿiӿN ?h6Ŀ8`g6ĿP?@?g6Ŀ`iӿM g6Ŀ 4߿mCTÿ`@#?Tÿ? P?Tÿ`t?Tÿ ?{Sÿ? |?Sÿt`Sÿ? Pٿ`Sÿ`#˿`Sÿ`4߿mC?`SÿF??:.y?:?y@?y#?IR?yl`yl ?y ߿y#?IRy? :yF??:.?y#w #r o?`? #r#?w#r o?#r#b?#rPKJ0) *5 *1+B1+",8",2-F2-%.@%.'/D'/(0=(0*1A*1,2E,2?$3R?3>)4S>44 5M457!6N76C&7VC7;"8P;8<#9O<93:L3:B+;WB;F-<ZF<:(=Q:=I0>G.?6%@U6@5*AT5AK1BH/C9'DY9D8,EX8EJ2F@.G]@G?aGafGD/H[DHCbHbhH=0I\=I>`I`eIE2J_EJFdJdiJA1K^AKBcKcgKR3LS4MV7NZ<OW;PL:QM5TN6UP8XO9YYD[Q=\U@]TA^XE_>S`e`u`?Ra~fat~aCVbhbvbBWcgcycFZdid{d\Ie]Gf^Kg[Hh_JikLjLQjRLktRkSMluSllMmMTmoNnNUnVNovVoWPpyWppPqPXqsOrOYrZOs{ZsaRt`SubVvjQwQ\wnUxU]xcWyqXzX_zdZ{mT|T^|rY}Y[}f~t~gyhvi{eux]]f}[[hw\\e|^^gz__ikjkllmonoppqsrstktuluvovypy{s{rr}jjwmm|nnxqqz~~}}wwxx||zzda`cb               !!"$# $('( %%&,) )*!*" "1 # #-  +$ +  .% . & &/ ''20(0229,,: --8 11; //<7:7:38384;4;59596<6< ==HB= )B C>!*C!!>">I"$?#?J#D?$+D$E@%.E%%@&@K&(A'AG'FA(0F(UB)QC*SD+H,HW,#J-JX-RE.&K/KZ/TF0"I1IY1'G2GV2)38L3LU3.4;N4NR4+59O5OS506<M6MT6*7:P7PQ7XL8-X8VO92V9WP:,W:YN;1Y;ZM</Z<B[=[b=C_>_a>D\?\c?E]@]`@F^A^dAg[BUgBe_CQeCf\DSfDh]ERhEi^FTiFdGAdGbH=bHaI>aIcJ?cJ`K@`KXLLZMMYNNVOOWPPPQ*7QeQNR.4RhROS+5SfSMT06TiTLU)3UgUVGVWHWXJXYIYZKZ[g[\f\]h]^i^_e_]``_aa[bb\cc^ddeeffgghhiiPKJ<@"@" vertices.npyPKJxkhh vertices.npyNUMPYF{'descr': '^.=9>p\e9>=bb>bb=r޼9f>B>3ڬ3ڬB>f>9r޼/E=>/E=i M>>/M>>i U'⾃'U>I > >f>>I<[&>AپAپ[&>Q?3ƽ/>-?.?>.3ƽ6ic=>K̾<??<J̾>@ic=I>q%~W#?q%I> W ? . ? W`#?++`#?/>6XM>->K>6X2 8#1?1?8#1 ǫ>'c=ǫ>,c=Wi>ޠ#衾$?+?-E-E+?$?衾֠#9l@Y>3⾷Yz=\4>AE7hJ?hJ?E7A\4>~=7f>*R?*7f>SH-0?@YNI-0?g@yȾ?"@C?DGS]?>>DG>S]?i>K>9.d?K>9i>.0?NO RH?hhDOTH?00? Qd,Y?߳.Kݗ?+Y?Qd+Kӳߗ?x>wg?xg?u[W=W=u[x>>5o?NZ%Y>U%Y>No?5>ᗾ-F?^u-F?ᗾ|r0+?e m &\?|r&\?+?, ?k?kJ 5;'5;kk? ?+ę>^HI^HI,ę>x?>.u?^>>^u?.>.??]|3ľ^hfhX?]|X?/??3ľ!vF#?Ej?GvEj?#?!.@>2Y`}?2Y.@+j>+j>`}?>$Iv?gk}?l=c=gkIv?$~?y?qxsyqxs?y3%|pq>d5?5d?^0 >rq%|?Y0 x7>{7O?O^.=\C =^.=9p\e>9=bb>鋾bb>r<9>fB3ڬ>3ڬ>Bf9>r<>>/Eξ/Ei >M>/>M>i >U>'>'>U>>If⇾ 澃 f⇾>I[&A>A>[&Q3=/>Ѿ-??.Ѿ.>3=6icK><?<?J>@icIq%?~W#q%?I W> .? W>`#+?+?`#/侗6X=M>>-K>>6X=2 >8#?118#?1 >ǫ'cǫ,c??Wi>$+-E?-E?+$>֠#>9Yl@?Y3>YH-0@Y?N>I-0g@>y>"@C#@C_@>C?xh ZC?azQ?.R.RazQ? Z i滦DG?S]?ɾɾ?DG?S]iK>9?.dK>9?i.0NO> >RHh?h?DO>TH00 >Qd?,Y߳=.K>ݗ+YQd?+K>ӳ=ߗx?wgxgu[??WWu[?x5?oN?Z%YU%YN?o5?>-F^u?-F>|r?0>+e >m >&\|r?&\+,> ?kk?J 5'5k?k? +ę^HI?^HI?,ęx.?u^?^?u.?.?]|?3>^h>fh>X]|?X/?3>!?v?F=#EjG=v?Ej#!?.@?ľ2Y?`}2Y?.@?+j+j`}ľ$?Ivgk?}lcgk?Iv$?~yqxs?y?qxsy%|?pqd?55?d^0 rq>%|Y0 >x7?{7>OO?eY>=eY"Z>>"Z>z$>z$:>ꍗ>5>>-=-5ꍗ:r>ri>iľB>R>RBپU>]0 >]0 U >\>C>C\ž ?{7>{7r?l?M>4f>D=H3fL̾lr? ?rq>gt>C>Cgtrq X?>X%?L>-K̾!%R>[&>[&PQI-0?y?Q?yJ-0 ?>>>> 3??3&>&޾R3?R3B?5?!?\>n=nYž 5B5>^fa>5JH?ȼ%??`faɼ%JHJ?6?=?i>>i=6JH?>HO??yO X? 7B?X?i>WC >^C 7BiľX X1\?R3?1?>>鋾R3\l]?>fY>HH??ξl]\Y@Y?P?P@Y?d?W? >n=ndW\fa>:j?xG?3?:>`fayGj3J?3?"Z>"Zi3:l:l?i>Jl]??I@jJ@j?>=l]8i?r?-r8i[&@Y?I-0?qxsqxs?B>[&>>پH-0AYJH?xG?獗>ewew?>>ꍗ>yGJH:l?=?=:l2\?1?z$>&޾AvAv?&>z$1\ %?K7i?vv?l?<=l%8iW?gt%|B?U%|?gt>U>BWɼ%?j? ?-= ɼ%ewew?-j 7B?r X?f}g}?>r> 7B XO?{7O?x7_0 d?sq5?5?qq%|d?X0 %|qxs?xIqxs?y?y?eY=eY>"Z>"Z>z$z$>>:ꍗ5--=>5>ꍗ>:>rr>iľi>BپRR>B>U]0 ]0 >U> \žCC>\> >{7{7>?rlM̾4fDH=3f>L>l?r? rq辻gtCC>gt>rq> ??X>X?%L̾-3K>!%?R[&[&>P>Q?I-0yQy?J-0? >>>> ??33?&޾&>R3R3?B5!\žnn=Y> ?5?B?5^fa5>JHȼ%`fa>?ɼ%?JH?J6=i>i>=?6?J?H>H?Oy3O?> X 7BXiľWC ^C > 7B?i>X? X?1?\R31鋾>>R3?\?l]ξfY?H?H>l]?\Y>@YPP?@Y?dW ᄡnn=d?>?W?\fa:>jxG3:`fa>yG?j?3?J3"Z"Z>i>3?:l?:liJ?l]=>I@j?J@j?l]?8ir-3r?8i?[&>@YI-0qxs?qxsBپ[&>>H-0?AY?JHxG獗>>>ew?ew>ꍗyG?JH?:l==?:l?²\1z$&>Av?Av&޾z$>1?\? %K=7iv?vl%|?BU>%|gtUB?W?ɼ%j - ?ɼ%?ew?ew-=j? 7Br>> Xf}?g}r 7B? X?O{7>Ox7>?_0 >dsq>55qq>%|?dX0 >%|?qxsxI3qxsyy?g}?g}?g}?g}?g}?~y?~y?~y?~y?~y?Av?Av?Av?Av?Av?p?p?p?p?p?p?p?p?p?p?J@j?J@j?I@j?I@j?I@j?.d?.d?.d?.d?.d?'b?'b?'b?'b?'b?'b?'b?'b?'b?'b?@Y?@Y?@Y?@Y??Y?Q?Q?Q?Q?Q?Q?Q?Q?Q?Q?JL?JL?JL?JL?JL?JL?JL?JL?JL?IL?/E?/E?/E?/E?/E?Ƽ;?Ƽ;?Ƽ;?Ƽ;?ż;?L>9?L>9?L>9?L>9?L>9?K>9?K>9?K>9?K>9?K>9?4?4?4?4?4?4?4?4?4?4?,,?,,?+,?+,?+,?!?!?!?!?!?!?!?!?!?!?X?X?X?W?W?W?W?W?W?W????????????????Q?Q?Q?Q?P?L?L?L?L?L?L?L?L?L?L?v?v?v?v?v?v?v?v?v?v?dN>dN>dN>dN>dN>cN>cN>cN>cN>bN>.>.>.>.>.>K>J>J>J>J>J>J>I>I>I>J>J>J>J>J>J>J>J>J>J>tu>tu>tu>tu>tu>tu>tu>su>su>su>>>>>>>>>>>,k>+k>+k>*k>*k>k>j>j>j>i>i>i>i>h>h>՜>՜>՜>՜>՜>Ԝ>Ԝ>Ԝ>Ԝ>Ӝ>&*q>&*q>&*q>&*q>%*q>n>n>n>n>n>n>n>n>n>n>,k>,k>+k>+k>*k>*k>*k>*k>)k>)k>G>G>G>G>F>F>F>F>F>F>~=~=~=~=~=~=~=~=~=~=T=T=T=T=T=T=T=T=T=T=y3y3y3y3y3FY3EY3EY3DY3DY3DY3DY3CY3CY3CY3P'P'g}g}g}g}g}~y~y~y~y~yAvAvAvAvAvppppppppppJ@jJ@jI@jI@jI@j.d.d.d.d.d'b'b'b'b'b'b'b'b'b'b@Y@Y@Y@Y?YQQQQQQQQQQJLJLJLJLJLJLJLJLJLIL/E/E/E/E/EƼ;Ƽ;Ƽ;Ƽ;ż;L>9L>9L>9L>9L>9K>9K>9K>9K>9K>94444444444,,,,+,+,+,!!!!!!!!!!XXXWWWWWWWQQQQPLLLLLLLLLLvvvvvvvvvvdNdNdNdNdNcNcNcNcNbN.....KϾJϾJϾJϾJϾJϾJϾIϾIϾIϾJʾJʾJʾJʾJʾJʾJʾJʾJʾJʾtututututututusususu,k+k+k*k*kkjjjiiiihh՜՜՜՜՜ԜԜԜԜӜ&*q&*q&*q&*q%*qnnnnnnnnnn,k,k+k+k*k*k*k*k)k)kGGGGFFFFFF~~~~~~~~~~TTTTTTTTTTyyyyyFYEYEYDYDYDYDYCYCYCYPPPKlF>lPP faces.npyNUMPYF{'descr': ' 62B":1F(=%@'D,E*A4L?W>X5M8NCSBT9O<PFZ3VGcH[7R6QI^J_;Y:UKd=b@\A]D`EaWkXlSoTpZsMmNn_z^vOqLj[xcyPrd}eRuQthfY|U{Vwgi\`b~]akxylozvps}mnjqrwut|{~ #%'&$     )*+"(,!&3%4#6$:'8.<-@0/~!1 |295xCEBFDHLGGMIJNHIOKKPJQRTSUVXWYZS]QR[UW^TV\YZ_Xi``eajkafbbgclmchddnepfgqhjoilsknrmtuwvxy{z|}uxy|~tvwz{}~     %$ #&*!")/ '. - (0,1+<=4C?5@23 6 7 8 ;9:AB> DE$H%F#G*M&IQ0P.S-R1JK LT/!N"Og<d=eCf?h@'U)V(W,Y+XaA\B_DcE>] 7[ 8Zk49^:`i2j3 ;bl5m6SvQsPz|RHF~GxT@M<Idpfohrgneq8J:L4K6N3O\}aj7k2_?l5c>i;]=m9wZ{bu^t[y`U1V0W.Y-X/tuw{y*o}? +n )q> ,p= (r;s"zv7              $)&+-/!0#. 2"1(%',*4$?)> 5!8&C+B"9#<-F(3.G/H%7*62I0J';,:1K.=/@2A0D1E4W5X8S9T<Z6M7NC_B^:O3L?[>c;PFdHe@RAQIhJfDYEU=VGgKiH\J`GbI]KaLkWxXyMlNoSzTvOpPsZ}QmRn^Vj[c_UqYrdbwge\u]thf`|a{ig~ehfiklospjxymnzrvq}|wut{~&%#$'&%# '!$    BCDEFDHLBGMFJNCIOEKPLQNTMUPXOYHS]GR[JW^IV\KZ_]iQ`Ta^k[fUbXc_m\hYdRepVgqSjoWlsZnritkwfxm{h|`upbyqo~avsczrd}jegnl~twx{|vuzy}       %'$( ) #"&!*!+",)3*/'2$.%-(4#0,5&1+6-.4>/05D12A3B6E7<8=;C9?:@7A8B;>9D:EHJFKGLMNIOGQHPFSIRJUKWLVMTNXOYSgPdTeQfRhUiVjWkYlXm[aZ\^_`c]b[gZd]k^f`hai\jbe_lcmvs~z<|84:~@x6@3<pzosr|nvqx180:.4/6-3wt}72=?u5?>y;}={9>pwq{ountry1;07.2-5/9+*,)(* + ) ",  (!"    !!""##$$%%&&''(())**++,,--..//00112233445566778899::;;<<==>>??@@AABBCCDDEEFFGGHHIIJJKKLLMMNNOOPPQQRRSSTTUUVVWWXXYYZZ[[\\]]^^__``aabbccddeeffgghhiijjkkllmmnnooppqqrrssttuuvvwwxxyyzz{{||}}~~            !!""##$$%%&&''(())**++,,--..//00112233445566778899::;;<<==>>??@@AAAAABBBCCCDDDEEEFFFGGHHIIJJKKLLLMMMNNNOOOPPPQQRRSSTTUUVVWWXXYYZZ[[[\\\]]]^^^___``aabbccddeeffgghhiijjkkllmmnnooopppqqqrrrsssttuuvvwwxxyyzz{{||}}~~         !!""##$$%%&&''(())**++,,--..//00112233445566778899::;;<<<===>>???@@@AABBCCCDDEEFFGGHHIIJJKKLLMMNNOOPPQQRRSSTTUUVVWWXXYYZZ[[\\]]^^__``aabbccddeeffgghhiijjkkllmmnnooppqqrrssttuuvvwwxxyyzz{{||}}~~PKlF>xkhh vertices.npyPKlF>lPP faces.npyPKq =dipy-0.13.0/dipy/data/files/evenly_distributed_sphere_724.npz000066400000000000000000000634601317371701200241320ustar00rootroot00000000000000PKx|@[V0D0D vertices.npyNUMPYF{'descr': ')v;?4?jc{T˿_]_(?\@U.?ur̿lLJ ONN';?tJS?H'أtpgĿ$޷V?]y$ rS?g;>)7տ2?lLJ ?}bUBտ TA]Wvп*!zΰ,?AME^) ҿrJ?jEOt|a?+'<|ܿsd!?FVa &?&FLܿDZGθJ4*1(?ӟ쿅]45ѿ1 &aL}k쿲em? ıϿT쿤F.ٿh?|?g;>!Ͽ?vsEۿ Rn*'Uͧ?&ˡ_?/dܿ/Zl?$˰W_?+MP[ܿWs"5ʿ-=SI?@)7xJ¿.п̰Ws뿱Kě?T67!Z]ҟK߿6˔? 1O2?]StCr*GF?'-?c[?Y'Gٿ6R?AME5?IZoR׿o.M8 ؿh3:?hP қ?._wڿ7jiP|?܊忩I64*38?6 {翈}~Gؿ0a7ÿ^َ翿fJ?zW!BտQ 8xm<?߷FVa7>_?\zlduJ=)?{Ph?T4Ms6>l/$?ҟo`?ہ.jc}տW?0&q@iſ0gؿ/^#1+@?go.Km3aΒ?Jo濫e?]2/CI5k?,M^;?fuJ@Iy3d@Ee!Hhi?z޿v|Re"Y=^?{?g;re8Dz?ՂHbh⿛'$濳^R?!}?)$h=yJf?1忄,wDp?6"#uC-`Lې(ɿ>䫚?َA./Կtп^ҟ(?!:^ѿ2忉{1忎 w$L?yY]v ?xIoi? ?8xY忙*rn,? 妢yBN꽏n?Hl㿯e+V &,ܿEYY4?<>)7忘󵿪p߿2cSx?>7<鿠9.0_??-`L} @b\"ƕ? {?gK ;,c?5EyJhPPE hA?e+9㿾?V]>d[#X? }:V?5 D7SDB?kduM?iZ 8x_ÿg!4 ?<=yڿcs̿'q5/?abs׿?QUerU?AMEdȼ?^v|RnZ 1?pj&?]#˰W=/Õ4?َA!:?*0w|Rn*⿉vMۿZ?)7]Q ,NĿRUG߿e+SY?\'(T]ER?.^3z1?3tf}῁/h3\zѿk3?3࿌*տYkKKտ˜v1?RtsѿNEZQPK?!Hpԙ[?M͙jP ࿚7Z?b2[?Jv|vr?eTN*?%+oT4OGp?P 8-+٫Mv (-9!࿝i?F&Ǜ?hP 9ӭĠh;?.^߿~E|?-St뿪I߿k&Ը?^َ߿DeANƿݷFVa߿<V?y ܿT4߿t̠翎Y?jc߿?ȦZ0/^޿.m?%?Go޿^#i}[71?euJ޿e'GB?2O9鿀v|R޿G~p ٿ{!?'$޿N 'Ͽ 74ݿ1ݿKgr??ſَAݿ1뿚@a?2ݿvebj?OgBIoݿ$BJ?YN?!妢yBݿ掗NR6?9>)7ݿzv I?XaܿS~修B<?q5ܿ(#?1,m濋Ioܿ%?(V?yB-`ܿ=#o/S9ń4?2ܿY2? T4OܿE˿˫WLW?eۿ:xlۿv~4ҿ[#ۿ%fUD?h;ֿ-`L}ۿ.}=qGj?IhPۿ 0Sۣ?;4`d[#ۿr8G?0??}kduڿE[Zm?ڿoTFrF?%I迳ڿ@Q"U࿊F?v|RnڿL|J5mَAڿbnzi?::)7ڿn,IPO?ٿ??c?lUI9!Hٿ8p4?U4OٿNb-d7co.^ٿ?`9-?n4 5Ή 1ٿ@9翼\??g;>ٿdRn?PVؿt*V0?瀱u?!HؿWsGpIqд?Jv|ؿؗU?&LT4Oؿ6jտ#!?+9!ؿsIտxSmڿGVa׿xz36?PNпbl׿xwl>I?}Rn*׿R^R?mBal׿t G?^?AMEԿ ?O?+'Կ?RN?T&eFZԿ/cݿ:c*?bL}kԿ{y˿OԿxn- ?.lԿ@Ӗշܩ}?Wsӿ[Q?"ef˰Wsӿ%(z?BV? 1ӿ*2!jYc[ӿ߅{?ibo.ӿBn3翚X{T?7jӿ۞RCB?H%h>{Sn*'ҿw8?m9wn?mLJ ҿO!~#? 妢yҿV7e?PRyB-`LҿBѿS?ҟҿ$8#jڿNb"3׿+9ѿ}y?PqտZѿ)g?޷FVѿ?I(7jѿNj? e {Z?ErS<ѿvc꿦V+B\?`َѿzpIT?,k;zB-`LпC`}? пYN_MUη俰lLJп 3?!Pp|MEZп'{?妢yB-п:iǭ?oGzFп 峿8mx?8 {Ͽt 5ĿlduJϿ*\ɹ?/W࿣0&qο WF;??2οՆ\gg?ЌA}{?g;ο?hx?A-`LͿEPM ,?|Y]Ϳ?,P쿱e+Ϳw4ڿLy&|?CrS̿@ӿx޿Jv̿ӏ?y4˿OI̿OqI?Z˿]??j {?g˿l-A?Yc?5 ˿6Ư^o" -H?)q5ʿ9]Ö?8 iP ȿ1Эʿzj0s&?P 8ȿ߿>eҿ$ǿh?Aڿ 1ǿ)xukP?Cg;>)ǿz?ufwJƿtF?3Wl?˰WsƿA|^?}kduƿ)[~}k?)7؆0&qſecB࿵E)?P}kdſ|Dƿel%㿃 ſ N?zFVaĿgms￾MgX?TĿ4AI?z$˰WÿL;hb?fڎ0D?\]ҟÿTqi8rAMEÿEN:?.ldP¿H5Tj*w0?sC¿U.?$*3&q5¿r?VO?g+968wF Y? j?`<0&'HտHvK?[#WؿDtڿ@0&q^7?I$ӿsS<N͟D`l?W 8xXT?ͯo.þ;?n1_W7?.$>( 꿡[ʝ?8xY-?Su\o俦nU?lc:y3 Le+9rq~?kY?FME󄺿/)7<1lC?T豚U jcxwѽ?ҟ!(ofg}kdu)i?8Is֣n)7̓``wwh\?ZOI|s?fyr"/]p,z}WA,?psS)?v;4jc?{T?_]_(\?@U.οur?lLJ ?ON?N';пtJ?S?H'?أtpg?$޷?VͿ]y$ ?rS̿g;>)7??2lLJ ?ڿ}bUB? ?TA]Wv?*!zΰ,ۿAME??^) ??rJԿjEOt|a+'?<|?sd!ӿFVa? &տ&FL?DZ?GθJ4?*1(׿ӟ?]45?1 &?aL}k?emܿ ı?T?F.?hݿ|?g;>?!ϿvsE? Rn*'?Uͧ˿&ˡ_ſ?/d?/Zlǿ$˰W?_ܿ+MP[?Ws?"5?-=SI޿@)7?xJ?.?̰Ws?KěۿT67!?Z]ҟ?K?6˔ڿ 1?O2ҿ]S?tCr?*GF'-տc[??Y'G?6RAME?5IZoR?o.?M8 ?h3:ῪhP ?қ._w?7j?iP|տ܊?I?64*?386 {?}~G?0a7?^َ?fJzW!B?Q 8x?m?<߷FVa?7>_˿\z?lduJ?=)ҿ{PhѿT4?Ms6>?l/$Ϳҟ?o`ہ.?jc?}?W忡0&q?@i?0g?/^?#1+@g?o.?Km3?aΒJo?eۿ]?2?/CI5k,M^;߿fuJ?@Iy?3d@Ee?!Hh?i濂z?v|R?e"?Y=^{?g;?re8DzՂHbh?'$?^Rۿ!})$?h=?yJfؿ1?,wDp6"#u?C-`L?ې(?>䫚َA?./?t?^ҟ?(!:^?2?{1? w$LyY]?v Կx?Io?i˿ ؿ8xY?*r?n,ÿ 妢yB?N꽏n翪Hl?e+?V &,?EYY4<>)7??p?2?cSxۿ>7<??9.0_߿-`L}? @b?\"ƕ {?g?K ;,c5Ey?JhP?PE ?hAe+9?V]>?d[#?X }:V5 ? D7?SDB޿kdu?MiZ? 8x?_?g!4 濚?<=y?cs?'q5?/abs???QU?erUAME?dȼҿ^?v|Rn?Z 1ӿpj&׿]#˰W?=/?Õ4ͿَA?!:*0?w|Rn*?vM?Z)7?]Q ,N?RUG?e+?SY\'(??T]?ER㿭.^?3z13tf}??/h3\z?k33?*?YkKK?˜?v1Rts?NEZ?Q?PK!H?pԙ[ڿM͙?jP ?7Zɿb2[޿Jv|?v?re?TN*뿳%+o?T4O?G?p뿟P 8?-+٫?Mv (?-9!?iF&ǛhP ?9ӭ?Ġh;⿍.^?~E|俬-St?I?k&Ը?^َ?De?AN?ݷFVa?<Vy ?T4?t̠?Yjc?ϿȦZ0?/^?.mؿ%ԿGo?^#?i}[71ԿeuJ?e'GB2O9?v|R?G~p ?{!뿛'$?N '? 74?1?Kgr返??َA?1?@a2?vebj࿮OgB?Io?$BJYN!妢yB?掗N?R6掿9>)7?zv I쿿?Xa?S~?B<q5?(#1,m?Io?%(V¿yB-`?=#o/S?9ń4߿2?Y2 ?T4O?E?˫WLWe?:xl?v~4?[#?%fUDh;?-`L}?.}=?qGjIhP? 0Sۣ׿;4`?d[#?r8Gҿ0?ܿ}kdu?E[Z?mʿ?oTFrF%I??@Q"U?Fv|Rn?L|?J5m?َA?bnzi::?)7?n,?IPO???clUI?9!H?8p?4U4O?Nb-?d7c?o.^??`9-n4 5? 1?@9?\쿥?g;>?dRnǿPV??t*V0޿瀱uп!H?WsG?pIqдٿJv|?ؗU&L?T4O?6j?#!+9!?sI?xSm?GVa?xz36PN?bl?xw?l>I}Rn*?R^R߿mB?al?t G׿^ؿAME? ??Oҿ+'??RNT&e?FZ?/c?:c*bL}k?{y?O?xn- .l??@Ӗ?շܩ}翰Ws?[Q㿙"ef?˰Ws?%(zBV 1?*2?!jY?c[?߅{ib?o.?Bn3?X{T7j?۞RCBH%h>{?Sn*'?w8m9wnɿmLJ ?O!~?#߿ 妢y?V7e꿚PR?yB-`L?B?S뿾ҟ?$8#j?Nb"3?+9?}yPq?Z?)?g޷FV?ۿI?(7j?Njп e {ZErS]Ö鿹8 ?iP ?1Э?zj0s&뿟P 8??>e?$?hA? 1?)xu?kPCg;>)?z׿uf?wJ?tFտ3Wlܿ˰Ws?A?|^п}kdu?)[~}k)7؆?0&q?ecB?E)P}kd?|D?el%? ? N鿜z?FVa?gms?MgXT?4AI忲z?$˰W?L;hbfڎ0D\]ҟ?Tqi?8r?AME?EN:.ldP??H5Tj?*w0sC?U.ſ$*?3&q5?rVOпg+9?68w?F Yݿ ?j`?<0&?'H?HvK[#?W?Dt?@0&q?^7쿶I$?sS<?N͟D?`lW 8x?XT߿ͯ?o.þ?;˿n1_W7.$?>( ?[ʝ8xY?-Su\??o?nUlc?:y3? L?e+9?rq~濬kYFME?/?)7?<1lCT豚U ?jc?xw?ѽ○ҟ?!(?ofg?}kdu?)i8Is֣?n)7̓?``w?wh\ZOI|?sfyr"/?]p?,z}?WA,psS`S>>>)]jH  .6C1$1FhFhF1F$! !eCre3l~\q\OqL?a?Ta*?LT?2?*2Tvavva   5W5J55 5J5WBWB W B-B  ~q~g0ERggR00R00E0-dBOd-ttg+wbwMb+M+0E0R00E# . .e.PePeezw99$1$F$F1j3Ur]r;];&]r]b@+@U@wUw@bvVQ<fQ1S19NAV A  55 ==tR=Rq~y~yv~q~v~~/fQ/Q\q&H&&&&33&H +3@+3+8#bM@M+@+M8MZ8MboZMo|||o|Zohuu>S`>`ShhP.CePC]PrPerXeC6XCX6KOdq:\G\:Ott[F[[F$VAVAcccc  C. e.C. U33_Rt_tl_*2*ivTiTG\iGi\~nl _(_J_J__=(==_(=R=R_=#EZ#8#ZE#OqdOqd  +@ + @MbbZM8Z8|ZEgEg#8M88#m6XmXmmeC..C C! !CXCzXCezeezzXXrzrwrzmwrormjorjjH3H&H3H]&Hj]HwwUjU  ^I^^^'<'^I^<^ffS>u>S>VxxxxcxAcxVAkVkIkk4I4kIk4VV444 ,AA,c,NcN,W BW5 5WlWl((=K>`u`YD{DY"fQDf{{vn{v{{    -B  B - \~^kI7*LdBdBd-dd.&]HH]]! !6.  +#+ g|gtRgtgREZgE|gZ;&.P;.&;H;]H;P]XzezmzmmzXmXKmK`m`erzezydWyllyWy%G2%:G%2dBWBdOr[}}[Nc$NccPeP.eP.@U@Uiv~i~vWW - qdqyydM8oM8oMoog|EgRg0R0gEZZ|gg||ZEZ#EZ8#Zo8Z|oZzzjj]]4V4I^)> ,, WyyyyddyByWBJ5lJl5JJ({f{fYY{Y{Ynx?\~~\\::\%T?2T?T\G%/}}pp}[}phF[}h[h}uh'<'I''I^<'^ D"?vTv?a?*a**?Y7"7Yns{nG~\   F9[<^IYfDYLnY7L7YD*"7"*  "  "  ;]&;&;P;];;P! & &.!.%]rr]jj,AcN,c,NN99[$[99N9$9pNp[ppp9[pN9bb@bMbb@  {Y{ff{{{n{Yn##88  zumuus^s<^<sQsQss)6mK6K6K)Km_J=_t(_=J_(__tnnaa?a:OdO-Od-O-O:iT2Gi2Tixxk^kk2xVkkVIV4IShu>S`Su`hSFS1F1S>  /Df/Q/fD/ avaL7n*LaL*7LLnaLu}ksnkljlknmkjGi~~i2G2i2T2iGO:q:\qG%%%:%G\:%\)1>)!!)66)K)>K4, Np[9N[N9,9$,1$F$9F<Q^fQD'44'I'<I''/7D/"7Q/D/Q<"//''/< %-::-O-BO%-%- JlWJ_l,4AV4V+ ++M+ @+bMb+@'<'<4''I4I`K>`u)`>K`)u``7LYaLa7L7Y%:%:G\iG\2%G%G27"7 " /<<Q/Quzuzpu}kpsupmpkm2??T2TO-:O-:)1$$1)AN,4A,VA4pcNcpcxcVxcAVAcN $ ,$,0#  -5B5- B5W5JW} , A A ,   /  *L*?*aLa*?**iivsQs^s^ssfsQf  ##     R=0_=RJ=_ D//DffDDY" "D "7YD"YivvTiT((5 ( (0=(05(J(=Jxp}pxs"7"7xsx{s{}x}xPKx|@[V0D0D vertices.npyPKx|@#B("(" ZDfaces.npyPKqfdipy-0.13.0/dipy/data/files/fib0.pkl.gz000066400000000000000000002734111317371701200175010ustar00rootroot00000000000000d#Mtest0.pkl[i[_2tw5()&! bDD QAڻGA|{{?HwjصޕӗF,N}?F8L>UkQƫMa)k|<97֡n6EmdgǬU:J}<ӸWJrZ.^J(R-(uqjJIϊ"%;gVJmXcLkWxR?~!CpD{ϵ}e5ܰ=wj76 }v}}m~7=^< H "~.=.YcnϚgV߿1c/Fy570?qk_yls6!Μ;zCt%]4{o;삺W?Dn5on &<Yz9ԏs]y,aoε} 3o%]]Vs ; A5"ǡ׽Ơ~nZnӭx=\veÞŞ_dV-@21XvLV?ߴ썂rӻ-y~i㮋+m߳~<֓R+=(纥˥S!JQo[PEZ r͢t>{._qB'V,,@P?1za6؎mJ#+kv'VsXTvBٵu5{Jb_vSE\mxCMq= @ŊjafKOYQ_q%;(qp?jHdZς٦u}2`w&vQM[V6kvp11m `#f,.NxbhoPer>YNXtK%cYBEΓX.ݺ! [3eTa}X= j~ zXW|xO1|1y.,Bߓ꥝Qϙ9TPE `~mp!9 VȍSY%=o!8KM`yK5+'\29: A;_x^M@>㄃8Qgݪ^>:l N$.+;5 . P}zC74}!4H̻ g$"Zv C@g:K>{'|;Pl!]8%,6`"PNI:FΪ?#">Z{+#>I&.H /Bm] jggˊ=]D5J`M]0bUrGLGtp3"먦@&ߡ"؝ H߹:7țGsM=c8qa~d)%F. 0s"rPձïbN1"')"Q՛0\,$;g.Dcl]$?j FɿqU!ejcsF ~jdq.ڵ\aQݞPsbb-oP&r \QT' S0R@йFNX1GГGeܯ!/>d O" ۀSq9(rW4˖?WI\tEZj ''̡U\omw9" K.ZtN,yגc୏/ ŵ`XKCۂd`ww-FQ1[%Ae Ts>ň4Tы N)unJL4NVd`B8ƫP!V Yc ~E.F\yP%ODj E@)l+QOyp*rG`P؆(Ћ/q'T}S+R=+fB-#\hBCM`b=M(ڒqjDrtfn*1F ̟SBjcR~XEW;UC$RK(o?ÃLa5p % 2Yʹ(K-w2$kvjcrU a/1eBx-t|a&R"Za!-`Z^ɼ+%-PC῀wAeH(2EyjeNJx֥?Y#.O*LwY>tAs١]91Cx"FE@<'M'v#耒pM sZI t>D0p^s  %7N"T[pFy~!ƼvhI`r{ȡ:ޠ0;֨ 28V=p"k-"pe!N:rkPͳ#K2ꆂ0SȢv~I|*mI9d:Hk/׮>ҷH.p!ꀃjЕg&A~2Ԑ 3p{kԆkuMDAwPȣyn흊蹫\6WII3TιDx\#eȘN%~x|}4Q# "R;_^KHj(t6 ޝvXHeś NIW =Y1J+omiqb̊]90xt- fٰrŐHFbaqYThɗʉgN2洂'kJ,nrJmPTtUW34E2R'?0 MyѥSpH$*vuڌGhij.YҘf(I]Zr-5t5nF/qi=qvG495m eg4Y[ʇbR [@-dz4z8"wڧ9C 1NW"GaWA̘֩KRߋ(xOBWtMcnyDB(&|!E uTeMwΣz2A~K5 D*\g1 0tZmJA_֗HyMar"FI'6LNPD5zZNh P$]DjWzӅqPo< HpȐ&UT@R89?CjmYWqs0"(0`L~Uխs}:TWWg_s-S ~hq-J NA:nJAOՊg Xda,ؗr֌x97#z xr Jwf^3ƃ# Y2LmMuoiŦ"0CǗ_XTMk<ꉼY\j_ʬ{ϝoE3l8N<}i?"Y j$h."8o6\~s+~͜ 3eLOW cze/~;S0ǒ}n#q_H$—0Or0!A0V9SmEx-3QkDfo"G?jjb&X'/HުBzNPZE4{}ծ,Auu4OC#,;,QGu`%j,i>_NzhMMb BA) 8``NNw9[XC5I~6e?{.-۶G0sI?Mچ?0s$~+H6OX)M"؆c\,!OEj9JI|Պ|(pe2p;FųOIL;g^~_e_o1Ĵ -LA1E:E!( ce_?n濄[l/s-NVZ>QWLm#;R&RpsV.%m63AR訃 :+$*7{:GZK;;0)ŀ>ӺA_*b>Ȥ,K?Zy5D~.Wԃ*VD"n0śxUN2nqO%r^McՔ(WPWOy @٣ XA~%2t&#FAGK=Ѥ{Mn脴'GB ^Y}ȱ?FyqUۜn{9m:߸]iO?AQ9dٓ&GMU@;{z  yM!V[ ы>#"6Ř,2|kA TTN{l4F*DbΪ&NE:9hz4ht^ bP&vFejzrVh?JF~DŽ:ffbq}al`(: #DV] ✼}5>Jbw `_YDUU8.=]ei'V(|?uitCϐ[L1cǍf@qbWǩX!8u ` re(KQ>ˬŌ~Xfn 9«*8dڅMa 1VL! $>%Yv@*kVvmn== r]}c25U1;t7_7zy Jzc׼ 6OZ!g&lzY/mKg0ʼ-q+sV|H4 iTsLR{_1r]jguGQ&>\BJ'"A.0,pH) \+mR Yx~V_cF^eXe6.>XōΦa{K>zPڟwӓrMHV}o']҉k2zZ0[ޏX.>/X|+_YvQtVTJ%A3l`PxT/aXe 2`4ϛ7'/qs/{ ._WgXɑxÄ)*FޏRTW.a|ru~H2D\\ ?g ۑ=(R/]<{Q$C?zgԊ)Lt"P˂w`#eߔ\|B*lB+n<v^z珥ҋ_! H'̸ #~;!f.&=6wV,Zdfۦ. peW7$'8] #ZYӫ-_5Ir//IF4-H/Ui_mg7U`vu\>la̒%[aV;r W@%P(3*F)M0ue & %dMBkIr~]IL7uu^o {ٴMo?*Fϻ׫: DaRt_K[>ɫ<]{P#bw66ԥONⱴOtMߥ/?xA1{l8# &ʙ3mcAG DfgQ6ʦ3Pv?I0k;bqN-[942̪`G [PV>ɴc[S!tDŽ;ju@ߺ14nn*/1L# CzW=,"ѧT;Lq6*ZSaʀqEqNI!}i} 7[_tfd\2$,VC~Ԏobh"/,ikf>)|v<krJ;#T@쟭!&L_ @ ӂbC:yϿ klҬTO9TQ,lMj?,gA:gѨ}SAZ ;Woux|F!ϋ5z7 ,ɍz5D12!<54b4y%8+5_Yvb9aAѯa8L wvuU^>{_ؚVg{uY@ϓH!Oo5WNGL"F谄su4`ՆM9E؅TqHsU00O|@쬒s]3lsv:ėV#[ϏX3[m-[ jiPaAUׁ@5F,|<.Ȗ+DÛv}0jdV{׼@-V$<[؃gUDcM2ӽI@I{ :k1Ź?n_D&2C\lgZAe/Y%tTڊDO6|Vx ./^ P*pY M2g õp놟oGRk8y+M;Iq@3.ǯ4iO/adufQa}/2Fv ;)b۷SDQE|eKNRrWyR8sh7DzϗZgȸXѴqQ}~ylD\v#w2`T,$Pэƈ^ɳ:9A$tu۪#1-5iFނS늝d'NoHdf?"ͺXL,9?XI 6˒Qd{WPhK)ͱ9}e%eAh>.kӤ0>|+ sܼDNoVZ(ݰ-a!=du},Y>.. '#([؜-{25%AlUoQ(*9ÚVJI& .aE<La$k-MĐ"EJ}Ȳ밷7+)k4Ց01|ʒ ǽt,Qǘ& n;]8G(iÚ A. :A|x!L%{rK bIe}GɾrD#ǟo)Ԅ{5>jCWGo؎UhD^*JAS%=!oX\DXfE 7<H.`&-&3~Y/fR=ح$aIY6)j6OD^=xxF=4_&8iDnM Zd|V !%PVtEj +x2F : r tdQru)dK+|࣋LF9QVbjhQֵ?%4v)kl3m2rt|ny7IQ}7Ć8DRm%u{NXaዃv׋O7'od ((2Ta[-'ilb.DQz{p,WeOXy F {ڽ~pnqŶ匴Zb HD9Z+8B*dctI6TBU(is4/+B?ǓyϞDclfD aNPTe:' x FɓT݇ViAّ"vhuN~kĝz&픲/"$$è߄(ئk1$# MI *֝lV҆ V/5dpKM!$\:*ج%/q6|* ŋKfeЂJ9v7HA]wvyIۓ/8Pm,ER#|x>E $eU62o5Q}J˨h7I~E؆bD^ ht;rmHZBzY3$;L1h⫛rSlkM#4YȊޤR5lYyL R=ʘhr[<کQU/0.J>\jer$]?OYxJTM2OåCƅXKz69ǫ8'r c ӝK5Np/6[r*<.#Dz`Լulҿ{v'mZI&]V(|̪ݝ=+VwU_6.ʚ¢N# ؂W.XpJHOv68)7Aߛʅ†rxW[U bu9!笒T5WM1Hy 2Pd)ѳ&dt_v'JU4pfqW՗?ODn<6-sŬDEwH*&~v[yJ|b"-]un?'Jw6ݬLeAwv<&ÔZH `1ĵ['{g۬yI*F"nؙ37oGSU XrGgF`'$Jau&;*ʭ#K\C:eTE6ol^V#Á@5WǚeȰ/^jZYdeF H*u(}3Sl.yB>|oSoup)5\kPDjZWQ]EѪ:G 1ss9G37w8eJOS>VGR|n^\/DZ~ۦ_B9O_eŢ,t!0[l=u5`(=z SE`K9r5:1)ݴ3 i;O#x3ށ]@Op2g02dgui `4>E@H|aJxm t.Fۏb*æn6׸K*!ZIG>qj,t1NЀOG&("ŠA Dೱyd^]QMAH\_(JM2#GOz^|*TCsOl=x8:_Xy eX⼱nnˎWwmd#Y"mǂ19xs1&8%|h$ќj\x|morh.EvDq Eoɮp&toy(꫹o[cdŐ,JX@Q*l~U>XV͂+'$#971|g2sqٔp1\iS"oNl~j=NMbUC1g\$ENZ4+qeA' }R%+˿Úl |gheCj֮+syldHnė8Kc^-7pci˰ElmWÌOi Lq3+F Ks/ĵ9Wόl}*_*i9&^6ƅpTO}HkB|ɎʚS( QEIoFY=8'F9#Ay#g0O.h"fT_bbD]X%G1e…x?x\O/:*ΧA3טTR Zy/ o/wO0&ja8\=1ȄpjFΊz,n/A+P*¬'g`k|ѤRyypfr m5^h Kg%mM1WTw@ "hom}Z \ QI.=fbPAсx֟lUoD4L$g2\/sQp  qf`|=)YqfAæD <!*85#)|&Yy t|!c;g'?nY~e5hv '<#(3ŖBκy_'_8 <|&g 96dAeJH9ƎH=-Sf3dUѐ3ɽPcX{US E)D/f=%TYIͽKz2aڐ$͒=~\<3_`VPhpHMer |,|`_mpdˢ왅4r)OFSܿX[ tꦽ/+~@Wvh㯒myu l:)^u촛6V0JMᷱ,dNfgRJh4B3j Y*`bJ0D9ͥsн;ߩ\"IY8MwG]Of Q"(:[HJ:YjU>\GHspoʦӾf9$XN`WĴw0NwٸY!$o#BzQd&@" tYH{ԑ=YTOqFX) Yn:{jcm6-5TcjknÅ\ aÕő*eYjuk\Vsǔ)"륂 $&rqmsjgHcr.ݦ}*V Ҟo9V4s-RCExV0C4E[y"&4yBf!_t;8;?0\z8*̴%\#$WK1KeN=޲WLM|h~|jU,f9h pnrm  voA'O%Uz T0[Dިew M?I64 3n*A#uB0>@ou1==wB͜yTOCM$A'_6%plyXZπ+g#q!&yUnձm4ފj35hv'btՐ(/NIqLDž%rD ˵g`(pť4vkY,7 K\S_G˲8Cty @cP%`규V9@ ,v{s,)v,:c8 $TMsJ;ߥ 9Y*AyQ>L(",J]pmܱxļ$'_NKZc$E:yى 1%{ 8AX_%Hˏ`3 o_(V#j{*P7ȝ)%S1&K-Xpqسx[D_c t1OfM>7W>fW1E%2_7DONz $|H0Z3g5vM5"qnVhH |̱rCpL:?Q˕P 7WY_vzV}̪X @ 䠏| Kwyzd OA+5X?Dcl՟a~ÿ:!y߈)8f eLIXi9.G9#gAe>1.OVgz#ug>39tyWK]W'V[LMrZfeQY*`69x ol ۾ybTFA ՜Y[G]fg#e .ߛ .A Y6%Z?_.qsA~8KFK$c.ILs=;v92ţwr5>$YZd5worhTy4TEU{$dXmk-IL ߊQd]=bXYpoZ=N 5$`0c6{{}WmeYF7o*/ٹQl ^د2U~jXW3˃9FD@4}e#_ \)4\yy3*RÄ>XưNԐq[t 86wr^yvrdgbD<bBS`EZ" EC|ɕK͹UR[USagDU)WǡYX_x}"0vEא}b.ܑH :_K$p2u5B 8 voZW0:!ap' Fh&Kk5gE<)ARj 5~y)+<S s%qF! F¿lzІqJh$oW"`?{*yGLxag}9w8~. S[ 1.ݤ`#J'>79{ٍ'*wKZ_Z{*NlpA-e1ev_$& 03wI0ؒ> Wzzh3D0.|2W4ez Uſ1ń֊}B߀jE҄YҎ|lRMr`˪؎K29xS`q^>4rIK( %Ԥ隊C-wVUY{m .$[cu%/+EcupWl0M" L/; 72̾Yc=:F|w@&z~*/' ^p TKH*eF,=cA<=]Ivΰ ߃=g+,\j d0z{Mb6-jgݖHq$01cfRa/GJ؛iW8 4L`^Ԑڤ@i;~l'x\}^y"K;/S ~2j3oP>#ye501D*橒5.y߻8F;nZ}Tivrj(ۜNT&i >7sYۄH 0-*4dFT"Az#yQۖ-aFozmj| ":&(zEV=M@#,dGKYnwut|[_(I+jl|ͳ 7 @އ$HUd 1/aRO)> 3vDBgU==1ޱ}W>:PNI`_С2Tq&XW1 *״w-wĦŐS46 [sS>g2X- Y1 ՛UPOP)_4>@ߑ_(cAڱ S!L+]O2L;҂E=%{򘳿 ܠ$jaZjؽq!i?,EOHb߾{(k2)ư4522ax'MΓpHQހ&W {7x)t JI-Nh3am8 X+r|(^}r4ERc|RajY,xVS)6/`l'I^inm&#oǾ| bG/jv34pӶ*][oݡ Ҕxw%2sHF(n4u'=XS3J i1ю\=9><$ )͋'6Մct9h_P]_){I"W,wzIMH;XroJvKD̴>c&#|>nEajO0?3 SnSWDnRmUI?~]N4 q Ob q|ٞS@+r_Є#][fẉHPqY܄+QDzt<'i$z0&az(jMB ,g7{K&, *vH۱zsaPݔ L޼/ka(Xg:Cʉ;X zutvMsY-\lGڮZէ=`-'_^Tu =r}Ylc^B-#U3PBrO-3ï'r d؎GC2bm ~a[{yۼ7{Li'٤T!AӗOن$u,plOqdX%@2ur#MȈK'nZG >Ь sa)*`tc8Rb S[jz;T4AkO Ɍ qMȱq$Njыo,$Rl 7rT9 4+ 4e"ej~tra]Rf#CݡT .dn'զGtL3I| 5mXg{S64 S\a<Bbl}zbG+>S- H;a&~\f,B_F慖8R`=0>H;ca\m҅#qRXœΖHk9p>JbbjVHrwb4rUf˹*S@~jFq{qv)7 K)Z7my\ ƻt˧ď[^Z%"֕3Qς2uh8 a $Di|=(cQIU]/c@ "g 7 Aq]4?%3%2*!(u/xD3 [Ɖqq SB#iJ;e7+fL(%w)ks\tIUF KI=_E: d;LQ7^6%}f׎ B{_[< -g5H,8_*JaELgcI%z-;3G_vM0qvJ (-0¢Taf&Q۲xs*o4pg4o$iOʑQY@pՇgٰdXfC,aR_k An~DJ\zJHWhfEpL?ڃ=Wġe:TG1}2=X/d`j}5/q'>{sa C-&%: k"{Td9GLH۷؜*D F $4.򮣰!SB?sԊuz9a1!AIBl}o"-gPqFA'-==ץ83|+wAդu8)X')B>TӦ\U{pv` @*+JOeS2ig μzMA*N% J!ń-31{ +8UGLBL|0K'ɰNUqu݃3.<z8Znu` |hʟJ+HؤcqqR\+RΞ;j(dn,t邌6g8Ru|e_]'zg-ֆZK^WZ')6'sYLN#}D<W7Oϟ64?EmL0:Lii 3{a1|4uO}0C5\{-/tEE >!^tW[ #B1 _yPVjaIאhteXU N6?WtOؿcBX"Ⱦ_X 6.xFպ8<'(] !.ECJj+ IF/7c3!Ǡ]ݪ+ڌmO,&c@RHh<D|ʛU+YWO'FjM%p[L!6dG!ZXKA"3VMgT^-H!^+b3|Ӷz?lvdȀ7*ďYC &Z5vK\%e,d¸,>{E({*YS1V]S8lw>PdT3Et sƩ٬1' s¤Z">'Ӟ CtѰOR-ur*Bldyr.7 |A$6Oqر3$b[<8xኂNCί7\RLMP4%b56Ǭ6Zr[8Mja>Qܤ1VXZ!N΂'u\I4踽wD)M*Iqw>$\7$~86c^oH"Aݓ(iR<(z!oeIV9cʔX1SJܺMlL-oq!.Oo[/*R`Qʂ/a]}-NٶD5+0!Ud d$@ȤքD%9G9Iě"P$E_+b,dÉNefYcKx\der '3/¥{Ba0\,JqlN5F,Lz8pҘ`u8P;3Gb|EËz:;9gSO}Y5) d ^1QC$ l& eq7/QD6rVÑ/'g*lޖ#p^ĺ-8ىCuT¨A)ownb{kWҗO⵻fwȊ!*8J ]:nvĬwd(.<ݷ,iBfd,T4ݕxXEx9rnx'B3qmQGtLr̚ڣy {iv݋Bu[b#Gٸx1i]΢JZ^mQqn(KAɿhFP bMI(2ӟ!ܛu[ed7UZ|ྜྷ~ ̲ ^n3&Jу:s2n3>5L]Ŗ|jުeeTJZ$8)l8JO:%pt\Jhd .u(~ R=4J{f?lOa#rH)q#y1@[8I5_iWi!?kѭJ{F ǽjGop^Jb(T7Cja7Ŧn8ďސBCkщ`?H'%B.& J3Ɵ7f1))EXW}/>G>G8E8CP*:VEX>H_2<݆e$"_YhD^#_uZ4xW*F*س8YގX'mmpYv6($,|F׸WfN;xžJHiL b]j=M>~ Ա1S,j!E?s &%]*^j^۞DA7 _ϲ~bwF}ɓݠ#{rQ>"kȖˋ/8br'U ! mH@zvh1b{&Li{R7h Άz1 @ Aɚ[.UxX۴W9340PKMU^ |}#jA^-Rg>vt-^Q8NU;~ChJͳۖaâB2bLr1JŰe>p[?q7][HF pV}h>ӛC{8e3槹 8zYJ$mmZH_8kxq1TTcby]+m]:0z1>Z8({sv2H(ZVNLk#͹]_,ɮ^.&&Kdl_i搟ѿ'_0 Ǜl vt:x p3Vs6y3TnKhb/-V7j;ÙV:p|?  m7^n N+8bRO =b/e9 g~P,DVw {8ݓPE/KHu_Kl,y ViݓȠڢӞژ"4BgBϦj9 anSM2sW%+yOϔDnkR%s !φ dmi)Fx@R|] `47$Se+ZilKQ]оoyp䶗@q{/ad]qwqm,8ia]\@\j MV-&ryrr4KeDkp+$nHz7߅K,PKJU{RCTJ5V"Iwݶ*`3X[\ IUeQ:z<[t½S( 'n"OB%憳7`V.!z?fڦݬMBB+9ji$ח6+5QZOllAK: XvS^@Jߔ/R ͬW-wU)g ДU/eL 0lYA!vJ CALK(#Q"C&vؗi~B> ˓gb_XxM6vEg "AZ3SKE &rM%+DLđ 抝C.<60z(m9W#v嚢XF߱~P$g:M Jhs"Y2y;ȹ+Q6m^( GQ¿ѐ0 )M+ XH0hm-̥^:1AxnYydR|21ftN/R) `3Y8%9Rɵw~sx2ޫS5,$m_rX"_}d1+` dAiQj,P׷K[j r(9r˽nS!gKOX uuML84[VA-w,[vs.Ԕv"!Y&w$)O '啚:Wqt7P}2?!+qPUdҶ3RS,YJLIꈑ?9EU}aLOo'y A|\SQ~e| ՇJR f!ֈ446XUJ$: Ⱦvg Xqv[|Y܌ȴ ^N(BrdmTf[ Չ.#&Lc^:456FZAJvc ̂,17w+]. `[H݉'qkdV /E)C? h-pEܸJNX!;!&dCvW#UBORBBO^ %7eǗho_Ay˧rfڑɆ8x)%7C[ ĸrL{?Mww,ƳS)[ Cؑ/Liro.a f"/ĬgݜM|B0 ꑇ<[ 0cΕvS_Yrzp< fTMo ޽S-g4|hۿ% 40e'PF,}8eR+F)`&T(~ը(A|us::V2᲎Dtt'aP=N`I"VRQ j r6r5u5q)vQlRcMWpBu&L0Mتf1kTu[ɕ&)uexj}P(k>2wp"Z0.Fڲ+#P/*2L$[z'lwL/63UDy!y*I.٠&r]],gD\^B;p.PKQev2zֻgB[^35m% IwCM}alh "<a!7kM]rƑ(1 ޱ5obh 'h~\pC&fIɦ}up0}x#uC7>G&B}I"Y_X+c68i"l5*q3(Mջd{aV)N:#)a/]/7Sp(;췬^{zݜ)*M:@yYX]5;)r;V*/־{ףD2ˣ4W0fɸt}D6r貶7GX9`ʞڞ@3VkkR) C<~اhLūV~ -tEMPqpy02O0-) ޙcs1g=̒TvS3R{>Tl,Mсp.psI}N[]Y,~vz ZmfmNIJJaO-+BS\ Vh90Z='7tn ?Ls KWQ/ݧ"_Ƙsѡ^$~FfI8:o+!P3}m$bHaoL?A>#rI)=83[ G̟51\o$FqkDK^I!fZf֜͡ǸogdEW} &Gdt!]-޵z⥳z&[uO.cW<:W+3zȼf-B|I=aWtս#ob8IrEʄ^KMXz~J]"E˱U4v6z&ip*?$0j!30D҈|ŠЍ8Gªscz]a)sxb@?[Zb= JVrj0EFafw(R NgK {ކO+2ʌˉs4esge*MMm+6D9cFi..W*۷V ^O8qi;[\o>[r[,"Km_*55aelOK^fr_E;*2  S+Gtf9+գ]M0#aGG(;sOqXJgRJQ@ڿKit* [":2dްW.+p==MD#nRv8/@np]h80EtbOPN~X+'}ťᱶ4_-B#]N?qJ)pR0['%$&zbx6S!U2[SUKӃ7sa,$XF+wna/~ځ\}s`g&-=q4o˚%=BQ&e)S,K{ KWdMIUq3;5 &Wgp(qZ ~)F7صO~`aT-El;Pl%,k~՝ۺ.ˀ}9&i|聶i8DrRKz C bK׽ N"n\N rruZ3}ei$A;B:HI["Y'ㅀDϹw5&,׭Ѻce 5u8i[8\LCW\݄"=Vefm߰2|C)Kf/4`1rQdڇqo+ƥZuopTP&|U\SYBg8iC'%h-ɏc~0}ÝGpO 1tL<[hWZќ9Z=b>O-25cDKI_ZH-1bױ<7wD8뉂}v\W6[@uÎٰM5N!"԰%XH&8N%?DfՂ,R~D q ZtecJzHJi:R+h AWepKigrLs*rV iwxĽb9,\(x:^ʶK( #=Fv+j1^n q9b}b[ w$v nrXTz]n1CʎNWFb^ 6X`'XUtOh3u w^MS| gޱdk\9u5 n=lN#%F$΢f,NYgV$?X$70o,Vdlpt\n6/'vau\;>%L]M-pcY‘w`UX?EY ټWNp.lXpVO&XJ'rϽK1)_8Kn/ Oj$Yb\Cfȳָ-٧ qeЋMYI M! Ku^>e즭SOUUrB=M;E+IGi2b&ce,%&~B`lJџ;HtlrÛ@*XLw {`674j~sK'C !08>*]37̻j \$) V#{X0f VAdm %]bD%>WՌWa)@n_x6Ȃ:w1`gqaѨ oPS DΣ#qIC!SL\sK]1 \rFh1RxxsdTנԞŐ*Òe- D̞ZG4`;0kW/rW4d|ND-,|@?YslkcbpYgQ*N"( 6dK#a4CtWNÜb=! @&5P#}*aJY[*@iwۤѨ ͓,-hA+3%뽆hDjik7Oۀ]po),R_]b JV\Ddv/72|m EzCU9DX (Sªc:Me̓2dnZy:&#orK]a.]xf.b3D<E;']#N@}tלiNtd >8 Xޙ hj[3y's&K=~ebDXkrB껭&4#qc$ i Ӥ 3@ic,؋[)>|uM0IAJ.2Uʂ?dE MbYlbx8f[l|I" :&vSoHyD ;_ױxxR&QzΕA<;?q2DQ d9?F9~1YAr|DSK%.*8$O{Il;3q8"#bDB'WHbE3lH*_P+_O빤VesR2Hu}g] ܷOىwح#/<^ߗ>kuI4&h ٝ0L9ugLX2n`DC2q|,͵\e%z#LJOرuم0ܜ~>e(d{t}Zy%]+y(jH;yYrg%Nb qC`f>RvioXSb$QrDji8K7!2 \Ut}i{|rn7U׏Iѡ_z Gng(ɵiΣ=t kOAWV5G߼`u(z2swk*#mƔxxۨP9HF4dHGsPGI+7#saU:TT:^DKHc%9řL.zYM ?7}s kwµK#jȹmjRӷ?(a.#_)l&^Vޫ:l0E=(4?F)A卍F {d]HX@4`@Γ>%7z@\ŽRM'+|=Gh4;vMR#,_s G0ΎX%D}!z: A33(1$U#)mVB4- 7~'_&i5`U#'|[J˜BܒEIH$3p˦56.?oR7XJ8&Nh{S;SQđ`Kț++hW-Q!/,^I)r%c :W硢K֮ tԏ38z+j,)$LY6wMr)Kfh+=rE74(<-km!K6w=H J])oя"{Π˫dJ0yT))G% g }4л_j۱痂0r厲2yz${H[FUG}$űB^ߙ,a. ͈Sߏ*G9>iA}EY2AyڣMChA]Xp̶:Mt@ި,ip(6A%~5s s[|U$ U~&m/H0*G79W4oFڕ^09Z\BQj4.'>ց3 Q`,a(üecAxnC*Y;4LDa`=ݐq͘N"AJjj>UUj ҉[WÁנOF4z 8SrhU~k_PNciZD(eiPE^B^X"K`+a=̍ Y0O&TeϝfWtnZ3dN2)+'k2+O떇0A<P#2n4LEf)%1#-Q.mBnbYghgobSK_}Lۍ0p ZlT[BO(=Ծk)"(:9^={|_j=q &%")Z<''5#GV>tl0q'8س2 :{٭5VbcPx߈45zAͭǣyP D3$Y0SI64r䂼O̞oGb~&_L1P0VzM\ 6^d;cn؆#xTAbBފfU1Ҍ#l%I2h_$٪zU$bQNGKp~F4z#1 Ms捨>0vŽ҇C[2[LgگeW06\HFlE܀s ߎP.PEd&*BYI;rń!qS.|I d͂Mnk/7DG*嫍 x`. l9xT;r8 -S8%}0տ6H-o$Fw-" w,CZe .|=!QFƃ?sjT z',eV.u(UqFK@b5q_8D1[I 7{/'m A~Bfb,]%5 Vl-(X4NzWZ@Tl"t8mhvrn}H3Vx5M4M2S28I0wI!}jLE0a ƛ/Oҍ*e?!Qv*ZZk |)|QC_eS Rtwb2Xݬ m>+qqiaoG%$AR xɌ:U41/>H$%~p7""/Q,1!q`N乙UK;Y?Qyd=Ovm|Iu2#+I8m/$ a;n] ?a+q75Iy^GUaUlH K]x %g$4NϞT-)@2G3VƣlaׂFȁ2 m0j%>& T9Br]gr\er~R[-yhICM\ 3r߄g|"nfQ?Gɢ+GaRmbLұŒ}Jr]F2y,%${v!k#1Fc`AoW'#I@(GJi-zY3[c@XPnǧ~ ldJWJɁVD,y/أУ[_`yMnN 5[dhs@E|J@}@W[{8թ=PG<*7&;=Uk&4!&lFoZ7ßlW{Mph[%vپ7.?c#eDYh8 f`J敠xNqwqf=k\YIS\ 2#pAj?#,x!7A P,ΐ4e8J׍%.r5h?840fBiPR1)"F^"PM(7S;rDzs NC!0U D8#t: # O|COp?$poXqޢ&%/ìqZ}֔]\7!YyBRxZI0"o_q&t!2/t%y9bZN:$+k) /8sy`ʗ,P1wjkPs&W^)![9hwm'@PܓCYZ"h#PNi!;:(G]U7 }&1. {qӈڪԫV Om__Ck szMgꕟ$c#5 ;dpJuNɶW~MFIk|apgL7I O?x:)nw6+J6Do GDSwQ}=ZM0Ȯ+qwDEXa6ieg~rJh^-d>Uj5FJzƼ 5>V7JMEdv2~"gOqi%v\юܢT,|tD, e>(~/9[":wv^ ?L;B&( @jd9..AQI.ߎo/~8q]U5o+3Q˴[SIpWMn#7(B8pO6U]~_ y/4uT55ɼWHQ+M3"8+UVc?sB(vȢj#op52 >L0qTsT>۹8dܳ# 827O4'tvN@(( ˟Ȑf2mPKd^5Rzavv&}zH+ >9,E/idOGC9R8Cri#*#5).ޖ * H#'1d3o)9^t-0i qWUA# P;`%.T[XNcCrX{* ck*E䨶x-7e];[x%,%/N(v}ynz WX.{ӧjvj\m:!ׄ'vRbrL:ɧIn^m3`wџ=ĀKOyr~^%eO1P#8_:\5ՁͩJ(Tqe4(xq\#/WHE`(e2┗X1:3bzK6+ՉISUD! wE溴L?KQm0ٷߐ%v{hN1]#_ {-{2oy4Rs?o'zP 93Fp<F9'O׋pzrd-mN(J&Cg)o蛜0%lοp8ɼ|vnU,y wVC,^-%p~r!A?f?to5|bs4Oh/I%D{178+VPfʫbO'Gp'%e9_UKUK,U7*[Ȭ Qp~XW<;8SOjYw1ф|3e^ižh~"kYW΃&y3")[+na}+*壖l_O<=Z|dc9}iƑ{FPH5΁ ğ& X1(%\ϝ+ONqY;9cE*z5 G>9œ[4;N˴.3C&81SX@~nDnt8mBɴ_Y3V(ſl(zgf<)fO6R7)˟TC7dWٟnsiĚ̙B-%ʭR%'٣*@ÀX'h^^i31FmW~d/>X%͟Akl&c3h _hSM(r[ܙ~wQV.C ٽǷ;VZpX |\N;r!>rw6G\P=f6;W1;wX;2OQdЊB"D E>ג; /_XE5 7%xOI&}yY!>z K|A po.~9&3_Vw`"b~^Ak-bV6O9^ OjbQ}8`K⻟r&A[|Xv0"XL<,B3("۝`øh*US&ܣNf@c"ۗw^HTsHH݃6N&m {xa-Ӷyuк +b`8CcV(˩rnֳ;Klɟ-#)zfZKPӏoah[g>*9n.r#G$9_o5'܃gURK(Ϡ ޑ$ILy@wiP݄}3uUsk?h ÁUnB>s=· l 񧆉 =b@8t1'X)ejϙtohzl?Neߺv_$؉w@E>ݰYpBpxbv fn~x =r8&]*Zs&xl>˯r%cÇV)J7%Np+ 4꒠$@d"z6aisMWWIJf"AzoɒuP]>`g!]50}UࣻWd$|xn㺡w;u)'gL~Z|zaW1PCl7@_Uk蝠Fz4m^6e;^YXӀ1T>㶳P*n '}{q\ 3B_Eؑ|1DSFOZ0^~nԤ4A!XJ|T[ ሌ=SܣHgFrtz_r@T70o,5jS48 $A!5nbzji^$=ߊ% @5gH#K2ͤ6+)\N&2OJYۥcz=J F3:Ĉe(xj?oӴ7t(Cn #$M:xOW'6l$bي Eh`x@gk9iW-KHZ86xK"YK; \y 1cQ7E"*_%`uyE f|[ߥBۣkF 2wDr:+̽i-i_Й 4Fg\p` lupNc*‰֎ǶX+,|Qn+yGJd0"WQ+ZVER9TK by%I#&~7BIGGVPKqn$gjMTټU5+Y*a}\[7Lє09mS׉!ӭ_(8( bDA\G %|L)Ӌ[%yW=3r"69K_Gsd#!SQ߾RP,7Ҕ~̬ր!қ9Gy/SVd6'%VS c%ɑB}YԨ ܕ1aj1bF.=>eNx'tAfi{Gq&t6Cz& Oʹ* ʃ"Ha:m{1D*~qLJ5D<}Y{Xkx5a lMߎLQ% fs\; R-J9jՓ0R>.l,Ȝ rj2;'8NY>y @XN).z-Ӈ(Y*(3h13B43Қ4]=`l!T1akf"EcΖ %<0c*˹Jи(e`?Kfb:Ǫr>J,笚b:Jt5H3 .՟̰BѰ8)א驜P*еc3frJқA/ұCþ#aCcoNˬE2Fy3,">&OSβ  =3L }t2 #"vtUݥGY̐b\l/S_19=I{UqJ/ u檴ף^&a\w|+4(rIs]BZ= /Z%;BD~bv#8k\7:v0&ˇ| ۔KzrqoYxdζZFqGٯ׷l\ޅƥZPB 3Uzuk]bc|/Hį,Y%B{ɚD2%>]ZVC=U7+F<ǼQ!1&^eHK;qz$H #$m,lO2=liYy*QPh%R P7'*CEڔB6RkY kG;BJcU )[-7W[OL/O˩lDW,b(O%΄4搩Ipһgו66 У64NY/>Wwe5ؤJtGf_bWD|H4wXV"HTQmДCy6Gwo?szz2/N69"Mr]@0J +mEIn^k 05aJ)|46p Dl{+Nj~pP3ް8`^g'Ub}{1mNg;+tui!v~C@+]>tq_lt ^7_ Cߒ(`=;5'?|'53 gi &8mƍ}Zcpˑ1hvlsF]~3GSB:Gi2K%/04;}LPO_2bk'/ŏ0!ݘ'+PԌY,5:]HWNnv&M :/fw‘ by-d!m._$.˰uT++E<6gzIDy~^m@J-Qo ĵ`UR\q_n(vvq%wV7aG`Ƚ]$:Qd.Y ̒Z$coPB N) ~ ;`hk~'=a\<#TOr5@=;ΛrVA)g렒hR`a6o\JQf^ 4I+V)OQD@f+n5z#6% /Z]}S=W-&2iV4z@E3ai%qZ 3Hk6pn[jU@M$ika}/51usb`%kqv}bs~'Hxz/-l֗e q78Ec8I$3iϜ99}[;.1! rdL95[K)onV؍CvZ^J~5]=v.{ 0޹b3~.嬒ֱYʌS*enO/eY$qۻ&YiYQ@R ϖiË0uM.SVnDokSm2d^Gd,]dChMqqtRI_'PE^Jn# Roxj|mTxH`\Oay꽽^K)"! p`~{W=aޛ,=W|}S m;8A69[p,:bi)Gla41H/4%>67@`S1 914ق`[%@(@'L,-Irag'7XOguעTB_'XJC6[ <=&I^q?>`g<&mX-W4tϭEY.sZyXr::z{aϗ$"VIj:ÑNfUϸbwɶA)C5W\5v Ǻl%|=agh+]!t$ D_`KKeb7LGEŖ)7M䎳Ɯz/%۹L{n(Sy D_4& `cVW$~ޠ2kʢV6@pd6! ogu/1}a?rk*90 IT\5+񶋮sWf RP$WD;AۢgCa%Jp#Mm d~髋eHѿ:VHd|cG2̓v(C/}|AOvs?cU{?||PlJ?7(,e \x %^6$ȗUW/`rL2'~z+ERxC3|k?tĺ36doE.KT;]1W2v>w=캝TDfS2D7ӛ S2ByW&ɋ|?sA*8.i%4JL]VDIÙ9@ZWWX [i3Af vHVjFl1[9oSgCVck?˱T5 9ऀ'5sG;13O )Z ^9k U-ά|J;""cNGV3kQԯ-&m4 /jvc7B2gXdL}Lq'mu9馓,st8Y ˄.%%.A^/$cH5-(ΟC)M9B[ḁƚ̜0|KɗDCIx[9?m5Vu]sNUeʴ 2;eڛ}U2/;7ٶ5Et߮p'{dUg!N VZ$+Y{*maf +;5q"2WYw%<3.\#Jݚ-kI2>%_G$p ,$$uwl5us%ԢIhlz OA&C-.K"] çr2\Hx.iԋLA"$ϞV$!BSgB=$ P'u,1kzgD/7wHǶcnnpc"a{¡&d%eZlLa-1-7ǣVpu)0>/gϏͣ^^14`1y¡!h32iRw rp@$нnQb HuB/IFBSΨF`XquRmy.ጨ](RxJ81"őakD:w*[ƳoILAzLioKFlycEĤT3 pYXLV/BOf7e_pjn =~5{ KHڌm^Nd3&FW 7.d)Xs7bCIB(f g-B`d#sE4H}C͖W'czQ]< ,N Ü&K p \Nsl}RՐB ̂#yD6j>Lv+pYSE0XMgعc$m)HfH["uڹ"$CC2.,ӛd6ᓒ =,%~c~pRi/^>=%)n"vjdI0V/vjkXUIهZɵ2S'f"Qa1l{r=RQcgSږ̔ خ!@đճqJN 1-"ƨKk/Y`#J;zE;Q}Ҽ&S>L[MrxGrrY:ʔBz'+yTr NUv.Ӯ2nLRaZB'('S\JE;! XK c2Hq8K;7=8MZc:IZ!Էh.P ɧ>/M!vHӛMObFަR}rJ'xQB1?@31c o8WiYp~$M(v@pKl"UN^,J z7qV߶d+s˩< n Px9:8wpQhp6\[mM`ZAҟSdd<̞8![=}LKJ* FA&B*>$r>B(Gs+ M S:S%G,uB8PܭJ; Ձ'7 NRDvHXII"88L?P\L۪j$őt uQ5 cHK|;拘{ {濶OCY9 eEi2?$Uޱŕ; NwOA- VʰH#Q$PtoGL2㮖k;V @`Yb\Pzea|lGCfۉ_EL!8Gr׺©665eq`9[k-\ ۴pg~%`g!M=#ݪ.:B9y+uVڟ?d*=U֙Ď%s~ %8ԓKY'#C%Eldmz҉M !ظ҉>.<] ✙Eћ'T ؄YN Փ("x+qY1ELjDiTY+H5m/˸ j^>}V7Srt K< j]RY-"QL 62礗*bp/-`PC++i.pV 5:P).S?;˭tHl )lqJϽ,ۉoOD22e:&+eH$@_s($y$pJeZ:]jPYD {IqJ{\JmguYO+C\A.! >24ħcgWIZeV(d3X1+'lA5!̓,ɊU2K>Lߦ’I;e#r0)CUT>jo. +J=T%!ߗ 1GSbdihjEy.:}c/urNR 1ӯ#,NY_ S@J)TI<&;ʱEp,> =e63JMꐯH8%4X+Ȁ>Y@nv;1>[-mg;BSźFص!41 gGj5ip>)RR`J|WKKtK(ck!RݹH3UB.m2}* gpR8ެrVJj'Ck'vI)ezPNʡ*&GR$2Rf9FJćfr}lFU_?Ie5ӯ;Ԥ9CD~l5:ca<..SS2}ЎFz,^-><۱?1~C>~awj筳-W n.HҤ 1.Ec9H7՜ }umνa:{I"W>Kn 8,One-Yͩ쳵eM:k 6zι4v}%\`=~ 2QTOjwZgZeܾ!Ótw2p)Z u*6 6v Sޠۭ-M\ tIԂFVݽEw cQ 6Uur$RZ\D3^&?#2H2p֢h,U|.v(~j4Tچ kUΑe4Rd֕VrPe*v^[3 ? +R8Q`X׻-O/ƒRT9ChVC+YmY 'y GTQI,6UkyΖ:uc I瓂 ]Eߊ)Pc> ,r6=ޱ;Q^M' 7p|K= _kB׽iE|83F=x|F>/R12VzʛǤsKY3` \kuL %w'< e,T(7$qoxpU``;(JПy ,>>8sovbk~i0`UV`6g( 0HB\P3UcTE#%O7:mǗ$Z(\w߶ѓKN'b1db%i΁p=m/\> eޯoa?CÈ:4|S7Ά-.&C툻ćT~̛`o$bDb{`^sU-ʮ9ܺ}~lffsGs)`Ta;YZX6D0 =Zk^e L4jdd'p|cuU\;"r jڕRgnvD1"YWAAIn;r;5i ^le[cek{ӯqFyc&0/4H0~2rd, 0a죀L}RP!3'xo"^T<hx,fMLb <0m%2z84*R} Fl7hl]Yz0}r,1)Yio"MN'6lIcr_ˈ=e|KnH_iAmBV.bYk:DrkVO!89HΒ>IO/A=yEzk|ON9;7 'GrvpKݥ^~3~}DawͯBut 6v fBLgCfhDafIASՕYMioyfxR#Z UL s0 #0kλ+/X1e,aME1׹Tbح@컦o ހ_,Qq6\m/CHc?e r&K0>D),7TΞ{QuOT6ϙ|}:5֪X`+h2N8,rE- R ks$nuvc|['dgjmU.2[WTے?!OjH [ZC lDlwpj3>fVmþ0XlЬgR纺΋3QQ7&;|\kHm'~RP)V8NOn AOXIBJ1Tf(0/z).Nr'"@C"+x&5D+n؄ NjN)&$8=o7&D0-3)cb )^)-<XѐM.+pxj}Pa{[LJӬн>e+Q+[_0 7 Crn.b@99 d(}h-խ؋V߼ vl7Gr0u}*mʴo2~I=5:2@1Uu@"t"yيڦ9iJ"8ZH6*@?rj?[5}`&h if`_UiźQxݲeOf͙`'u o޸z]@9 a]*tۉ%\i+v- }h:VЮ[+%إ[7;dBY7FYPA'~NKÕ9Hy0 N6\$<(\6uLO S>aO#Ro'юӌz^ LEM,<98Vn kSXgRNi1 ]+J&#s|nB& VFMT@$$"b'3e5gQ UHOH$DY).%Zr6T܋+-8OA_zըfGW=%)RLD*?]2mbQIJ\[WI#68qr(k AosUO{"Qx"@w^GleO{c5Pŧ#qF:j"a?:ɤd)E]w3I"ds!A*b?6t2P O֋&Ieu:%K#k F6;W`I@V I7qDً*WP c }S2tc4}$~_&$j(3yڞq %-^(D {YlJg/P“|lJFE=u6+rho07ʒ!ޜFsVzEl%DpvBJ/_ƅ$ -YeHͪyb{9Rܿr(Ȑދc >L1v=yeC*o6-fS-7$m-m-S\cټ̲n^.iQFVθUKH+}ĥ+Vf{iMeD⼫ZUų^yklYJj夒Üѭ,)G1Tj. Oد7ll[1cK |RUiX[N[:U 15U*$Px8u\g/t> Kz9=q}؜d~U,͑B)DG#;rrVW% ; -pVɇ\ n ϚUM\-=f`xW 3gbG]I9/,^cf4l=6޼XN6؀],e ]/#[xʔ ` h)М&M`8j8/Г˩>߽ idOH6bRf}ɝ@R *9- Ku  pհ wj ="}oJ 2n2@i~904ۄ:ݤ]q (_9{ɡ~Ou&lώ/<0?$b}|6FPbp\MLǔV(+S=C_o4y_>bo* }^A=B)[h6漢MTпV B`Nȕ%]˧._J%S_Ɔʯ `+ 7NciS$|f"M r65bGU_r=4g8% 0pr)a^dLpS4fH7}'lN`ޘI$Z8OU%HAԋ%g5Q/@enN#Z݉y݇ۇCuh3EU EaYK{C!mP/ߞBa8gL74"Yyf=򞼈\E  [p + +u`VX-+ x&l*~dwZf7BT"~EJtCʫH.tԑ=$uq (6r̞fQ&I {EuBfLh ii܉?U%%tkFlq qN.TGY6n\:H|E#wDn,t YzF@P3D1jʁM>?W^~H\0+1>3cvB݅)J{27d2&M5z[g{v^B;XoÂ񏷹JXN5i ]q8_G!J t4Q-<ޱ5c=ɼWk1 Wq?}ϺƘN;3R hy>O7Hg mQxq7Q \-b.IiTs`o4N@?Mx= lTR}47$7w[5665޻*a-a6qÅJ|1-N܋Jc-ܦ肬j,/48He ixD5HLE}`Xb3_b vD;4ro{.hCKMd\05I\+kHzId,3P%Ȼyg}λSjp'w1Y=$lWf7= +}O,{Q,67})_uU4ɩ1!JjYVKwbAJj 9 q W3B*vL(. kJmvfs4_Ze aDv::~0zrx+`!g y-`# (,wLo*Q ?E[V7VMʲ֤X#3>08^\n[&V'uT!jN{Wfy_#_  h _-K0&X` )DRu}uPkcg(C6 -!Oo$^mk&xh1L;;gIk,*!\X)#gRmI^2@15!3Bo\Ey#|ꝆvPl{_}o6YDO)!"~'eD7 "qMI 6;o՞ߵFk.,x 'Ĕ0PCNPfTh_*1h>H._ /!2,o%R%_›5ieYc.pr$H| sZ?(d5 eq"\||X!Rc򔒾NњH*o~2C.Z.5 PV"f qL $k!bfon g[IBL}xym۵lWvK¼Tdk% a00tS#<4Bƹbueړ\|`|/ WT(6CZ6PP5HWzQh4K"MmbA|&&}pEȱ5jʐSD3߱/67M z4n?쥐aVMˉNڊ:yd UW2 2Mז/EOVae,ƊI+zǶjPC+H#=$ c Xu& W+cG=|Ģ8mM~"8K~M:Ug?em\ە-OpR/]~^ Td8`OWe QJ5{*bWh.x <|ЅU g.UL>ik26X\(1_@C< P8VX'IɧGY޲Ԛ=n<4׿w:9bȍLJXKpDv^(/=Z\XMFW1.^#yO:*F3&y1~ByL߿ߚP*ʃxXujg[|ld( @2=s_ɍCXq!]>j#T#Nm,FH+GSdÞ vziTvWGX2Z xQtqMȾ0u{c(dIX!8Q3^dY?%uD%Fq`3H; fLS.om~DD>TB` \__sT&0Aϼ c~u@VCmY-{ܑ?wP')F~x7-Fnf{JNWy 44vgbᐶhٌ_R h|ٰdsQwj_24{)i?_E/*ϝ~ٮeFX J~j >cS+뫡c|L;RcjڟhV֤e2RE$@X_Ԥ'H' ,=79+0[<$5MD icvW}n5'Y1 f4 טx^ӹI؄hA4ofnBp}/1ہzD1R_ϯXp.ʧ H v3TӲ[ë+d3{a5yTol??~Pm0܃/mF+[=G$J~.q8=}嬿>/NVA($itI) 6 ).p7:ch@rT!$b~!X<ؿ5 *<#Xghœ{LDOc2<^^!̱Y fk{jTҊ2A%yW*.0{)vpI-'`U 20Ylxҳpwj=ͬb r/>iyޜ֒Jc]G; nU+>H`9|{C^(NC>)8s;.ijM)sL9R} h '#<mpPc_8;ٴ/9gb NDw])]Dܔ!Α79 d  >9KZ|od3ds kU֦;*nVh7!fkT9)NfAY_M=9ZC 1ݦꟋ{PJz(>)ߕ2 0\ސI6o`!0W]IJݖ6oہz yWy4lO0TayGQ}KlWiYr $oJ'vw_3eL2q 9~cI#L B͜0 -F`{E2;o,pXM $V|+' L[;%eM#2%3y>:s~^xitno܂<`4Xշan}jFsG5y~@4u#Nv=q&H3o+v'[q,E 8|it; -nI 8Oy쯍X7v;D"ҌSv1}k]cZwh H&ФiaԗG4`X=+AwlJ}zj!+sjʐ?fHs\\Rւ7Fx%.}L3L6?tb9;l$$\_~R}"Dؔ:ڤħ՝,f+h]8K! ,TlGvʲgNP# eVB<؃Se8{ȱ:ViLSm7QK[^C}1qa?wmnI*5<$O*_,XE捻E5W4w?b;Lj5t a@LFUtkPMWGFaa~\q={})SGdG V{Indr G`z,Y>ef}_Čϙ\|F5+HB]JSCeԯjgclkPiA`ة&lT]*\D#w Fޝ>3L 3ֶ@+Y ̳\Q\-+LH`?8$g4ђ,nd:hSs7Y__ıw>O~w|5f| d󲬀F*tDckHJ(NΜ3RzS "]`=A1$|}8dWUwڵ U5y)ҖmyPUS0X|.ǐ_za Wt-o K9%_ZC"/n6 Ձ9+2=u՜PZwȋd2o? 끪d 1"V6h,G5JM=%6-]nPWꑔl?Xb7$<;WUk-WvTxѤEXU;i$wţQ|.y7*f$%\G_.cd ]V#^%U ͯD;~#T;>/_jPtI(xY=v2;lĹ3N[ݍY尲V8. LBA-PtBmg;n (2!/R5V% Q*X5`} :$GLHhͼ*/w/I^h%u)M2K$ /AUT V4K%QxAkoamУWBepUQLqfH'LȑB]wb3a~e9/o`NjЃ@xQy/'ۨ-(7#KOsu7{L!AWGG ޽sKhIm<0-^*05;vSM~3";,Fjm&{|T%eÔMjRĖҊ}տ$/ԻҠC67_edzZwۋ)0SJvTjʆflDg?&)t~?2/Zv ' ^gW[+*A.j/Yα=:RQ]91ro'X1£Aw芧V>+"u@.Tږ3|1l 4+TS]PͤzR1L]`3oxd;=&9qRz{']PM(fHM^>z"Mzf't ^n|SU8F!M1&A;u:o54 gkbeT= 3."!e#c )f@8u7&07󭀉G񋧳uZDZ~$Oq*)1߫&*ǁb,?³_!Ņ(Fpl\MO*gbo5*GSH/d')$=ڲB0ca ')P% N CѾ) Őd_֯0HHs/ ɺ#nrc{zv̖v w6^qՀ8\KhG1cngs+ZjEIRbZq g>~> 9C6DB|!ua@C(~3H`h^/>ԅ|LGr׆xv+ ra]TѯYOzKYa;Ĉ}̽?"6<3o2~J濖%q388).j5Eœ_!@%MGV*gg d߈ֆU!40TCI6^VmKB]2V:5`7i@GiS #(pۈg8NjMj"ٌEΔۙňU_P^,?{XFWIf7Jn"NpHkrʖK.+mΟ JuWEgg)j7h ݃C6+5$AsV)`GJMV>%DJnK\꿘g)||!p"c7= M;YrheH>F:Ɇe]|JcL;xA'꘎qZMLF&#vQ=i)6VHp3^{VP;&c@2y#ǂ!bM t.$>t(c?yYBKڀO۲7j * ǂaq^ KY'Q +Gsm*9D pp؇N,hd6YiyOR G3'CL^Y&$x"SnYBͨLgK,y Ɔ(=jnYuɨ'aBWoSĪ^z/t1 R1R HSQt;yOIhBnL_2*CR:Rjzۉ Lq, "qCB!'`9\ s(qJ G3ȂU,ȁ^ f-Q #hրR`p3CQTJkR<Қ*OgW Dcvz"mIq+6:|@_əfTCD&ZC_d0&gokxF*ѾRpVCvY,#,XVz|D0MNڽB|AKu OV7 su[#^kjF!y?VY#+7A=yG+ӱ+/#'$^\dޘ23)/8'j+a',uS}BbYm/+75g7Y@f$ C e-KEX! ~L*m"$H,MvG!z"L@VV #ueȿfy: s-H/Ds(״ k;{ 5ThcYOb<7F rZc92eRډK"i)7D$H$(C nxr'Gjj&j} ~{N#m}˯@J4Wd)?TwHS}7qL&X8[G[*DDEPXoe |4GtZjRW;?-a⁺PXnTxǂ3sS/Sx)K5$,Aad8˚59!xoo49d3)]TIvFr 9Ǖ`ns{H"6Tm28ZٞM; ƫ<@W6mM3],D\w~J2u^@ ; +l.D[iZxPEaqTO.WȩE1nv׆p4^A)J25 k(Y4ҕb7pCޘDbLNv[[=d(R%_BZSo|o5k!yq5>(ޝ`$VC-i_#mY>@ Bq-_B[q&ˋ"m.׻y7;yuIUFn]'{,yr#zSͳ 2B?OϞ"a \oꚳmZa7zUtT|L4SH&3,LGC%mlm4L+ 1-F(ޡ8 Mc|sz0FΥ[@xY<; >̖2t\Vо(I9*b[rR4CvQ> F8xV#*ccv/:LP.u5Vc s_@U%vk)5|w$԰_Axin)LGO QH&D&BW=29BJltpd7wNx-5D`:Ae,T0&bN>,W@G Ҩh玜>i=5NW[&w&.]K΄XW4Ey$XlbD}襝(I:[4[YgNxzif*Nl5LaNf+lc3 ~Q#y`Q /S_C6 3矞$,9F|ּ+- l%4ǯ: _]twLOweÁR?ԣF2fD 1^I!dkʬw+(&S"+~huO'T6rRvPU#" ..vy\ y-Q 4ҫvcdڃ"PkQG`?ұ3Q6DaJZ[>jE5oW ƾC%]WAj: Xg?%[}c'Ii#Z#j=x!{(&jOvd!o_|Ֆw$4dLl|>-h5:9O%4p+|qˀ,ۍGL.>%䜟2bty{iWtS;ڔ&4#ai',QUY QD߬6oL7աl K`N5N}*1մ=sÂ$ǚdݔ%W11rQ>_PDʯOPÇ3|`3PE y?Ԕ?XyVs41[O)Kk8U|y/[֢@qyw{:,v1Zz:/{9^Ҙ% dj'{X9{zHu-_(phXyb1| AϐFGnGԺ>2w%Aq.N%8A-pHf[+C 3@ŧM׬`Ol?A=yKD )6%~pާK5xMAs8&~+ɱGHRT@MHD)ÎK시dBi!L-(#9 mǻ^]41)uͻ ۽LljThmh&i"5dUsNd1;jȽ&XDKCxR`G#C{p\1(]CtceSJ$:އ.:,_lbqT(- :{flZ{(hE 19@TlXQ;BC._aMvI=kD?99Qy/Ô? Z"2 S'%&'t U*oK/%L.ssu>< < <'iew $&q6ۦŽ*n_,k9P#ռoN;Zs u'*TmϷ6 Di3{; r{φVw"Ew⑋X@.8it~eP+>y7EVjPT*ѹXe8&(s"$51N۶//\I0jjg !0 _L^/ܙivˆ{<+`bd!}{-&5s X ^B\1#,ljep ²sg%-+\+5aU:N.xJ U$"{+ٍVv|~d¬W}o7j1ٴȱIX7*˅[M@g"X'e\ ^T'\1:~e՘)BZuuOc嚒_5M etTT&HX-b3rk<W3ϺJ -*Mފcϕ`OqYd DYr2=yuM]֌8D- O4}9o>)&v~tZ+ka$J& Y_n/az9Ϗ5=M2Vk&XuZ1EέO~ #Rbd U%mWh,1=p~? )ĖZ=- &_cSM2ÿQoQW,f {ٱķHɞ{Y2Z :XxnjqU0p1)jNu 98]ҋߠ8rB9&Cۉq5C,Gp?bӪ\{)T?c0 :2 Q]KV`K) C[=ZqpyO`+EFDؙ0ZO,Grd>?fRd.ɞd07ɘ^4VH, =΋ȝ$ݮvX&C_F{USZu29K3j)MF@q–ްpqa%b#h VAO]+_*W0pO!xgR% GP/R<ٙ5vR#x=X=& ITL#L5u5Thk ~_n}/U|)!!M\ ǵƟ_$[ԳFLhחJ`ce_XP+C0ROt;xg  )U40&FYklYi!"vI8`/)#uĭZ!~:z]Q#e0 SWC.%<ÒQz12ɀWv>Xkҏ7| "cQ4D ɌZl p<]Kwm`0V}uxaovP*f*6uBp:-'`XVWՒH,OCw-? +uzR-Qw2)ĚynF oWQcn/ 4ȇb*ǽb)+#j. ~_egP"K{">νs)s8m, l08$ȋVIh z16ɥH”V컉>%o-|)à[2559ϖ FZӰ8n|R"akI#t;O "ɴ{޴eoPF%_C|)tՒ1O|,gAwAʒ.a}UwiJRDWffA4[4jKqyZ-?(j,Ιs`_3}&YP0"oDŨn&4~E\`_Q=_G" Af3xC.\ a=1fj KCYuAXsٍuT=i"0uf8k5!1}Ǹ$TBYix7M'F |<泵dAĊvNS܉I"į}jnڴ)Ј°RyƎ QRn,C|3*gцbzdR͵'YZ@2`;*3(Os!@DhZ)B[-Wmj*Vdg$_42i2(aV {oWPY zG;5儝;>5ۅO pfSq2M.2|$._YydǞP&'vr@\*q +Sؿ係 ɺ7 8_2koiק *=f+X.& 9s T9j#B^6Xe4*˦ž3D\ Y+Jfնur%9=|F)T5 }i*KK|1˄YY6%v"]*ș$f#q_M4ԯ^wowd;;Tʴg2pņ5 ;{?vHB8 ::nc m)`$Z s7•V%pDtƼ@^BNl=TfxNinm*1$&#'G*GxewΖ 26sښKeUBPG'; Fב:n3P=d6郣OGt%I#l\E$6?žH0uֲ>\酠'$3I.:JMHw '5M SZN,%hwd [!gL BJ`4lIx V1a||^qإ؁ٗ _g}#hY|&H$.Og4 XzHrScI-@zcS~~FCA߲)KɪY2|bnuTaCqQD&VѴ )9Ґ,BLŘ( HzI"Ȩ8[RrJL#;ik}k!sZ,,%J%1#)8 *iJ-}85nf͸ vK)U9VDB8l,դ>]> 45.Wc1˨hQ^/S96TwcX"/?a:4̵ ǀuҷ{Oj>?U^GvY]wbԗDg6◛w GP zX$Nk!o/ey++)F q*|MF\5 mC"1I7}zU^}Ӎpf|uw5-P5zxld2ܰ"f./H#VS>j2ud9Huщ2JˎqKB11Rv7äI ޤ#mo)u !f3 8_CL*` kT}ӄD|a2OKЯ3(lqH)Qhtw̘Zbwi"][vЂ C[.Kj}cNc_ h_mUϭPP+yH!?m|tUR*Mm 5ڕ&읽P^3NDZPtVڨiy:¶,e; OqYth TrD5yԈ|9s=', 3J6cYLPHQ:aDz6%fD%3㼝L.!+8DmKļ/\a5QLNyK4ɚbm䩚,db,'+acA)lF@B5Ls/,RAv;ѢY+EMqXsbI>Sps2O&V߭hܜ:hUqܺRE !?1L n[RWYSc-9Jq4bke#0k'ѥf$ƪɈQ̂6=ʥ\u0v\5jXgv(|{PznLEۋ9oΰ"x\] 1p˓.P&'t94 )mtFrKSў\A懳 ^4) -`q峑T}ײ ڌu@t?7K֋nZHLd'wZ9X\&ggWoxQ{9UPWӃpY5HZCPG `MMq"OObo󮒞uF_ EVd f% ϴ̈K GvezL.VX7a6>۷qL=l(G@\ жZZF_S"bK?v9R)l+ś\96iоm+M܉?_ȿGF9U;Bm _row4\ A U l\29l;AjcwG$kɦ eg!e wۂ9އ97?oF=M \uޥPqǑx2pz,bkA/On4R8Jٗ5\[n3JUW |'z` 0@ t]fŚvNS6i#+yY%GTKdN7tыYdAUSX W5dšցXD[ H:[Ħs UM-8xxKΔ㌗Xyf_/ɵJ&Ve,gZX5Tn iz0Єy4ˊ?8O,-DBdؖX#8 ¼gs!!hwB XceZƇꢹhͰUU/=؂+}H7? "u`EsDcSk6FfjxT,h<&Y0 :vUv-vG`Q28 T_#2ː`}4o@vpk!zJR:9wҬ,YzTS")d '9hA32OAH~c_xF؞@]1AWć&O*XZeo$5u?6J聾xS`V~5}!E"W|EhGVG6zfEvW9=UxHL_Xfb(kD |EG>OLf]n+<]7hmQI8<@GڝaA9lROQ7>u-oYf穽 >!Kn]ՇNj r#1PQqVdrY%F}ʪqt-(% C1(~ qHsN)@dBJNf^!b9߄ S/hБ PHE&}4ƴ 5|pby&37Jۣ&#a}VPyZ2`J$&§ʖrIn!R, `}P8gAU-.a_)?'+ǖ>%dNWq,ueXpҖ~V(vAQ&4l2!']Qh+:)Q~kf]~:t Pk ZG%؝%L32B2n;?OSvKt 4Goj@mob;*sQ'ݶ'5rOxłdapAoN02R& \9&K;F0~+:]gX3!u%#wPƒ͎RCv1!i:)Z[ iՊ\?jDWOU~*YU> V:zj+~~S"IִG3%qU4[J֌ܴQ`)-؏{?n| MX[67$ nɜ 'לC0֨ͬ$Σ\?RUM|! QsQު|ovׄw5$DY.AhO%CZX̢ph'cf 23.>:ѦdK[o?4uXuMiJSѝ:8ڮ P5U> v&>>_u“\dR:e{zk DML̔+T !F+R(Y+~廟Y[V: e]=w>=/,pr&U |J9B'_q :_qшBo&Uöq [(豰ղ w :~~GR\WLJ] dxZLsOSdŞOQ{-sa|X$0W΢lD&@%K>{tx[bCqyU|hGL)VI{CH{"!I̬Qn'ಎ˝pVe~kU # e2b@4ةm/ǟmv-4U*%S$ 9՞hVvҜ0]Rd({,j4eWoc'4ؿ=NIQIjBò*%)^;EG\roW}lYv#EGPwzTڔycv5i=#,!6u9lS?|"ml:)e+T.NҼ:L]f53RfTcM 1jDT3ԍޚdnqMȡ, Gf 3|ʰgUo.qG9GBMP_)<ݲ}bO ,{pB8x =i{l$p,G"/ٔ=Y+N^FBU PL0JgwV"ap-Vqh .[] b-lccҼ&ּ\Nڕ%ve[ɻ|X(r(_Eb/)u?kxi$. B9n7G/ه &;8pp5-b+hc#{kbIy_?v'XU!^K|RٲSIѲZ#uxڿhWw|%^.Kfgyln"MςǗI" rrDz)Nr[cV1&+D;bx7IC\Mb ̺*T̗昵">ĴyhRTQGp%z2[/8 ?{`5qI iy .|/ 'l;ۥ𚕦%l{4y$ //GH"Z#`Cr9)/I8UԉY_>t9$׃lAi"zRO~%3s6zQ!NOT1Ia뎣Ctjp: UpYAZzܢrَvu޲!mQ jz/]SV`{ Rt>nf؛E^{qwoAK=x{ 9'.aCiX<{ 9om}`K$<v;Į 㗀ÔC/_!E 0 h̓&ZWRGb]~͟7D4ױ,qptB[Iǩ^j%VK{}B^3,[86j~o}_(2OYKT%pJi(cZ ^_^H4yn!k>أ"9$f+ZKMy_3JF~́FDE{rp4QPsaqXG-+R*Q]t8Rֈ9 5hB% 9=uL(.ER91 ϩm9My!s0uX7> (z; riX Dnk~~m :@-:n B4cAxD_aùqx^H%nI' "R\,ݴ֚ xZ.iQ&vS5y)Wm4QaeDe ;TB3cKx&HDD;{ XP>@ZTyR6%.rhx^ 0Ei!oY1eR:@NǪngteSܣ,Q~Qbʻ3M@58OǽE\ aٞ~oZ.BEiLѼYfrX۶0oiJ H CZN+G֊PhoA!mo1 y(kY+/7RBwF$8 ߓk^=[n#,jM Upywxk҆ 7rx笹KWyD\IY 3nCx:y$63fa:6 |N*dz ŕZ9-oFMxnPzIMW+Z0,ߟjW1 ewdFqFȎKqa` V$n1ɲAU=aJ5a?U?p`uxbںؾoQ%Jf* WN mw Iݤz3aykP.5J?6A~ݦ@PROMǸQ";;'>N<Jh4;͠Qv0FPƛ 彉~D-|b3.I/.^P.n?ɿ`M#oKh`x蝡mK]q RC7Ҟ2)H*yF>KX~M6gE &ߪy`gQV[.5_yc(2l߯TC0.#ÎޛOğҬ # O=!x; /ZojyyX!yߤ&Lvu'5W,(n| :AB_毡߽71"ؤ$l M uOm"_4-O4j82!(㞈]I5k㹞$ԗь-c1P/jeW_p7`_I(CWo.Uz5YE,e.κY9ռmU8j_%KB[} q~)L iaźJs`@s]55sl />IhXDtVO,S#Tm c-Zd]Ⱥ}c_R/BP9=z}2g1fYŸw\_~7+f{BA/ȃ;6kLY"2vF4oX~4PK'Yr:OxMXDbq2#{ʺǻTPWPuUNYiBQLTD:; f-ͧ_ :IIOofe0!?NIF_&(QT(iB+ePJX7ݪY"ǐp w?=ñ!X8SOVz '/Y24̀qjaN#THEQԴ{-7͞:%Kfg!** .B8XC)R j"_Һ&5f>H2&}t5N'ψ;52;A FD 1Z k3}-URVSkTГqoɖ b.uyX55ɢ3"=慃{)~e=b|!^#\A=%G%Pw0d;9)Ctp .7F,_ƞNʺ,aAjI:YD%X&ull_ ξ#Xa=ܤ00Ae[UOb~$ -p71f[=0CA)dsC10Zbô0<7>O30΄s;IiՅ'#aڦ )&gAnpㄯk~F~J]qsJ~mځ--#7R@؍TNNO;ʔڔ06DThcVl08%KrR])ǒBuݳuY\郟])N`]w؈<̧EB ,'#6fQ2t^]R6\u X^4_UƳWԹ֫ʊ[R5K9ԾPk 1:6ڻnA+Y{H9 ߭8шigMHβwxvz_p"'kGG`)-fM` bBUkVE'%R K ,>!-ғxyw} `E5ړ{0/wg:Z{,67)5Us=M p 2wvj|og6;~ܮ s0sYTN2ƛzbg0)5l怔[(%g{GЙqfq"}(GhA8}ɞ8"e򅗻szl;/r}%*)lO9a4( J !d{XCU%{{,vE׬ıZk~&={&&"{FI5 l TIˢ_*!+*o8AZhfQ USWb0ܛI;2=%7ũ9~4` :-y4(l v"q&|(}M9&8]p oJ5+IJe-L5Gdp4 %=Ec|DG|loh0H`7sDi%Al=uL sjyAFpT[\i;|C!^cj@ܯg&f)eejwoItQ3}:_ "v; cEyӔD|fEF*Y4#s7%yCb-0GDOT0L-EYSVVO:w[EȩUoF)&ViJ2ғiΨaI"p1#ƘILֱq2=.zZr$څP[٘PGJg}Ӻ%e_`2ߖm(W6jϾrѩaͼ)3mǾsD Lv1V_~K^ J73mOXh'Wgr f3&ҿG\8]mF/r^1~ÁXhx/<>40 kmH&r"SL ;iӲZbW+gFR%F`S>3czp;zhS!hghh|(h\mE18LjFBNV O_!g@~g _7=g7% PfȞ"MJB'6O9_Skpv H3=l/O[E)Ek'CK-6|Bza')ȯp5@:K +N>Y䚕;+;NZҚŐfM.[e̡ďvHkd'>Da"]j.!>0.,OFprI Go}KZtRfha|޳}5DL l|f"ej[zgb灮jFiPkaX=᳜PIu$:$ڡUA$4p 導8RDG8pĦJOW*R(-7/oGl\ RIUC`k.} koY `%5o?y73 iIEMB@9.a9ۓlDЅ іWLv-n^Yx?@Ww(r@@'+i`ʯ_=],iO(v+CTsdj^ړSH&;41~2ĴHզb;Cވ*\auT*( 5֭87Q?b+76f"{& \y H >ϖM=Y {;VyW3?2evAǶ̈Z/ǥM_x QcW5m 1{!E.Za,FF;xT6TvgtL2 `0?hrw>m7ڎ;4I向Yà%(v8̍El$j c/щQ` FTCBcgz:sXQ:%ؘ{FOz~ 5y:P8~t?^%fDAuY `Q3^ ;P^+Nń#Wyw _|q80k9ä%&*7cvBoDOȀq@{Ť`>F?/4-{_27r$zՔJ <=pL 8 #THqJ; ς:Y/RY؂ vʪCqNPbg0lJ4Kţv+ lO_M.^ .kku5K<ƪ#W.b/t*Ԓ8-o' h@u76g^?(!QccEk k!yA~2e|EV.ґT5:}%%Gu# 'Ŗ^, ??HPF<@(DWp|2,(4kE(TB.m&l!lGIHpSGkWTm q~=by0wc9g{KV/g".MYq($1--ͱ$:)Kqu?$lCnсL96 Ӡ`l竖PW[ ֗B5N@[n cJydI!b{̞V|B TT8xh'6`<@pfourO8J!K+m ]#cw~ >:#nj-*ḄJ}uQ/ I ךW5+:4s6xPΣ+2"@[Ǿ,y^IkH} 6+B7XD xKWUyhSZ[eq1*Y:# Vsf8YY=QÈ,Ƶܬ3}g]xN/KwFfRi_;Z |X!qs$. O5 4_%V L[EO9pՄGTEaW<݋XiBӮGУIE0s(FNn{wq^bd]x.fhi΢Z5(x"\JhW©exF(z)Gxx1~Im?Iux4q6#ɔ\gSW-5J#NqRBhl{f%GJvw;b@"L XdȎ%fAS'_՝vm- ̕ P;%J.Ӊ, ukN ,w"l2ƚpfu<7vVr)~(9u N5s%Je7ZqXֳX {k߲8c{?pTu){fI@/V&WZNi^īQ \E<1]ޕȢɺҳӄ+$3)RW'T|E+V8M$#{+tԿTb)L?Zikd WFz13eV-ID=-ԚӰY} Q=#5.W7MmD#BBp^B9<=;_SJT@OwW]Y=L̩=j,M԰2rN0*3Qqٱ ep N 򴯹_9Be2sv(o^D*P{zPH69s $rLeֲ-լXokhj+$juDe efVԐlmʨɀ80 'M7c^ bV9cG-ekr'mA3M V_rn:Փ^Q z/7 (D"~T:"c4 .66!,/QCkT]&?j|k?(✫ew?Xؿ"T$. t?hj_O(3jE42'~u$)9X~Kw%Ef;O5 ֪e(#Dq˦o !t ygRZJ"ev)<8/IlSYA)WO̳OjXkX _ʽttz<ϥCڀgc0agy% $.5*i9#`x) hxB8+IZ\y΃r7$of;vu(_>SJ48nAw{I` " c', ?+n5@<_))fH/ j;l?%4In1(% ol0Z\HS0t0wHg~/=Ayloh\7({rwD׬gO11M Qz Y?Q_qe0.)~:dWXiGն&F 6x>ԇ䩍<=ˇyf#<]tPkE[[}wYJP-r,5 }IKG/fWxзSNTcVSEЬ]&a6|{tZs8RriP7Hs]\ Yn#gGVŔ60wUfVDpW?eB%q.*t[adwM=&䛘e)CEʁZr'?qq}?OsxUW&w]wCN?>]z@v&4=rA?Yn{j3u^Nghk+Χ)?{9|T)W~&ƥO|$gh,a.)?Y]`P*}_ _oˈL?¢0C`>~>'+6dt%1W_yX4:dipy-0.13.0/dipy/data/files/fib1.pkl.gz000066400000000000000000002735541317371701200175120ustar00rootroot000000000000008L#Mtest2.pkl[k[J>B@ٝL rS@.*舤D(^U嶟9g2KUW2Y{sD]*v2˲6[*ڸ݃!*v?V8kvh۵zgD(Qm&}zPZߊ!6i03(kS!j/:'B|Ueoqc͓\ƼlR6ιv3?igȞ[|^gFק)!;?a_Wn$2ymuV7?l3!yn;`ZqEHYLk<_ASD@OTs R-W^.w( VB_0y/C0?dzˋ~zY9]JĖO5/j̛5oX}~_{\J|k-^.5&S3'=Gr:RsgZo*;ɬ&@)%m߿6G;;7yJH*"<cy wLM7SK]Z%z묢j(xo͇_BD~PYм:ǚ+ Kɵ={xU< ŤŬ} `G#(,En\Dmb>;q\l'a@{dpxX:;M||[ vq{NS6̄^q_^dlxE4t@b/l~:%:{Qom۲>WX ܧp`wểiOɷH=@G|9nDX.wkSdrI~XVnI|En;};YU^ {V7YNvW&w WDӱ~trX:܅-0`!9bR بlw iYZp*ś{Fe[b8׋+e>kUl4B%TǤ (;ę^pcѰuDiP$I`Q>zĈcc>l`4RL#8Wd~EbIWpTSJ5[/hVeAڌBɛ<ƩER i4p:|\V'#tp.q4u:fOy \ilX8>}`"_hNzYTP"$0]注Fߍ]#A,-U%9_Eo|6g"<3]:%ȯA4(-OL973ْ#"z1Djd{~;tdnr!ǩNr+NY /t0߸)eOR$T.afLu6mV2&Sh{+Ym!Usl'v$i02>V4@ycHw6,Y!WwY|7?/38 O )Yщq 8r9V:_8BΡĊx8C©y_!@.TI3 L`@[`y['݈lN!a{r _ P MpORg [2gkD|SA[ezPryRpsHh\Lfw8t]l#Khd̻f H-7-Fn'?g!aG+P##eP}w+Ji|6Nni_*V!gy';얂5ᔾǁ~,v(h, S JO]KST08,j$_fߐC#fh nw]]"5Z1}akU,5N;Ӈ#` #/+E1I8 knfKy} Ag_=.XYCjId;7C±~8$ Nmv3Iaݛ/mp>TAe#K D8²7@&f#}vDnpf+a$+2܁x1 _6HGH>"l_'owNY6 z.sZ@JZĊOH!%xB/9qnU/}BCGKmql%GB3%P4:ݥþ$/}/SWp62Z5bPͣQU bjNgwi{[~Ciա%Y5d{SL(bCƒ+FCx;Op," 5DUFsSTtqT[PY%/)$+fh`f* Hpܩ]10C&b_s@qcYl(l> i`wq"gJ7: \Bjp-\y {a2>K53± ޖZO`-Ռ8J2/y> UXAY.q$zV8LPHGj`eO9§g)Tdz(x}тk'*W%T!$&&xPhs#D_73T FW$ I!Kjhbե@a%hՉ&PXV`_Cl< /*[UED!Aa`q(]zqx5]&muN^V%С@$)p"q {Yds:vj`Qwڤ&w:xɳa;Jr2?.9^wˍ!Mxdޫ{h@E~PQ }9hQJ3I?F;V9fGX{UTQ"c/WZgՓ|eEL?ދԞE%⦟da(K1ۦ Y$/H[CiGMLd$!H dT >I4[zXVɜG 0ѕN7&%K # 2tREk@ȋy/8U Nj=:@JEPY#BB?$<*n6 4ӑ:yީ<~YJn;\RUԋPz&tJg=k5V3ۑ*: #E";Lּgu3.g++92c`6" x Jz#C?MEP]d;oB`3? p%}Z ,Q$5cj8\*Ib?r|O!5Up=;D6d fMF"[6^kfʋ6%Ͽ6e(\!!$ǡ?6O&; z^ƚupQfS#IjM~ ZQWFh*ש3x:[ g{O .r~H|ƙHǙH|է915ҘnBP*ӬҡYOJ۫G[ԙo+5ݜ*}KtF6:"N1T2ဃ|Rh<9e[i9Zu[L"l7~: +8nwzL3b%QwHۇzy]%$:5reИ , ق,4̍ 5jT| pUtym yĬV{:45cnᰜ/>EjvtBޔb5Tga,$Q 40|W.]Bh͘/I>H$*kW2qP&E/O#Y+pAV&?ʆuH`n[gfx=ݪx)cKa ֵ bW}Èʻ8۳|#EM 6SCmQ\ʷXX@͈.rO=+6EwSk)#l?`*:Jazf5HOTX~Yz0P掛@#Y~x;;CEQZ 4,|8#_3!LO",*&/i( p&j,nAZ{"~  t eGOÅ2 YVE8 W#1?D4:wh 15[/u3TUTCsl2Ђڮ>lPK?eRUU5R#=x;㶚YH' H O|FqcsaM\ M}U"hm8C}|V &ea*Hz-@}g՛a: nySy,? Wzl2t?uI=qInޛF8p1e`c 6 4Nc pr~'\$/[$y^ՉquK0CESHrԑ >'d[44;o8 {[S̰N~Ɲ4Tx'gf 4>_8޲~O섅0YjP$r BJ5"M`kY"@?ERٛ!1.4Wf ja #/ a/8jNr5 /_IjPC2C%<҈.<ö75ᦣwp'4B|InBZeR_ȨsSCγSȘbSD᳃\Jp&~Wj4HqxU=@ #?&<~NpM <ϵGW)_+={&/KXa~Rx mh5bXlYUfdH| _/iI ?M1fu#O &b$~K<xhuIǣśE/6W^Ecjb$%,7> GI\<n/-nuQ Qr _SD刺xh;] Y4,cM-"!<_y_@Lo8o+ LdfmEbq7aJFa^+ݮp}ՇXUAV9=&k F>Reʹp1FXȯ!*;p $",[*W-^* /0&cw)rcB;r]^|*glFazuSdwfOlҵrLO>n G7(OYIeч0k;YfX;ԥ9""+e-e{0^NDw <<*"[%qfe!pi4Ȓ9|%&o?"H44e 6[X zzY$Qv_IʟEeUɨ0dWPn/!ɂމ*ךll6,B@?cY*lYyXGFt@1HB$G 0x gqrst:BHuɈ0T$z,``V PH r ɯ*Oq.ޓ4\,kXdziZ3b~g''#QJ>nDIt ցoC.6j!ٝW]əɭDح\2|CHOT&^Fkܩ8ȏ_?s9L~!l` |ZbQ~O3o+qA[O N&E.`W4#MY9&gW|P_*w VG>Al~/S5<%SGKvx\*ؤ_`۝G"y4FAiEш+w;txڻ?@>!c (;/Vd>z h3n߀V Oh^2VGdkܬ/*пy S8He}gj}&7s&95hD_t ^yg**L["<ݝ-?Jߦ٨]Rxt9|yͶq;EDwD0iPNJe^?T/[k6e@\I-P&}V,φ5k`h h{ު׾,yT-8pW@UUGQ&@M ʈ}~BnU20ӿ8bS A{|e*cc4) ֘@8iihRc~׍!ܣ]"Kc\7ye%3\ iS#'̗׋wPovAd0?`aX. b;Dx~ڝL[}f0:n-vy2# F>Zc⬦Gg5$KԐx`_?3J<N+-x5i4]ʏbe 9Qk32iwuUN.$dVM2Ԛ&<=6 r=b(Öm }uޟp3V8smԳ\xB| *Yz@&< h]*3eթYpmƹ&'DV#H}Kbh2j,-yw˙[V7or ށ޺^R5/LhaI`L9CN1&IWXY1Ǎwg?el*9q7kpĤ&ط/$bt8Ü)E _׫L^<7%XxTшFUL?ܫx6]KW,Rjy>Z{H&6 * )<-6nAt&xaf!$a rmO_q]xZC״c(n2(W8Bw/T:Ҙ eݲ 3,o]['70$IsKzlf'xV7bEG.F5fi̗XGYq*}X1Q^T9|Wī~S#]*N݊,n ;ٍ'hn\}{. Z3qM3or`!c3T!᮲vR9`>ĵ =HCS/{ 0 J>;գ74ޔo5dgK+XxM` tN픋G6Э-j3v_֜ճTv"C;C&E Q*S.?kA/w\طÚE;F{dJb1+NP=-k|GoT 巷6䮿 l#0s6_@Oj yLSZb-kn|eᣬ;,\@V̈́wEz=#3u~<0K`u1\zT]_ V<ŮDf*M riuaX XxXz7=XH?+v[&׽Z LT RThpq<Ғ~)|]+CM$yd%4 7C0Bu.0$ɤ] Z3, *=l\я@/X.)v-+ Q<z5Xcy,2:x٪8"k0aU~}>^Ȳ8W)1<g,ZHԻ=Dc eVW%#]/=Fi`'crȗw3݉[eA;F" ^kcIJKd4 W.e<Yz 9Xcgشx$dgW?ѣ'wL(鑬>fN1.u,8rC ]:GJ#J:}]qwo1$E>{?bO`S.AjZ_3[to!h7S.\4Ajru\޷rMBvhkY&+fj4ug_CjWY?^4b=U-xC(C$Wr>xG/SG_|)ž[쀻}眷)i>W(.UgXzYూ]/{b3i)/WPdutQ 5 vZG+|FB255淧. 51{XNR *FZ0ڎO mŪVIF5g+3&'C;'7'U w/Y~'@0|pbÖIH5PJIx塆@jtF!`TsFb2 _X $ 5 jyfGo(S9LafAP'd-iFKpjh#-\` ye װH +nZN KD^aSw1)%Pә!^VnkoVPf}lҡ^B;ԪT9V)95}fAV|b<0d*9Qn[ 3yH/ZԞ}:Y09.W6{TލX3)s|16ِ֠2da(H セC @턦5 k)ZJ9$\`)\QO7dF_ +hH]2J5 `]XVBv{y$ݥHIr䳡8%ߥ@S&0lqi>y =l!XlC JlZ50h4bDʠCB,wWYc7Ɔ* r5a?|*$dف|Z0V6WypfpW|"@Y؃f UYLqnrxf,1xڔ28:9(lvwTYUk KcrP?:NkUif/>ٶO!4*K8mh`q~5/ 碙l3Y5[?b٪f Db9 :@xObck%"ze55pҺhl.fV_2cekXFDU6.=E`A/uF>8$zQ=tsx $t2cnk=LZ&;~qL_`j )g/Ǥ'x2e@Je9"kFo%$UY"'}L`#ۤ|(eϟYE$d&_98*P8=<ű;=?cnaqmXLndOETYQ0~`12_]X[^,15B f_f- b=y~Ys ֍(ꂯzl'㌬v*r0i՜vfjf.[S \EBRLG ?@(w]VuXY)ۧ5'pi]xyk`']o|ή|ZzE=pGړBv2t%W|uo^j `בoWf$' E?W,E{H+dQ7xjym8@`Ez8".j9ӌx׎iP_~WrB+(7AV Ve]xVc S~lVXS0U٧ 5 e,10G@B DӸlGBN8BY'VTY75f i4=؈p6{6TR#Gn&RYRt/n441rzQ6zB /''SKxJGM(nʂ}~dS܇܃JK90!y.6x?7ˬ8EI%C?py}2P?Rƪq/]d&,tFvG㔔L},T*-:G/M= nBB]uvZH:P@3cH5cp)a@@bRYr_"CzZG|t Pe^-O৻r_,=1:w^r%}eQ=d#=ǓKX #yMx<&Mt'%bPWL27dHW/PLSvKn<;<hXZ%cٚ9 iD؇v-AFv<>k$YCf(yjz iMS uiv;;܋@^p{K**9!<§-GP9knT׉? H˒N{I!&IpxYfSIL(ifbW:6֌8fIYɯG%lY?4)iPK؁ ;006 햸5 S(W/;E]bzTh@ZU B,B M<}ᩲwXr> !_] zf!y(sv9`] 2w>w,y#"=õWP59W k;AkUǪ>?|K.O^sB[5cVE/ \lGrS8-‘]W˚e̜Lʰm} n,xҴ3UbG})[T:~H8 |AcĊ|^jNt)#LZ~3wkѓHk[\WO2>1wS̞jE @T ۽(DfI9 7v0=I :Ȭ=Z(yR_/C)x|,NhyhQO#iS$`{I׽i^f{ "yH;I|S/Uҟ|R+I ->{Ŕ$FٓȚkAG,2#nL`c(s/\gt噦#QHb' .)׌-X(E(;r Q43n(t#l~e=(}s2&v٭&vzX?S7Eֈ^hQjCpcd۝-#4رzv]zAZejV*.c__ʕ9A/^p?B+`tFS~$!m6gP>7>jx=D&#mwrhA G5xO+9W(6=TaxQK uϒ3P6"<[fVA{ 7)'rl2aS;\,"FoV# x$ zS;I hTK5 GԊ`K: %3;xP6=l/Ⱥ$FySGq('S?f]J .cF<$XK}y1k;4'8wd "%G E3f ;Oi۳o% UQIn1[QwĢ[OŹڦ~Sf$Xť}P[R (,-tȍt-?cok;^D3 7 R >gU J>0و%$b̍|l)ZYle\郡%lp)5%bkhd⽼>/l0SRr*A`?T^ E2x 4 ~W?e@_p"S/&^+ΰ)_JDAwߨ7צmϩrB9/Q@B0 Ťl.JE qPw7}w0@t vM{9ݘk$))Mq>>\U3f>}HTr)y#h#N=MQ5{l_s!y#TrX8 /Rtp) 4P$b~N8 x#ɲl.>%9MVLJTY\6z(q<7}I,|͐H]NHļOϔ]n8 s]8D +to/q2H r ȳe'2As莧t'm|u5ǯeWB,.JZKv秜Ӈ_D YkE@Qjv_l31/p6Ʃ~ vSqfڤ'Hr +׫889゙oH=] k .Y0tӜ5+kZhlםo>Ѣ+gQ@=g>g! ;XMd=l3DkFٕݧ!NEoF Bl/5M[օQx8֓-l\>ZL7;gQW5 Y*7 W,|E>wl/҅؂2hlM'YVmOE;qfE/VD8g{J RKe\?M 26 JugUQ!q+|5شJB׺zn]eܠ$ko<*9 44Rf85^l24/\_ihX:ְD1b4<bXGGcD;(*`PeF,I'iKKU) ()tu4j{7^wx-́Iځ< X nOB .2_kwhg:W6tȷ[B,K鵒Ngr51 /|wFY*qgyp`;&W(%(HiZe_lkI(5HW) ?$ ovZz=]\ ֋T!ek -j8 Nl@I͓)ّ$LS1Guϗ!lr24ėZScRlv=v˷4U-M6"afG+Nl/J#L^Hw:: a;'b 1ymCu}݈f z0% 6徙gK `9r{4e`<%6>Vhet1_%6[\Bu s˹d7x2gZфS$5R ; +1u1¤g~btJd8 2BO~*$s|-m$" cL%=՟4\IIznA#S.st[z%[ݽS'QeZ^ZP ]E)+rؿ'5Fh}dn$E=buְ~e=ۈq\EGO5&{y*<(Zzܐ^",n蜮 Q?MY 1ٖjZ4KၻUmt6R{$ǘҔsykdל4DAߩXh^ݱG@ 6 oz ^4T1uh%^Đ#KFMPE'~~*p5]Y%JP'?1@zA]%m7gHBug?M]{.(Wȷ7<(JTH儲(B6'}3s(zۦھxPBsq# 1w390a5F(7n!Z#‡.BJ~۰,1<<˄qm1km+$ڔs(+eg c_>o:X7.gx@(32@bLZ1 fK%o|s@BY226#?J0x wMh|otF$$lwWnHԊce?3Pr8*8^\b ꠕEǗ/MY5WgvC!NR@,_0+v#q\ˆ\R96BL{f@]?響Cxvn;aj<ܽF /~W9(Ż/H#X_@eܚB7]eV$ziU؋ bkLし|\{e|I.h>rp"`ϩ@F[fI:NϜ4^(?[ gP1;*ymȩWˠ-Myfe eݮiЉZ@@KЁĢQ]\XSi!!QEhkb^"5O~bW[&;`Po4|XR46&v#p="#Nd/|;b30T=)Ԁ-CI3crX"HB<ˍTэΆ^w K6>H2C=ε2tB/ۢ@ DhkhNS!Y0y9הu/< LPa>+j&gUAfnάPTp h;kD: T2c;%[ᬱk}ő}|f }71׎z(#c2J>+#ޑC&ְy_۵G`€ U z6n;~>XGTk&E&b \#Tz+Tv?Kzxo<ͩnck#Su"ɘ0r2Wv,`R(ϰ2 {oA0 7,0i[L 4=p!U&:p jr -{y~xq{'6@?e@ 'BÜBybg8%c[ט?Yi-ϯ!wRH3)ѱ8*b6璟R~kTHVY+LdS#(Pn _+ S'0XnrA` n,?+v8z6$nc'rAA}q)Gche7s,i>0(lr^<6W>oТ+ecHȕrhC0Tz2`?ˋCmY`MVgK,R>M'2c@Mw+LmE~ꕼO1zkE9U .W6W D0gܲ"$pgӗ3 "Xb]~7CehsN R9tSF-Qns?3X}p`Dw29& |E͉}WklyKFE^ٶ-*Z;g6nH>wf(?k"jyW9dgV}BXlZD/o,kRXgR˙3fRLS{aEzϖt}d :8|DbVsV?>dbo((b٨ZٌNrnB`fNՊ~odjVJPFl80n f'YP͇NiSM1!2|q! _^~2庠XAdGDiwRc:\ukZ[5CfV A%Q@<_qƗq6 j ?K8 *K}Md%eNZG}VLrUa&^l}kS{3|hҍ#m iGp=#N&]&)xA0&NU|b^\p~@*i#Jl:7HQ O7vt} 3+3eCդ.!LFzRX^!iSbN-;Ⱥ-p0.3bl>r2xP*ϼo\.S{48ߚ9Mݘrfȫf7Y<IN,Sćʥj 2>k#!` +ⷺBRi!jyf߰[Ǖ06ΥS弧F!l{t 7%J b=Qlxƅخ<9 D=5(gUsj7uqҌ.YD>T݈6)&RrAp_/H v\pK?f[:}Gy/ڷqy!O\4TM)J_Xg`)> 'ASywX}sAO}5b74m,*(Vǰ Rj뗢u$ ̿Xi_wrEU ^O|k[4]CJɈ江~!&atQlv+e5v\δ[%3y). LbCޛ12BSOvYW'.8N8K:Mc"l=F5x01~%\BL# hۧYaH!xMWmM((ɛX\m Zc`GK$F#29=lx<G6zU~Kh'pRVmKI'SUNYT’{~ލɅ4ɑ^ N0F)!iD]a q3#Gv>_u0y/$?TEe| Ғwʄʢl>>8E0vPCALPjv KԘ~cv4 c<3S>JiUߠvjE|ƒU 㷱$Z}x}Xׯ{Upf]=Z.vZV@!n@egՏ̌zy67p2fU4Ί b䔲hn*JsO#"geK_X󭵻I 5ckAW4簵$YMxw6}lZ CFYyȀ%`FxfBW8zɧ-0eL4+Z65MpXSe(N_G՞Zi_c~yf*EX._ %9(&f!H>x(*$4}d rQϥzʁe  x黒Wi$B1Asj sHк_r`+v)5 ʠU:mu/u[_h,|G["&F~1k78,nr|_Ki{^Lطx+5NG%#q|v4ҴF_E sr9l4P CT#u2 f^r?a+ΕcNOK>=ѥ9I{+m|{'l, 8,lVg9nc%+ޞ )HaxlM s=Y8@;kV)+f‰ ApK{ZjK2L։Մ`9VtJU2n&eNXs*Yp~%U]"͏j7Vg@UY;_s CP%14FxS- B/~hT5BX(<$yvgյa&f؟ ^lMl| ؓ_j̻,qnvW@ Il8z>}r0*? fLEX>3Vϔhv`:b'~fsrBFlCkn3abEI {FyW.U<Ȫ\' Wd#-DH :+F&)ҙzpXt*s2 yO84i!lH~2b4QDjoGL&|MNbŧ6 XfüO*g(3Yy#` YmQߌYh.jan(tp1Ƚ"X;6iBHsweԔzK;e~N%@jʫ5<Fw9@F҈3Oۧv8uvn~E];y瘱=,qj lJ>_mwb$vOhEa ,ĝ :e۝c` Hg"OБ=3K%+&ACSv ^M$64C9e'StOuvkZ RVM{yB:В:ME"Gq TړTYQٵ7؋pNHZ̳Wˑgѥo+DxHw6kj8PgӺ5 w>,dK8ţ5Oj.j.4IV1 _>c{6iKU:kϚgZX(yS}X &|/A駊(Uh\h^|SS-soV}ؚ'O[miڹ|Q}f- 6 Zl,!dcvfrƪ`> }d5RrOLEj>u5=9,DH(gM]wWf}tewwtܺ ^qUl+V;mM=!;KbhB,Ğ\YױsҲ,o_R:Hyfc"ZL>eQhaM(G!|Y9!.*)\44v,2L}Nf.3z7E^`PqBVOabRQj7a<'"FrL~%(R-Wdޒb.Lѻa/0Ҙwb~>. G]4̠u)f_/=LPhcze,ZuJ)Qyf7mf|o"K!O^sGϭ:SDcT3B MdpҭSk4=@!4Exk8*rNۨj l]x+ʛ8 @vTmp+ |yqǟJrȈءo? PHSPqgmPѐX3%6LƆj`]L`t viV"yєшf2;n\k,**!G S2סg>49W1H7fyIW />S\dDc_pО+|ɶ nES8,o;z Tr\B -C6/J9`>0o.'G'L)V4xuJ9>Ȯ_uLW::J OٝЛe~F%lpԆ=TxkRv] F=IܜLC1d)+*$. mL T&@P= o"nSzGA!ߵ_L Ǣ}ØIA&(wT%}*AI]R*>/^PO2gP[Zl].RB~ 9dutMƓAH3r#֯zdIxj"ʯ*Xѿp: Cͨzxvg¬<+~=f́x<ǷءEL:A=y1]{27vu|~AHC@Uo+l6J~4dv]|QDiSd%G($iഓYqI@0s Ce;}H/d\Y93}@'%2HӑF|"? ''QBBͪ8sX*e\׸Ov4maةO9Ycd7.FV^R6fIl?]N72 k5HPOx΁ѫu AXMasU_k??@˥ eWXLU7ʈOXajM᛫֓]ۧTI @qP"WHéE+%4-?5q l@TGY'ɣ.hiFn>ՔZ @$s<۩h}L1%^~0^fH[{[>K)ms ykl2;+ = Hs ' HJ,"y˿umø7 惮E?\$*#baܪh_(JX5$jTB^TƊA]oGmh*GX5:m~kRͳAb9!Z!%RM-+zOx se9(׿IP+ہnxs4Rn Z}IPnƏ0þ& (2.HƗy*qM1Af3j4̶aqp$OuF waqK'eUN*9|7Nʻ0 ;&kX-M%fJ˥,[-ؚռk}6ޱ.k0CҐyݒ4m$iܓ뮧M{HCd7 .X*ܙB#0!9 <%h06x]NO= tpK``]FSPsGN<((<7i_};ܗbټo.֓NR >T >Dvlfđeql{~}Jck9Q>uQ ‘& u%ߩmGĒ=w;Sur$=M?ҍ~7DhNch2.?8f<7+nplji"~jw5[ZBD[XR[)ДD/3D);TR[%!#ʠ:E;aLq񫀔RIn5T(Ez uːqe3 hjRģlN2JuE!| /).&yBqWɈM:q% 4qWrzӘzMmתـ/-SL7DG_ge]J emde{ݑӸ;^^1AXznl$Is.综@ =k>\P\5rK|cHgg44h4ָ5*h-}h%3EIPL?kU:A=ę4T#A6dS^tHpC%74gw!Y" L%co%f2W'O`yZ{-2`7Fۆs ػz >B X\gW k\{,7 3:$޷M/^փ8+#6tw 5#/ ZDyZUGD[N~?V39DRt3ISNs/ǷX܋c{b9@Vܺ}Nvj0Q`#ؓja^E W(ϩ.G(:8W)o}xUp  4^G h<{_{m1vԀFk̟֠]5)sk"*XBrQLE"_&`= H()c4ccmW]]tpB!>Σd%$%fU1cF]ߐDHk>mQB*!Vk 75'j@x"[<]nLq@$Ay7Mjo@<[3Fl]]/}\J:/,\(&dTKS)p@vq!lIL5v|ŭ &6/ +z73,՜ImjMPQ-}v5$ gڡ3AĭoqJoo{x[E }9٤*UbFH{MzUeנoG0L5^H,\ۣLGDmJYQITBjӠj_tj塚P&i$prj:Kg|, 2ن8,NNB`Qd53AmXǮe-̀@teg+Wʕ-5q'e;6 $t40)KrFwyNz| 8ӼQȒ}EX *M3zn4BK[2Ca-lD" 虰n3Bq4$Rxȕ58Vz"9~W/b$Dvw2nu!+NZJ}'1 Եb,=(Fټ[hNӾ~ r.voa N$ _3%C!5<=V7\j9;)ǯ:VAu$T"ef V]g- H9f#? #)NѱR`WAeUا@,-1B.gHFOVd4뗉3qb cRǡ KXcϿ>=F||B\hʥYDR]3O8 [˚1]}BUoo"RT .xsM 7s}կH,H~MelT'tGdFzs5⫕Fq]=>}OrdgNc $=MA@T_Kj~&4%` h0M<Ҩ{1'=<3*q&،L3TtMc= BcT }j#TLI6ς0˻H`Ft _^c֒5ҚTx2R?x c[ c=&9BTZ0 v,k$]>ijPf7WL6Ӓ"&z,(}l}9[ C RZVRI!گVMlfK(%ʳBizzfutdw,d#-L7y'N(XE1rmgGr+sDxGzSp ?c˽aRU] 95nj󋏔 _I6ܬ?(|7z \5hPXlA7E ʐ0'7^>M,~@M:x'CA'݂23~}166lXʣB8+f Xց9>61 GC7!P !^YۈtL ĺ8xK̡`Q |vo6 Buzǹm^\B諂(XdqO{j]K@ʥb$ 0ј1aѺj|2gԑzc?ly,`9 : %4x@1 (l,AFpzVֶ 'Z.xv"J陟yoчaHfL m,bR8b!}RM4Dv$> ruB,_HEJAM3|HbތS$@F N#Leጽ8Y0DFRfѮX!ʍ_~ٳyO'be2՜$F+zz#V+K_zS$,N%%  jJJ}%M`DJ%O)d[XCmOXGe^7n ['}GVRB$%pv#' 9v_.cSMi຋aJ/޴BUihZ˾[q1r{`q8y|SڎK[۽>}۷fnA Tfm^2_篍R`&wC/@d9t^G 3l34i7G 8 $3]Λ[-4 !8ĹCE⧥'~OYl4E )/?: ⡌Oꋭf !#+1GA>UkD8_"Д ,M(I;#}"D]܆FCp-ޕPMO?S?'9߯u:C_nȔxd%x i yc;tUcL'4#&ʌK=ˤ\V]0GF=39މp6>GjeiP'w5/X* jf3.J=t \:g"1uAt<RPL M =&_S,SUa7ߴκ)>//rZѦ W°$6v~#/-gw"KDROY>9f?oHiI7y}>e;'m 4؛S.H_\5d7H؍tb5(89 Q9/SUIp3xdYܒdohe#LKq5KT3x_I=40=̌DrL66߰L>ipwNPCh:D8Ѽwp83jʜD v7*Tj-ǐ>IH\ŕ=7(be{J0t\^Ҁri= Vc\p!X#)Bvv5JԂ3Z_gOpTQG>71ܲd!Rc6uY" X* )BUWnHϞjXTZU'zˈBQG`@[՛'$! p! i)+d<_27)s"p$>#YnL'mNhmUffn/)4F:,g4s%L~ͻ>Ԑua0wejnSp~T+͎;}|%o̶͂\0ж3G _%D7x°n)b!wabO@.7l]ddEլ/ %T u+e+rdM ^Ih wAk2`ROy!X߁<ԣٯf_o—ID_VIkxa8ε(VnfG=_\?? PTiz x g~0aNf+Fx` Cћȯͪ5Ru~uJx,$LjbaDUA "mA: ko:`@Ia߻\xsAv$ $3hATͰ;QiQi V4jY ΃ 2 X/'=:_P>kXb-MenL k`W@Bdmz~#Z9ăɏ/pe*J|[6vPbPq.*(Ws?4;Ƿ2)S=M$):u6➜~cu2x*W]MczboXXӉAD#*vdY@zoi}D"28a9M"3?n2!zk5"} P+/N Z4vþARP|L{\ksY0ϡQ`WNa}QcW,0Zo(+6Z v$6 _nfَ-ɖ4$ǃ2C> _%OMA[j;F#ܥvQ ._⭢7mImj ަ:mY+ɮ gcqɢ糞>ICYY~lO/,hx\߻8jJ<@G/]p/EEpUx~tL'HGs"Wfy?#*!YpJ}78 ]ï;3*^o1O = #8K:v'zϳپe-=.G0g0gf7iqus„Pآ:Zyޢe5x4P;?^/mMy_o2GXk."~KjUNռHO9w']8K&o#q9f1ޭ57 A`4EvxU'd4cNh)Q*Q@TͯU:&}`_8FNC>O[ocН%/$ʡqZa!Id9gz,bG1h"^WOڢP"W8{Ee`o~Mn@]I僯ƼKZRQEy3u_l%JdoŊ^GKȘ>jbR&pJv0BL&PǓs葇%@sZd26@6ɞOg~4?j쀠[$!g 'ƴKD5qe= uR)NP,wef(+UX@F-җal]T-[H(y6+iD 8  t{LP a屝e$$XFA#ZhvYO1[ $8d,LSn g5CxFFUc{1&?y0 isbpJ[!T$vȴμrJBޑ#{mr*XϩRəfC"J8$-7m۱fAջzA usXn0-:|AR~z2sz} E] q`C'UmbqQa~Ma83ZS,F&c0}"Q NMvFi&{W?'њ%95.afZ!w?y# &IQDru̵Ho;%fiϹ|l!!EQss*ݤTBக/!r^[gW^;lvrD8Md;u&F7s[}~w{yכ K6C6,v1zEl/$Ěϒ"UhX_<fO9gc;9Vu0i4!bgbLp0G1NӋ\'}ɪiM2 $q 2l%q%I Y}2Q7X6B]qNSNӧlqry Oq-l5*$w5OvTk+Qw'\g™e${@i:J3;KX]y~t q` t JủiAJKYWͲd}M;L eXÃ.: ҭhP>T/WԈNܙ_80.1Ö\Lmap'ShXͣsU?ݱ<(ꜝ}.{G'2F,ǎ[lj@~I⧝^^Kݲ)s/V<lӏPlԙg (@kM$("<#FP}ћ' 6: o륱_>;aE)nTP5H鈖_nk5"Y-I5ł"/xlCM)ar}rt0~r"g umQWĎb oFP)Nx)Xb^^?xBxeIL?Ň+UDn:Pڤh+;cj$ ':Vr޷37r#vZ ܑcԈTFt$.zaMdTAWuwO$ 9nៅ\Gذeld Y2zş71nEٚ{Tsdʸ]`o ʄneѡّLX>x k "L=@S/y҉+Y:e/gX8Cq_3zMGm-_lh6}tPL6[u/qxٺc@6*:s"Cu*#"I뵦i@Tfk[cq<$aYQYX*,gm]-6~,\ա#U#PVhbEjxޤ:Wo_"(rW+Ebeya0DYT?+AS޴a mŮFH)}pB7 P3аHaL)5o"}Ѝ&?adޮ$h/8F@)CZ B,x%'Qǿ|B0J2B"+'!'=2?TH(uU@HXV~F[=ـ3V>j-d@ܣ~*}4*0^fNROh:e/BġMz ֝ܒnPa:7@!ƒYx:UwJ%4Z,Gzt=eiÿX wk _Pp =0PJIzL8M֞ jl%JU eR)PeF]^_fPfBrmeIњZƧzuP*dCUF("y|@"˞yVUoMjil^-g&!n!Uf΋pcOZSMLG계0&QW9=Fs+ycu~idu$]By])Ydr=G0RA5=F}=4:Y.zx7T!zNAfd#OM<RRXE% AAժXai⾨YO/6^bK S?O4·6WF0b;ER |`(p2>JZK fm}_XK&ϡym" ʿyDLnp֩ÍԠ3BA\}#X,|Z/8U`nhV:)6 |p=.m!kV=N#9&U9(zg7ӐT(?*$ _i`[RB:qD'Z{ п0ą5SxߣP]M̪!-r@ +H3'6mVYD.=B%:<&i=33IKW(z %R5L1YI %_<{5 lB3Y_}.`홒)8#k;]]u6mֆ"0[dZUSX6SJM($ǯ IS.v~}*?!Y[=vRx; E wn!ŒVOa'3`Iʹ&ՠK;'+Rxڬ׽g@U#k)[I4W߯ ƠX]-1٪/aB,:M˲?f>1媩x晁6 ww4{EjJ ʒ4г^ OXuX$0xFO$GB3Nb0.tik%8xвV|/rJ_RL  郏l(}re9] Mzbo/H/I}mX \D$EjK _DΓYHw뇷H:PRJ90,˸t<hgwvI$2!H=K`nmp K@xkF/[l#~ -l3!"Ri"E3L?-#oƖ5)fCy_]j'$ jdbQ31߽,&Hr}ء?xk. 拉{W:8E$Pzaٿ:] s鼧ӠECY|o 6XnX* Ƶ+9ʷ(G5 R3j-AiE/[=PCZjӼ8A޼@ % bep&i2Gæ9 U(`BwNPI2+{$-Ni8C+#QuͥR4l8Dڥg+A5<͝ a}ܯO5lr^X5lڰE,/Ȥyp8y$Jqh{~` d4C+_Bח9ǡҿ˨+/Y1ݖnIIŭ,kF֜+. )!o~L&a3OPf/$b: 7ȥ@e=}LxR@-$6^^<9puxc0Joe6f 댷? U틯H+Jw׺wȆb|?b qXMa}{T̾2n垲-9n|Ỻl Ÿ~C&VNW4oF$3A9!«i.i_'L۵`B ȕ!RNZ6Uma+N>&R`PAhʵgN7]F\,IM[IWϯHWQF˥mGє0 QήڟrOɭCמ/֯hQoWoH*Fbޜa oM/‘.6Pa5`bK$Ʃ "qsMR1X.*"LfFAyyu0E5sZUDo)1,! I`oMA"^<ǮXcw3J8ޚTH1B&qy9{z1jO@d99caq]#[צ)@'55ȍ(Ϭ&+DUa2s+)_mc\mIe kq?B8<*~mVKw%Z{F"a]R-'&8m9b2tH.` #ݐzfVm>2`q<)6ĹnXRۼW= $ 4(#Cds$U%| X':oEU6nr/ð>2SGB܅&"t:5Y,\o/mښ\`9{3o5ZY)X`,6(/vUw1}9U@ =$`Bq΃UZo>5bzZ$ˎ\p@ Ii?PT0u6T/aUQ̭;vhGg\lX.cSoü o~Vh(aْs0fhD_W@rHepZA{=xج찎MN, _&BqoSTj=74-}4k$׳Hiv `57kbȑ[@8 NOW auhP\7'D̪5{I7dk꠳G"!0B E?zhd;3]J#M$ՒPFXM}6^θ=|GצlzI`ȅ%ŏlD-T-znKH"%_">USX.9nqCQ+,G> Z"6+S-qH䵊ovy:JS8A\X!zE;`kϝO"[[`n2g +\1`>hA;\P-gC>+yC 0g)Ne*Sa&60@,!\ԹJ=懑/H%i61#MG-s& Ճ'TXb`%ͦB-MBKlq DiO׏.gqY;ZD֛asbB,,̺EpJ '~r|<ubA#3jD#׻U;:԰`=~ z_t/PQҏj.9 [NS %ctY@llcz0~nP師e1UiTt 12pB0fXA.is, (Rl⚵.1vv3ƈ^ۭB]^MB[~6l+oNao3ƗXLD2m!kk2(G+A'VY+4+Ŭn}eeG(4]!wޡFhwx#0gތ jIE3GqiSҾ-eD]r}ƇpU4UcX5; bM;dG}'/W[$H. ik=)BM 2e<8;ħ!s,{L[8[̨"m.RK[_R“8GY;rjH٩%m QEsqbL_zԚ(t^>ȘGDʩw r_~*c NJqGSuxF J82ǡ!8`ͫ ]r=,zP+m LF'P!Z)ՐC!\pTatֆ&~98YA`+ޡo(#)R&s:{vbY3\~4nīW,Xh|eQ'3`u)Z W(C,$Miօa\c1eTolaNܐn䓔õt q?$ /kEFY8 R R -KXLހ\M=&a!]F5\?HΎhp毖 cf&ׅJ=AU0.^9mRi 峌Br(3S8 y/4j m3{x5AW&f`5Te]^ŤX7ZޭeE24 _ -= `d K?YQuN-y] kާ\lwxQ d9Y60hU}ù-tK }Mȏ\&+LZ8֋" p2Ou &\Hc$U*q^o!e-K 䣦ޔkʸ+-U6"bj^sÿ"Ig&bqp"gG1llz6;I)KW4B}y6/%c~ g H?yfDqh_aVuϹJk#&IxlhA#pAB,>-%jӀb!8`P¯ۆ#faGN uD5鈦\>+}ͻ( /13k=5±c{MgWnr,/Z$/v%j! v#kæW苗5v.Ԝ3LRE OZ= ㅲtZ8S;8nD5U;A:LfK+ akNBr4ҫکN۸ 6ᤕęUc 7^$$ z^Вu+K3E-s+aBj^!qBn$UQH?{C"C8y$lF8eoG,Bmٰ%J\6Pq.m_}aA"K=fjZ7i衝?8CEs!yOg%A0|ܴ@^z<Ԉ:aWqHqK %B(r@ڒ=?&YP mn!gݢJd" ԟP2k%5@Vac|8d? 7{(q٭#XHE?mX:kIr(3(v/_Qe_#]HZ Jԓ V"ޟTRawne:y+oߦRt x;K3k{ NN ]ĘQ`Ifm~GvP^گ}OӏDl#_ ZuX80M/,I|\CIn&-%~z4j11&݉bkMQ5m]+@R9lr=9N HIw3HW/z?E 2+ \PK"$Flb"`5qtSG)CL`POINJ\xڀFp 5e!3!SLCgn_DLD7z/~x}ϢP-y4$i\q؁*~zjSX{(42W"AIٻܢö=K'7 6H4"b;mk5`ƽMrHPO-RXw4Zt VhLFmgb;n(븶W2;,GH/N., ~f{rqٲҔً4U)SoON~&NOxlf/c* /OHVᵁ]xݟbc摬N֚yK›z[&-\~KVƂc )1.sM@ R&|$YDYS -ʯΡ(T7w:)JOBT!R4l).hXBe٬Iݷӽ Yt ٓkK%=L^u$g5ö\ YV3<(% (_->ϲ8:OX럩549ir -)ߢQ"!ڗ'dy|ʞ/TISnȍcd4\s{q O8צvj++> L+'?ĢOc/jWdds@M!%O|r١=FN)p'TsY&c/P^9,ps&Y?*#*ƬZ }]PTE;Ƀ$9|nm+t㟥OT[j>"Уu%TP9}$#(AZyhoK'ܕ6_%?)J1ߦ5u^(*YCR'* -S|O%#KCVBRNShw&(IJާX}o{FB; \(7g2 LV?B+&[QB̩"opj cC-ճD!'SObYfm8b"rS^_Wߜիoh5to[) P7)#(;]Kmx,fw5]My3;}1A4:vT=ԶvU8>{Da !@wQ˶UT,j$ tWٞ=3;[j> 3mS =ʽ? e\ -uڬx pdl&[Ervdle{ mRsqdrPқVبeflԴl*_Z6 J$Oj݇r|rIV&N"rR5`pTZs7 <)}BsMdKMvfFNQ8OwPe (Â0E2duuTHI01,ƬP>jέ}¨FO09މ{XOd!FP 6kNGXMQ(: .Pb N,g~,IZGFG#SB jC{|谇qRT2j>Lj۴&8՛/Zq4xP$9cȟ\'zzMEob3foң{)am‹MMȖл呏xHBiub T;680ᒗ[ # =2{M> cnqP+26~-Vdlz%._yzxq KcA#K WVNΩPX5] P\yVs`ElZQ#HBg5DÚ媔`pRϮ {6x%M^G0zdݭ4P¦Z"ƃgrl[luVB=9U`c 6 Pso %mH&sGUGzQ/=Ł$wX;۪L#[HmYrRPO:%Yh"{A4{;270).{$3IJtm0H."%/X0$M8\7߄rumee(1)8>̬Mw$,똬mкm5 ~XCMZQ;]vY*۳" 47e"[7B$qdoc~WEIF3w;#@YnoMD0F:Wri%Kh;s\jnK`G_;hO tHhLn|p5xd8,_e3LO7t%3+! 9 "Aά:F߸(e~#nHacTy)v8T'U]3I78={FjW"WjzF펆9K m )gNv0i`*``KJ ('%y҂/3 q_Gv7Y/T,(58cd)߰L.__Ki^_S>v~^uZ2,:l[W RZ/5|xۑ&A\ay 1BJUrFvf씆z^))YL=!luޯĔn3IW5/ dܶ r~\?Gitd~& 3rʒ[f .x5JQ}j%f2VՊKKpS_'9F7y"*N!n,cMAZL0)`8;z;NΉ{&zƯ'!1SWפ޼u0'td5O|EYJ|@˴ne(@^W tS:2V|,Lꭒ$/>G8 6ރ|{hם7uB֘1.hK1uWæT;3A>8ՁK4eꈘZ0zIB9^;_q@g*gz~3xSv{ *IӪD@=8A5xsݑ h)Vސd(ґ_* 2L^^/B?SiUԗ~XYng(P=17.)$WԐ!В:2q+YIo] *[>7= {ǼYW<ߓGmrf5cKUH2,@wLGcBH)B,w~-IG5# T0 }K>A0O=زON7Vyi|~AY3Z]$WN|#]J-Bm`SƎ3*=NNhduշ!*EK #Gn읋z}OYʼncf89plbqW6/x%9#oLYb=nM~\b"J&U6*=htzزI BM~|?!-{]G}Uev\22-*h;rGKX{&,'1\h:mɶfN,&Uu)RVHX~&/JݏwYhOHG˜1yY(\WIRpHNeKh%Ny6&e,iY0KcKsMlNh=L0@.tWf'Å&CTDn{ %}H.:-\K3/)k$vC(3^\g܋p9w5<ُxx#ٳo?̸])*C6Oug.5 gFhuROv@5DrpF:d1L27.*GD&Zɞk!r 8=t;D;oT dEPais{ў-Npfet$psP"oq1Y`d[rFLu%+R>#tROǏ/}!Xc`j8a^|X ECXD`g(ұMj4eW vR v K-朩mZ_ɁdY7r}ccOT,c8wv)e1lF 'pm$ϛI͚kI\Rh$;!=Z:coPBmǺdq|=Wd,,3 N8 u d=턢xVۖQ3z5jW.vHU?ddY0$xD=xE6%^` ) 쯏ɹ<8ˮU&D.yVhJW6;[Z,J]nJ J$եYKEjƔgr*58!WSݲ,  Lx-HBț~;F8/#6JQ@qczPVMwn|AEF5+ňE$۠֫ܜ[eQkMnY '6qa Dʭ [`X 嘲;B Tlh^NZY.w5.XFV mO<)#1^ܐBCGF(6lP;_6G4lmDs7VȞ1yC9VwS]u-M"gdW5l1N &sȟGl*Rv 1Zb їE,R{?K]T6/I$F(Ӑ}'qRd$'N~e.pV2}ίc(@I2ۺ>MpA`$'@=D_88Ɇ<ɉ",>3rŻ&ΠU}ΰ3*TiO!I!Lv1wDkioc(Ӿ) XxNs-=6ŴfǑ+Җ61* `bjǜQǀ ^K_[i}Rp(P2Y߆7! W, H$?Dܳc !=33s;{u`=Ho~߼89܄$`A+e#qٹ.D ,;/%;x*DVC.qaeܗ4>&UhT6~ "7L4]Ũ]k[3MpÑHύF1EHK_/vhć`u6ŔZrˇv(f CN؎5,o‰Teb-5%.S\\US[Cڴ-0t<%/FZH񬐃;#ESD t)s{Kߊpm[pKɼZ^>"TXgGꤢ4q" |< L7vffY'X9g?=-/s-*jש.\Lm}j*EQ|-WcqS584Ndڡn`g~Seo*튧퇙b[ ˑd1,P0 4.GDg.t1f[~ͩ[_ {zq(?m͑/qiW_/uŽŌF41)2qbI8Nϩr#)1RP,{E`%[$y spkOIx1~Ҟ' Od/9Z>p/$6 LAX7L<:K =kDsc+B04g~ŨDDXִpA "+Oդ)W}9m[c2V! )L7ɲ636A`\׷oJjD֔/OEtZΠq{%#+foiӻԚA\"{,^zlٹx&*T1Xk$+S;BQRp~qT[D'"Q/rcdbRz_s_BApWo@A3[86O-.Q(eN8:¦IgmVGJ^ |޸/ \"/8w葧!5ty dɱd U`J7շd͔E% ոE:XvM>2mRAzK7#ls1LkF}Mqʎ(G86EO3yj#v("Z,=9}Hc~k@2V 4˼TO{mn>x2\B]ɩ/QnҳɰG7L%Lc>vrsc% tEz#_0 1 jf)#Ias jrAdX*jpԡvاonI*rH >E#'}_/uMNl]fTZ%y5իw'QqM5&O0Rn#X>(_GQL<kgnhgapuz檜mvr:`grlHe\Ĩ_14{\jכ'JΒ`A6s RL:XQ61^-uxXաޯ,xpY:a|BaXf5jHAm]ehSrWV3}~{9dU,_Y?·W֑B!QU9lj=bG|ok*1$ 0I6'I$ۊE[)[ц-_ɷJ,5|@_e#0)G7{-#ؤW=?BCԬs6F6H=gxhČDrZarNS <氤 ̓oue)LL_7w}$_ǭ/UΘփ:/i\b}[ze2~wݤB?,M-uZcf0_xJh$xl#gA[:'cܜ2榿))y4 gٽ[Z6UCEԸ2 Zmm!!#ę׺U y{S% tM{qd S1Jeߤ0 O%]yϢHn//IXUk~YԜu'L8fҫyx~.Q^>ǠW89.mR@3J̖gEZd0cP&".;= gJ:veK#*D0C0G3bI~D (Ə v3=`q,^eZgJf ^!$Z/ID1XӜ*ÕX'Yq/8 p6(N!qbw% ӇcoЬmtJgS/2# ~USSgf5KlM$K '=!l)g2F>oRd| ō!5v-"ם˓$ob"  v8[2lZ/sv ry>19`D=oG-Dr9sX=8Uz3t([C=;qL;|֘u-SP{#]͛zXk*#WlEۭIT@Tv0l7hen/n|&:2K ?t]v'}%̂@ذVi{e:xL Pg8;Ɗ5s'!Ve|EcXZ73"M ϐc]Jr*nz 9{/!)$p'LCYxvGuu4`4JLJ7XV6Մ OmEb|b;NR9P8$rcr*O+m?|VRyE[V{qWvR_O mI niT )#-@xf +%R/4(%ZgW,Ad~5܂DU2"AeY[qPJ*RUyz^y;NܢhVriK )T: Wۼ]ž{F4d]דmu1wvCZJLa!iOṂBp:z z.xN#'b I]QOC+4x|,d"cCPC_?T' AH[QM iO-4:z+._aPϴg8pG6Z=i7cj{Nw5 @w*eiԖ7۫j(\pEPJN;̜Z`-?씩*mN ̼N&TrSkC#;bjrXeLKuhc~̸&` /.[Sac/Ǜlbs}K-Eih`hÞJ;Ư,ѤNAn=`9 v$ ύKBu6/ևuۥߋD;磖NvXmo$.DC2=*a$M+z5YOa /,e{LnVjc>P$Fxt94))$9`b `12jō\ݬ.aAS  9+ 2"I h2H=6G>ٯlX v?mTZŕ:N=>W*Xa?Hf\79#Ƽ(9;%d:,ICJhă1vVh ch3cvN\nQ 0(D;1e˂)rbmT{ߠ(Ϟk ;;'TC^@/As_Ҡ߹ `JViwqG&&"f9:VS¼*@^sj؅?p&++Xgf;C_W;lZ+D4O!)x߷5SBDDJ0.7Ǩd1h9An]o%dQkۓA-Y)o\M3HEhb__K2T8ak?M[!RXq{$uyVL^RkVY:D2.&i1blOD\g*廽JV1⏠IO6 O}%S@ =B-/CfNEA,?C̋K)S㼘] }FgWz1K29kG˻~GxZ_܉ i^WV#*5\B&'; |:\Yd!W\C\9ٕ̀>3ZǍ;21 f% :!qKTO`ZJo낐̇G ò5u=S?͈cqI1cm B>`{4 2G\etθT'Ɏ|W͡\,Т8:Sy P4< p&AVDi:dyʛG,\T9#Ȓحcb؎m] NIP'${0{lIn䀓ZU')=ZX `gz`cn$X砷̐/;Zj$k~jBl*f 2JLF%އ֗yEIAaPB ƐN^VMQʃiԄޖ*C/VJX1"#[`d>W8$*^\ dq?kjQY<.&E2߻~1oIp6&:>'' yMo%y˞Dy_ =[/Ԇ&zzٍAX+bB\쒍mNs,i2dGk%v\"d  ǒL8ez'[HE'Nu jhh"-}"@_P".9#"S-E|EXzSj@bڻ r 0;J Ɂ39u[B- kp9p 6j iTUOJea ʯ괸$nABmL>9!=2Q>}_SrlU!X#$e,KaLMi> ]9ГʖIPGP vCgZl!xSлu.0G)FB+-Z-R<,) |U+"iBÓojؑb fm6w/iG >3 s7s8zQП #gIt۴"=E6n ,]FOa]q'$d^n bH%7꡴o`"6, sO9{dMT2g.jLtB>w4d,dΊhܘ5[Ȭ^nZeqD-^DՉ%Ao90.D#);#o^Px$UMP?jv2o9T&51|\z}zeԤr Fwגi=F$CflڝRpqExt`{Cs$N%=&Mb@R ++y^u8P愴!Uo}tIra/EL%C^ !`]D+؜(Y&shVWOQ N~)n a7񾆯.Fư>AzlrщKtxG3gg:@ڭ>pǘ̊6=bu,YxrgRcɥo`5ww2J U8N5lpѕ-YAEU%Ze=5"^xRy6Hgrrّ_U/,M1Fܴ;mvf^ʟhEL|DG>;=?+B!'VbsFR鎬Q'ޗ(Y;†i%nrkj'o1ۨhoO)rúr%N3^cz-ʿC paß /- N{WkOp>U2ʍ[J{xCI`tZj9ʎ%]߉v1DlH(DLl MC8 ^7l)A (P̜8-$X99"}HAilRV^@ MՀ,V[La+ag;s?I'ؔv1`$NPBݾW[\6- 8ފ]U1(v.N &K70/`eqp`4dS_I*u5醇GuN35iɑ59Nv0& /u-=2'$PmFK:!3xiZ~A^v mוej.!j}X1]f?45k)[{&{Ok ɭFW`a~8kcn۵+y SM| ״hC U qϪ[qiA\҉UL\6Rm&9W? Ef1ک ^H0C^ɸy>)Wt€ƪۇX@S=HJt2{.6`k 0>L/@L^umr7m8Z w`H$QIDQje\jUO-ؽD$ cBDB E.i*X&gxJ7әJXx:uFhJI^^:[eyئܨ3l,O-n0V7̤4n*$l`^6+GxgAƈd~gzC3fS'˼>?x[AR 1DV(p ̳8 /d5aRM Xc1ȁ9mR FVem2JL#rΧ}%gD}#igdu_2PjrS6P+.:;d6c%œjH mԛe_)eiJԼLu߉a)9M9˃ʟ!gDmP[wUdNz#i`WŇ-nyR?}—y0'*#4;niă8;|K`>WMPH@՚PБra ~fr8ᏽnK3io{"\ ZŒ{=TL5ZblGb&/dm^,P{iӊM,VWCȐR3Qt(.it*N4MW=(rgg#$$F]w#RK`1z/+aMA6g o(dD!{Cp|,|Iyge!ʐ +)-@V~"ؗXy 'iM$qtLjErXfIo F6p_IϑKN6{ 5_3ӓ+S4D eхW.X4po3WqNPUǑۿ<}ĄTy%jR'ĒeBr 7tj!LC*cžqU,fCjg(9cw2\}3sfVFG{aBOLe\^Be.m+cm]y~(? cg`gi ͬ>Wћ}Mc~D۹<odev?{4 Ic\{y̨5vdo<%иo[ev)E3!_M'o(kL`KH/ۢ3c0Za[j+Kש?ׄ4bAzض䅜@zW4R=vA}#>@ƤƪDGָ,7'D+d$9~;Co}22-O+ɑ% ]rrZ'rH3s/=Żs՚m33IKR me0.AkX?5̭-ml pٶ=*'~\D%"z,W4QtGkLXeH֚ۜZRJX#|Bq&ص;PܐƟQ* ^(GE 3"$yT.Hׄ_È?ns;zM4jQw1 &ʘ 뢐SQJl /;S.0-UT>Dhm4,4AX2|*f©k&Ϭbhڡy*tk+L{`pJvuR7)^  32FɡJ˦ULZ.tXȪORpL}kc& ܾxK@Dx74G!ea6RqBٜ@Bc"\u{;&/̓4ajGNfa{n3mb`[)ۅ`}T'ڞTZN|ڧa ʴw+:T F)FynR s\6=Nv)찯tBIɢlGX 6M!a־! [6{lYa bA` BH@ 6N@F+ _h+-ɔiŠL_˩Q(ݫ;[ YN7Sg4 %εnGN:<|a; 4iLUb1 kmɚXHz=+gZG6T5g7(֮GTnWc1'EPͱJq}d%} YKDJ˃ iۏ= (}pD6"~R˃*\\u"#>-F'lv Y\qC6FQ/I37]_Hay%BVZfOV^](? a1nQӦ3AfU3Jw}>qa"ԗpiQ*f ڄ7vØѰ@9yAo}I't7VGcY%4y*K ,EGSM" r#vF"㈧jp2s](,SEh9&bQTX@) _:ZzּK#@zsrŜ@4t~ ^*Y$#;}g!Pw(T+w:7ZHS{=:h$Ŕfs^& :'D\Xjj\}lT'N6_afa7oݠषEZ'5wJBE͵AǤI}(% 䒧 xPx&e_^iQtOGUI@(} 68 6o%"t{=Y~K`pX: Ix_tDyXuvBs V< .K%&$aY*_٢T+TӔ}BX;jJkUsa;BPf_>'p7Ě&q t|ABN|FR&5-ҕEq3+T\vo{$T+a9uţ_#Ƕ ]*W9R2RVkI$aua2eeݾ,VjbJX[)+($ ~l̋/tJ>9%{+/%ʖ}IrrgF=jӲkRPv]#ch:IPk,o%=Wՠ#z U.Ny{xP;xwgPNMegFWA.t3Rpq(6肽)$Oٝ?Odd*jʫ dήњP49ŽF[yMpOM|4A?./,v3eˆ5!Tڬ*FxȾrVC7uph^amR CV? DMn՘Nh ]7 9`!}M{uR@p~E5̉6K%*H; a{yrن:FoP'D' &>_.=zfl2T+X'J5Τ+>'ObQl_BIѐҾel!fH\MۑED1=zgW9߭KDF\#[ 7v _"aH#CY2ݮՏf5 8l6R0v&R4D*۬|ͭ;2Ge5D$] ~¢(MGLbڳG㴅L!fXˠF@~^{@q39#:W\+QABZu5I0xHvW+dOv\4Ndix䢤¸i~H:z2br6*2LmeK׬]Ч.'gK21!|`ʚ%39"X5HYd0;~f/5, "vu}I{L;Rm!L-O 9S^Yە+ V ۸{W\ Vhh/-~PMlV`z3t<몥:q'coGb~KrPۚq]Bt+ PT7}bYo%Op-XlٮX2 i{5!U&@G@(SkLz;3m)Mql܊6]@{;;MiBQKŇ|BƋesޘ"mjrQmXdEaRp=(r[2%,}},&F7@ ^k m"/UC`"yؒ#1L8T ](Lqñ6IV4abQMj 8
?mG ;XvGswESUR§l? o*):Wk¸ά|cV쐫KW;;K܋ˆ:ܔ$$PE_0s*-4f z!A%މgyBGWNL(dLIyT?v& ȫxC'EO P0>JJڼޏQ[ƈrdFF:A,3H3Fvt^im `A^zL6vzX@Ds3÷+Oeȩh/Eh#O:JU5C=HJLfRegʐGGuݽmJ=5@Yuz=]V1'EMU2`I^gC%.zo#i*G1cL\IiH [sl_ ɛ^\AYRroIҚt1ɭSE=8g! cY4u:{q~/ΨُG1p Ng8Z!Y *?YxHQr*}UaN'cZ*]Jejmg3;9n1bkNB󚄑JKhGZ5?8 ,/I _cF7S+^۷g[ә5r(h(~(3[:lx*sOQg즽bd0Qqz%Qvᷴ&yS"Js2V,2GT6ZZi]# չc6G<6<#y91dEb)Fvj g}'ɶTE*bOjZign5sWmuܕ" >/艹+x 8Et*%;1+R3+<!yVvT0}e'ט:KБl2ӌ5L͒E'B_ |rd5GnΪ5L,mϼx0?Yeqf`/%Zx`I(ZE{G-3-x*W/yiO`Ӱ,L_J~;}aĠqPjQ`N|A1`%:\Ӎxi/ߥQu5$Tϝ;E\Ӎ&SQ@p0BOX!:;aӈlHFNQUX^NS9 z˗.Ҏ=Cq?sT!N+٭?LmҽׂfeUlrB uƲ{BUro{M))k̀Ã@UXۏ[gc_.F D* bp`#V3eг 1K#Ji/8(Xqr}%(ș;m|(B]̜qMa@1WJ & T8vIї%Yk-dG٥*w>*콥GV³ʬaǡx"C<^\{zJ6) ;{S9RD?dmeOj1{{3DD ٪" ywc[ V6bo8 |h yͼzP .ҙmFѹ5D?o#ZE\ۺ+D~E|OBO0\B/#^}ae,H[}.֜N}t g.a΍{fY+FZޘYy%BF˸_6::4KwF:%B`d L$OZLsU-ۅu[.Dj!^D G<^Ir^ @⾁pEө'y P#MrA$?J4hg\c[ : ȡcK4;dWlAj!Ob{D[8SN+{AA2R?jyxϊق{-9ۋHZ#BSgŔ-gcdI(:g)ZӎU6nx " ԫ'b<Ca]uu  b= N <rQ#]M(듉P*1#ey<̫٧1 _a4zA"Bzs3t3#@KM:C_%rBHD?UPES衖rWSZ"hp\[Isy]:ELi[}䒚zV"9qUoTs8fX8Z3kq.  W&6-S9E`% F vwWlDkSC4xrR+{W^܍.LnJU<tFYNT:>+$F j,'p'b$'TѝCF%بfrQ B}[4Y2 ~~g` 5]W1x W0|—HX 4(PZsr 8ǨD+&Tybl/& O;8ՊŽ36­S>IJV^)ԺP؟鍥=`pzP}G $:%Qf+9)iPv9~|.@rA$5!R ZYnVz>KDIq;-$r>'37 aw"8՞i Mpz) `m9@$ s$Wʑ^a@W ʈ(ju@J9K{n/Jf<],j}C1(97޹'"ťA{[c&Vm(mșq$I cg"Y~~.F8zsH9E""p}3C5Iar/Rg|/hI%IPY-:I1%7~ b˙iRxVp#Xc쐇}7߻Jm,9:l v(=1?epF\׋I=);ۓ;kw${{ZR'n(̑A9rpn} 86 :f5xԼ1 nP-!UIG.Y yvHD߆&Cw2+}tW`I KxazsD<ژoQ-KYᎇ$~ɲT JkezL۬TCpSVK{o*j>iG ˧ࡴs  soCz}%@{! 8T#7* Lfl|jV&Jb쬂o4a4Z$Ta Of;8olR@~s&3yo Q;[g Dj q-q\gTRGHUPΙ@HLv!|(وFBB/8a5= 'r;v= qFo"k}푷XճH '$3#}2eRw櫴8Pn&0G is#_MvVVVՍ岲" کC]ph`lBwݓ%Z[3A"pWr[*F @[#[p\Dd.l%0lHګajaI[?duUL_⊂z03,i=Z4dXR=\$]TP:OR'WiXeM}jNzkJ8Te+o/:29|]L+`fL)FimXifml%5 6*<Ϡf͹Zm,2zѪ6tu(@d4%a-5R٪D&eǟW(U+")ԨB t} 8ڭBz ̭ud/wj?&od&]oNSԪHb(vdDʈ3`l730l0Fd- DktgӋEnwcbS2LN+EM9e1AەzMZ*1=.;V,41v#H" 3sjG|r|P$휖4(W.Q o<gxAa~e`8n眕O {`#^S4520sDǧVI"*L#=x"Y(mBEq$3J^&I gBzEM-u4`=JQd~i^iNg5@zUըL3l{Bvm_źH.wQ͜'HRzȖhՂ>EDX.Fq$.Ok梡 [0wʋXp|ɿO;3Qu*߶;!f@ݸKv=jلCCe؄r̨Tvdz0ռᏣyS!eǴ{ڇ|no^4^…TN,ғ2^ ׯa*_tgq"\f6&—xH2єDŽTejx"SU%'Ypѓsf!"]9J&yU/hXcEoj+s /f4+I&M l8zl\Бl]QmV+W*)&_1=t .Ռ|T]M6Q\Sֲ!k[ 7>r{vO1^P`#Z bd_h!Vl,"{7D:0 6>皬pNEױRM;'/DoP'v yo} <rƲcqE |K8(o9/Piҳ3=02Ou+fZp xpг5u>V+J&-r#8UBJd/4RgUqf?W,9v=ξvl0[Age_!(/d1Ky#;Da5z15gCBn u|buV" p'[<{"VogG*^>R7P+Ro79Ll3F_{v,SX/SL%|iY6"ι 2t[4'0FK1f 9A/]*nkFIiR]g%2V pp8̑H5xZ0W>Wn3N ' $.xWN۵7Z={iնZ r,u.7[q C5|H.(ΑY8܂h>)f_˽3[H^pk(3&T j⫙y#D _ qkI79kZ YZeci9NyNJࡷF$J@>GS#6buGRDMN~8$|z"XH"v6 j)KuG劦 R}'!5. $BH|Efq ȬLd(x'8r.SSѤ g% rR-eRiIWqdQ_hI(?oeH?KRδ)|tM?X]V( Nl/<}@cBn\e4f+ \٩ܯHSaL$\;LpY=5"fZ=FAP] ÐH6[gNAc鞅#u~,c^>Z;K5aUz(8ݢ_񢬈& 5/vavv-c5":kT2dRj)HUI+~ wc3<˞#bq6ekCA>]pW@H۲G[,}qVyXbz'c64SAh ($%O9B >XIA+ #bp J*CɨSqب(+N8Xϐ.ډNa ⧥ήa.}7(ժZ1% NxJP4gnb7>G.:e<9de5w#~*˵UV+mzFs XN)!GvKV۾9h茧4wL)t^02k|ݲ؎ZgSx}Z,/Ʊ}Ä4SGxnįwRH& Gws1fHY U=80>D:-{} sQe8G :“jtY i!FLXed=ꧪG8/L :`Z8Wܣ흨hsNB & ;#&YH] _huC?^ɻPO8-KhL!sW#QEKS×7?$U6y^P|҉] ߏLn3Ў3ymޣ (t 尿7تmD&鐜W? $zq=Cs+3y V柊t/,cxqZ 1pED_bǀY{\x8<ԿM}vz%,dr=qN8,le#191Nx5yɄaAKcping*(;e1'궭i6eR#=+8[5;M?SQ׋0wÁ3~hgT߽O87 1x!Q H8M3va5m9:X-"{05,#a#9%7m\kgBoIY;"1*Ea/J5ŝB4bXܬNWiEZ*jFb y#4W5'ɠL@b0^A`T{3J_ސ$DJp$vb)hƌQJoEMdf4ΓiOƺ,qBFm݉`{Rz8k]b YJE')NEn 3"U4"ųCߩJj~M{Z)ȑgT祒bbe c>U42BHQqoM##_hx+ʷ+U5D> }h!yȀrY~oIc72o|XhƱّeg %,TOɽ6A%E*K_Ug MM @C?7v$wNpW&g;B,SF GBzK\vȴ*ƨ~C=7"8eUm̲MY$y\ǃVҠ.K"@x0]rۅ-D:ܵ}ͱ2x4c'K}]>wD9lU>ϭ yfI %3vnec?H٥]9ìdj௫s.Z,>@w,Oݖ!#  5|5y6GUɊUƔuROlrIxܚC’Xs gwg0v`'ыEfEs'*GK<ƀW#I8S IUE̱Tg]r;Mx<}{~ 2'B4S@Bc/n˰ +s&V˷Og$>hfITL bkb Δ}v=-=\ӆدq' .byBJSN 'EY rwUhfkM]і$|`P`?ßA"k6-ΔKqd oCh'F\Y蟏Z%GtJlaP#8cSȁ0D "^Kkn3e\ F0$HcƍcN,E3;{z 0S RޜJg\,4L}Gxu$m5th{xL.jI=O<"]ibA2)Eu"|_Jy**rWmZ%cS= -tr}VJp j^+VeJ#y+531+ʷ},˦Leo*z{<'X,5 dS/1?^ܖhFe<"6w& ߨTRiD,0 ҴNK#wz-[+*쒧ÊPi8>mh'"5](Tќyɑ]^W'XAvlF Eʪ $- )^9[wgع* JfVuq2f}**A[3#̜R5y4iG1l['-:6ϋ2Ԡm;T R;)u^\Ex7ѐƜਁKz U2m鼑ϴ&\:9rf i;P(=|su s8W}>|j޻Ji=J=';\1玳~$fϮW7 .e&L-Ț82:5xOcP% 2+ຒ&plzg$u{'iB4qg JvsfxWp4Wtr4sRï/ub4Z]7r^.ŃiO"5*3e+;tMt۩r]"ھؽE xesI]܀CO}Ew|,eǙyH;äeE&`ڑkδ8,[YO3%վ IYNAΒ~-VPc{%\_*n /zq֧,f5cH(&Cz/$ND^8'盓[=!-alkIᆝryyxsv}nVV]NAݤGZ;Wŋahؑwf[OSIj9MâFlsB ';]@Q#@rȍYVg,DXĂn} (!,W`+x[Pii5{`܉S꘏w C;r?3}?jDۺBc8RPf,]mmςu\(\CI ZGmnMPKNŘf'lĖf}{Slfaa_qf6q{67l?Nłf.0KJw ˿lֽm}VbSr$ؚ^l1nLa{sA6i ǘ 4'`5z"ְ7SDt6&74>,'zr&mE&KXCT<9iǎ;8/=ҫY;N8b kW `]))'=w`;BHicHS*R$Hka;R2gvѠg9Yџt.YH:+.$.7s$s@Ec)_`V:g)eף2B< tة%ŒFK}qG$}IAӜ(IOֲhE'@ۏ]UNnCRXX4rJՒ0h,۸~g]5M)CRzs? A]HWWHiQY}|ԫ} P/qϬ[,&l1Nj:n3q*)пݖ$qRn91'w!6V }g# )٦y~4svdJpV@㓇vp4wekEW֧|Lœ:ch`30 ;:0jB;+qokJtf/Lu5`$:3Q3Gdc;No5$|o(g(;=Ʉe :l¡ho no܃֫Nv"IU_olvhDHx.p fx(dpp=V.Pkl" ^ݲ5Atϩ _1bc;۩瓲lߥ]"mC=z:PW(|%ު37d&d^Qsz΄A 8Yy*a0>6Ìnx+16/ξ co9K{V2"1a.u㏲!_i#CHl*jR "wl"R(D; _mdFlH:nL=}W@gdvբzKT!)rчr%ү~KQCSؿ&Hs{(=cmG[hROŦR.1G_[#ߦW% ȾmL]+W μYʇh6~;;AP?iR|Pᢋ #D3uʹ~)E)Jo'֜MZPP*;AdޭL}3_'_B1,3$\DZ1O4BvAzQiCެ;MZ`#sXRsCn\&>|wv6Ky z,_˜zhUkXJm?JsTsV&-==i53%T%JSzΥKQ[V姪B]2QfDVH$D{+֓?s#lGᇰ s?Sodcb8%-kGuFp@̊K2I^2(5&惢9KqKp nLEdBˢZǚuVh ñCyU?jre:^ ^8;$i3H25l#ge18%,۴h|%hBQ{V8|?x9K[Ba-~~[]HcX-ROwA3{X@ _$hʪ,d&x53qEfH7W^U6c[呸:kE/DZ>y6ĀAҼ{SwfX *ď5:oj4$[TgK$# :IϕoweQFKR7 ۼ֟=ev_ _Gwk˚ (#:: /~K@S,Ԫ_Z3GQFgLhOwgsn;EߩyŠr]YZo)6{ 1R1+F,wSʺ eRi 3(/C*@(T/&2/T%DvUtr04-c:)276ڗ`v[Z!}!Ins7IkilxH'R\*o9a3Dވ'j:yB'EAȚ᠌X$}z|L68Z D4Qi?ncC| LC[pÄY17"|Dv5/PKgH#SfUW~Z)$H]j21fyI $j M П|yv}oW M_( F`or Oo D9pz,s%I3zY!y6 əkZ.Ovڝ0k:, cœlyH蹛yJejd6IUװ)Wk/yսaOIDkCyWOIg"nniFf3ޡ.Ά58sb o˝(X I:}W^TCHHݸP2 Ǭ΁wTHuc@5W?uG}8H#Z!4\FIҵQUǺU`~U>[8[:+IܻW-Z_:n>chm" 5Y!gx"d$`"1PtpW}Jou`7z/+>jnW؏/<:LjOOStb*8./,\P}h` t:0eod<ϻ1|͉^MWpW,hduB!}8Vid"S>4k!0<~d*&u[#!q6f'Gl[eωYƞL\v~3:wf7b';q̥AqU5vXT[|+t&Rf|9^l/V I7NE޼ 9CSHG͛ 5$ԅ%9<Ҽl.n y͢ PϜ^5[BK~p1$I8lsĔ@5oP1$}-N)P4LRz{/e4=ia3}~-5~b?C USJWcFG1G؝5ؽC(/X&4w㏻LVa]HskRMi bv |dxYߓ 1H/X+*>Kx൘gnb wJS6f˕PE~ _ %Xt7ސX AUdD{D0GnkB,;=aN>gGءTwt n@h54-(5[:VG e{yA]+g\V1|ԅ ϋI`e01L=|_YW#SH0C/Y[%-i9鿏D:Ll<}wjpJ? a8 ջ{#FNfAB,64oY|7\FC\$E_yE!vA6uɀ5i<ܪg,A3֐տ/U8 I߮X`@OK*&X?Ꭸ( %j (%х=q!zЕ8u*FF`dd'TC7ɆEJ.*K{s(OUͬ7'^wF"?K::KkkTx!1bEWF':栰!º4Yw&"ڇ SBk>|B{Kb GDcu)1?ȗ[~`upV;1x[IHJ[ g,[K YjjD᭦dK)dH#n&ip4CY=P+P ^쓶+rLQǁ21\,^Q2HwHUN3qұL(+ 8bg~\J»N[=I~'Ͻ]*Z4W$Sd+l)a1gș(O#[MLOYRZ%hjϏ4uGpIYttRpM3yL340X82.خ=׊9s$\AEgO$9B:wJhheRæn5,L?/mڙ;5lzsqT1 ϼKJ,F_8/\Ck?~k;~`XIg-6%)\~}(n.!R:ey`!h@u1a![Jԫ$bɵ;$S{iyj1ZHŊ'/]Q<^\in!.,niݰiP"PYQ&ɵ8%K-~6N ?'\6֠d 8Z~ylhI~agx[>ELfr_UBCnCY[QɹA.OtW ǘ_wp ~% 1yw_sə)l쌖-ѫm<$w.nD_U)r< hZ#!bjQڎn'dG 6G$AB0y^D=8/~V>=AFae1aՒKȆt ݠ5E]mtpp1@kquI}jmEr.$)[+uO =u<IU`O>FB9@OuGcdO86ފ/_Ȇ%F'd%טvc/r#wuwdF;*` ѩ1yDzX _C ʼn}чj,)ĕf6W# a7{N6waJh@.0Ak,ӱpΘ6eI;Ss+v٘]ՀL3V)|?6x/ee'7"\93rn&t )jn`BãX{*QcrH$ܳxpS'"ղuʴ;-C JX"x0e߂ey a/ *7JgaaI8χҞ|3z l7s)cU[|[M[YwzCQ0cDΗ;/ؽwqCXTpowtp$=v@v*“HSK{KV'/#;,JuEZ#ٔ86Hqr8rM2-%(! 0,f +'I;b2#sy5Đx&KKiY;PEe!W3)xǴ*&w@~vvcuXhǙ ת43A+T9^FB}5"Mwjs#Uk=#H3cUB"(Q:kOP†tQLrPx=,ZBøm*PhDYJ{.xDlo^\'3KF ((;‘XS M#;=s.w_,{+ہѬ!8F(`C+:~s^h@~gv+KwKw}3E;gOῖ3*s=g$LgV:93P@? ?U}?_靇'AX4F|W"rWFWsyyW 6<dipy-0.13.0/dipy/data/files/fib2.pkl.gz000066400000000000000000002715351317371701200175100ustar00rootroot00000000000000bY#Mtest.pkl[iW˲޿Bg*kdD&ACGiiDo#=*(Yއ^65dưcǐm=FvoE$n#W/ݸYڷլfCC o-3Gi~ְ|0,DFOOZ+.M[6_:{%Ქ7kkx~e]tzvPAo{g'zW?|$=l͟7>Ygo\]-s_^gK]{n#ȯ%M\Zē/W%B2&߯ ˮ_Cwf+\!r]k=Ohʆ/ W7 j#=vJj+7[7nϯyYQ㦋^v꟫ۈ~< [XGݶlݵC!j뵞?1(:z-7hvcX ?7.Uخ`rhKuyWd:h?¿WUwn -cD~kexb: bպZwЁ l?Th4vE-c5ҿ\ZJ?fl,*X+ }<S'x++QtZ>vzZYž0e5*>wR D;[PbMA~! J5hI^FvH)2"}!+pfr|`YLoƚW0XHT?)DN&'?/PCTs5jkI?hx/KL^tQVP0:82 ߉E NO~G˺F;_z脯;)i؅ \^ 6yۙj+#T*甸v*߆ ]Ճ*'?xzZ^t ȟ@L1B \!t܈ZJn}ߊ^GmZO3ߏ#TvWk0*e@4~Bl &C lVM̓йZ!rRL-?4NBj9VpW/)S>X51]Dq`p,zNg̣tG T|ȒT'!%hiKmx oHA)i`IjՑ!(JT <&opaٶ*B" , h7fg0vxCԄ=Qb>A׫$`> !4X<9ąVYgE"KF+"Zb[-~zEBN1ؘzKٷ"R<1CUSج@sHou CAQӊpYDLFUs&"r)/dndZ%ܦ00r뫰Cށ*Ei"17خpB.ddz8Z!;FE7 _TzxhDHנ_$2}X)lsB=1"IQq1ɩ0J)3Pki[b0"˗҄M`Lqnõ9yG-e F򡐹 -2;/ `ڈ/1g2f1C Jq_攆@Ġ=g1i`ὬZ5$?UO8ԏ:b+j:ot1 ljX{C6zB|%̰9N"7˝qPKfTGI3ύ@y(NZNCԛ:ehqf'<(Rj2&s$o2`"/${GEpDEQ8$ ?йz1C˧!T+aR`*^]i81~40EtݪM0.XM>s \'H?EoPC.Uϧ1n9T2_[!0;ڡN._S;p "X[#Tc. ݒD})h77G[(l2#mQW c9(ҠIVhiUP*CqTN+{*KL\gR?> @z6ހ狠)[x&LƦ?ӣ F=񳋉2N*j#'3)%. mFp!E\x8hҜ&!Ӕ4&RUpc:j`V2otMBJQbTI86veJ p2tq,=/ 8ay:j,]S+ >)=搚_j;UBjӌ,$$$u2b~:'T$CQ,E4m %fhžf* M3Eg.k6ČCRiJpxVF:CG6ɜ:;kQ|DsW:^(?(&,QhFـn!>[)h;CJ&4 (aᔐ(=4/(|k0~/ךVdxw`AP#Tv..+q"63`P:`ή :a kEޫ^}3_-gNt4 `35Ph+U&L.&dA(@'qxCSG8-^]tn| SjF80$$'䅐+6Gr}';|60f]3ŧ!!u}գ٦4HJuʜjC&4ώjO pZN2(hݡ"iZBsR@D7+l"p&;z8"4{J6Էa&}60{(^/rOuBz]$BF}Ň5A*x(D'h}vU= i##Ӆ( ~uBU);Sg ꀈжPg6 ՑDkQ(;Q/_9COSTT$1gÄ^m4ܟ&A> qf~ƌjsX  f\Yp e &tbCUs_ "V_:JzlɟyJUT. RN6`|X(!0Fi^Qou c6Y{ʅh6KzK`?h[&}Ou`Wn|#9Qx v89PT8jW'!]'PK::4U/|2LV.%~P̣Qj L~_\! E#@%i@a@y8A2yCo""EF[mp ]ߡG4*xj_ [<ٷ#2{з.2i"## +4J隍ܫ&y],j}l⭂EOo*B/A ơĞ,ϲP!- ~|ij/@dUOHʉc,=#2<ޓp0d';FbY,b~ ̏,Zm# ;z9r աbm+BW{?c[7c%#'ϻ)㛰;:tGll42TgݚvJji:{K&`ՔA֧s<[rB<'m ;SF;8Wv #o$i]Ш1+&,IL 8ye\.L1Gz>1/Ojud`W[GO! ~֤ FٵrؽKPb_ȝ 1Gsw?r"o8/Ip3wT <8P':뢔U1?H Pɿ=JhK8s\{)]mxErL?]+i]wm6ux;ĶvAzjg fk\E2r"Y7ffYydk »rkYcuF=!"]W\JV&@ ocr^6Dɨ@3A5vC.W^ċVxtQ'Ϯ1[B]x4صv#3; Rpssyͽó7$^H\ 6΅^^I$̜QW OlGjS#SU=ه_U'r/$%] d b,4nk_rB۬$ bTٺWJkH[ !0P-idVMb)E:W?Y*nwb5y7Cb>'πQ)ߨ%d5h7jދ3 ,1_J.ڝq6IqK;GdB /uU=W߁[5y74ge䭦qH>a>I'Ƽ1ޗE< =5u=fU?ghV#EET-MgidX/K5pqs W9WDg)7t>h EƗtC zeL?3 λ)"5H'̔Dإxq)\LE@Lgݧ%e)iB  4Fq1o5jQGN~Q=!cRɱ@X W`|?vsޔƞxS2a[ }ɪbby2_~RcjiL#3K׹ׇpVI0@dXJ3mMߕx l'fo5q.^VJfʌ^HóVx 8ȥ Ԟ幼h=LLaOdcq3t"F$|H/}y.c llp҂ γx|^i2*DD_3w_Nڰ~ifx\ՖcXE%=IA#.d;j\w׉ibT_뽂-UQJZޕJÝ5)pSg0Sq~< hWeqR|ވUj}Y08I`9cu>~iZB>G#SIr36G _Xrʗrt+o倮Ud(e‚Q/p 7 ]O9t. >k"߇q<_"N*[y(c_d>$<$WW>? b*{b>  -\rfɸ,ft*y1g%6OtSy.X>Zv1p`!%D#5&$KfGnI{>"c뿲8%PI9IMm0p.OόfY6;(>d'Nr(Y'ڏ+iB f5̦~SD$Q?(`Ʒ*x,V<)v!{+ F$KjXGNz`F@I6ǧmZ pXdhJrJ؏?CWx.V#d>G] dSiҨ)r]ŶU}j7Mpf;ُ6ؠ%X4+D?bF`0F4(K9J#vic m]WI ʞ< P*tuqy<8b_^AZ.G/ixKgŴFO'}Ġ+ŦJs=>A$]դ!qͫOXasoϻfU/vsv>/BneBxlrt~{3hs\ڢ<,1/J:a}Wl~ p5gn$g qǛ?6kvs&VY& |zkx# `ٻGyA]K'K]{Y@QW`)ܣLK8srxgV=eJ/i.Efx{r6/4 -*BJS+Zل;p,|TLr(F&p.K&{kTEpnȁ&ڐE*K4 i!#x/p^20G[JgMwԫcM/%Z8 UN}EJwcWUQ2fO Fj;@J(YYx! ƌK2/SMdS0iQP8/ϏOXyRSBw(7ʽ[Zoe[}}}MY,c5xVLSqsOtd|w;qdDSZ&+ `,lp{ˎRأdx0[dݍvY 8ƾI&d7G=0®uTNS;]JCYnm[nP)m'OC>A0:hM{$I{> 2IIZMMՃ7%H52'ܖ:iބu%?YϕrxLX$ɡW)2rR2sdxfO0$Ɖs7jݭϱ*<7z[w|uG?{H4"RK6f:&ŢB1 RT> S4B#\ЀTWxm&r/WHڗ2+Iof#SU`a^ 0Ix՘y}LHxl~Ծ5lo_7< U-VH6Tr67^giO (9i}Ѥ??: !wrKE7KޣZz„0kQCLHڙA3YlB_˼p$L(Ʉ=&_UK<_Bw=98! xJjԶ0oϏ'L%=( ɺba!Y*%UJWo.'٪aڋ•dm&<,1YUNC5TR뷸 &,nXgi褉'7F2X"5 uQ 8[Z@TWϗL2NBfv{ƽԸ?{Xst4}+?+;M0b^bPn1fp!F,h@''~-Do"V<ɍ* -UEn>]ץS]'y$ɥ;QHׇ$ Ax{н *̝=5{l x蜾(Q;D.IaQǮd{)q$q,( ,HQ٘]-Q-;{Z'oɕ}sbb>V5rt%;֎wdzgr U>b%o3aH` DH^i+xZlC);y/[2\9܎ڎ+a隌JyѪy9}*KrM:UyZU&ݡ$.uYm7>\Fzpퟥ\`knLir+$vls,6-@8fswٽ[3/eaF4AQg.-Twݍe3bSNm^PN]Zf]Qzv*߻"XzCv ybYM ~\Xız(EI0ɃrR<4)xꌻŚi;KKRz -$ i8dMګȣ'__yߝULߞuyGOsSzT#`@Y/)RRPtA7S^7ڇX K 4}^I֏^#)ɛ*\XY2AXZ"5,Bt-P,Jݬ()LEu2`.4uy:mik{7Pz۵JMs699 aˤ5މ X3ʐ78N9 _vc`֌ay;@s]ev Ν&Yv nwV#_Lc0ջCfO'o1")Y[rPM<:\;Py~]`;z}h$#Z(d-B0նր]_뻌_5l^n߃}F8`l(&kN%HY6 -_pSg~;}7\bJ6H5^M{(GL@`4_k> dJ(Zp$퉈-?5č4D/,hޤqDm@{l K=}~_,QL£Zؚ|L͔ sFVkQ4jl(mx:°S6% J-.2.v‰ϣ];U՜H{K"Pf,)]  '+1٬\IfG#$D/SQ}aK:bsZ"[8d"y+ad1q428 DGa!h©tRl]/6L"`IH2v.U  JieVSNgsrW$%_V(>̤e۠BO[l6(B /Ye2>a Dr"NYvQ>X E-B/`| E&_JO)NŷXsOJ圛dbgY0cu:Kk MCn[fI*n ;l'OȲp>/24B?DQ%QՕm@dDz>]E iWk9 |0%NcԵSfP3z|M=yv$dud_ŸO~e24"z:P~Z ԗIABwHkOޝ e}|Sk&V{nfPB&|wǘJǗ2q]0T/#2DK,׺yrD}f7Jh5j3)ܒs)=W^viE A5p1~j.УG6施f'\)kN#:?"Y048+"yJ)"r_2&`A`ޤ%Mu*NÛM.Bp~Mfu1ɢ]ٝbQ*Ϫ;FlH6iFcD}ٔ8e1 FAo)5y tx[ӿ$?/*%ͣYa0|e}xiᓽ BN{ OvU#d5fQ|i=g0ft5=6f^ ev:7裶U`k a>LqnNT gDaKI _Gn_tcW~zIX\5bپ|ؤ 9DN\IFF li|W,-WOXڧH :n(Gbg|Q0)mk$IÚB/{Ȯ&d[Vg|.Z4"X$~(`DHBO LWէ* *ҝ1i7n}-l(S]dbRfXI~S+ I6-bb(M&Bt3#$ҩ_ƣ]ռ#?a[u=]A*bZ6#=aSϽCŶe'/st_2z3ug(l ln|C+E^w؄rJ$TbLZ| H8ZwnN|߭LqrD^ a'S9ˮz QNMG_޳6MEJW j^6*@X3?n-8Շ.ܺJf}z&"$HOhexKZO W7z$4^pصfvZ 1j @O0S-nmpJm4`KOOy0TDc Ɋnw5ĮUT2-4 j1ʡeEVKypWdi1U2J郬O'&^zmVf Ufbd 9cF>Y:U Ͽb2YLg}L9T03Ώ2dr!nуM 9Tcy7FTJB;N:v[FcBʚ҈rYфS&,c(Q/>"y XnƏVi-F2/ Y4#ZLWr*<;` ֆ>ke/MXˇȄ' .G^k;F!݌Mm %FbYMlw#Rﱥ!&>p]%rH.#(;+բ-f>k&_}D3aUp; 3ɪ#ȩtLSFm'q{b\nDuWn%,.ӴB$ = z/dž`wwV6(&7E)H4D$"I+[joefm! .HdKKe/o@-|wQS%o~tz}5乬]Ie;ֺ O{YcȂQrke K?4lT(󧚶DPRFg8KV++]Q~2&;ׄ7=7\\~$ΎzjJq}F FmiQiR\{/>0.전ؠ9\ƣe5"J17apͪ1[3F¼Z[s(puf(Œ5$0 =ݳ:>42SEċеLj<p(1;^Bnp݄t" 6Uaru<=V{.&==gbZr9u#$kuI}r6M|BT$8ϚJ!MTt5x|9^3?4P5,,TC$aфZD (9KW߭dIH:~ǥaK=FEg5̨Ÿ|X/<^|b:W|s 1R'FmH@<G(լhdt4݄ N0T#XɁkdSE=N|~'ͷr5?e_y%Lq6}DA)S#4ʾk E}B'Pstv;.ժ2 OYx*Ѱp2DZ~Sjuc4 ՗ԥZqGלF`I%̙fj'}*?4rʖL0}(=9L "p}]j>P*Zwr0D9?օkBv{ ?njF(2 -yб%Õz&|09U!>\f _mV!?. 9!|Q&)Cg9sݑtEFv@/N(N+\KU~UrQ¶fVWJ*vY E廒4 M^ǣ$T}59yry/7u3̋5) 7:5iw 5auef;DM`J9=V&ݳR^*׼ !8^{.S 'l,.[%G]YMS2;3yCb898sQ AÕ" "E@ː ?䜉ȡt]t%Bա n Ϲ+4Hኋ@#>ފ*=BTP`$MfL¨@faދ]m<.D"z)Kh5Z$Z*MU6pjpd'Hd=݋^ RWUá88ǿp\^SB ϛs]H'yGKpb]96waUA^P̭/t?Vd,v'ɔhfg]jRmj,WDX4x˖Gbڤ}^Pe\(?:\XxdjuÎl!q aO*d6S'vH H:~sm]hU,@mN i:T̈́۫A;I$l~ħ$u19U@9 ?&'n ^8КFX,&Enc1 $`}˅jM7Ey/kzaS.}jgnJbS~'m?%9@ÙB$%L4\]_C: fw!4?M?xJ0Ɇj4]%?Ѡz=e}XY`x@:[m˄1V]kYC(ҜJEjI*{ #]*M8* m.`ּ#a OPaHxu@7݌Lg48{/`9gʂ@#C=c^3fo OLӋXT 蹢M"IDY=AA 6z2K]8w@+#9 8/C&6ϸ!WxAT#6pZ_$B@;JC1L.PN;(m s,Lo:}] Lu9z(> q}`cҵo( a_@ZH~T,+1Dnսg n~?:|RmʦIJkأm:az!ɢi\/ l~/Sб-DNBMvdьXOSyʻ} #r4YQZ6'ӨIk1Qb7¤M#Ԧ-x=XBx6^茕o[_`R :ә,ϰTFB Wua1Up =,h{"Yrz![DtX (t df_4arz(B9fbMz u Igol kkFDQ 3=+wGr1νSR$ܥHfd8o 4HM)ŞiqՏO}draYoGS9p'#jo-Xh0eӴ|*Wb?J>^Մ'*'ܩXjc66ʼ)X&n^C.M H7܋E ,D0BTrΩSKC}>*8uSt4k#d-(ik7^ e kjaB^!+$jXo~'ٰrUl(ǥ&\}i6\*tdd]nm\Il]͟{8՛nOϓbAOLE\؁֢ %0D"pabhj)j%W=\YspcLqӫ8uzʎ[-ykioQ%o >5Xi=o,H]mL*27[F1h+#?:=eNڻ~!iqTa ۝їjJSMRtz15m )"f­E%˘@"&P8dnQ}d%P5l">s2N7cqf\"503VG1|P?HU ] ) 6ڑ @?nd&XlG^)^)+~7 w:덯Ԁ2pZ{kaùWԅ`X֓""`n:9`uI~ƄH?2-Ck#YMx48DXe"ϘS8kE9XXc4~x4]R~:@=א܇L݃LU"NCu$䜽&J U}")%z"W'/h$B-&w/l9N؋g0aVlsDZz-hE 0+l҉0Rvםu=9x4Ɵk־k^TƁκ^ zUJo6 MaR Fdv .|)fva,^'-ޟ lr=qRK_,] 7 qUy>kO}PF kt,+ѭޙ&FHLrj}+a) 9HJY>bMJ[ԪlWRc.BG0v: dűt+IUXf%*P?npZo Ce߼̔#ZJ k5gGH:Ơ8.*|`M8 7-gv7%cSb7g\IaV/1K8J4~ǯ @Suq ,'F. c)Ё^%Ue7Û0:uO|(Qdrn__3x5;Enfa_x̴+ң<ݯL5eӫ4nuC` "ᗏx  L"ٔuZ-BƓf/ d*2Zd2xS>WZW8,gdbKG/"YysSFg!f%72.qI}_亞nTEJ/©vM KTpHSצRyT@o:JkKmġMgʣBU,L#g Tn:w`F7t}9ʨwD Z]:ou)_}]gy'9rhs|T27ڜX#A~U|,ĩ pj₊m~3o 8k-ۖjmiuKS.m|`@Z"L)VlJnq b:BYXpZI벇@b$RۈibbEe1r4a)٣V4eאǃIWplCS:Vﵯ]mIΧ(1;[KNzFFd׿@= Wx%Zo sgl1ɒM4Km!O3`[н:|h@76 Ԣ)k\kz`e֚"D't8eq$3`gs.gZPo"wzƯUڃ_]NQbZ."5< `O:2Ibd\NyǦ!57Q㤠G75 碪1B }f z0@RzxlZaSe!jF& &KubJp# 1~yk$Bfr1j*YG[rd>p,y[ά1cr$WuHz fH!fjN1[Ks;+RųP[to$pL9ݻ4x٬"\{JB47ߧ]Pڤ&Iy`'4#HrY$B#YԚI5딎3-VوBP,cfpx`PmV }^fL^LDf_I8jQd !ZYlW dp QV@vM6fW~ݻT50(3Jed4梪d^윐۬0ZlC9`hFvUDmޭab\#I\lՉ֏PEN?)3Bv#y7V۟%m:+U`Hzh@lo%C< XVprαzQQk%q2HjʖxM;=umY$}#Ze8#-HCYw[t.XǙau!cDHl"$oU-W4ddaIVNV}=? '3a2L"0`\ΖVn7O$D|s}gyX⇴Yv͡a|enu7*Qhtk3by1L1DJZ(WnNS 6M{$ibvJ¢Mڦ IZA}’h0u≞hL/Omֺu~D e ۴ C8jRXX%{P&g 9L75f?KwS7z?rV!\#C l(CG a(JjT}wn*FRz:n3ě559hJ QvCyNm8/;Q:j}R[x*V[lvDr 33GWz&ꢣ |t*_L/Z)Ŭ-Hz ~ؙc1_;?~C6,U7㍄TVfP2$jnΕBE5@p>'pFrWs 7h]il7Ou%Au '+tªԄ/w:xtfCv=& >?Mq0ǤSr:rnJ&vZݓNm_{ncұTsS|Zz&gG N",ϗbYRY< gʁE;7]Vc*6=L'yKSN!L \Sjӕ:ED+1NX-kJK R\ҋSTʞ2*~\7 &~T˥"oe{F,w sϲ:~C [OK?j'3_nd$kd7l;4v/tPsuSgqK$[Ec^UMo]O4{'i͢HK\>rbOSbfm2 /Ly3FшXW˘X6+*.+[xV XMrdqS R&d%x̝2O`g䂢Te+yKVF=e.iGzV K3Ӫ?7p9ۑbn-D lZ"̃'j Dv_ YW'v@ryJ; '?GW! CMT;}U6|kHwGJs74F>V(1P ?r&_ZyҚ4zgY6e7:le0` Fu+Emu2Be. 8ZpFֆYRVC%R,K_vmOho=(l3C㙐*M W ƷNaŽꗩ i7:')0Dӳj\.w!pNJRB7ك7`F^zV˯aTT}@6O&]^vR'le -gf8ޥ]2ݛ#ȅUgۧcgi~In{ĖH M_pFFB'}Ēt!E6lXy{3!x>"gC~ub"O2mR::А{\s\&̩OIX}J8Ѣ(.R|QBN"m+\x C(9^>`&9U{9F|,1#q dᚹtpT>;gEw< 0&|m]#22X ur(dXe;(j띤vSk30B;"5vmKKw^9BjΞ~ *2ȈcFLbYd:?tKEr³)`= s̮~wBY}AD)߉xY.6ܟQRd} ѕ԰'X$r,pZͱd35:sm !H K.-8ԭ i%t.D,8:!PTQU)JC2B>[Ϩ A\>baEGX֬Q-1Bo򳏗Ԗy3]VX*0,}Ew n#x/ P  %hF)ogHn."R"DsKҹ?t.nl;$K 67+x+H)y>0_- 0lɌvgl'+*F=FXd}).G)KNy; hEQgݗm΋7w[W:+l jb،[δ吻~ɴ]Qs.?4ʋl,ʞ)7)J WNIk{ . 6=)w"xPՁp`UHjwRw/mZ!A~1Gx4fEtfB mP'R7 W #~@ 0N&n v[If'Čdwθ^Qq#!!\;$#Db|ǮÙjĩU| ^7~%Mst=m"-jz{U;L;>=cd(45>W,[MwU7>t[hڅbFx1m#D[~n) Os/Щ–7)UݹW#HyLYxG6\B:RN`5.t1f jJ׺@{UBA PO_[l@ؖzh/D!%PH򂞩}e;QgE Ž|6`Wj$GMca.кN!)5Rޢt}Uw-:Аhf&٪jetcbҜ.w4_j;!&/K !S_Oe-m8y:M XɫԽg>1usuZNczQ-g]5xB7g|`A`5QYD Kg֪FC0e ~.6& 6J]kӍ'b Qz&52Xb~Q̺Nk>IV1Ua0Z|$ F-< bji\0hfn/I«PPhAy14ZjFc/tu)FXJԌ[XTל%Ĥ"R)t'om0o{sgDr6jRUAPG蹐 ꝱ]>|}2IT!?gAM"!MDDH_×([tk e:ʵv y ƈ{ {᭿ǴuPiM;(A ,+֤+q^^  "KB~LےSeV;L|j3_r%;8uT%$ϩ?\q߮g9.$bPKjȡ5Ђ,1>H;C9*^=z7+u7V8-SN!rőf "V&~v[ m5dMFoY&Ch!(GAoqƩoU9UqB:в|EM0hhJ;dkH7Aۼ? ɀ%~ 'hlQQDX ߇8V7'xEA'&:$ {Ic - S%YvVOb|[Ҿ0ojT`n*sҨR{[ aN`~Ҳ]p&l1Sa4թ|߶t,VZj4 OwJ.*̘oL_ 9FJvkၜVS QQZPN/U1ICQyd!Q+TT1u5!GmS~` GQX_ R'@"4?7rKɤhEMtT NoagtРhn3xjpplO#/SiĚq~|K~WFpnGe̜R)ߕ}JrvHjVA!8"nBW RZ>$v@JP5S}9e)lL͹^Z)'ᵛM˜z„B8o*j m;B97ӣ;N,bN{?HQ('sp 8*ja莆tyଚ|I% psK+Mq ]@$SEJ,RyM4blͷi|@;/N2٥b,6\lo̺ŽL'Hfb&%q9alSjfePSzl}IR*[?U yp`ֽ8ŖN!<'vE)WsDžGooZ֍%GU xPy\FIvܲ`YvbQk:wٛʈgA30dk>'8wkKa^bь7ET z4P|wu`rZTׯgaj:ؤA RdRoDȬ'>.}i8\!YBEP`Y.GpGMa4>$)kؔr^*֜@q9&YqfX6/XU[^)\[{X-%iaA=E9k[BLf[~hE{vj{ b@&B,u 65\Ll:'&qՑQ1R"0[RٝRs/S[K2ukPYq'q 6tȍקzccʊ52@&ڷV)DEPyTysU<"~&"b‡KԡQG5lXu}!el9RRK}:ɡ ֌䂐 jbM洋&6A)b  {$۶o~Ӗ)53u"qv3?\B9y2"fOQ JWI)gTia7* %n꟤wKyN z׎}Do1hcKIH?(j~"=;ٻ9wG$QH>սdsM`LW`bp[㺘F#]4[z86ӠT N`y:QYT閈ݫN)MMT( '8"Cj(4V6 K)t#Z"Sp{yɾG|J^i2z`B14?/ĥ͞*O2^i+_u`7/s[ܿ׎V Nzxp56VOnmp%<REmpGI0#R47@5:b9| Frp?j=j}yV:k`{ҹCۍ&p+c,/e[喞!<]DHnOe"<l*Z,X$m?"jgb}%$8V{̤\<}eU{zW`Zo{\'$huW#= KX|Һkv$hT?}Mc V0GDv6̍Tf`/r{{Jn 'VQ"˰Wcp-*ds3Ҕ_Id!/#R&B:`Q.8J\-`!cX*GӁ/u"8=Lww:u%{od{a'Lo{ yZmoG Lh^ܕflMD)Yl;%#8n7D;y &C|e' BQ?ÒrT&4Cz3 ]6@`4EPnޫGkS}),@RMhDQvA 6?$čwÒqwʾ<Óik0gGGXgA3\~zd7R GDN. kf=ShVՉj°餳}'27&{*Wpɀ8)/ 3MІǴ;#C'La jmFS˘\{ Z':6tڰB]޷C Mҟ9`c鮛_ȮE`eكZ3n8l^m+ɀ_O1ә*irƺOs; \3zJi.?%FRS,%=7]ΡA5ڛC37O|&:;?{T{KpIG.0{UlP/E2e&AwEqGur5w4gNх_أ, F< M‡`BJsqjvk֭ש0md=E .J%q8+Q)CqrYĆO4;kh\ԨcC/t1ejNK@,!]̧ř"5ȡx\12h 0gYA5n"\WNgPd,؛PQ\*[WZiAstY91U؏yuO?@t`#٢IR_ Tv5#,{j-!-o@)S]mt(?]Ȯ}lXץ,XR0 g/sGq1y8X cEF3U}K$]h|ː{fԑaο{[f-k^Pu]9Ħ!`'4/W؛8{zZfYO*uMf mH*X -ߍAX]<. teM.Nl\c}hI!fYoF'h`VHn1TKcE@OIo_A0bXJG::.2<E6+Ԥ5rzgR:W/WR^fWh\"!5A/l:n%w75da%2Ldgve@D^F56Y*/㻃I Rt p*L g܃j2 5֢{rnoh%!4s]n }Ffj$'pA ^/nAqDYA)d;[])ҡ ĿIsDsLcOĜLR?&Ծ_Ix WSaTU.%D~ƻx49`RKp7c1VhJvڢIJ @`dprNMW:uO5)rd6՚)4,Zd CHĽɐKk \$ARx)5⦼ꋜKi\\v _ȼ&#z73꤄4)0ҭ5^09{pK.J*Y{ 7cKp tPԊfCEQDoh@R^ـD:Wslj#COɥtz} JHpf/bqgz}]]D&H&Xtq3+VB°913 6jVK. hKNឍ"1Jق;y)rE~bg_vy V.tS(gVrA澛}$\H*Co`m (tdk3CVu̡ؕ\N3$)^d7ys;lj칳w@쎜!, EXZb"͘$*cp@EW@[}o=i}ɑ@~dʋ,՗Zۋ3g$kh]?EfWےqvMZw "A)azyRS g?f=tn VHF5gw`Q$ӧNS:ZcpRBC, ؁C2rv𚐺n0n8M.)Xק "w|Ty-iER 5I 70`'3粤գzpA U H#}lH>xU"GY*3_" 1#vy}d_^vL^Yڳ<S80M o ՒvɮG]t@F:Ś}@"1TZqzS'wkwRwIXU/͵-9b7T Sά}Z)ceO8doD9_ÛKѯ.n/ŝjKQjyYpOSfG#…Z:%ͤPE$,ۓ4 bnʣzjZ[^3E)Eؘr0I`XZ7bƙqL=idI#K /8Nh u5C2Йv3c4&;K"io! p&դ$0fZ2,IL3z~qX(I 2Y5gIFZث4Oĕ *%{@cDbj0_Yh1$EpTAVD=cXoΎx)f;M\HlY=JsX 4pD%L:!/$KX{.Rbvћ}ݬi;a~E" rĖ18DnwK#,"}6DaJӱ 3݇wiM)fgh]|8أj^tq{?(%*G\wRJ: $ z- cO{K]|T 2ZI!>\iͭ`m_£t𜿎Nb/CwucCK?KObbC;,rf8^5?>(di|"Aʈ{vW}_bI90Zs!C97c&vٿ.;<`]|41b3ZnCQ5tI!WE8YG!?LAm8v+ye5d"07⍟2k Nِ?xeGZ$xzS.M/Ct?f>P*DQ_%v$#KY¯74o=s$A.19* te_'7UTVS Ѩz6*GGN_ܓ[by ͫ$DeVq[|_"j: -4 .Id:|C1Q@WxsػCY("3s &cE1 )[3 \fokJ@ \{Ncn"e鬣v' % ME3b"l!l^ ~W Xe2#L.=oi0\O_̵)6\3f ` ߆2r#8"k}լ͝SVe7+k h"t1Q_3X=ǽQBK37sZr*U֭YpH\\2Q}lZ&~ +!=Wlc,KN5/WbqmNBzUNO[ףZ\-/d/ ?K A M&hXa .CZJ|BX!,A"W] D2}vԊ|7i/FFvE`^b]XnmV`n~mm@'iى1eطA&ˑ zd%($s …Od]:P d*xP|' Opw \ f j! LY67Z|ti3LCJRLIXpB]TSxl dDBWkWՇCi'PR&O wHeIk K(=tΪϴr:J7n 92Qr@>w|q=!yl&H%=uWdꍷy4}3I$+̧rK4Y&oV&,P1ip5zѸ؇%H_5;d$ Xf汓ٺhsR $#K sTo3/?ՠe=@R'&up ӇI 0' h#ũP_ l~{s-'nglr\<C ;)l fT;o >la$s3gl0`&JU7Lk-)uDtWO M1@%z4#zzxTUoD^1cXR#ꕇS'{7J♄#&zEj"qvgVRrml.I\to5VTjZnByh`/zQz8C泭 oe>'ƹ~pN1`)ԯ V4-;3՞Ӹ)ŚS"R2ko5OqvyH,˫.0g~CN{xժ zItj- w,9Z7q+ 6]-a/\M;אg%5U=^<0tGYKߜg2-]эM9)E{ad\p]4c@x/rc o&ʕ+ڄ+[Sh.$wHzG 1r ɼYã0 S0,f)kKzHi_hF rK] 'G]4Vʂ390Uӑ  5~DQi}Oø 85IIu!p T5đ]M F 2(搪#-jSu.\~#N{4d2\ ݂l';vۇc"I1TkJv_j2_d(xTUS#`y;/:8pcs*C;Y|i1o:FzK45*U,o֜Ok|Q0$s#%`j=%V/ײ;tZ$2!= _jfU(siv5KN-oLJgMM~|jȦk4.1ĢiWFX'Df(Kɩ-0Vnx}vxTFٶ?Dz7HP:x~x!"i]`b1X)!}W$LYBΔXkQF7jC܍0uځ 0Qpbirp'c榐GL3; E}7dp,!MJţLD{ ֦^/ u0GS4CnL&R9<%CҚ_rC:!Mb) &|>#sD&7+oe+1-Y6 :!#|J4 }WűQ4ܫ J4Cx֞v0n2|N~@^' b mCTCC:g{u Q]^o̬X<&ZH_n!lg\<6ln4#cҰ2j٢̓wxa1ͼ ~etKYG-Tڂ{e_vR׮pPZf}2* KR ]GEaO:!$QVӽ'vEp6ε7S}Me=iF,֪ b܊*JXDcqI"jRF)5ʫn@rv(_e7Xk̄J%.pHQp.ݫM}~^VK6 , ;J h'^HfCy35P8e")]3<_zSpS+`c:h;vXu++2 /cG&Sͳ+zU4)n:qZb #@"`sG=$jdQR9k=2K6b;L郸՝fAyxQG0uǭZ2t?JP=")ܖsZ5Sn: _M\(ȓǾ=r7V4]>_axZBN|gl)]CtZ&_sxocTQ椃> rQB؇5_!`41z)kaY(qҸ{gf}FWN~T(~"$'XAʒ=B-b@fwB{χO8-0 n>=~4eHz^78%Ҿo$z̙L'xC;5[h0<O#ۦ3s>NA37nx6(]9$PNN4ŒNBh\:'` 6C*)FM $a&By\ k)-hrtK`L7 սsE&.X ħҖ>M_f[ϧWƎ޶G&A30$ 3 7͎xOK[}m _R r(ܗ]*uc(?Ԍt+(3"纶6AcF̃(Xpw9,|H]&9#][1<&o*=`uꂁV{&>RϖBS2l?,S "Xc$%a@-X]]NG<55G& َQ1O6".g{/G ^2v,/hmv2\ZCgl/^RY4]q24,TcRc2J(]#rF/Y.dP6F*"&{8' WKJƲT" )ya;]끙VS?/3Vv"`sN~W)M2hБXS@G4qpK74)Y U$&B(pjSOJ#%ż1nt#ȕY8GBή=(]2K;U˒H=2ݪMa *ukª1k8fKq) YFJ0\kUk6?Y 郯4 nj:%4 ϋWH5[yy6V= 03~z0;ug/L:d2)r 9 AJODڤ,/5iSOv"|BȂT#?QXyDx"]3aZ+7ؙ2wd@N]"2!aPmrLtCɷ)9=|REQK9yq5I&ڍ]wtزTno4 n8qHUln`-VȽLHRM(a:HC rE%JJY缴0Uq1:|^kiet~=Ru4;ͫUl[t Q_Z~x}f=1w )Z{ !WHhIlr(<0$U7%dmwL".hE o9M?BńpI$d,lNZ 2r싗bB h|~`. dn血pPRz貈m0AN8L5c>.$6Na_S>şh_ΰƐK'玧 50xԦ^2|IK^\,Z CtAB#DIY2H/ДłD,'WqQ.Ƈ <"6JYcWE,V QnUp?5M6*8D:\9ʥƅj#ۿn^KFf'WEǐEVՄj.jolJimB˕S8$^01pp>N#{d$|iXf!\צ?׬l*| vK籂 J^ ,žTKp?k"m׌_[!`( e/|m,η RPt8R^@ wyRe1bD-9) -KJf$䁂_VwiS9,bdM>βYp(@> 'iP6R4C`B&^ QadFz;k-6H)/Oh g&bNìder7i3G*.v=~Gq6-F~_{>^~D>ΨԶ߸} l7THߏŠџ+ꤢyb,+j>yت))J(WSL=)A}\Zݜ.n#50FȁjTU;BV-; ,if\A$$bftv5Eйޠ/NJ{yqoUm"ifmvSe4eytv*]jpO9DWDC牠 bk$wX+:ka2џ}6* S6LQl]GfU.kQq Q+8D#͓=1j3өJR~95R+a4V0 ,im _tDPDqM]x.ׯeJ#!QUsא ޼6u Mڤ&Ue2?vcxbPjYEqoۮ[t-0PCpe$jeiA^n+6ON4Bxbn`(tyR-+ XdBxX:f] `5:>p=t<]QP̅ϩ`gkjM,3]ɋ~vbuJs lZ:XҹO%BGhR}L$Ha%B$=pu=su=nPag?e;CWZ"(EDy$QKv55))]+KY+4 q9NT&1`{e]T-ѯy~7T+M~_ EW=<u]8*>0 Px&)ι/0Aj\w6^M-"sF] }!4J:iDZ:#ܾYZ)~ЕυtS+6GT,:Oxn= ϑl[ϕr᪔1Hh"Bu@T&-~ޕ [_\Dt޿Ӿso }fsgS]-s >v:7bOx<뛲9ɚ7.| &nc4?hWy;rEL `b $q=13v3qD0$[ ?0ȣLXpF!Cx9&uP^}ܭ0k#:C^ZJk|vpIR ^]{sha#~kӅ$,H ص7e2' Â@"I鑸 ]'03Tmrn1oS킎u)SPpG}r"8*"k(@0&whuJ7Ow)`f`ƸpG 5o7!c˫0=넬R$$Z֕Ӊ}cm~t%# f G"_%=b_ \̥ov[Ħёlh7r;ޗ=5rv{ݡMH$!KlٰjNEvGtD"^dSebsnd$w3e}܁@sY%S,imᩩf>hE&z$ iðEa5 g͵Vǟn[OKwEs"XjIZ'80)K0e< ԪH;ZU"W l[³!!_C ĵ}RliplKg7桕a֌;+qyP{I {sن;tx"఍ pDOf $AY`i N\䖧 q)v![9BԖ)=>$N8O3e4 p, ڟcH?2I՝OMPz80-PJszy%FN~[㔠gT-D}D\. 淏W` ;G@zR[5CMWy V&VAuYZ-Ь%p&K=yqHF񏴋wKUj?@'Q^ѯwKܞ)6/uJ{E2u|^7dH7q}sSRsGK;\Kn-6)8{gRblsGŖ{u%H̔V7÷;b=$wַ*`x߅KhmM\I:k}`OLN2EAs+ŚdOSaYmI65%߁5 u7JA]i7f^) Ntⴆ`98ZW-EK:ʰy-$0[x#⁄ئ@'̩yl^" ^y";G3Ckw8)43kEh& ̒\4^p%*-Shb'D5T=md!V?p\)UnfMI5Ҕh^6fG~'6RK<:6Mf*ba'=w]26+xӑ.mNP+LJB^2=sd=5$Ú1g[r?ܳ.WbؗϰLvܴz6Én-> éXp2i( x)(7ܐ@Xb]Rba0?]r Ѡ(XQF c1r1cŦubtuRg'\[1eA֋I@bFT=[ϗ hKb 7*^QEf:?lxf+5RMUy+ _N1JgڰK=X^3"A)'L"R3] ̓0_bQ#>&;@#X&@ۯqGpKCgG,萺 Zn.nM"2 g%z5< .d#^D6k^s/4A좺W{WI5CƲ|?}e}_Sta4XTe?lp_SF)4G3_*ϰciMJsB8qwN>$daq“yf4tlM vNɲl`gX쫟28wg$-wl.Α!h۷:Cq(r[q1ocMt+b_lid$i+l{?OHq4XMq[Vʥ@܊!j˃q$aRDPVT6myp/ZqYWd\vKTF-cZiy&jEMJ3i$=H842^ 9 -$!]<pڗμ|?H`_LeX& c$m rN /( pp< Ge͹IVU4pӘAb~ F^($';z @W<~2w=YD87y4@ eA#]%"M$nj)u<P29NK\hF>mt#\Ikt'_kاkDqGSPkyVDS?#lQǜqa Ty %MZ=gbT7c$5P w۬c-z&dUʰFN$)9oJV0Khb.~8A)ᑛ>l솎|kxqg&cJRLjyn4UϿuq:5O*-N ݰ1QWmy #QF/fozP<=c㏯ducBI|r [fȼެe"S# f9=ɤAliVKU~ Ϊ+-vF5py 1G>#\]DnG|~z]oϲY?vՐƄmҗፙyB#]Jj: \F,lOc\@mMRqg&Xav{^yI6j0.yͱ1 ZXbFquJ~o //[Csb2`}<7Ɗ4md9ˀ!^7TtP^SF&{Y46EsrNϕNujoJ0X^/eG46tD Owl:ZvTaw G%5S`2R! B KId7"1!dGI#kB)).~{W+ušc%EvC̖*UJ[JG/G+K=R;;YB{c4[r7 49;7{@o0I Yj$%sP*h+ܦ 9XBL,8+9xAMSЫhŁ4AlhosqSu,ic/tT$J;eش2}-Y]{jz^=%x5qt1AJ{kϸ40l 2e u wʃ~ j}1%|]!p  ȥL11Ln`UE(ɅSA[)p"DTR y0 Qҭ@'{Ɗ\IX!/$ VŢ/rIS _la\\? ZzVg0{,Չ5wr(1]@;R'U]zY+2U1n^BȢɌ}МMm.bIzVq;7/{Qnʊ+68~eS? AB]f0"8y}l 3ϿAI> B9<}'Z$ϗ,ޟ+Y2Ϯ5t*Ow2@;t7t֭WWƍ$[i7:-Jڏ8Ay*ݠ(%~#j7.~96]Ev]x.8(TφM$tJ;/qꭕqo;BdZ_zf Igwpg5ޖ9b95X, [xވxLd:TTEͫ r[G$ע C7 \4_Z,?ZEaƥ ThUXԧ H5`)X֝!ď^[!'ʑG#61iDs2M-bAwy'(ug"B dV1w[L/OtP]7kNJvPhjcJaW@I,;~jwUs*w=ў۾(eȋ!8f]^L%4j5& %MT,IGUHƤ.k @I$|"NXj;I_,euXٔ oˍ='QLY~kd^\}6U6̧=)I"ٙyȑź{6wOx-1s%onѡ<_kUYD!0Ėlg"xp}9RbE1UjC.(,cD`8Ea6X1 )ihLFחL jRΕ J xjCȗ,6uH;J7Ee촒]72JA$Bw˞jzASk 8("vYoU 淙PG]j1[ې޽C T(n:o PN05rE݇ۯ`<~+ yţÚ;!ntwNi,c\pm/vNdm&u OVga մHcx&; miѝ:hӷYF.wdT[}¡ Ԫ$`^3 '[tY ߱4dda}=pCOVQ ѫGz~ENI+D#@SOh DpJ3Fˡ׵[ԀA+ȥu >ι91$ yM'{Z|QjHJ$|.9G8b&y.=Lɰ]0N {q=k7vB(>, =4ޠѺx̩fԑ cM P^O圵8M޳n5yoM~WNKr< ]`0t=Skme!_4̦/r!)hn]@Z1-e1EՔFRKHe* 'aUp;d§ze?1t*ߔU7@62Zza@[Ô չuW1Rdtr`"x]OjRW,ĉD^`y%/D#yrwKIbL(sR+s-›;/Y-~ӆceh,8YbU_!#€ c Vd;$G"dߣquHQfĚ/DlqO f`f0+ -mTQˑ|q{y!` ϗL5h2y5R vBǀ;ݞ悕6fI1}3.q$`݈UJ3Moz,;Pv.-$r@x4zA#׾Ev.{Nj1*pKy )NtZ#Bެb8V#6~Se|VJ Lfo_ë-{S}`׮: 0}"M\5`6²c\%-5?72rp6 ! /0eTō;LWhn5\:R ڧ/#RΔITט .k``Ot$I$@?D@S9tF ]`AeUKgzTքR2 c&ON>Ki6wQBȴzнH,7Tٍ&&l b/ؿ"٬ O苢Żb|l#ȠUϏ$7k_ LK|z'юTnqx{D֨Hb;Q|jcHhBUL Dy-PS>C.ɘřaw L)x-c7jzm,iRW'f0!9~K7v2ϿPB ~n^Z,Y6W'VWr؍>q6Q ɕdSSSH Z&nD4`uMCDT+59Mྔl(sga2#I~ ID25dk>` <լ;n&`X6%[csھ]L37D J59S0Ёk=ղA %@%@w fR? c&۽akM5x  S6i M;_S|Wx +]@ kO8G‘Hzˇe?QkU|h*oθP*V5qANrM5e. MSzw+-wr9{dN4E0dFfVe=OSf4džZ1G\;Et H^:|%w:\I~ž7Ȳ,8l^!M%l6%uS6)ۋCHAHOK5I.e ]mf?:?p?F Gn w^L{)w.M!TœI[esdŢ.vVTkx$54zTEmsI`aT*ԉf1>Ɠ&{Ek!k*-c!+rg2ce}J#wy*aLϻӚkjy| ÎL!˞g]3˰Np-'D +}@?$R5V^L*P԰с)L m#PǼs!KdָFM(cJA6ǿ*lҁM˿P])8TL/#Zz J9}2/ pɘɊ2p<[F+ЬVR!\Q/(\GP<]r βVu(6\N=qBTY0&ױ-l oo]7ͭj"/& u5 -? u|YJΞrvժҼK΃OXs*=M䢖O)O0Ig1P^04|R:+ -JU48~(c2ԹM':G/EMQolyȌ!c΄dń9+/)0]4;m;ʏ l!$xQ z- ['u'GcfkYy*^J@S}!M4R(l9ːJ;݄UWLpAurt,+ (=T.! މxLM's(ux3܋I9딩Q(0ʂ4@nLE`b-\!ֺX"2u&27Z!v n.$N-jzrhhA}~9:]&n kW#O;"^&5-ꏛ񼻷כ)é22,rf R[ȏ?1`Hr*yU6i,&٤ieQ)Q1gMA6rcEqC[ 3ve:6}H}xZ3zyͳC@MNA7?i .6z1cݓ^+˭.]uʼn\2'qr86g7v,gWԣ>y҆7í q*R/%I` J4%H4eǩ`(F.@6¶*̓17uP:sh*~/ID Y7ˊAwAUVhvSM_m BOaĈO :쀣761e<9~ [7҈H3Oni2p+'K{v»Ja'7D^ICA6MnVi!zjM2r$^[yug8etr wS_.dgFb{dm^3&'};WƌD$|yIrS4ʣ"ܫѬ'Gsk;͑c_ၷT Fֿ}J;Vp]3D&?4(~[U$s/Culdk"`ٞodǣK?irX4dbt^)M >gK݄K#6)y1{uM)&B:jޅY>?n%V%}tt,e#"}Ϸ+< q\L9ǿJ +4)j(`4 WMcڄgyzaG>5a,5 .9iƧEN zr1'[i/N_`P_*)*IRǀ&8ŅDĦz [tӦA?:&ˁv6SD/q[^-4F?%lAwA07 wykE $3kY&Qtn)_ȉY<,I 3NFNv9Pƈ4[CMn^ۃN/>x>oкUDTq`6{(@]b&[OY_txX!rFi{1ʹ!<_2twؔkP N8]@@$3~ۛ`"QJeL}#3(#xK|[c?o+H刿{-/\Ҿm$80V9^)l~+Y>%{/ӥM&? 佤V*d9t~Vy߷V!x`LkH&&_fNzov/G%[$T|oJ =Oz!Ny&06eU?pՒ 1yvHөlgnuƟpttjnivar?Gã(8yv,52#3|`seR_l!5 NS+ '4+ z^;)s/PϛHeVN#B_4[e9X6pY=GȳT ~r%|>Wf5H^o>ko*^UMYK[Q̴gSfr@Z 9JIJ/)TP':p0GM 5$1Ƈ`QhސIV_pm,PV=!0'_ʄNT sخQmɊ@!N\IUg%f_o0QLI-"A R ?e2T]Sl-Aq}0(2)P Rx(Np{Z=}n#NS" -է !8K N鶧`Kl'm<y)_MulM0z ic Z<2әU= Z;;'}vijߵX0g0efatw_/$>\,sIxJ?rdD^Y>@H\oRɦXr 1@5Լ`JPz1/'/jWAӈ}):E !4z8pqSD≦Aż|pBӿV7kc%VT4OՉZSuDv^M-C@`|N..Y'%56EãHf D;FJU7 ֏LfPa ݸLMP?gbkqQ܆Y_XE:Z;ʔ{v*F_ 6Br@ZHb'dSg=թ-j^$'pjq/XrI'awAFh$SltdFT7YOX]!ghI&`ݛ@kS{< xlQ<J\1ۢ yb[I<~B(3VP\ hP*0 0J/Q㋼ N7RD 2a,G2[8wF׏5 %^D;X=jy&HCfkf2V aݬ~EbEfp>4=tǏ/v}8߃kf8⿨u :MKU}#8lDӌ"p`2M # œ0OWtFl6`m Y'v) fHdŀ?a^ivZt;㕠#o\F f_r~ cBV9nY+!C*% L !:J4wQ|*uC%:$,t]W+'?[OZ3Ή!`AD=W]-%$0m$y;;59'!h#(YY7_cz )[+ՈGDmP6P]hgZA6gJ苳ľܲ˕% A(e“eGqQaIĐsܭ\!62k!nv2Aoa"]K*Kp|v3nQ=`ȗo-M脝𙷗j,Ǝ WM{Θϧ\eJgrѣ{46J!p ^PJ\S})naF2_zxwJ@a4ͱ)?އ&]%w' լu#Ƀ6v]yJD6d[5 ZN:v[YS%v BVNmtIfpRayZEPڎ`п'IvBby9n|c`r7j ;Gz2 zJcpf[=9x5-N瘥BHȂG Jѫ[Xm2੺ f rUEu4l&L~N0J)8bY{u'b2݊+9N4~*p0뮲 q6J&U# NYa8E^1@\sO!TcAx}͎IT\̶zuݜMNl`.Ŕ;##>' t';dgˬ{ӡ=J+ :ct;ӱ*R9I3'>o9k)3 Ԟ0f{ei! fnY-Ȭ=/e#t:{ŚBtJj& (ɝkZ8wxj[l Yגpф|AnܿS o)=WYüoL"&s51~EkY5-͒{]Q;3+k? @j~';aCJje%wHlGJB/={95P0*W'$Ii;G,X^e3 Yddc03(&@] 3sOXHĂq .TaYHcӎѤ,jEJl,`nBD?) 5?u~?/CQ8Oox}>z76i#b]jFA2uMb{On 2AG`PCſDW2:EI|m>=h#@n= PULL=ns9GwqH{Rnoj \Dwƙ]jd|5l۪N/Yf/CSv}+Dz@dۥ,Y7*Wtaoyy]]NokBYKde(ӞI ѼOLgLOg Au蘴;hGVC֡&{ [Kk#@<"/؜l"J Mt1>|ާJNj ߟX> _p( p@)CF`| LHd{i0j)&Yw?4`~_ }gP,iҁg16_rO^~!ٍ6><_VG /?{6Pij$9)0c_p@CPV8iDkS j9xŰP"cR*4ב^}-iJLUb9 ^؅? :/вFΞlK9YɁ) V)=bȩ%lY%o5zm?`_&qBL|YJ*_b 1fkS%JL{ly>n 7y8(ǔn _8^y0R&M4R +3Q./y^ ?T&Ѷ:1ܴ&IҞ뻴?Wp5Dkէ 5ybbEWMYfQ#@;be5̏Lm?4G/ 2wWYO"d.]kbB`%d'Xuԉ.S~DTXH)rT?<;nzZd(8 5LW6?13kKZL:$k>GI64XtmΉ[&>82 8^F3ޝ% 6 1cv%;D%V3Q5[V"X01prA!ww%2r&rgc1Zy H tW  `s}^"mU& 31:#KX6h`Cb]~>/ yĿBo)lK6|+ L*sA 2___elefv)08hjȬ+#9AIŗAr3UCAXFWQQnb]d/~G#iXwKn܋?:ζa= n /@Yn0] ;Sh+N)lvǃ^o^7b=bdlp20 :QQэa1>%ǀ,> cɑryEΘԇXpeJ)k!p clG!U*f((G^og$;:؉y#m\9ںl݁^R`om4gX 5k Kܧ]CnuPޣY0†MoDZ/b yz>~S<^-}GPͲ̟n';]z0#qHQ3F5UVrK4wpANsu-U+>fwZ|1ſI w ƋT18o=[?M!xP5B} ilMĥ!_2:"{AY7` 0F1h$XN.HW ߉e!e La5x%,訣B T <*~ڗtQh?Ԟ6bCxz\Jq5ׄYnuDEnԔϑ^>t"B5ȥpȹ85~ +w@GLHo,:#?,A/X]33V["$1zUG%^=hh9P'||IEW2fhxF[ˆ:Jmnn `$)W:Hkh>[tG@M<SH 9Z XGgP* [)ce2Yx+xiA(誈xgbU" $l]&RgEozAa`(\0) s=vޞqU0թ) A7ɗ.2@̳4L>XjF]Տlx"U]tV֋0wRm4Tu`S.1;C| z;T \ uAr-[aTsn?zm` (R9]o3G]Vj[I= 1 )_ɔX+r CvDk-1VXD/~_3}G  [~|骐QinhHӂ,s)#kJgCf?#Byכ,[M̕\NZ03NZҍ,Jԡ y#OECx2ٌ/|߄W4Ň@LOcO$3ԩ羀 U?!ȁ5=gsD;$ )^?b8I1u%W{@<6C7Jfւ$q\2Y<'V^M i9s7H}IZm=چJL'+ƟaytIX#jgh˩ۓ }u=<6aS1zo(i_hTR/NUooqxWJ^n|td`)YςIPvTr&C wuG^d"V~ do,ZHFbѥzczzY2b|+k9Z(Y"Nsc"L`QALx&a$Hjc7%No QF[Ң49|Tr"q>RTtBܘs)>#Y]&]5[2м" ;Ǵ=gTm_2قm΢'eǔ('fRTJޢ *0G'󹧌lU6q:UCqlz~%:]6 Zvi6#dё6Mms{V/6A 8KNSZ>z[XA ycޙWq{oVm KJZ6ԮxHW% e{J]-p 3vhXNc$'Q-ycIo|Oc5SVA2`e*'8kݕq,ZLs^5+Pd,$8ew6"L,\գhN1+Ll;D(>3̬F kg9ik~ƺJRk O NM)s&@7a*Tuil3 P,>,ilC.9G]niWMRdZOR\ۜbŸ֚́Vj?05z~G4MoFyqM ";{/nռ@Y):E_'P֙VwՙSF/ .q8e]{`M5'o~{b~- ]Y7GZ;~8/`fW-Dϙ]sEDVy`^o$/WK0Ρ{H^Bc"]Q^2O\_+Cr>рZD褔D04Z9`@gk Hr$Tg~C]4YbGKƙ6wX)ñ)O2OK%0C_MB.DMq6+ m5$zz-P`T.VB$ⶩ>! lx^p'1G,_Ψ"֯=И"aZ4m@p f҉\A_֓j%饉@Nfn UWMÛ|MtZjg}R pfفRI X&,{irB }uݯӳ_nvft STċzD!>|oMF Ѓ='#$VfIK#?HF2 ++$lAdK:756`! KLe33{w_+o \9lAtqE>j6T꾫.Kɿ~aLDZkH.rK.lA4eb%.y~-=&p![r&͝,S5˿LǼl KDڒ+w}AbuZ:Ԛ%p'ߋ1 F 8 ЗY E>oIo[CΐAI1O|\% Hconeu} f`,KƙƝۦ՜CW ^Ն픿2"I) ())FsH͍ӸMFdYQ b_Ea8%*;yM}kshjG}nL1!TQË$#h%Ky\^ns/ϙʇX<^-#~&qzBS$P\I?DʯB2dCMp0Fq##6?6N v\m"{`ԸhhqaKF5)bMY5W\*}ױT>fQSGA7|#e"2j r%Gx,ԭ lRh ٩!˭jN 칦@)Pf2x<ʿN&.~1eO餆&Nxdy~Xb%Hu1W6!yw;µDZڿ*3T6#>Y~`K޺gTlE.᧽5Ü <>zF!=X\v.9N#Aɖ_J&3t(0n[O:֘}W)^#٠\9\_cfL^UB3jn)G깧[ 5#L|b]"&쁸N"wa™$w`kW}GlI;=TC[JB ʵ埇TU3AC>"V׶ӸW|[Z4ٳTjK\x͙=c| I3 _s}Im!h"86_3?en.G6 ; XHOKVLn17HK]zQcߞʩC_Fӟ3;"ڰ^(^Y= M-,Y_b6%IKqVQL3EId6++rML&jsV1 ߬9HG5ԽfE䊗|2qf!NŞJ"'j4\_ijBlp’@΁Nµ+Unʂ,C3 sVpW?Ԁys,^GH*5Րj.%Q5ƃGH g?k#/H8\0OD_k)$PYfb-hRKG{6A<$78Ř*D% a }4Š]%Be5Ѱ|޼+(^chulm+FNWb(T+ե4r灓lϚkg%SMa@eJĢz$kwӥ KbNy>Nv ČpLNwքpJ[e{1qg@B 1rq::u: BLDu:$ԔxxBjꑷD8 uw9XeAY֐-wrQm %<<1`kK_g-t({Ϸkn71GKoR& n(gWK$Vc 7ҏ!0Ji@aч=]#X B")| 08? ԫAR4/眜zSUv|=Tyƨ s+yd\[NINOiW:.MDz99a^Ib&* Rx2,W8y"w`ԀGL[ä0pSh٬'2\'_iz7/ zT63xVŻ5 GkwSQ0|)EWkU݉*ICeopt+J6vbkxYh.3T6Dh @f 3%-~hZP`6}b0`fCCGo?灂lBzޑzBmFt-n\ o xB\όKR7=XU3ꑆZv0z/#*BU 6@4=uY#j "ݫS,V*m80%伍}_aƖvݸ>Q DWzuʑI@͌;d#'_?w9ix)}pɜ67]_ȤүE1[Qz@ zޡ{'|y:qM*ˠv&%O-m8rҚM̺f4U [Hե,5;EHFen"1*7߲kߖ9J|p^yͰV;pG T-9Zr_h,PbC峄\)6|uؗz"(/luߞF&_M?m$%Egx0R/eGO~˘B ~,., a,[r0IsU#qzG=~~':%[功$^SUpcW)cuAb2}Á]7JXKQ"e& 2 囲z;5%R;&.ahNb4n1FJ=ݜIY۩ M&ߡPLee:<[UT0\2>Y,d )"?,-L>ln]d!Ԗm6vsݟ'jɐ&W8fj鋕L+")gR NN @w)̜qGݢ&0M o?L]o3s{}T%DԂsy0x q{}~s ܂ʭ$ȹ,F4j6Z`Nh-'"pՠLi9= F3b7z#6*4=t`V|Ks 1ɚה](}ԋܻEdm n訄(g9@2*tg5̇S<+1_mHۜLNrxSs\ /ؼ6?Qt}9o"ZF"KPWA;#ˎH1dGqŝ"T 5@犗 7JS$,4\ +Sz$WFct_j1Ϫ*Y3]Xd!A/gm /TCRhV^0La{{MiIgՅS? ;l'ⴡ뗙ƩJJsv¸O^^`gj@71 e$e ^<`y,nF|?=skT{xV GI/' &4Q]=Lwy|_7ǂo:ΙЍ-3BM[Yxr ȉeϏ4ϓČp@Neߐ-lCȅVrNcAww9&)nhŞ'T厈xn>icSL=a gw ٭)|p^IјqbyժIwpl赙ucJLid wCm!mmWm Jj}}×{߱"vT(lQl5kXB-q7C-(۷#3%ho4% OÐG %b9)L'o+PQgsznZ2w}_/a4Ft<핼u PH*cܙsLJ4Wg8BFA%Wߦl_HL[n,/OŒk,tـYjBq]Ae$B^Zg{2#2YBݺFi.ǧ8e_vc0$ZQhc" wOxgN$/&MoKT⧘afZr8BvSpzS7)W($_Ul h#s SCUދsy3k'')bE@YI_Ò ^{Q/J 嚎 sPMu>$F^f_† sjKcQhY"ӎ_o4p)좋m>f[ȇ`_I&ɷE׋xiv&CB!& 2XK \~ eUBSЉч4N |`I%>Ԟo2)hでo H:xUrq3K 1R#U:{Ј\5awU)!JjƊ89좝Z5؅a:Z-.X^/?|s@ t{64%bGkʌ4:sג;h5G-``Mc ")i1?ۤbPIM,^?AceBetҀD#󉻜*pn l.Vݎ$ Aŏ@ysqSwi_ѓ=˲5WfBxU",_KBF 񶢧3&O&'"-rUp8Ǥz fd^"$<Жh!I:e\?-놁m&˥z \7gCg5 hF/TvͯQQ6sHlvf1@ݡ٬A| Zedqb5`xҬ7Vk) o^`1UI1پD'ܻ{Q !5sX2(u$;_ײ>|P.֜F&0W-R-2s⺑UJy%]B!3x8O[Pސpf_5,5) hВ\cJ@e!;ZU2 H=. iVHҋ&Lb #&X?S& ?ԃ!)穤6rAhA S ~z-?ɋ^f֐dYg8i,ev@lPJ*1V>V,BcqMzP$[d.Icz*'$oӮQk d Y.%-]dY7ɞUxQ#!{ %Jj3޸{rT gH8m:dGB-E`K6m5p.FC>178ΘKFzS^HZWѥշ>=:17ʫsb,hq$~IbNFpQX$&ҳJ)JJ镚HZUs- r?^@).m:z'.d) Bzj[FCT%JzSIveM۝z=;zړhyF YqJy'6=$}|@nN5$lPʀ_X9sM3Ω]ܞ0S5O}^K*yUszR~s'I[Hc*fGy~injwF{R-$'n͟*İQ3^נB4RkWl3䉡&EzN~}Q#r~s9$K؍:֦hN lp;wfWB2  =*i|=:~,ݚc'齍j7Q(oa)Z+ݓJ6^p(j%Oj!]b`dݳCM+ma1u./S&+ms^1jjR"1+YxkyP˒WO}ϴ4RP̕)\ ٥Sw<̺~?>(#t,b'B,]xPkb6^g{yfv{ZjY6{2\ ̼46$AЗ O}[W>NBzr[c$H+,!l1V:y;ZHgju#no_@aS1,/RfFk}S ;5f7\F49d.uOFcyà-'8 ԈyF,Wx'J\bh~J6|%7FW7r@)眛@Y %Ro,3MݛTb'2Q΁/[׷"9r :-PYx_cj4xds}#idl¡ZnW)ԼS1 5BQ @\3 E&w>(hJ0ZՀu|aMfd6mhSـ(c28n?@bğ;s/r=5b',z_j;qx.QײT;6^}&m#@n|X/>v Q'βӽПL3z{8Ckkno{!9\:%MIesLìu ԓ†i.DW+rW V9C?)uڡogz3e`1=YR$kv؏FVTd=s{A %?k'bVy"< MՋ_M7P@y_SE?%PWkN{ =H!⢦O@JP/KZ)_u+g 5c aR_W .h抦hMp>J R@͕~E;} ?3oHSUk,.0iMJ 22p_1!/K#t1YP~(+iµlƮTi䦗~!'[O*pJR?Hk'rK6W9bg{A2o(;,7p YIwOKg ~tT6r ]*Y9eB~`8i ?̳Wk7)YfmyWzۓpusOe7*ΛbU}7ѽOFSդ1IRFȒeg'ʟŹRB]=%e m͆0`raȱG1=Zz#SCp q8Q)Sq :|'y$fA#Fڵe-c9 dv~R+dsuyw./g2JNKM9 ᥇b+: x.^j ?`t*x/9BmgjI6tt͉n~T /_[^U.`) ".NfeKRX y}Ohg4u=IKeeRa|TtAr"cZb=mB6/A'~f!F`U7Tsgk-hZæ<[sOObhC+6!@ *JwR9t]O|t7%!kbzm}}?y0w8W?&'-Vu]fާ=};-U2E%qapndaaD[O\~& pt)lZb?d!\do|u}Le^FǍ VٽPen 4i7ݸ9˞i#oH67p#ә95z$η/ŅP\Zmb ?LQ9%~.,昋55K{3 Vv*?ߓL^QbG[ 6MGpuR:O"v>U4@ؓx]DZ sF+&ӷk jYڌ!Ry4aLpjUv7hgMY/cnvV!u<\A5CtJ!(8+!tV$0ɳՆ||SE1ǗpQg'8SR "2@}̳h3lcDks$G l 89@'߭Pw&ߘi3j|xͤS%OWWFTk?xL%tfYL)A︳WpnTYE d|d2ivBW\cl9TSeQD+Β|3GV,^/FSrƣ8M5j?P则x :]_ʖjZU’6t&R(% ~9«*?3gDpF٭MFj"ҢHeE'9IC#Frշtn]iAT^"|NZGJzdYay"ш>;ޡQ^kP#iq7ӺtEj.: ֲe:ۃÂ9+&Z)ɏ/eU DltS-vձdnMnmSBmTEVHjʶпMFӌ^!t`i\ y˿7!pzBT-1  3pϐ>sL l\9硝ݖQ@:xl68脾Z™D ktWS3mp_P<М^cZQvUe#3 K1=bQhePi9" 8ɗ`~ 7#+#-{sE"ɩ%z?==G[ c} r8_=G&%V[?ts`&=&Ď)A4 [7q:7c?ݒr Iyx4xJOf%ȐnD"W 7$$7|Yn&^.ɔ;Ӟ7QcwW)k+M8ަa`oj\$\d=vSmF3-5rHGS6ew7&vBW t\a4}u>FXilh̥g#EPT2CD=&pCk6il[bfMhKFiNsO +}H$ŻPo8-/]2auKT1;oF!Fݍ/J&vnl|=,dh**Ә&OI!R/rEZ,1\c`2*YT+H_NkZ!15W<6yٚsZqs@o۬U;XsSL& ihc[?& yp>{d1N96Aeկg%2tZ8Lr{o١Ŗ|<._8w.N+d£zZ< 4a'0pǐ쒢p5/e 6 W$]bl^cǴT@mT35)dz HUO "AqCsVoJF,5_R5`WVZ~$7) a*#$DqՓ?YOEY wP gy#E6n]4 O{q OgNs)u~_FҧqvdƜ^*Z$[j,<_op[?46 kTt-1@@8-Z㫫nrZnPGByZ 2UUnvW⑾\߯9bh$ XVD,ƀ5}O]}wʧ+h3} KWEy,ٖ7VX1Ťr ĜfǤȤt}O&$Bei6x/nNv2osGZ#w԰aZxI?؄P9k1xz-F6< #[3XmfƢ Y+x]{b . Ӯɹ,{D¨[aU|˴qӣ6 }dV)]h uxVJrغPXG[*F2fOTl07#4!yku K Y@yo}Lϰ!0n!{AMpkͼaK LJL%X/ f,=~ %SJgo/|&w yWH[/UPOf륄 1׺L ,J<x"n# 8s58Rqc >Sa'Cx[Ň.zK y"2s啬t\ǏDo}H6#o(g#u3(GBd_ۣ}u8D"GUb%*E WZjcŕ_D{RIܨ,MpO78ֵ8BsGZw9.f^yv6W%{uׇ%Kgl<55P)t"F"))'R˃>*_J*\ Aפğ:N;g^n81$&Ib5K %bs K\ELF7V 7uG*ٵQrJN6(zxc sBChQ j͑uWrX7JsdV5MC6桞eT5oúq6ߨ$漩XxG\\fz =X@ 0я #Ϋuv#') cbsF3D5{ p6ո4{aۈ S," #X2"Pf}/ڤ.`#D_X c暑tNp:gs@}Rg9w-K"<uOjŲ-x6FLJ˵ ?4.e#H!S(Ƕ!Y3x)эY9)eOFGke2ȂZ9q6 `P˙-.Ô$b8YuOX2CL1GQ4rM;D;I2$clPssb+04ox:BZEhiӢahfSh*#G-4NȻ\F=&B}Gƿ}e"_?jfIDpkFwa=me<\Tdd [ 7hK9|͜MaMJmšxX`6Ď#=0_)zq&17)ӛnT>Yvv(&AA ~iJB2+{zbr9\Y$>%(~#yN7 ;vng5eUŃEE:eoW@ b I bM>EI{A@O"MͥLם˶z/fF~N\qQ_qIlU٣LK5+ܲb8%Ϸ8N]گ(9GTO\SR[G 3ϷqvZaL(ɍRN?aPNy ~Φz$d1&5^k SUŊ}Ǐ) JƜ7P1T\ "7O%hNUkq>qtv%qzR|%v^(&IC,A"ņ&So\╘k8:3I f!Ye[0O?/b!^4fR9tZ2j_r)DoFE\|y&`L.SyR*>'b#RLUn}n8ԇKBa D)9OngKj-8X /d'u_H̤/$<ke|Z5|L1ܯ3XH5_3g^i>Q ^.~w3mYNo18e>.L\[_YMXdvT]ìQܠP#q9 "S=D淡z?ovm^esO) o(,eЉI鼼RNh@fT`)jNG2LP}@ۑ 8$Z6si`QQU"F[1R(k3ͯ(b0Pzi$qpU0A6rFWI_}K\$V!DsP"T6j^i/NH3U!59@ ǽ~i,8Sr@Dӵ(lÎ;Ňb§hOOz}oJ ^__Fn{/dUp]Xiē5"1^sݘqna(bٛ=ܪсƸ-J}[>m3`& <$JH)|h_VlK^UXz]VMhTP@>ZN+K۠1q ZڿD`Zx '*;Yij'ΠJla"͗6)&^%APMas8g%A#/IhV*d0F &^#O.XxS}6ڐ.LT¼b YU" [rJ<S {wg) 1!$n,h o)/t`^2򩩱.3Vdi`'5 6r:Tu 'h?xbAh >J(|29h.X^CsQs|XMB_l\ttVpx"2jBvpaLCА ]2T0c@,J".V3jl G"&e8(*;L%~^_Cb?<G6ba|mje+TN_Ds<ck{x8υc2 &8j@J*$ \9q aQ要QOo0qΛn0ÝdFM) Lh2˃3veX*p7gC lXLtAT(3P2; 崶`"i9q_u$/33dkW2 $jڔ(3 L>>9p.,+CAhU\%Mͮ \)3qX<BT?VI&^ns`8 ?t<epa.mv ^ϯmqʥV(!ßrdK{mf).*%:HMfqI9QG6HQpo^-# ˑ3n )SoB VVGԟX]鉊-,d9RvKiƅӧWnj<$kTK*SARcӠ(^1h?!*4̵!:g@ZV|ɚ ,*tG)F_T0ڴrC1.phkb&E&ݻm>YLL}PPըyZ? L2Stɸ3*gP ܓMƼ]r`[!놩Z :yqWBaYCKl(:ja.dcVsTJ_[bۈl6-D6|ݕֻ=bdLm?2%9hyoE#zm*{ _GN{lj61U-ى#^#:yJSѨ_P;^q+] GM7V쳔g*ӵ8+jQ/bN-+wgg8akI|BE "giǨ$y̆<3z2O?HΤm1WSҾGBfi$a}u?cԶr&6#Mqvq>J j*l#hLeEp<ZJPY$苒vm5<2#,P KD& pYH ϴ+T" Bu#oqOՀ$7 .!&PGϛOIܧ j ˜&P(RyH2tX낍d3X׋fJ۝nbBm`I>"Z57(=]5{ZUJ/y{5&'ʗ)c6πxn(;!.=dBeiP+g#cWh P*ݹZ:mJ1Qڹr'o{hDksqwQpު-->Z o\{;5h~* uW+6ǽkW' Svl*+ tYGu%+VaW'9EO]yɽZ\Zn(L+O~W }/ A@[=<vmU({F@DdDGE]eTﲋg3E)Hd=ؔV|jX?m'1t&Nm$ l'%a m [\Ǩ*_U|B2eJV@ӮB4-{ۿH*dv}OM5j^bJ6xCBc=4EnĔ598 cZ3RFĦ#蘛?Jܲ9!R 9:a9&QcKnGqrZ ڒ89nYe>nL 9.7!,fWb Ǭ4ghCH0uSp|]IOXPǫ uM@*bCKfq&u8VW ;BoF_`Qj\z6)iP-D"iC&<!E~ݓb≍Yh8D`ta5,3`]dJbYԳ!v!kHoTM?ZG3ꕣ)j!6;ކ5fﰕ Z PsF!`*7,P_?tݸcc b.ϕ%Ѻ=pf4ƫ=rNQgؚQ5U[uBS>En=(qb{_`ӬqMjuCy; :rRwXEc5a1G%ɖzm~{u֛BK|,2G'8jMJY(ڏ.ƃV"Ao`XFJ}śh4s oC[z2z;Zn"ىBa:*6#߳uGLEV]LaM+ ~OUCNH!G|ڞ,bϯn_*_ZD>(Pg=늝9XtRϟx"O'ۈ[Yq,s3pL CISm.|ݢuS7uq ,1 ' 4ɜLgRT>O) ov [U'W- XH)BDݛ_L=f%˦c&j=g*J9u-:ek7אpQS ` ,d c fL4싫8X -/.F=/hM9LL^an_ϐ(GUhbG,=20ʤi8JH2 dn[. ~׉}wܝReŰ,H~{Mz% cH du Z!XfٞS~N7M cy#JJ$ "Ebʆ^*=^Ǹ D^ SI=Wsb@6NӷHbF:P1Һ/ _;rHH7s)ՠ!AxsBH; E`"{S(dPTH^wREGl/'Kqf%Pv0 Tkkk09a!( -84W4|}HM?xYU?AOy%=C>.F<񚫌S^jve6t ƈNg!2H/YgjZ+;9g;.onJ}óHQ _|R'Zk(G&N\ll߹&~VpFzpFvWD]=Nţ5Xr蝻Gm\Yl{w,cg~} j{{LEbW''G|G`%h." 8rTt(W)֚?J33k0LqWCX0˜ғ 8<_#BC #G;hKz:Esvc0(T?ɬ#8^"`;{pDw_gj^J`^d\,ƺ+בA>Ԉ62H.>E$<ܕiP]?C\lpǚ# 72]+{y k#nnC3B#朆W,eb5HTGW!Ap(ON̗}kIP&1G} /J\]Uf zx&$P,njMFL|_Հ7hW$8p.GVܕjK&;<ʋ1>L}m,10*ib 3Dν>r:tIw|ljcՠRc` xA|akM 澛.08+8m@N讈{HYC,dHױ3RAֽ$q4E>~ᨷw?a;0s= F8v/om.7gf} v*6QGϖٕA=j{UN)m(u4%uOgts1U;?]a7sz3%m>l(3/ x-5e^nT3\|RC; zSM)} )\KC̝zw}/=O:BXa 2VO?41u xd" Uq,}SB:$f HWM-k7$6RVb7{؎"K39%k̞B$$A/->IA`kV%&1ri9U%Ø= 1tiq( pC&Ss?YG0ir(,x|TNv/鏸,6 s`Ghۥ I;ς'uD ifkW ;Ev3_OW~xsN ~/HX?v(ʯ |" l%QSFe9Wos 6g 7s@lhӵnifrFkXﱮ%:Vbz";J>R q ϔG d kU#-!spÃQX hXfe<S?*+jN hmUWj`78s}[P5S B%ng,2/a y]<^o`; ynF[$_-]κYtbژG}"#IZo;޼4ttUuHZe4VVv^`\ף4k%H{4u?H#/Ya@쥈=?1 ljCf%ߥ}s S9KdM=zO "_hry&^L&޻j:'r~-w(grޒ0r~ %n2<(’07nWjrV?`oL_k ՗7"g )R8=P>/o$su5l\*_8Ob3w;ҔӵbV–r!箲LGOuDRtq -w{Qnbftm9PA>;.fT_SB}܄˖ ͺ.A c n miq1 Ů׬H" KuRl =?fQ~aS9ob8}sq}M] Ko*!J*p!mc(x>-pTTa"lE±E}m.J|:̫-졄> 5!s[]->e2WJ{jcyuSqlU|Zy-FѢ݅1ZІ\h = LoH'r_Q3@K&&MZK H35.SWYMXU7"j@91Ob 6CR{{M^u<; UT*tKS2w5=g"i O"_%a0md7Lg͞I18$`(*a~gҜ˺ۃL ?lP%,|'%қ+ot1)^!9%bf6n}rH U}$ϊV\̗"R7ќEQZZ"' J7~Xⲁ hFt% !g[ewwPY^)Ŗ ~Ha5˦% [XAn^qrJbsAS/^rG`OO^'Oߢd:j-oyQ6}Bo0dfp:zh:tqkU ag|:v/6_c7MGlħ1xaE)rcfD Yj\w_:%#  amgb# }ZU`>Hem+m[׼H^I{WbvGz݊(}]鴎X.zCHus8 a(O+jt|WJuud7jdEͲK_2;kAH:STF,NWQO|w(j?O<>4.1`}V : ;)ݞ;oِ5~ɷNmE ŹMVІNX  L韭/ "{%BDM,$i5̺b8gw6'7 [5v}bbޝ~}q 식r=3Iw-cvqua0i|'ӛͧ%:ϮӖoBl眐juHNe6卿$,c;rmŸFkZ{T%vgR$4 E*Ajסty(GKI0GdlT}\.#paଢK,r.64n@pM_W\[C񜞞J{PPƤY { Q@^g͗l\1Y5-k]V [ne*aTiŎH=tpG4lb4x0?vQ^F:Em,UmfT|-Ȫ Q<#?pSA1'ɍC3HQH \*L ^m>im˜W n[Fu"k8Ϋj^a}s.ҽquu!E6L/AK$~N $wܿ;&b/T fҳE̺`) гOk:iYӒ=N2^eos M漘4Z&,,:Omg]*b4 45%>v7ph~F qb7gr~I7\Լ)e$9΅ b*lqQ*k*ą27Y 3MKƆsQ#Fx"6,,zTB/p$eкt1>+nQkhâ{;҉v2+(EUznG|L[9EIy[Ӫip0996]ݥx Pͺw})F9? $ ;qS][0 V(}*$܅d 6b7$Xb2~zx#ݧ Rm䦚's} pEi:Lp1?]OO;",2Dn2>f*Fc^fmCä$s]4{.YE|jdSqAv\)aHA`7dRc_4t{⃙ydc Y`[>^Yq-&@ B>a kl- 5bNv%R090vqGIܢnp-+U?0k5`y ]їR& rMaIU16+~PR괎-e~87?>o!Ɯ1|Yu1, WBn;m*/__LxF{IDݘ a/3霛 n@gvf{] 2V@|_p][hQٽ Sת*K,?0>ƃQnƾ%]8L<$@^8odͅ@ß :Plc. 1cjvY|Q`/ &CDPJQݲ#GL҂) "ptd,$ld$A )۝9֟m6$D5P%€3TBZ^ЙtB3 ]'mβsD'=|ubj6rIHr8}y>)CLY8.Uh DoZf lj:P p#>,G Ll~ˡqj۠ݍ/^Ǯ?gҗrP ~qOWcjh G*p M_Xb ?V5S> $]|fW7Y nGPj*xMJԑ9kӐ[ڧq%sc*V֜杕" ,Ý<~SK*4i Ay8 |<GJX{;Gʒ S䮧ݴD~SՒ$+/8K{FYM9KNǰ2p@T)Z:3E\};}?V4rh U<`rZ,1K:\5r`YZA\g"j}W*(PK +,&^,^J)rΗ4 6i'˺A6ܲn${i{BmB ~^FgQCњRNdy=K2n =uT# 1}Y[K:TӥmTL:!I)͟w^R!g Y- LY6ZN߫͏OԧHSMj3~#*snoa&Hp ib*}_˨L?|xއG\ѕ\b62h0dipy-0.13.0/dipy/data/files/func_coef.nii.gz000077500000000000000000000077031317371701200206030ustar00rootroot00000000000000+Qfunc_coef.niiWiXjNKs$BQ羽7R4V" Cm%7i!C[ȔJR2$Q_]Yz~ZYkb7))) /w\))'N'}MW\+ead>겊Xc Y;L75I^6:AQfYY60FS&d$SJ%!e_!+(jIdȎ]Ҕݤ.F#޾>-s |s'Pu5,&J-Igŀ ޙ=L bĎ^ɷf IwtN^3ۂ)}՘uO)w̜|Ej|8rmA:w&DO YkI}XeÔyןyNDTGI}?4\.i\`tJ$y>z&"2! ʃ^lH`/")˄lfs\9>ۙX5SGrߥ?ifu8~hhKl9Cݸ9@ $cs~47mmŘ.)mwR9ǰy?g*DƽQ2ccMGO~Zi҃Dʐ4/eܼ`XfM=h`YMY/Y_iq|lyĄ8<~Yo;/`3yq'<'"dH{u ?\ٟ(f@VXfw3C#qఌ>ƹ'f"y8mos.kzIBXXP*vA)Ԉg8IjWS96m$) w!}iW7q9tGͮ4Ix36FB D[k쳱b= 6A-Y!Rt]Cw*HKg(D؆oIY`Oy>gWe\R%G,:z@CgV%jOJ"hÑe.z@+':Y;XniHtT?$?1s|] Ǻn SC Z~ƙЅGR kP| }Zaŷ;kX jcⳭP*3(cn*)Z[(-1 gbҠn7+,y&9;KWXϙK;|V^/@hJ(1dF)Q2V-L&ܱEÜ {-zT ( /-#?]R`?qN9IˁXdhȇ;z8 NWڰI; w'}oA+Sp*2KQUca-GyH_B6ʪ;E/U0a1D,I|7/KnLar|9܇C΁yYPy@qS7KT.Uu*?@\ /ބ(TFV1npy!ΩG&P9?K.HOWpurm(}L vzEvk3,.Vɶ:ᗍ^ֱSemהic-\}y$,kj@gTo +,~֢q,:0m +S;bѸ3ˀUƲyKfGnj<<ҚW'KK+C`V|hE ;Ъ9[qZa7OI05+,QE1lUo9k5fYU;-]"p!suyetwؚR=..ܝ@X#߈K_Ow^;οFTaySa2vsf|޾ uUUٚ3o 6 cxk3h.N ?qRrԹ|t<117i)`jLiŪå3nAYpɝO75 Yp].,#ѷ[0F87N*&9mw` ZWtN4qvS,\w5 emuuoCgT,g/P& ^=jc A|~E[ NcP>EnN,B+[`p);QHM\;=GFvcݩi?GG):W6W\ق*fcg.R3Ʋ-~nA9>w7V ]ǃx::KnQsFR+Ÿ({ O1xϛGa. c:L1JOC)B&caWs矧$R]W@exy"2rq,MW)(0C8L`Ehz97hsb a?r^pן>S¼c,<7)&Zxؙ4ȑTy\w&?`%qEڳ"{ 1!{+6?|-1oPJ:gK'; OOʒ 0b&<ϳ_!UL}(Jݱ- z>CSV~r?7صEUx%,dS ygjbXI ōY}}cx<;4M9w)h0<,[ʮ'h}o#[%L y. >I㌅K!;m+U)WSmz1)j0ݛI^~al$FFW!ϥKw,cCkH^V Y8<0-ưi\g]'ⷛhOI|wQ,*aQΜk^BU=:9G꺲i_/#ٸP:ɥ?v;+VıC4Jp- ]ԈЭ5_vm@dipy-0.13.0/dipy/data/files/func_discrete.nii.gz000066400000000000000000000155051317371701200214650ustar00rootroot00000000000000_*Qsphere_func.niiwwPͷ!sHsYQ%L *T&PP̈PĀ"&P QDrI^ի[[u{jfk^ZVfp2W2T-x00O8b!'JJi$csadB ^DBm|j# E005ƏP0py)OQ$q…RYrQ=FB{`|,X CJvH$U1]hgKGȡ_藝A 7k_J f!Ї O-"nm~S]抌ҏ0$c,40y1+4!i#\i&_6JkB^Cv*iD<ץ:dAev9'ɰGLAZ3V01iʞt=KhYkQo?oXCv$p"d${OpB S:ڢ hg;%MAy}4MwiE ;(XFNđ ҉AL8Yxġ @[4&OȯL d] m FH/?(C*]VfEMK\뇩'2HuldmU@%DGvCvoOLhFl0DbW#@~"`*t^]M 2^y̮@k\`;XjiM"4X⩓iC+_}ZJHSXc@[\IJtǝ#RyT5דgz(8kK:{JlpO3lӌA~B Xw;!0ޚOqU{ z"΀QLx,TI0!:"X&.upZ+Ăɉ*Ԯk㻸Cy:T"ҽOh.A_  wC>6{k)ꏉ##eyqxqlrtz$]Zz&(,דlpnŞX ] *RöJzSL42`YhB< qL/H(qK1;](eb7?{JKg#lSqƒf^g*?&Nч9Et7x@s4Rbɘ*.@!!x KCMo˕q7CcdqDU8EW](L 0NcdQoWT8Ğ65T ;Ր[ c{%bcØbiuKV82JLhng^(*z[FAUtSʊ֠vk#Ӥ5CT$DwR+$hA7[J(m eմE70g>Q /.j$XYKw`RAwd&,oI篟b]!uKE=57CETf̃,M)-\ҦdiWKY,jPh9NBy.A7z![7%x0rշ`{O#42cF4lbSi5c4JrY]K8$b n)s `#||&ЛδSM@/Y⚫:xI 'H '-4p s"v - @,sʤp@|~XCF kkgIh++P Rsqќ~d,r%1Tw99%b>0bbӤt@c6JH EbNJ:DqB==:@M]4a#LʣunGs\i\]te*z)E I5i~'t*`QаA;|+_ @S#yq 0X?BtA'+z5fh'Ft; iɤTGɐyJe0K Nx8HYcz‡=`bОzTiY.RHۘ2fg!4Q N._6xuT!2_o%P&35oH-T-ƳÔb QӅXS]Mc*Uhlq22i"`"" 0|t80f? ҜIuu8$p#.h#ɽ&T&=2G*6oBͽ,bkDRgl>R cd8VqrjH+k]9P #f 1y mk [4pV %6X0rzdyrx* ôG [}S .B 1rRx2Cm4ecX#* {4(\s AZDڇ!?/!IV_T J6@( 2j&Se .A+Ea|f)5A(9Z@/X׿o(-C <4Iǥ)%09ר NхW{4¤plbJNag~M[1mɦqȡBk}}j*FɵnIK#5II#0v}p& z1cn$MhiV[% vH{vh`\a 7IXTwbϼĺ6:pI_h ylV"]zO Qy8@̬/}W dG)Q uPH1ƞz#ANF;%á QB4lM^";L;6vǫT3Қo(x”D\z7FrI|&Iz:|EHn }Vtv=K'SZt.Aj8TC=v0DB 7?"#+'0avvT6@5GZHHe߻Ȣ cH jlD,371 Ї@ F['_);cqY1C) aCް-TB*>cI3x.`M)^|0He1qG 爉J8} Ocf ^&Dg'Cz",JPP"Եxۻw’8Qx%#KGO3HjzpI -&?FrVb =dRcfk)3M/01> ZDp.|rGHѥ|HfU4T ; ?qxh?v;".oCtQ};<(,qn\r#OKha q*1)`Z1d=) ,. /I.hx X&FuG#CiHJ;q׳UB*ViuĩBY op?J6['2zrq3h5X_AsOӲwmzt; a Zm)J ϼEζsD2Y.iV/wL14 sp| qCAzrD*#ʹcr[H9CY VL?yjP_&D7!Bo%В7 P`!$B` )2KM|k^>ф1u|lBl7zUמ6/Mlu__u MNR<9TvCZ"n1 cDmϘPJA-.s5d]#3@Q]QD%$ْ&Z-wJ m,V@"4]FCf(1 VcDBȾ̕E)%drYH`vp VHͻEpC^ LHBdKƹjwVWAfOxԑ+ˇ A 0)wx A}<X(}>)-rضSn3d^-bknŋt\^rG ͵nZ9Dw,nm8Hq2(G:Uŀ4:%0g)@PvS(h&+#ԅE^ϵEC Ϙ$ QRuN~!Fzg B\/sL¸񢝆_cÛ=mo 節ghdd =$7|+@ŧ0Uip[µ`'k㸑>4M:"t<|ѪP+X8![ܸl dӔ:g7AS\0փo6s}{%ёHC`\9p {4P& za  dipy-0.13.0/dipy/data/files/grad_514.txt000066400000000000000000001013031317371701200175670ustar00rootroot00000000000000 0.0000000e+000 0.0000000e+000 0.0000000e+000 0.0000000e+000 1.6000000e+003 -2.0000000e-001 0.0000000e+000 0.0000000e+000 1.6000000e+003 0.0000000e+000 -2.0000000e-001 0.0000000e+000 1.6000000e+003 0.0000000e+000 0.0000000e+000 -2.0000000e-001 1.6000000e+003 0.0000000e+000 0.0000000e+000 2.0000000e-001 1.6000000e+003 0.0000000e+000 2.0000000e-001 0.0000000e+000 1.6000000e+003 2.0000000e-001 0.0000000e+000 0.0000000e+000 2.2627417e+003 -2.0000000e-001 -2.0000000e-001 0.0000000e+000 2.2627417e+003 -2.0000000e-001 0.0000000e+000 -2.0000000e-001 2.2627417e+003 -2.0000000e-001 0.0000000e+000 2.0000000e-001 2.2627417e+003 -2.0000000e-001 2.0000000e-001 0.0000000e+000 2.2627417e+003 0.0000000e+000 -2.0000000e-001 -2.0000000e-001 2.2627417e+003 0.0000000e+000 -2.0000000e-001 2.0000000e-001 2.2627417e+003 0.0000000e+000 2.0000000e-001 -2.0000000e-001 2.2627417e+003 0.0000000e+000 2.0000000e-001 2.0000000e-001 2.2627417e+003 2.0000000e-001 -2.0000000e-001 0.0000000e+000 2.2627417e+003 2.0000000e-001 0.0000000e+000 -2.0000000e-001 2.2627417e+003 2.0000000e-001 0.0000000e+000 2.0000000e-001 2.2627417e+003 2.0000000e-001 2.0000000e-001 0.0000000e+000 2.7712813e+003 -2.0000000e-001 -2.0000000e-001 -2.0000000e-001 2.7712813e+003 -2.0000000e-001 -2.0000000e-001 2.0000000e-001 2.7712813e+003 -2.0000000e-001 2.0000000e-001 -2.0000000e-001 2.7712813e+003 -2.0000000e-001 2.0000000e-001 2.0000000e-001 2.7712813e+003 2.0000000e-001 -2.0000000e-001 -2.0000000e-001 2.7712813e+003 2.0000000e-001 -2.0000000e-001 2.0000000e-001 2.7712813e+003 2.0000000e-001 2.0000000e-001 -2.0000000e-001 2.7712813e+003 2.0000000e-001 2.0000000e-001 2.0000000e-001 3.2000000e+003 -4.0000000e-001 0.0000000e+000 0.0000000e+000 3.2000000e+003 0.0000000e+000 -4.0000000e-001 0.0000000e+000 3.2000000e+003 0.0000000e+000 0.0000000e+000 -4.0000000e-001 3.2000000e+003 0.0000000e+000 0.0000000e+000 4.0000000e-001 3.2000000e+003 0.0000000e+000 4.0000000e-001 0.0000000e+000 3.2000000e+003 4.0000000e-001 0.0000000e+000 0.0000000e+000 3.5777088e+003 -4.0000000e-001 -2.0000000e-001 0.0000000e+000 3.5777088e+003 -4.0000000e-001 0.0000000e+000 -2.0000000e-001 3.5777088e+003 -4.0000000e-001 0.0000000e+000 2.0000000e-001 3.5777088e+003 -4.0000000e-001 2.0000000e-001 0.0000000e+000 3.5777088e+003 -2.0000000e-001 -4.0000000e-001 0.0000000e+000 3.5777088e+003 -2.0000000e-001 0.0000000e+000 -4.0000000e-001 3.5777088e+003 -2.0000000e-001 0.0000000e+000 4.0000000e-001 3.5777088e+003 -2.0000000e-001 4.0000000e-001 0.0000000e+000 3.5777088e+003 0.0000000e+000 -4.0000000e-001 -2.0000000e-001 3.5777088e+003 0.0000000e+000 -4.0000000e-001 2.0000000e-001 3.5777088e+003 0.0000000e+000 -2.0000000e-001 -4.0000000e-001 3.5777088e+003 0.0000000e+000 -2.0000000e-001 4.0000000e-001 3.5777088e+003 0.0000000e+000 2.0000000e-001 -4.0000000e-001 3.5777088e+003 0.0000000e+000 2.0000000e-001 4.0000000e-001 3.5777088e+003 0.0000000e+000 4.0000000e-001 -2.0000000e-001 3.5777088e+003 0.0000000e+000 4.0000000e-001 2.0000000e-001 3.5777088e+003 2.0000000e-001 -4.0000000e-001 0.0000000e+000 3.5777088e+003 2.0000000e-001 0.0000000e+000 -4.0000000e-001 3.5777088e+003 2.0000000e-001 0.0000000e+000 4.0000000e-001 3.5777088e+003 2.0000000e-001 4.0000000e-001 0.0000000e+000 3.5777088e+003 4.0000000e-001 -2.0000000e-001 0.0000000e+000 3.5777088e+003 4.0000000e-001 0.0000000e+000 -2.0000000e-001 3.5777088e+003 4.0000000e-001 0.0000000e+000 2.0000000e-001 3.5777088e+003 4.0000000e-001 2.0000000e-001 0.0000000e+000 3.9191836e+003 -4.0000000e-001 -2.0000000e-001 -2.0000000e-001 3.9191836e+003 -4.0000000e-001 -2.0000000e-001 2.0000000e-001 3.9191836e+003 -4.0000000e-001 2.0000000e-001 -2.0000000e-001 3.9191836e+003 -4.0000000e-001 2.0000000e-001 2.0000000e-001 3.9191836e+003 -2.0000000e-001 -4.0000000e-001 -2.0000000e-001 3.9191836e+003 -2.0000000e-001 -4.0000000e-001 2.0000000e-001 3.9191836e+003 -2.0000000e-001 -2.0000000e-001 -4.0000000e-001 3.9191836e+003 -2.0000000e-001 -2.0000000e-001 4.0000000e-001 3.9191836e+003 -2.0000000e-001 2.0000000e-001 -4.0000000e-001 3.9191836e+003 -2.0000000e-001 2.0000000e-001 4.0000000e-001 3.9191836e+003 -2.0000000e-001 4.0000000e-001 -2.0000000e-001 3.9191836e+003 -2.0000000e-001 4.0000000e-001 2.0000000e-001 3.9191836e+003 2.0000000e-001 -4.0000000e-001 -2.0000000e-001 3.9191836e+003 2.0000000e-001 -4.0000000e-001 2.0000000e-001 3.9191836e+003 2.0000000e-001 -2.0000000e-001 -4.0000000e-001 3.9191836e+003 2.0000000e-001 -2.0000000e-001 4.0000000e-001 3.9191836e+003 2.0000000e-001 2.0000000e-001 -4.0000000e-001 3.9191836e+003 2.0000000e-001 2.0000000e-001 4.0000000e-001 3.9191836e+003 2.0000000e-001 4.0000000e-001 -2.0000000e-001 3.9191836e+003 2.0000000e-001 4.0000000e-001 2.0000000e-001 3.9191836e+003 4.0000000e-001 -2.0000000e-001 -2.0000000e-001 3.9191836e+003 4.0000000e-001 -2.0000000e-001 2.0000000e-001 3.9191836e+003 4.0000000e-001 2.0000000e-001 -2.0000000e-001 3.9191836e+003 4.0000000e-001 2.0000000e-001 2.0000000e-001 4.5254834e+003 -4.0000000e-001 -4.0000000e-001 0.0000000e+000 4.5254834e+003 -4.0000000e-001 0.0000000e+000 -4.0000000e-001 4.5254834e+003 -4.0000000e-001 0.0000000e+000 4.0000000e-001 4.5254834e+003 -4.0000000e-001 4.0000000e-001 0.0000000e+000 4.5254834e+003 0.0000000e+000 -4.0000000e-001 -4.0000000e-001 4.5254834e+003 0.0000000e+000 -4.0000000e-001 4.0000000e-001 4.5254834e+003 0.0000000e+000 4.0000000e-001 -4.0000000e-001 4.5254834e+003 0.0000000e+000 4.0000000e-001 4.0000000e-001 4.5254834e+003 4.0000000e-001 -4.0000000e-001 0.0000000e+000 4.5254834e+003 4.0000000e-001 0.0000000e+000 -4.0000000e-001 4.5254834e+003 4.0000000e-001 0.0000000e+000 4.0000000e-001 4.5254834e+003 4.0000000e-001 4.0000000e-001 0.0000000e+000 4.8000000e+003 -6.0000000e-001 0.0000000e+000 0.0000000e+000 4.8000000e+003 -4.0000000e-001 -4.0000000e-001 -2.0000000e-001 4.8000000e+003 -4.0000000e-001 -4.0000000e-001 2.0000000e-001 4.8000000e+003 -4.0000000e-001 -2.0000000e-001 -4.0000000e-001 4.8000000e+003 -4.0000000e-001 -2.0000000e-001 4.0000000e-001 4.8000000e+003 -4.0000000e-001 2.0000000e-001 -4.0000000e-001 4.8000000e+003 -4.0000000e-001 2.0000000e-001 4.0000000e-001 4.8000000e+003 -4.0000000e-001 4.0000000e-001 -2.0000000e-001 4.8000000e+003 -4.0000000e-001 4.0000000e-001 2.0000000e-001 4.8000000e+003 -2.0000000e-001 -4.0000000e-001 -4.0000000e-001 4.8000000e+003 -2.0000000e-001 -4.0000000e-001 4.0000000e-001 4.8000000e+003 -2.0000000e-001 4.0000000e-001 -4.0000000e-001 4.8000000e+003 -2.0000000e-001 4.0000000e-001 4.0000000e-001 4.8000000e+003 0.0000000e+000 -6.0000000e-001 0.0000000e+000 4.8000000e+003 0.0000000e+000 0.0000000e+000 -6.0000000e-001 4.8000000e+003 0.0000000e+000 0.0000000e+000 6.0000000e-001 4.8000000e+003 0.0000000e+000 6.0000000e-001 0.0000000e+000 4.8000000e+003 2.0000000e-001 -4.0000000e-001 -4.0000000e-001 4.8000000e+003 2.0000000e-001 -4.0000000e-001 4.0000000e-001 4.8000000e+003 2.0000000e-001 4.0000000e-001 -4.0000000e-001 4.8000000e+003 2.0000000e-001 4.0000000e-001 4.0000000e-001 4.8000000e+003 4.0000000e-001 -4.0000000e-001 -2.0000000e-001 4.8000000e+003 4.0000000e-001 -4.0000000e-001 2.0000000e-001 4.8000000e+003 4.0000000e-001 -2.0000000e-001 -4.0000000e-001 4.8000000e+003 4.0000000e-001 -2.0000000e-001 4.0000000e-001 4.8000000e+003 4.0000000e-001 2.0000000e-001 -4.0000000e-001 4.8000000e+003 4.0000000e-001 2.0000000e-001 4.0000000e-001 4.8000000e+003 4.0000000e-001 4.0000000e-001 -2.0000000e-001 4.8000000e+003 4.0000000e-001 4.0000000e-001 2.0000000e-001 4.8000000e+003 6.0000000e-001 0.0000000e+000 0.0000000e+000 5.0596443e+003 -6.0000000e-001 -2.0000000e-001 0.0000000e+000 5.0596443e+003 -6.0000000e-001 0.0000000e+000 -2.0000000e-001 5.0596443e+003 -6.0000000e-001 0.0000000e+000 2.0000000e-001 5.0596443e+003 -6.0000000e-001 2.0000000e-001 0.0000000e+000 5.0596443e+003 -2.0000000e-001 -6.0000000e-001 0.0000000e+000 5.0596443e+003 -2.0000000e-001 0.0000000e+000 -6.0000000e-001 5.0596443e+003 -2.0000000e-001 0.0000000e+000 6.0000000e-001 5.0596443e+003 -2.0000000e-001 6.0000000e-001 0.0000000e+000 5.0596443e+003 0.0000000e+000 -6.0000000e-001 -2.0000000e-001 5.0596443e+003 0.0000000e+000 -6.0000000e-001 2.0000000e-001 5.0596443e+003 0.0000000e+000 -2.0000000e-001 -6.0000000e-001 5.0596443e+003 0.0000000e+000 -2.0000000e-001 6.0000000e-001 5.0596443e+003 0.0000000e+000 2.0000000e-001 -6.0000000e-001 5.0596443e+003 0.0000000e+000 2.0000000e-001 6.0000000e-001 5.0596443e+003 0.0000000e+000 6.0000000e-001 -2.0000000e-001 5.0596443e+003 0.0000000e+000 6.0000000e-001 2.0000000e-001 5.0596443e+003 2.0000000e-001 -6.0000000e-001 0.0000000e+000 5.0596443e+003 2.0000000e-001 0.0000000e+000 -6.0000000e-001 5.0596443e+003 2.0000000e-001 0.0000000e+000 6.0000000e-001 5.0596443e+003 2.0000000e-001 6.0000000e-001 0.0000000e+000 5.0596443e+003 6.0000000e-001 -2.0000000e-001 0.0000000e+000 5.0596443e+003 6.0000000e-001 0.0000000e+000 -2.0000000e-001 5.0596443e+003 6.0000000e-001 0.0000000e+000 2.0000000e-001 5.0596443e+003 6.0000000e-001 2.0000000e-001 0.0000000e+000 5.3065997e+003 -6.0000000e-001 -2.0000000e-001 -2.0000000e-001 5.3065997e+003 -6.0000000e-001 -2.0000000e-001 2.0000000e-001 5.3065997e+003 -6.0000000e-001 2.0000000e-001 -2.0000000e-001 5.3065997e+003 -6.0000000e-001 2.0000000e-001 2.0000000e-001 5.3065997e+003 -2.0000000e-001 -6.0000000e-001 -2.0000000e-001 5.3065997e+003 -2.0000000e-001 -6.0000000e-001 2.0000000e-001 5.3065997e+003 -2.0000000e-001 -2.0000000e-001 -6.0000000e-001 5.3065997e+003 -2.0000000e-001 -2.0000000e-001 6.0000000e-001 5.3065997e+003 -2.0000000e-001 2.0000000e-001 -6.0000000e-001 5.3065997e+003 -2.0000000e-001 2.0000000e-001 6.0000000e-001 5.3065997e+003 -2.0000000e-001 6.0000000e-001 -2.0000000e-001 5.3065997e+003 -2.0000000e-001 6.0000000e-001 2.0000000e-001 5.3065997e+003 2.0000000e-001 -6.0000000e-001 -2.0000000e-001 5.3065997e+003 2.0000000e-001 -6.0000000e-001 2.0000000e-001 5.3065997e+003 2.0000000e-001 -2.0000000e-001 -6.0000000e-001 5.3065997e+003 2.0000000e-001 -2.0000000e-001 6.0000000e-001 5.3065997e+003 2.0000000e-001 2.0000000e-001 -6.0000000e-001 5.3065997e+003 2.0000000e-001 2.0000000e-001 6.0000000e-001 5.3065997e+003 2.0000000e-001 6.0000000e-001 -2.0000000e-001 5.3065997e+003 2.0000000e-001 6.0000000e-001 2.0000000e-001 5.3065997e+003 6.0000000e-001 -2.0000000e-001 -2.0000000e-001 5.3065997e+003 6.0000000e-001 -2.0000000e-001 2.0000000e-001 5.3065997e+003 6.0000000e-001 2.0000000e-001 -2.0000000e-001 5.3065997e+003 6.0000000e-001 2.0000000e-001 2.0000000e-001 5.5425626e+003 -4.0000000e-001 -4.0000000e-001 -4.0000000e-001 5.5425626e+003 -4.0000000e-001 -4.0000000e-001 4.0000000e-001 5.5425626e+003 -4.0000000e-001 4.0000000e-001 -4.0000000e-001 5.5425626e+003 -4.0000000e-001 4.0000000e-001 4.0000000e-001 5.5425626e+003 4.0000000e-001 -4.0000000e-001 -4.0000000e-001 5.5425626e+003 4.0000000e-001 -4.0000000e-001 4.0000000e-001 5.5425626e+003 4.0000000e-001 4.0000000e-001 -4.0000000e-001 5.5425626e+003 4.0000000e-001 4.0000000e-001 4.0000000e-001 5.7688820e+003 -6.0000000e-001 -4.0000000e-001 0.0000000e+000 5.7688820e+003 -6.0000000e-001 0.0000000e+000 -4.0000000e-001 5.7688820e+003 -6.0000000e-001 0.0000000e+000 4.0000000e-001 5.7688820e+003 -6.0000000e-001 4.0000000e-001 0.0000000e+000 5.7688820e+003 -4.0000000e-001 -6.0000000e-001 0.0000000e+000 5.7688820e+003 -4.0000000e-001 0.0000000e+000 -6.0000000e-001 5.7688820e+003 -4.0000000e-001 0.0000000e+000 6.0000000e-001 5.7688820e+003 -4.0000000e-001 6.0000000e-001 0.0000000e+000 5.7688820e+003 0.0000000e+000 -6.0000000e-001 -4.0000000e-001 5.7688820e+003 0.0000000e+000 -6.0000000e-001 4.0000000e-001 5.7688820e+003 0.0000000e+000 -4.0000000e-001 -6.0000000e-001 5.7688820e+003 0.0000000e+000 -4.0000000e-001 6.0000000e-001 5.7688820e+003 0.0000000e+000 4.0000000e-001 -6.0000000e-001 5.7688820e+003 0.0000000e+000 4.0000000e-001 6.0000000e-001 5.7688820e+003 0.0000000e+000 6.0000000e-001 -4.0000000e-001 5.7688820e+003 0.0000000e+000 6.0000000e-001 4.0000000e-001 5.7688820e+003 4.0000000e-001 -6.0000000e-001 0.0000000e+000 5.7688820e+003 4.0000000e-001 0.0000000e+000 -6.0000000e-001 5.7688820e+003 4.0000000e-001 0.0000000e+000 6.0000000e-001 5.7688820e+003 4.0000000e-001 6.0000000e-001 0.0000000e+000 5.7688820e+003 6.0000000e-001 -4.0000000e-001 0.0000000e+000 5.7688820e+003 6.0000000e-001 0.0000000e+000 -4.0000000e-001 5.7688820e+003 6.0000000e-001 0.0000000e+000 4.0000000e-001 5.7688820e+003 6.0000000e-001 4.0000000e-001 0.0000000e+000 5.9866518e+003 -6.0000000e-001 -4.0000000e-001 -2.0000000e-001 5.9866518e+003 -6.0000000e-001 -4.0000000e-001 2.0000000e-001 5.9866518e+003 -6.0000000e-001 -2.0000000e-001 -4.0000000e-001 5.9866518e+003 -6.0000000e-001 -2.0000000e-001 4.0000000e-001 5.9866518e+003 -6.0000000e-001 2.0000000e-001 -4.0000000e-001 5.9866518e+003 -6.0000000e-001 2.0000000e-001 4.0000000e-001 5.9866518e+003 -6.0000000e-001 4.0000000e-001 -2.0000000e-001 5.9866518e+003 -6.0000000e-001 4.0000000e-001 2.0000000e-001 5.9866518e+003 -4.0000000e-001 -6.0000000e-001 -2.0000000e-001 5.9866518e+003 -4.0000000e-001 -6.0000000e-001 2.0000000e-001 5.9866518e+003 -4.0000000e-001 -2.0000000e-001 -6.0000000e-001 5.9866518e+003 -4.0000000e-001 -2.0000000e-001 6.0000000e-001 5.9866518e+003 -4.0000000e-001 2.0000000e-001 -6.0000000e-001 5.9866518e+003 -4.0000000e-001 2.0000000e-001 6.0000000e-001 5.9866518e+003 -4.0000000e-001 6.0000000e-001 -2.0000000e-001 5.9866518e+003 -4.0000000e-001 6.0000000e-001 2.0000000e-001 5.9866518e+003 -2.0000000e-001 -6.0000000e-001 -4.0000000e-001 5.9866518e+003 -2.0000000e-001 -6.0000000e-001 4.0000000e-001 5.9866518e+003 -2.0000000e-001 -4.0000000e-001 -6.0000000e-001 5.9866518e+003 -2.0000000e-001 -4.0000000e-001 6.0000000e-001 5.9866518e+003 -2.0000000e-001 4.0000000e-001 -6.0000000e-001 5.9866518e+003 -2.0000000e-001 4.0000000e-001 6.0000000e-001 5.9866518e+003 -2.0000000e-001 6.0000000e-001 -4.0000000e-001 5.9866518e+003 -2.0000000e-001 6.0000000e-001 4.0000000e-001 5.9866518e+003 2.0000000e-001 -6.0000000e-001 -4.0000000e-001 5.9866518e+003 2.0000000e-001 -6.0000000e-001 4.0000000e-001 5.9866518e+003 2.0000000e-001 -4.0000000e-001 -6.0000000e-001 5.9866518e+003 2.0000000e-001 -4.0000000e-001 6.0000000e-001 5.9866518e+003 2.0000000e-001 4.0000000e-001 -6.0000000e-001 5.9866518e+003 2.0000000e-001 4.0000000e-001 6.0000000e-001 5.9866518e+003 2.0000000e-001 6.0000000e-001 -4.0000000e-001 5.9866518e+003 2.0000000e-001 6.0000000e-001 4.0000000e-001 5.9866518e+003 4.0000000e-001 -6.0000000e-001 -2.0000000e-001 5.9866518e+003 4.0000000e-001 -6.0000000e-001 2.0000000e-001 5.9866518e+003 4.0000000e-001 -2.0000000e-001 -6.0000000e-001 5.9866518e+003 4.0000000e-001 -2.0000000e-001 6.0000000e-001 5.9866518e+003 4.0000000e-001 2.0000000e-001 -6.0000000e-001 5.9866518e+003 4.0000000e-001 2.0000000e-001 6.0000000e-001 5.9866518e+003 4.0000000e-001 6.0000000e-001 -2.0000000e-001 5.9866518e+003 4.0000000e-001 6.0000000e-001 2.0000000e-001 5.9866518e+003 6.0000000e-001 -4.0000000e-001 -2.0000000e-001 5.9866518e+003 6.0000000e-001 -4.0000000e-001 2.0000000e-001 5.9866518e+003 6.0000000e-001 -2.0000000e-001 -4.0000000e-001 5.9866518e+003 6.0000000e-001 -2.0000000e-001 4.0000000e-001 5.9866518e+003 6.0000000e-001 2.0000000e-001 -4.0000000e-001 5.9866518e+003 6.0000000e-001 2.0000000e-001 4.0000000e-001 5.9866518e+003 6.0000000e-001 4.0000000e-001 -2.0000000e-001 5.9866518e+003 6.0000000e-001 4.0000000e-001 2.0000000e-001 6.4000000e+003 -8.0000000e-001 0.0000000e+000 0.0000000e+000 6.4000000e+003 0.0000000e+000 -8.0000000e-001 0.0000000e+000 6.4000000e+003 0.0000000e+000 0.0000000e+000 -8.0000000e-001 6.4000000e+003 0.0000000e+000 0.0000000e+000 8.0000000e-001 6.4000000e+003 0.0000000e+000 8.0000000e-001 0.0000000e+000 6.4000000e+003 8.0000000e-001 0.0000000e+000 0.0000000e+000 6.5969690e+003 -8.0000000e-001 -2.0000000e-001 0.0000000e+000 6.5969690e+003 -8.0000000e-001 0.0000000e+000 -2.0000000e-001 6.5969690e+003 -8.0000000e-001 0.0000000e+000 2.0000000e-001 6.5969690e+003 -8.0000000e-001 2.0000000e-001 0.0000000e+000 6.5969690e+003 -6.0000000e-001 -4.0000000e-001 -4.0000000e-001 6.5969690e+003 -6.0000000e-001 -4.0000000e-001 4.0000000e-001 6.5969690e+003 -6.0000000e-001 4.0000000e-001 -4.0000000e-001 6.5969690e+003 -6.0000000e-001 4.0000000e-001 4.0000000e-001 6.5969690e+003 -4.0000000e-001 -6.0000000e-001 -4.0000000e-001 6.5969690e+003 -4.0000000e-001 -6.0000000e-001 4.0000000e-001 6.5969690e+003 -4.0000000e-001 -4.0000000e-001 -6.0000000e-001 6.5969690e+003 -4.0000000e-001 -4.0000000e-001 6.0000000e-001 6.5969690e+003 -4.0000000e-001 4.0000000e-001 -6.0000000e-001 6.5969690e+003 -4.0000000e-001 4.0000000e-001 6.0000000e-001 6.5969690e+003 -4.0000000e-001 6.0000000e-001 -4.0000000e-001 6.5969690e+003 -4.0000000e-001 6.0000000e-001 4.0000000e-001 6.5969690e+003 -2.0000000e-001 -8.0000000e-001 0.0000000e+000 6.5969690e+003 -2.0000000e-001 0.0000000e+000 -8.0000000e-001 6.5969690e+003 -2.0000000e-001 0.0000000e+000 8.0000000e-001 6.5969690e+003 -2.0000000e-001 8.0000000e-001 0.0000000e+000 6.5969690e+003 0.0000000e+000 -8.0000000e-001 -2.0000000e-001 6.5969690e+003 0.0000000e+000 -8.0000000e-001 2.0000000e-001 6.5969690e+003 0.0000000e+000 -2.0000000e-001 -8.0000000e-001 6.5969690e+003 0.0000000e+000 -2.0000000e-001 8.0000000e-001 6.5969690e+003 0.0000000e+000 2.0000000e-001 -8.0000000e-001 6.5969690e+003 0.0000000e+000 2.0000000e-001 8.0000000e-001 6.5969690e+003 0.0000000e+000 8.0000000e-001 -2.0000000e-001 6.5969690e+003 0.0000000e+000 8.0000000e-001 2.0000000e-001 6.5969690e+003 2.0000000e-001 -8.0000000e-001 0.0000000e+000 6.5969690e+003 2.0000000e-001 0.0000000e+000 -8.0000000e-001 6.5969690e+003 2.0000000e-001 0.0000000e+000 8.0000000e-001 6.5969690e+003 2.0000000e-001 8.0000000e-001 0.0000000e+000 6.5969690e+003 4.0000000e-001 -6.0000000e-001 -4.0000000e-001 6.5969690e+003 4.0000000e-001 -6.0000000e-001 4.0000000e-001 6.5969690e+003 4.0000000e-001 -4.0000000e-001 -6.0000000e-001 6.5969690e+003 4.0000000e-001 -4.0000000e-001 6.0000000e-001 6.5969690e+003 4.0000000e-001 4.0000000e-001 -6.0000000e-001 6.5969690e+003 4.0000000e-001 4.0000000e-001 6.0000000e-001 6.5969690e+003 4.0000000e-001 6.0000000e-001 -4.0000000e-001 6.5969690e+003 4.0000000e-001 6.0000000e-001 4.0000000e-001 6.5969690e+003 6.0000000e-001 -4.0000000e-001 -4.0000000e-001 6.5969690e+003 6.0000000e-001 -4.0000000e-001 4.0000000e-001 6.5969690e+003 6.0000000e-001 4.0000000e-001 -4.0000000e-001 6.5969690e+003 6.0000000e-001 4.0000000e-001 4.0000000e-001 6.5969690e+003 8.0000000e-001 -2.0000000e-001 0.0000000e+000 6.5969690e+003 8.0000000e-001 0.0000000e+000 -2.0000000e-001 6.5969690e+003 8.0000000e-001 0.0000000e+000 2.0000000e-001 6.5969690e+003 8.0000000e-001 2.0000000e-001 0.0000000e+000 6.7882251e+003 -8.0000000e-001 -2.0000000e-001 -2.0000000e-001 6.7882251e+003 -8.0000000e-001 -2.0000000e-001 2.0000000e-001 6.7882251e+003 -8.0000000e-001 2.0000000e-001 -2.0000000e-001 6.7882251e+003 -8.0000000e-001 2.0000000e-001 2.0000000e-001 6.7882251e+003 -6.0000000e-001 -6.0000000e-001 0.0000000e+000 6.7882251e+003 -6.0000000e-001 0.0000000e+000 -6.0000000e-001 6.7882251e+003 -6.0000000e-001 0.0000000e+000 6.0000000e-001 6.7882251e+003 -6.0000000e-001 6.0000000e-001 0.0000000e+000 6.7882251e+003 -2.0000000e-001 -8.0000000e-001 -2.0000000e-001 6.7882251e+003 -2.0000000e-001 -8.0000000e-001 2.0000000e-001 6.7882251e+003 -2.0000000e-001 -2.0000000e-001 -8.0000000e-001 6.7882251e+003 -2.0000000e-001 -2.0000000e-001 8.0000000e-001 6.7882251e+003 -2.0000000e-001 2.0000000e-001 -8.0000000e-001 6.7882251e+003 -2.0000000e-001 2.0000000e-001 8.0000000e-001 6.7882251e+003 -2.0000000e-001 8.0000000e-001 -2.0000000e-001 6.7882251e+003 -2.0000000e-001 8.0000000e-001 2.0000000e-001 6.7882251e+003 0.0000000e+000 -6.0000000e-001 -6.0000000e-001 6.7882251e+003 0.0000000e+000 -6.0000000e-001 6.0000000e-001 6.7882251e+003 0.0000000e+000 6.0000000e-001 -6.0000000e-001 6.7882251e+003 0.0000000e+000 6.0000000e-001 6.0000000e-001 6.7882251e+003 2.0000000e-001 -8.0000000e-001 -2.0000000e-001 6.7882251e+003 2.0000000e-001 -8.0000000e-001 2.0000000e-001 6.7882251e+003 2.0000000e-001 -2.0000000e-001 -8.0000000e-001 6.7882251e+003 2.0000000e-001 -2.0000000e-001 8.0000000e-001 6.7882251e+003 2.0000000e-001 2.0000000e-001 -8.0000000e-001 6.7882251e+003 2.0000000e-001 2.0000000e-001 8.0000000e-001 6.7882251e+003 2.0000000e-001 8.0000000e-001 -2.0000000e-001 6.7882251e+003 2.0000000e-001 8.0000000e-001 2.0000000e-001 6.7882251e+003 6.0000000e-001 -6.0000000e-001 0.0000000e+000 6.7882251e+003 6.0000000e-001 0.0000000e+000 -6.0000000e-001 6.7882251e+003 6.0000000e-001 0.0000000e+000 6.0000000e-001 6.7882251e+003 6.0000000e-001 6.0000000e-001 0.0000000e+000 6.7882251e+003 8.0000000e-001 -2.0000000e-001 -2.0000000e-001 6.7882251e+003 8.0000000e-001 -2.0000000e-001 2.0000000e-001 6.7882251e+003 8.0000000e-001 2.0000000e-001 -2.0000000e-001 6.7882251e+003 8.0000000e-001 2.0000000e-001 2.0000000e-001 6.9742383e+003 -6.0000000e-001 -6.0000000e-001 -2.0000000e-001 6.9742383e+003 -6.0000000e-001 -6.0000000e-001 2.0000000e-001 6.9742383e+003 -6.0000000e-001 -2.0000000e-001 -6.0000000e-001 6.9742383e+003 -6.0000000e-001 -2.0000000e-001 6.0000000e-001 6.9742383e+003 -6.0000000e-001 2.0000000e-001 -6.0000000e-001 6.9742383e+003 -6.0000000e-001 2.0000000e-001 6.0000000e-001 6.9742383e+003 -6.0000000e-001 6.0000000e-001 -2.0000000e-001 6.9742383e+003 -6.0000000e-001 6.0000000e-001 2.0000000e-001 6.9742383e+003 -2.0000000e-001 -6.0000000e-001 -6.0000000e-001 6.9742383e+003 -2.0000000e-001 -6.0000000e-001 6.0000000e-001 6.9742383e+003 -2.0000000e-001 6.0000000e-001 -6.0000000e-001 6.9742383e+003 -2.0000000e-001 6.0000000e-001 6.0000000e-001 6.9742383e+003 2.0000000e-001 -6.0000000e-001 -6.0000000e-001 6.9742383e+003 2.0000000e-001 -6.0000000e-001 6.0000000e-001 6.9742383e+003 2.0000000e-001 6.0000000e-001 -6.0000000e-001 6.9742383e+003 2.0000000e-001 6.0000000e-001 6.0000000e-001 6.9742383e+003 6.0000000e-001 -6.0000000e-001 -2.0000000e-001 6.9742383e+003 6.0000000e-001 -6.0000000e-001 2.0000000e-001 6.9742383e+003 6.0000000e-001 -2.0000000e-001 -6.0000000e-001 6.9742383e+003 6.0000000e-001 -2.0000000e-001 6.0000000e-001 6.9742383e+003 6.0000000e-001 2.0000000e-001 -6.0000000e-001 6.9742383e+003 6.0000000e-001 2.0000000e-001 6.0000000e-001 6.9742383e+003 6.0000000e-001 6.0000000e-001 -2.0000000e-001 6.9742383e+003 6.0000000e-001 6.0000000e-001 2.0000000e-001 7.1554175e+003 -8.0000000e-001 -4.0000000e-001 0.0000000e+000 7.1554175e+003 -8.0000000e-001 0.0000000e+000 -4.0000000e-001 7.1554175e+003 -8.0000000e-001 0.0000000e+000 4.0000000e-001 7.1554175e+003 -8.0000000e-001 4.0000000e-001 0.0000000e+000 7.1554175e+003 -4.0000000e-001 -8.0000000e-001 0.0000000e+000 7.1554175e+003 -4.0000000e-001 0.0000000e+000 -8.0000000e-001 7.1554175e+003 -4.0000000e-001 0.0000000e+000 8.0000000e-001 7.1554175e+003 -4.0000000e-001 8.0000000e-001 0.0000000e+000 7.1554175e+003 0.0000000e+000 -8.0000000e-001 -4.0000000e-001 7.1554175e+003 0.0000000e+000 -8.0000000e-001 4.0000000e-001 7.1554175e+003 0.0000000e+000 -4.0000000e-001 -8.0000000e-001 7.1554175e+003 0.0000000e+000 -4.0000000e-001 8.0000000e-001 7.1554175e+003 0.0000000e+000 4.0000000e-001 -8.0000000e-001 7.1554175e+003 0.0000000e+000 4.0000000e-001 8.0000000e-001 7.1554175e+003 0.0000000e+000 8.0000000e-001 -4.0000000e-001 7.1554175e+003 0.0000000e+000 8.0000000e-001 4.0000000e-001 7.1554175e+003 4.0000000e-001 -8.0000000e-001 0.0000000e+000 7.1554175e+003 4.0000000e-001 0.0000000e+000 -8.0000000e-001 7.1554175e+003 4.0000000e-001 0.0000000e+000 8.0000000e-001 7.1554175e+003 4.0000000e-001 8.0000000e-001 0.0000000e+000 7.1554175e+003 8.0000000e-001 -4.0000000e-001 0.0000000e+000 7.1554175e+003 8.0000000e-001 0.0000000e+000 -4.0000000e-001 7.1554175e+003 8.0000000e-001 0.0000000e+000 4.0000000e-001 7.1554175e+003 8.0000000e-001 4.0000000e-001 0.0000000e+000 7.3321211e+003 -8.0000000e-001 -4.0000000e-001 -2.0000000e-001 7.3321211e+003 -8.0000000e-001 -4.0000000e-001 2.0000000e-001 7.3321211e+003 -8.0000000e-001 -2.0000000e-001 -4.0000000e-001 7.3321211e+003 -8.0000000e-001 -2.0000000e-001 4.0000000e-001 7.3321211e+003 -8.0000000e-001 2.0000000e-001 -4.0000000e-001 7.3321211e+003 -8.0000000e-001 2.0000000e-001 4.0000000e-001 7.3321211e+003 -8.0000000e-001 4.0000000e-001 -2.0000000e-001 7.3321211e+003 -8.0000000e-001 4.0000000e-001 2.0000000e-001 7.3321211e+003 -4.0000000e-001 -8.0000000e-001 -2.0000000e-001 7.3321211e+003 -4.0000000e-001 -8.0000000e-001 2.0000000e-001 7.3321211e+003 -4.0000000e-001 -2.0000000e-001 -8.0000000e-001 7.3321211e+003 -4.0000000e-001 -2.0000000e-001 8.0000000e-001 7.3321211e+003 -4.0000000e-001 2.0000000e-001 -8.0000000e-001 7.3321211e+003 -4.0000000e-001 2.0000000e-001 8.0000000e-001 7.3321211e+003 -4.0000000e-001 8.0000000e-001 -2.0000000e-001 7.3321211e+003 -4.0000000e-001 8.0000000e-001 2.0000000e-001 7.3321211e+003 -2.0000000e-001 -8.0000000e-001 -4.0000000e-001 7.3321211e+003 -2.0000000e-001 -8.0000000e-001 4.0000000e-001 7.3321211e+003 -2.0000000e-001 -4.0000000e-001 -8.0000000e-001 7.3321211e+003 -2.0000000e-001 -4.0000000e-001 8.0000000e-001 7.3321211e+003 -2.0000000e-001 4.0000000e-001 -8.0000000e-001 7.3321211e+003 -2.0000000e-001 4.0000000e-001 8.0000000e-001 7.3321211e+003 -2.0000000e-001 8.0000000e-001 -4.0000000e-001 7.3321211e+003 -2.0000000e-001 8.0000000e-001 4.0000000e-001 7.3321211e+003 2.0000000e-001 -8.0000000e-001 -4.0000000e-001 7.3321211e+003 2.0000000e-001 -8.0000000e-001 4.0000000e-001 7.3321211e+003 2.0000000e-001 -4.0000000e-001 -8.0000000e-001 7.3321211e+003 2.0000000e-001 -4.0000000e-001 8.0000000e-001 7.3321211e+003 2.0000000e-001 4.0000000e-001 -8.0000000e-001 7.3321211e+003 2.0000000e-001 4.0000000e-001 8.0000000e-001 7.3321211e+003 2.0000000e-001 8.0000000e-001 -4.0000000e-001 7.3321211e+003 2.0000000e-001 8.0000000e-001 4.0000000e-001 7.3321211e+003 4.0000000e-001 -8.0000000e-001 -2.0000000e-001 7.3321211e+003 4.0000000e-001 -8.0000000e-001 2.0000000e-001 7.3321211e+003 4.0000000e-001 -2.0000000e-001 -8.0000000e-001 7.3321211e+003 4.0000000e-001 -2.0000000e-001 8.0000000e-001 7.3321211e+003 4.0000000e-001 2.0000000e-001 -8.0000000e-001 7.3321211e+003 4.0000000e-001 2.0000000e-001 8.0000000e-001 7.3321211e+003 4.0000000e-001 8.0000000e-001 -2.0000000e-001 7.3321211e+003 4.0000000e-001 8.0000000e-001 2.0000000e-001 7.3321211e+003 8.0000000e-001 -4.0000000e-001 -2.0000000e-001 7.3321211e+003 8.0000000e-001 -4.0000000e-001 2.0000000e-001 7.3321211e+003 8.0000000e-001 -2.0000000e-001 -4.0000000e-001 7.3321211e+003 8.0000000e-001 -2.0000000e-001 4.0000000e-001 7.3321211e+003 8.0000000e-001 2.0000000e-001 -4.0000000e-001 7.3321211e+003 8.0000000e-001 2.0000000e-001 4.0000000e-001 7.3321211e+003 8.0000000e-001 4.0000000e-001 -2.0000000e-001 7.3321211e+003 8.0000000e-001 4.0000000e-001 2.0000000e-001 7.5046652e+003 -6.0000000e-001 -6.0000000e-001 -4.0000000e-001 7.5046652e+003 -6.0000000e-001 -6.0000000e-001 4.0000000e-001 7.5046652e+003 -6.0000000e-001 -4.0000000e-001 -6.0000000e-001 7.5046652e+003 -6.0000000e-001 -4.0000000e-001 6.0000000e-001 7.5046652e+003 -6.0000000e-001 4.0000000e-001 -6.0000000e-001 7.5046652e+003 -6.0000000e-001 4.0000000e-001 6.0000000e-001 7.5046652e+003 -6.0000000e-001 6.0000000e-001 -4.0000000e-001 7.5046652e+003 -6.0000000e-001 6.0000000e-001 4.0000000e-001 7.5046652e+003 -4.0000000e-001 -6.0000000e-001 -6.0000000e-001 7.5046652e+003 -4.0000000e-001 -6.0000000e-001 6.0000000e-001 7.5046652e+003 -4.0000000e-001 6.0000000e-001 -6.0000000e-001 7.5046652e+003 -4.0000000e-001 6.0000000e-001 6.0000000e-001 7.5046652e+003 4.0000000e-001 -6.0000000e-001 -6.0000000e-001 7.5046652e+003 4.0000000e-001 -6.0000000e-001 6.0000000e-001 7.5046652e+003 4.0000000e-001 6.0000000e-001 -6.0000000e-001 7.5046652e+003 4.0000000e-001 6.0000000e-001 6.0000000e-001 7.5046652e+003 6.0000000e-001 -6.0000000e-001 -4.0000000e-001 7.5046652e+003 6.0000000e-001 -6.0000000e-001 4.0000000e-001 7.5046652e+003 6.0000000e-001 -4.0000000e-001 -6.0000000e-001 7.5046652e+003 6.0000000e-001 -4.0000000e-001 6.0000000e-001 7.5046652e+003 6.0000000e-001 4.0000000e-001 -6.0000000e-001 7.5046652e+003 6.0000000e-001 4.0000000e-001 6.0000000e-001 7.5046652e+003 6.0000000e-001 6.0000000e-001 -4.0000000e-001 7.5046652e+003 6.0000000e-001 6.0000000e-001 4.0000000e-001 7.8383672e+003 -8.0000000e-001 -4.0000000e-001 -4.0000000e-001 7.8383672e+003 -8.0000000e-001 -4.0000000e-001 4.0000000e-001 7.8383672e+003 -8.0000000e-001 4.0000000e-001 -4.0000000e-001 7.8383672e+003 -8.0000000e-001 4.0000000e-001 4.0000000e-001 7.8383672e+003 -4.0000000e-001 -8.0000000e-001 -4.0000000e-001 7.8383672e+003 -4.0000000e-001 -8.0000000e-001 4.0000000e-001 7.8383672e+003 -4.0000000e-001 -4.0000000e-001 -8.0000000e-001 7.8383672e+003 -4.0000000e-001 -4.0000000e-001 8.0000000e-001 7.8383672e+003 -4.0000000e-001 4.0000000e-001 -8.0000000e-001 7.8383672e+003 -4.0000000e-001 4.0000000e-001 8.0000000e-001 7.8383672e+003 -4.0000000e-001 8.0000000e-001 -4.0000000e-001 7.8383672e+003 -4.0000000e-001 8.0000000e-001 4.0000000e-001 7.8383672e+003 4.0000000e-001 -8.0000000e-001 -4.0000000e-001 7.8383672e+003 4.0000000e-001 -8.0000000e-001 4.0000000e-001 7.8383672e+003 4.0000000e-001 -4.0000000e-001 -8.0000000e-001 7.8383672e+003 4.0000000e-001 -4.0000000e-001 8.0000000e-001 7.8383672e+003 4.0000000e-001 4.0000000e-001 -8.0000000e-001 7.8383672e+003 4.0000000e-001 4.0000000e-001 8.0000000e-001 7.8383672e+003 4.0000000e-001 8.0000000e-001 -4.0000000e-001 7.8383672e+003 4.0000000e-001 8.0000000e-001 4.0000000e-001 7.8383672e+003 8.0000000e-001 -4.0000000e-001 -4.0000000e-001 7.8383672e+003 8.0000000e-001 -4.0000000e-001 4.0000000e-001 7.8383672e+003 8.0000000e-001 4.0000000e-001 -4.0000000e-001 7.8383672e+003 8.0000000e-001 4.0000000e-001 4.0000000e-001 8.0000000e+003 -1.0000000e+000 0.0000000e+000 0.0000000e+000 8.0000000e+003 -8.0000000e-001 -6.0000000e-001 0.0000000e+000 8.0000000e+003 -8.0000000e-001 0.0000000e+000 -6.0000000e-001 8.0000000e+003 -8.0000000e-001 0.0000000e+000 6.0000000e-001 8.0000000e+003 -8.0000000e-001 6.0000000e-001 0.0000000e+000 8.0000000e+003 -6.0000000e-001 -8.0000000e-001 0.0000000e+000 8.0000000e+003 -6.0000000e-001 0.0000000e+000 -8.0000000e-001 8.0000000e+003 -6.0000000e-001 0.0000000e+000 8.0000000e-001 8.0000000e+003 -6.0000000e-001 8.0000000e-001 0.0000000e+000 8.0000000e+003 0.0000000e+000 -1.0000000e+000 0.0000000e+000 8.0000000e+003 0.0000000e+000 -8.0000000e-001 -6.0000000e-001 8.0000000e+003 0.0000000e+000 -8.0000000e-001 6.0000000e-001 8.0000000e+003 0.0000000e+000 -6.0000000e-001 -8.0000000e-001 8.0000000e+003 0.0000000e+000 -6.0000000e-001 8.0000000e-001 8.0000000e+003 0.0000000e+000 0.0000000e+000 -1.0000000e+000 8.0000000e+003 0.0000000e+000 0.0000000e+000 1.0000000e+000 8.0000000e+003 0.0000000e+000 6.0000000e-001 -8.0000000e-001 8.0000000e+003 0.0000000e+000 6.0000000e-001 8.0000000e-001 8.0000000e+003 0.0000000e+000 8.0000000e-001 -6.0000000e-001 8.0000000e+003 0.0000000e+000 8.0000000e-001 6.0000000e-001 8.0000000e+003 0.0000000e+000 1.0000000e+000 0.0000000e+000 8.0000000e+003 6.0000000e-001 -8.0000000e-001 0.0000000e+000 8.0000000e+003 6.0000000e-001 0.0000000e+000 -8.0000000e-001 8.0000000e+003 6.0000000e-001 0.0000000e+000 8.0000000e-001 8.0000000e+003 6.0000000e-001 8.0000000e-001 0.0000000e+000 8.0000000e+003 8.0000000e-001 -6.0000000e-001 0.0000000e+000 8.0000000e+003 8.0000000e-001 0.0000000e+000 -6.0000000e-001 8.0000000e+003 8.0000000e-001 0.0000000e+000 6.0000000e-001 8.0000000e+003 8.0000000e-001 6.0000000e-001 0.0000000e+000 8.0000000e+003 1.0000000e+000 0.0000000e+000 0.0000000e+000 dipy-0.13.0/dipy/data/files/gtab_3shell.txt000066400000000000000000000347031317371701200204610ustar00rootroot000000000000000.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00 9.999789999999999281e+02,-5.040010000000000545e+00,-4.027949999999999697e+00 0.000000000000000000e+00,9.999919999999999618e+02,-3.987939999999999596e+00 -2.570550000000000068e+01,6.538609999999999900e+02,-7.561779999999999973e+02 5.895180000000000291e+02,-7.692359999999999900e+02,-2.464619999999999891e+02 -2.357849999999999966e+02,-5.290950000000000273e+02,-8.151469999999999345e+02 -8.935779999999999745e+02,-2.635589999999999691e+02,-3.633940000000000055e+02 7.978400000000000318e+02,1.337259999999999991e+02,-5.878509999999999991e+02 2.329370000000000118e+02,9.318840000000000146e+02,-2.780869999999999891e+02 9.367200000000000273e+02,1.441389999999999816e+02,-3.190299999999999727e+02 5.041299999999999955e+02,-8.466939999999999600e+02,1.701829999999999927e+02 3.451989999999999554e+02,-8.503110000000000355e+02,3.972520000000000095e+02 4.567649999999999864e+02,-6.356720000000000255e+02,6.223229999999999791e+02 -4.874809999999999945e+02,-3.939079999999999586e+02,-7.792289999999999281e+02 -6.170330000000000155e+02,6.768490000000000464e+02,-4.014300000000000068e+02 -5.785120000000000573e+02,-1.093469999999999942e+02,8.083110000000000355e+02 -8.253640000000000327e+02,-5.250339999999999918e+02,-2.076359999999999957e+02 8.950760000000000218e+02,-4.482420000000000471e+01,4.436550000000000296e+02 2.899920000000000186e+02,-5.454729999999999563e+02,7.863609999999999900e+02 1.150140000000000100e+02,-9.640499999999999545e+02,2.395409999999999968e+02 -7.999340000000000828e+02,4.077669999999999959e+02,4.402640000000000100e+02 5.124940000000000282e+02,8.421390000000000100e+02,-1.677849999999999966e+02 -7.900049999999999955e+02,1.579929999999999950e+02,5.923940000000000055e+02 9.492810000000000628e+02,-2.376949999999999932e+02,-2.058300000000000125e+02 2.323179999999999836e+02,7.870509999999999309e+02,-5.714719999999999800e+02 -1.967070000000000007e+01,-1.920310000000000059e+02,9.811920000000000073e+02 2.159689999999999941e+02,-9.571229999999999336e+02,-1.930610000000000070e+02 7.726449999999999818e+02,-6.075339999999999918e+02,-1.841800000000000068e+02 -1.601529999999999916e+02,3.604130000000000109e+02,-9.189410000000000309e+02 -1.461670000000000016e+02,7.352740000000000009e+02,6.618210000000000264e+02 8.873700000000000045e+02,4.211109999999999900e+02,-1.877239999999999895e+02 -5.629889999999999191e+02,2.364819999999999993e+02,7.919089999999999918e+02 -3.813129999999999882e+02,1.470370000000000061e+02,-9.126779999999999973e+02 -3.059540000000000077e+02,-2.037930000000000064e+02,9.299790000000000418e+02 -3.326819999999999595e+02,-1.341129999999999995e+02,-9.334539999999999509e+02 -9.622389999999999191e+02,-2.694639999999999986e+02,-3.853909999999999769e+01 -9.595320000000000391e+02,2.097700000000000102e+02,-1.878710000000000093e+02 4.509639999999999986e+02,-8.903369999999999891e+02,-6.270149999999999579e+01 -7.711920000000000073e+02,6.311750000000000682e+02,-8.295329999999999870e+01 7.098160000000000309e+02,4.131589999999999918e+02,-5.704919999999999618e+02 -6.945430000000000064e+02,2.793949999999999889e+01,-7.189080000000000155e+02 6.815489999999999782e+02,5.331009999999999991e+02,5.012930000000000064e+02 -1.416890000000000214e+02,-7.292409999999999854e+02,-6.694270000000000209e+02 -7.403509999999999991e+02,3.932230000000000132e+02,-5.452119999999999891e+02 -1.027560000000000002e+02,8.253669999999999618e+02,-5.551669999999999163e+02 5.839130000000000109e+02,-6.007820000000000391e+02,-5.459920000000000755e+02 -8.775499999999999545e+01,-3.396509999999999536e+02,-9.364489999999999554e+02 -5.505060000000000855e+02,-7.954839999999999236e+02,-2.532760000000000105e+02 8.374430000000000973e+02,-4.622019999999999982e+02,2.916480000000000246e+02 3.629289999999999736e+02,-5.659300000000000637e+02,-7.402740000000000009e+02 -1.836109999999999900e+02,3.970810000000000173e+02,8.992300000000000182e+02 -7.183230000000000928e+02,-6.957010000000000218e+02,-3.548969999999999736e+00 4.327819999999999823e+02,6.863609999999999900e+02,5.844730000000000700e+02 5.018369999999999891e+02,6.943369999999999891e+02,-5.158049999999999500e+02 -1.705180000000000007e+02,-5.137690000000000055e+02,8.408120000000000118e+02 4.631950000000000500e+02,4.280519999999999641e+02,-7.760289999999999964e+02 3.837130000000000223e+02,-8.125720000000000027e+02,-4.387379999999999995e+02 -7.141659999999999400e+02,-2.514669999999999845e+02,-6.532470000000000709e+02 2.592050000000000409e+02,8.872580000000000382e+02,3.815569999999999595e+02 0.000000000000000000e+00,8.131860000000000355e+01,9.966879999999999882e+02 3.636330000000000240e+01,-9.046159999999999854e+02,-4.246750000000000114e+02 5.708539999999999281e+02,-3.085970000000000368e+02,7.608509999999999991e+02 -2.822049999999999841e+02,1.497950000000000159e+02,9.475879999999999654e+02 7.203509999999999991e+02,6.119139999999999873e+02,-3.265830000000000268e+02 2.658909999999999627e+02,9.606829999999999927e+02,7.993519999999999470e+01 1.999957999999999856e+03,-1.008002000000000109e+01,-8.055899999999999395e+00 0.000000000000000000e+00,1.999983999999999924e+03,-7.975879999999999193e+00 -5.141100000000000136e+01,1.307721999999999980e+03,-1.512355999999999995e+03 1.179036000000000058e+03,-1.538471999999999980e+03,-4.929239999999999782e+02 -4.715699999999999932e+02,-1.058190000000000055e+03,-1.630293999999999869e+03 -1.787155999999999949e+03,-5.271179999999999382e+02,-7.267880000000000109e+02 1.595680000000000064e+03,2.674519999999999982e+02,-1.175701999999999998e+03 4.658740000000000236e+02,1.863768000000000029e+03,-5.561739999999999782e+02 1.873440000000000055e+03,2.882779999999999632e+02,-6.380599999999999454e+02 1.008259999999999991e+03,-1.693387999999999920e+03,3.403659999999999854e+02 6.903979999999999109e+02,-1.700622000000000071e+03,7.945040000000000191e+02 9.135299999999999727e+02,-1.271344000000000051e+03,1.244645999999999958e+03 -9.749619999999999891e+02,-7.878159999999999172e+02,-1.558457999999999856e+03 -1.234066000000000031e+03,1.353698000000000093e+03,-8.028600000000000136e+02 -1.157024000000000115e+03,-2.186939999999999884e+02,1.616622000000000071e+03 -1.650728000000000065e+03,-1.050067999999999984e+03,-4.152719999999999914e+02 1.790152000000000044e+03,-8.964840000000000941e+01,8.873100000000000591e+02 5.799840000000000373e+02,-1.090945999999999913e+03,1.572721999999999980e+03 2.300280000000000200e+02,-1.928099999999999909e+03,4.790819999999999936e+02 -1.599868000000000166e+03,8.155339999999999918e+02,8.805280000000000200e+02 1.024988000000000056e+03,1.684278000000000020e+03,-3.355699999999999932e+02 -1.580009999999999991e+03,3.159859999999999900e+02,1.184788000000000011e+03 1.898562000000000126e+03,-4.753899999999999864e+02,-4.116600000000000250e+02 4.646359999999999673e+02,1.574101999999999862e+03,-1.142943999999999960e+03 -3.934140000000000015e+01,-3.840620000000000118e+02,1.962384000000000015e+03 4.319379999999999882e+02,-1.914245999999999867e+03,-3.861220000000000141e+02 1.545289999999999964e+03,-1.215067999999999984e+03,-3.683600000000000136e+02 -3.203059999999999832e+02,7.208260000000000218e+02,-1.837882000000000062e+03 -2.923340000000000032e+02,1.470548000000000002e+03,1.323642000000000053e+03 1.774740000000000009e+03,8.422219999999999800e+02,-3.754479999999999791e+02 -1.125977999999999838e+03,4.729639999999999986e+02,1.583817999999999984e+03 -7.626259999999999764e+02,2.940740000000000123e+02,-1.825355999999999995e+03 -6.119080000000000155e+02,-4.075860000000000127e+02,1.859958000000000084e+03 -6.653639999999999191e+02,-2.682259999999999991e+02,-1.866907999999999902e+03 -1.924477999999999838e+03,-5.389279999999999973e+02,-7.707819999999999538e+01 -1.919064000000000078e+03,4.195400000000000205e+02,-3.757420000000000186e+02 9.019279999999999973e+02,-1.780673999999999978e+03,-1.254029999999999916e+02 -1.542384000000000015e+03,1.262350000000000136e+03,-1.659065999999999974e+02 1.419632000000000062e+03,8.263179999999999836e+02,-1.140983999999999924e+03 -1.389086000000000013e+03,5.587899999999999778e+01,-1.437816000000000031e+03 1.363097999999999956e+03,1.066201999999999998e+03,1.002586000000000013e+03 -2.833780000000000427e+02,-1.458481999999999971e+03,-1.338854000000000042e+03 -1.480701999999999998e+03,7.864460000000000264e+02,-1.090423999999999978e+03 -2.055120000000000005e+02,1.650733999999999924e+03,-1.110333999999999833e+03 1.167826000000000022e+03,-1.201564000000000078e+03,-1.091984000000000151e+03 -1.755099999999999909e+02,-6.793019999999999072e+02,-1.872897999999999911e+03 -1.101012000000000171e+03,-1.590967999999999847e+03,-5.065520000000000209e+02 1.674886000000000195e+03,-9.244039999999999964e+02,5.832960000000000491e+02 7.258579999999999472e+02,-1.131860000000000127e+03,-1.480548000000000002e+03 -3.672219999999999800e+02,7.941620000000000346e+02,1.798460000000000036e+03 -1.436646000000000186e+03,-1.391402000000000044e+03,-7.097939999999999472e+00 8.655639999999999645e+02,1.372721999999999980e+03,1.168946000000000140e+03 1.003673999999999978e+03,1.388673999999999978e+03,-1.031609999999999900e+03 -3.410360000000000014e+02,-1.027538000000000011e+03,1.681624000000000024e+03 9.263900000000001000e+02,8.561039999999999281e+02,-1.552057999999999993e+03 7.674260000000000446e+02,-1.625144000000000005e+03,-8.774759999999999991e+02 -1.428331999999999880e+03,-5.029339999999999691e+02,-1.306494000000000142e+03 5.184100000000000819e+02,1.774516000000000076e+03,7.631139999999999191e+02 0.000000000000000000e+00,1.626372000000000071e+02,1.993375999999999976e+03 7.272660000000000480e+01,-1.809231999999999971e+03,-8.493500000000000227e+02 1.141707999999999856e+03,-6.171940000000000737e+02,1.521701999999999998e+03 -5.644099999999999682e+02,2.995900000000000318e+02,1.895175999999999931e+03 1.440701999999999998e+03,1.223827999999999975e+03,-6.531660000000000537e+02 5.317819999999999254e+02,1.921365999999999985e+03,1.598703999999999894e+02 3.499926500000000033e+03,-1.764003500000000102e+01,-1.409782499999999850e+01 0.000000000000000000e+00,3.499971999999999753e+03,-1.395778999999999925e+01 -8.996925000000000239e+01,2.288513500000000022e+03,-2.646623000000000047e+03 2.063313000000000102e+03,-2.692326000000000022e+03,-8.626169999999999618e+02 -8.252474999999999454e+02,-1.851832499999999982e+03,-2.853014499999999771e+03 -3.127523000000000138e+03,-9.224565000000000055e+02,-1.271878999999999905e+03 2.792440000000000055e+03,4.680410000000000537e+02,-2.057478500000000167e+03 8.152794999999999845e+02,3.261594000000000051e+03,-9.733044999999999618e+02 3.278519999999999982e+03,5.044864999999999782e+02,-1.116605000000000018e+03 1.764454999999999927e+03,-2.963428999999999633e+03,5.956404999999999745e+02 1.208196500000000015e+03,-2.976088500000000295e+03,1.390382000000000062e+03 1.598677500000000009e+03,-2.224851999999999862e+03,2.178130499999999756e+03 -1.706183500000000095e+03,-1.378677999999999884e+03,-2.727301500000000033e+03 -2.159615500000000338e+03,2.368971500000000106e+03,-1.405005000000000109e+03 -2.024792000000000144e+03,-3.827144999999999868e+02,2.829088499999999840e+03 -2.888773999999999887e+03,-1.837618999999999915e+03,-7.267259999999999991e+02 3.132766000000000076e+03,-1.568847000000000094e+02,1.552792500000000018e+03 1.014972000000000094e+03,-1.909155499999999847e+03,2.752263500000000022e+03 4.025490000000000350e+02,-3.374174999999999727e+03,8.383935000000000173e+02 -2.799769000000000233e+03,1.427184500000000071e+03,1.540923999999999978e+03 1.793729000000000042e+03,2.947486499999999978e+03,-5.872474999999999454e+02 -2.765017499999999927e+03,5.529755000000000109e+02,2.073378999999999905e+03 3.322483500000000276e+03,-8.319325000000000045e+02,-7.204050000000000864e+02 8.131129999999999427e+02,2.754678499999999985e+03,-2.000152000000000044e+03 -6.884744999999999493e+01,-6.721085000000000491e+02,3.434172000000000025e+03 7.558914999999999509e+02,-3.349930499999999938e+03,-6.757135000000000673e+02 2.704257500000000164e+03,-2.126369000000000142e+03,-6.446299999999999955e+02 -5.605354999999999563e+02,1.261445500000000038e+03,-3.216293500000000222e+03 -5.115844999999999914e+02,2.573458999999999833e+03,2.316373500000000149e+03 3.105795000000000073e+03,1.473888500000000022e+03,-6.570339999999999918e+02 -1.970461499999999887e+03,8.276870000000000118e+02,2.771681499999999687e+03 -1.334595500000000129e+03,5.146295000000000073e+02,-3.194373000000000047e+03 -1.070838999999999942e+03,-7.132754999999999654e+02,3.254926500000000033e+03 -1.164386999999999944e+03,-4.693955000000000268e+02,-3.267088999999999942e+03 -3.367836499999999887e+03,-9.431239999999999100e+02,-1.348868500000000097e+02 -3.358362000000000080e+03,7.341950000000000500e+02,-6.575484999999999900e+02 1.578374000000000024e+03,-3.116179500000000189e+03,-2.194552499999999782e+02 -2.699172000000000025e+03,2.209112500000000182e+03,-2.903365499999999884e+02 2.484356000000000222e+03,1.446056499999999915e+03,-1.996721999999999980e+03 -2.430900500000000193e+03,9.778824999999999079e+01,-2.516177999999999884e+03 2.385421499999999924e+03,1.865853500000000167e+03,1.754525499999999965e+03 -4.959115000000000464e+02,-2.552343499999999949e+03,-2.342994499999999789e+03 -2.591228499999999713e+03,1.376280500000000075e+03,-1.908242000000000189e+03 -3.596460000000000150e+02,2.888784499999999753e+03,-1.943084499999999935e+03 2.043695500000000038e+03,-2.102737000000000080e+03,-1.910972000000000207e+03 -3.071424999999999841e+02,-1.188778499999999894e+03,-3.277571500000000015e+03 -1.926771000000000186e+03,-2.784193999999999960e+03,-8.864660000000000082e+02 2.931050500000000284e+03,-1.617707000000000107e+03,1.020768000000000029e+03 1.270251500000000078e+03,-1.980755000000000109e+03,-2.590958999999999833e+03 -6.426385000000000218e+02,1.389783500000000004e+03,3.147304999999999836e+03 -2.514130500000000211e+03,-2.434953500000000076e+03,-1.242139499999999863e+01 1.514737000000000080e+03,2.402263500000000022e+03,2.045655500000000075e+03 1.756429499999999962e+03,2.430179499999999734e+03,-1.805317499999999882e+03 -5.968129999999999882e+02,-1.798191500000000133e+03,2.942842000000000098e+03 1.621182500000000118e+03,1.498182000000000016e+03,-2.716101499999999760e+03 1.342995499999999993e+03,-2.844001999999999953e+03,-1.535583000000000084e+03 -2.499580999999999676e+03,-8.801345000000000027e+02,-2.286364500000000135e+03 9.072175000000000864e+02,3.105402999999999793e+03,1.335449499999999944e+03 0.000000000000000000e+00,2.846151000000000408e+02,3.488407999999999902e+03 1.272715500000000048e+02,-3.166155999999999949e+03,-1.486362500000000182e+03 1.997988999999999805e+03,-1.080089500000000044e+03,2.662978500000000167e+03 -9.877174999999999727e+02,5.242825000000000273e+02,3.316557999999999993e+03 2.521228499999999713e+03,2.141699000000000069e+03,-1.143040500000000065e+03 9.306184999999999263e+02,3.362390499999999975e+03,2.797731999999999744e+02 dipy-0.13.0/dipy/data/files/gtab_isbi2013_2shell.txt000066400000000000000000000114371317371701200217730ustar00rootroot000000000000000.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00 -2.266327214356250806e+03,-9.183275903750475209e+02,-5.200340317942658430e+02 -3.451967493954964539e+02,2.539315081030927601e+01,1.459518548048878984e+03 -8.687821369576009829e+02,2.323250139612324801e+03,-3.126121995295425791e+02 -1.332678531203753437e+03,6.057252180305250704e+02,-3.272077210434978838e+02 -7.133089050173707619e+02,5.552534181107091626e+02,2.330854789063293993e+03 -5.909558927697119088e+02,-1.373764685157115309e+03,-1.163689074277325233e+02 -9.620046143336023761e+02,-1.663268413255978658e+03,1.599401546662312057e+03 1.524040250997601333e+03,-4.976289031939233496e+02,1.918245757989622234e+03 -1.310906776715079332e+03,-4.267496519835166850e+02,5.911075682939853095e+02 2.137449464632421950e+03,-9.795263447809793433e+02,-8.496104554574311578e+02 8.036905147915512089e+02,4.652768750402192950e+02,1.177963915401019221e+03 6.118441408476945753e+02,2.071391299541785429e+03,1.258961807003270678e+03 3.149948695314147926e+02,-1.233873374323025928e+03,7.926756766203922098e+02 -2.016575661811054943e+03,-5.906019227499958788e+02,1.354478485999431769e+03 6.460309928619076345e+02,-1.121364874288886767e+03,-7.584093716278345028e+02 6.044230534930856038e+02,-1.608119359468752464e+03,1.816211688682767090e+03 -8.824433249298083410e+02,-9.492765701355546071e+02,-2.137795072423571582e+03 5.310700425339645108e+02,8.492127701239588760e+02,-1.116603009570261293e+03 -1.054671935951507294e+03,-2.258501524869488094e+03,1.919322007337414959e+02 -1.373961930556389689e+03,-3.773567140735632961e+02,-4.688608788599892705e+02 2.071938848285220956e+03,-1.281610506785919370e+03,5.608421505757620480e+02 -1.311137097464583690e+02,-1.074853619200096091e+03,-1.038026441093374387e+03 8.942967847836255260e+02,-1.808493731043927937e+03,-1.476341317412907301e+03 1.764983771476244669e+02,6.108062603280885696e+02,-2.417801487965517026e+03 6.987629376269226213e+02,-5.293533263847286889e+02,1.217175177550222770e+03 2.498545348743911745e+03,-7.606967381351635993e+01,3.852979361575307138e+01 -1.215314074213265940e+03,4.552574404363761573e+02,7.521651174751937106e+02 -1.626364931671346767e+03,-1.503360529448681746e+03,-1.159674190247172191e+03 -3.607006642415059332e+02,1.455976251491915491e+03,5.309040147464097359e+00 8.331893303581111354e+01,2.340805027516016253e+03,-8.738934594980764814e+02 1.124353191327967352e+03,9.833302798737638568e+02,-1.374462143312825049e+02 -1.653720553265972285e+03,4.593280934373784135e+02,1.817753017955097675e+03 -3.759970659660853016e+02,2.382088554995548748e+03,6.589994859968487617e+02 3.001173301911491649e+02,3.906608855968609362e+01,1.469150580724652400e+03 -2.128663806851728168e+03,-2.144264981200675990e+01,-1.310851101448424743e+03 -1.030316972919468753e+03,1.089894130130593567e+03,2.403581537912267052e+01 -1.901566073143071662e+03,-1.487916930230456046e+03,6.481895387961882307e+02 -9.451522326186659484e+02,-1.215736079475225040e+02,1.158407145621336895e+03 6.878294554085988466e+02,-8.127358768845556369e+01,2.402141803519633413e+03 1.624240188331681566e+03,-1.850731259269334714e+03,-4.320157596333787637e+02 8.200799773977817040e+02,1.037808396856925128e+03,7.074055145986018260e+02 1.175216553582782353e+03,7.076400293120931337e+02,-2.090002784950342630e+03 -3.784281936408362981e+02,-1.317364986763873276e+03,6.093780385822526569e+02 1.423187921813606181e+03,-1.642128421682733006e+03,1.236102902636202316e+03 1.208722462037743981e+03,-2.384435267328210273e+02,8.556486979623286970e+02 3.773953003791272209e+01,-1.451307454120837292e+03,-2.035259787222704063e+03 -4.951854729525189782e+02,6.748410455297414501e+02,1.244741302699125299e+03 1.670044415493887072e+03,1.836049453503702352e+03,2.997900174562804523e+02 -2.363898709855261586e+03,1.599047940265688794e+01,8.134661603982726774e+02 -1.488815236979110068e+03,1.018782253857645372e+02,1.518223215837661542e+02 1.956382677784426107e+02,1.565485886215385335e+03,-1.939324523704342937e+03 -8.612954914426065045e+01,7.484932807333950677e+02,-1.297050311075543959e+03 1.660689898552811883e+03,6.556327428480628896e+02,1.749929932125905907e+03 3.214001834136819724e+01,1.420492248229029428e+03,4.808001580098056706e+02 -2.912991904832986734e+02,-2.478128359467806149e+03,-1.549987600762498516e+02 1.516096796083063509e+03,-1.290727017378622122e+03,-1.511778512717902004e+03 -8.822996955012581566e+02,1.048749381914242576e+03,-6.096490640137826631e+02 -2.031307195810249823e+03,8.358456205600374460e+02,-1.193797794787752991e+03 9.602188732099014032e+02,9.029973251154810896e+02,-7.159438150901182780e+02 -6.268612903098597400e+01,-5.301030544174469696e+02,-2.442347477515107130e+03 -1.907738712044329077e+02,-6.099802978175640646e+02,-1.357029611445553428e+03 1.221964464137328832e+03,1.961471459061434871e+03,-9.536417375896357953e+02 -2.341088661959557612e+03,-8.057016790353089846e+02,3.466246979718731609e+02 dipy-0.13.0/dipy/data/files/gtab_taiwan_dsi.txt000066400000000000000000000362501317371701200214100ustar00rootroot000000000000000.000000000000000000e+00,0.000000000000000000e+00,0.000000000000000000e+00 3.060599742382443651e+02,-2.765200783212004509e+01,-2.065571669387280451e+01 -2.575623717651059152e+01,-3.056332799436796108e+02,2.808762071403956995e+01 -2.298419703420260873e+01,-2.631524008263777148e+01,-3.060118213829101137e+02 2.288267619722078194e+01,2.621510476963157643e+01,3.060280238997245874e+02 2.565432154927272634e+01,3.056307052103832689e+02,-2.820864793729555231e+01 -3.060786255380648981e+02,2.753264386414221931e+01,2.053846417284802328e+01 3.957169112450297348e+02,-4.706585449911170258e+02,1.066115291785341590e+01 3.996136383112861949e+02,-7.619153715938998062e+01,-4.612253134216570629e+02 4.645188593254337093e+02,-1.881913001993654078e+00,4.030430345936409822e+02 4.684046378059773588e+02,3.925064641541216588e+02,-6.899833968350519342e+01 -6.881458102723127013e+01,-4.686915655385455466e+02,-3.921960859456324329e+02 -3.919827094470945017e+00,-3.944913987875558519e+02,4.717903890905229218e+02 3.762919180145303244e+00,3.943225724193932820e+02,-4.719327805098901081e+02 6.868429110232389689e+01,4.687154020658587683e+02,3.921904384632695724e+02 -4.684076386195116584e+02,-3.925343798873016112e+02,6.881892682528970795e+01 -4.645089378174691888e+02,1.724467875798906924e+00,-4.030551735162717932e+02 -3.997932084373172188e+02,7.603910042827406812e+01,4.610948337310392162e+02 -3.958842029919957213e+02,4.705142547243480635e+02,-1.081822178473176344e+01 4.451682293263964993e+02,-6.221211594988657225e+02,-5.164489427834931803e+02 5.247096225433380141e+02,-5.312420748196007025e+02,5.425778008284095222e+02 5.342094714925420931e+02,4.354383677705396849e+02,-6.139565688556374425e+02 6.137389895140746603e+02,5.265011995205273934e+02,4.450280212005625913e+02 -6.137117042060538097e+02,-5.265050958462578592e+02,-4.450610386996640955e+02 -5.343587285723475588e+02,-4.355955547189902859e+02,6.137151309097728245e+02 -5.248382747781089392e+02,5.309857216276328700e+02,-5.427042921845379624e+02 -4.454299932274629441e+02,6.220262675053053272e+02,5.163375288188925651e+02 1.223260965509586185e+03,-1.105195865064901852e+02,-8.233487267852983393e+01 -1.027376982809065424e+02,-1.221535699862614365e+03,1.125011080527421541e+02 -9.186216412046562141e+01,-1.051755212393736798e+02,-1.223053740657020626e+03 9.165933192442591348e+01,1.049754576277323395e+02,1.223086145848695423e+03 1.025339929453075456e+02,1.221530513357083919e+03,-1.127430052299052647e+02 -1.223298245390085640e+03,1.102810135967481386e+02,8.210055338188493579e+01 1.309532167632073424e+03,-8.060567319704998681e+02,-2.901804235093799278e+01 1.315616458707049787e+03,-1.822767383882529089e+02,-7.754821237279820707e+02 1.418301908210537704e+03,-6.473885939890702446e+01,5.913311908313959293e+02 1.424390160056957939e+03,5.590415150214839741e+02,-1.550137297641648502e+02 5.685897422544877600e+02,-1.426805659368653551e+03,7.984557217878109725e+01 5.807895781612451174e+02,-1.792958433008455472e+02,-1.412791727918280685e+03 7.860309621033701433e+02,5.556040880755914202e+01,1.320799896876126923e+03 7.981451219078418262e+02,1.303372472226758418e+03,-1.721294949049230354e+02 -1.661319996627892408e+02,-1.423776032930948986e+03,-5.574100525999256206e+02 -6.373182475918985546e+01,-1.306378415686428752e+03,8.091709890631771032e+02 -1.599399102184175092e+02,-7.999475199451392200e+02,-1.303820229346417591e+03 4.513471650802490842e+01,-5.651759281416964313e+02,1.429679344333865856e+03 -4.537838032058055404e+01,5.648740457392486860e+02,-1.429790934035341252e+03 1.597298441428331159e+02,7.998893592556471503e+02,1.303881662513778792e+03 6.348757213011214873e+01,1.306207196911330811e+03,-8.094665446589300473e+02 1.659213869491033790e+02,1.423848553773994809e+03,5.572875283626835881e+02 -7.982316475003476626e+02,-1.303356800019206958e+03,1.718467013723954437e+02 -7.861007442366864097e+02,-5.579579414929161629e+01,-1.320748442840565076e+03 -5.810991571178793720e+02,1.790668337341480481e+02,1.412693469459428343e+03 -5.688841082168794401e+02,1.426674051687754172e+03,-8.010007277857546626e+01 -1.424365082466851163e+03,-5.591823225910078463e+02,1.547360395994629130e+02 -1.418260680277899155e+03,6.447766687747051151e+01,-5.914585980875659743e+02 -1.315815036927222764e+03,1.820219067290956900e+02,7.752050142168296816e+02 -1.309714981246806929e+03,8.057685995210898682e+02,2.876859265757046202e+01 1.378532999392024294e+03,-9.475546163768949555e+02,-7.807067430028159833e+02 1.491101612643861017e+03,-8.187878911784133606e+02,7.168810013039011437e+02 1.504378911701863899e+03,5.481091518971567211e+02,-9.187689849104049244e+02 1.616920918173165774e+03,6.769327743141775500e+02,5.788305135645115342e+02 5.667300689798133817e+02,-1.627650931571458386e+03,-6.612756413695425408e+02 6.790192881037959296e+02,-1.499059388742953843e+03,8.363430847472923233e+02 5.735820095967370662e+02,-9.441958708425680697e+02,-1.478923201437725993e+03 7.983763482558605347e+02,-6.869623581997329893e+02,1.516045489080818697e+03 6.993954119729967260e+02,5.513105248225369905e+02,-1.616978281527312674e+03 9.242276633949672942e+02,8.090049261766590689e+02,1.378053067061416868e+03 8.184834411452368386e+02,1.363595586874766923e+03,-9.372341927323776645e+02 9.307480713274635491e+02,1.492457929264358881e+03,5.603511016283881645e+02 -9.307983572293142061e+02,-1.492374599025283942e+03,-5.604894953193447691e+02 -8.186738917716397737e+02,-1.363731915964772497e+03,9.368694254326629789e+02 -9.242700904801380375e+02,-8.090759345825546234e+02,-1.377982921491845900e+03 -6.996875391908079109e+02,-5.515983318669281061e+02,1.616753731334099712e+03 -7.985444365880805435e+02,6.866014941334057085e+02,-1.516120434216226386e+03 -5.739383475486121142e+02,9.440633614507000857e+02,1.478869548939179822e+03 -6.792900994017161338e+02,1.498785675319946449e+03,-8.366136864231536947e+02 -5.670796005299498574e+02,1.627602686715093341e+03,6.610947139862807944e+02 -1.616822270023855481e+03,-6.770494197444879774e+02,-5.789696282004430259e+02 -1.504514594681671042e+03,-5.483313079406922270e+02,9.184141827747965863e+02 -1.491208124943704888e+03,8.184254504975290274e+02,-7.170732947752254631e+02 -1.378843147706513491e+03,9.473092654522874909e+02,7.804567442280463183e+02 1.584489418789225056e+03,-1.883876011287723486e+03,4.299367208835884213e+01 1.599907036018945064e+03,-3.050430833680687215e+02,-1.846263847175255933e+03 1.859720871054988265e+03,-7.533046566298685853e+00,1.613327472948968989e+03 1.875300406678335321e+03,1.571172190097363455e+03,-2.758810138025534684e+02 -2.751990001622239674e+02,-1.876185657890463972e+03,-1.570234659990613864e+03 -1.569180411383414153e+01,-1.578909329933991330e+03,1.888979379223353817e+03 1.537773327623712483e+01,1.578571395499443724e+03,-1.889264373937704022e+03 2.749381736675210277e+02,1.876233335826987286e+03,1.570223382258684524e+03 -1.875306372629160023e+03,-1.571228080280154927e+03,2.755219202089027135e+02 -1.859701003363345308e+03,7.217874295875285284e+00,-1.613351815438906215e+03 -1.600266513440163635e+03,3.047379455874483369e+02,1.846002673476180007e+03 -1.584824240270395876e+03,1.883587146555032405e+03,-4.330806834798309524e+01 2.751580822960364912e+03,-2.486007347304538655e+02,-1.853689548124000055e+02 1.611398444426871265e+03,-2.076314517715665261e+03,-8.715928389000364405e+02 1.749091243873480607e+03,-1.918809001555268196e+03,9.623995200308421545e+02 1.619574944353243382e+03,-1.239222743532253617e+03,-1.873143078233903225e+03 1.894949501326790141e+03,-9.241789190699852270e+02,1.795110223069319545e+03 1.773664275765259617e+03,5.923306586983419493e+02,-2.042209692377373131e+03 2.049108372268141011e+03,9.079044404007614730e+02,1.626107439808695290e+03 1.919606900134974694e+03,1.587136842361944446e+03,-1.209738398403369274e+03 2.057250984517960205e+03,1.745052080865178141e+03,6.243978073055643563e+02 6.253118210934724175e+02,-2.072281378365647925e+03,-1.726845683690275791e+03 9.005755011350458972e+02,-1.757220490019601584e+03,1.941262711796275880e+03 9.335012295928689809e+02,1.591112077962034391e+03,-2.065017871523619306e+03 1.208827791671634941e+03,1.906766084385130853e+03,1.603227828326614826e+03 -2.312749624868818046e+02,-2.747679662236234435e+03,2.532377646798806836e+02 -2.066585745791770137e+02,-2.366090926341302065e+02,-2.751121475114422537e+03 2.063543985318382852e+02,2.363090692864599873e+02,2.751170093974476913e+03 2.309695126782845591e+02,2.747671897221782274e+03,-2.536005312116034816e+02 -1.208912988988030747e+03,-1.906708347619437291e+03,-1.603232254595191762e+03 -9.338153802740702076e+02,-1.591413096390093870e+03,2.064643841489782972e+03 -9.008658780612915962e+02,1.756804939152404359e+03,-1.941504075585362443e+03 -6.257308003513546737e+02,2.072234022865709903e+03,1.726750740544057635e+03 -2.057188503399082038e+03,-1.745049581336911160e+03,-6.246106148301645362e+02 -1.919748268521095952e+03,-1.587311360867676967e+03,1.209285007419945032e+03 -2.049033553547601514e+03,-9.080566040323811876e+02,-1.626116754820904134e+03 -1.773972078699697704e+03,-5.926550876316190397e+02,2.041848185124994188e+03 -1.895054767280823171e+03,9.237352122864908779e+02,-1.795227474886749860e+03 -1.620012636177589911e+03,1.238970222779204505e+03,1.872931617996605610e+03 -1.749363801658917055e+03,1.918410180057762545e+03,-9.626991588737391794e+02 -1.611824710475071925e+03,2.076102115206173494e+03,8.713105703125619357e+02 -2.751636775163151924e+03,2.482429597619677679e+02,1.850175410558365741e+02 2.819486845238788192e+03,-1.227648640056062732e+03,-1.063096707450663274e+02 2.828162390533582311e+03,-3.452315832136267204e+02,-1.161998987401962950e+03 2.973334855834526024e+03,-1.789263453068564047e+02,7.714883006466220650e+02 2.981959980866465685e+03,7.034744983688980255e+02,-2.845475402379931893e+02 7.230249676687612919e+02,-2.984012797181214864e+03,2.020681132349376412e+02 7.489778006532395693e+02,-3.367769033382248267e+02,-2.965390795747931861e+03 1.184490518661768192e+03,1.617447064878777212e+02,2.835268922187722183e+03 1.210447621351424459e+03,2.809291044684306144e+03,-3.326099550843914585e+02 -3.164204194545539508e+02,-2.979684082978914830e+03,-6.994925902354550544e+02 -1.712007129092333457e+02,-2.813585486746007064e+03,1.233837924798716585e+03 -2.989023076954838416e+02,-1.214904567546315320e+03,-2.811155154417737776e+03 1.361337529030404312e+02,-7.164565036804343663e+02,2.989328800860578212e+03 -1.364724277583266314e+02,7.160417096522999145e+02,-2.989412742747303582e+03 2.985993340325934469e+02,1.214748407075016075e+03,2.811254834628835852e+03 1.708609740912376651e+02,2.813417148331899170e+03,-1.234268802572842105e+03 3.161164165399382000e+02,2.979774390314867105e+03,6.992453031787978261e+02 -1.210633049245885331e+03,-2.809258029081730001e+03,3.322137054870627253e+02 -1.184657796865844148e+03,-1.620704477796920457e+02,-2.835180430639548376e+03 -7.493994931753793480e+02,3.364579185183467871e+02,2.965320466441154167e+03 -7.234309579091122941e+02,2.983889571783220163e+03,-2.024343660104547951e+02 -2.981934995609053658e+03,-7.037366589637366587e+02,2.841608642874509769e+02 -2.973292783721014985e+03,1.785519144542232937e+02,-7.717371548119670024e+02 -2.828374738710904239e+03,3.448638511725153535e+02,1.161591262698065520e+03 -2.819682428423139299e+03,1.227230202143105771e+03,1.059529791444305147e+02 2.881304513741023584e+03,-1.374783354193673404e+03,-1.125291085954029541e+03 3.033515438118813563e+03,-1.200452184780246171e+03,9.027312106885174217e+02 3.051646416594431685e+03,6.506646001614756187e+02,-1.312369889239718077e+03 3.203921179788254904e+03,8.250851464605177625e+02,7.157852853995125315e+02 6.825453354012404361e+02,-3.216977826098676360e+03,-8.021287499594091059e+02 8.345628163833371218e+02,-3.042768927986690414e+03,1.226086112958236072e+03 7.005984814201951849e+02,-1.365872669659540179e+03,-3.016915414476985006e+03 1.157021815870793944e+03,-8.431097265139912906e+02,3.067359044301229915e+03 8.709746846479574742e+02,6.593944380604731350e+02,-3.203876881803872493e+03 1.327437412918172640e+03,1.182806721685336015e+03,2.880469262797486863e+03 1.193479328296101130e+03,2.859492562707587695e+03,-1.362767176281363845e+03 1.345807443829256727e+03,3.033862884038389893e+03,6.653595456503406922e+02 -1.345957356279911892e+03,-3.033739311790014199e+03,-6.656196986086278002e+02 -1.193744183881032086e+03,-2.859613456516492079e+03,1.362281433023298405e+03 -1.327575570191480438e+03,-1.182969916343018213e+03,-2.880338570803467064e+03 -8.713894667325072305e+02,-6.597978521331864386e+02,3.203681037742064746e+03 -1.157264135139769678e+03,8.426501969698164203e+02,-3.067393904777462922e+03 -7.010493324374873509e+02,1.365666179882160350e+03,3.016904161324157485e+03 -8.349571096592077311e+02,3.042496863227117501e+03,-1.226492748565068041e+03 -6.829864543349780206e+02,3.216954517131669490e+03,8.018467047391795859e+02 -3.203800938671151016e+03,-8.253273808934641238e+02,-7.160441744176018801e+02 -3.051780134508651372e+03,-6.509826429453520404e+02,1.311901143075285290e+03 -3.033616851574064185e+03,1.199978718217251526e+03,-9.030198633870395497e+02 -2.881637164928362381e+03,1.374398520113323002e+03,1.124909310839740101e+03 1.781071476557796586e+03,-2.488504613201357188e+03,-2.065428087701867298e+03 2.098964219088664322e+03,-2.124711308557529264e+03,2.170441213733573022e+03 2.137093445331861403e+03,1.741648533401185887e+03,-2.455678316070957408e+03 2.454955958056298641e+03,2.106004798082109573e+03,1.780112084802250365e+03 -2.454901378352156371e+03,-2.106012592698100889e+03,-1.780178132087265340e+03 -2.137391958256570888e+03,-1.741962917435640975e+03,2.455195473085442700e+03 -2.099221546854858843e+03,2.124198555924154334e+03,-2.170694218960844410e+03 -1.781594984817244722e+03,2.488314824566794414e+03,2.065205230458988808e+03 3.122071386978404462e+03,-2.500457173606919241e+03,-1.960044816937114831e+01 3.141650565272775566e+03,-4.884078609877844315e+02,-2.427239066727171121e+03 3.472637623359345525e+03,-1.092074727769958997e+02,1.982135632271116037e+03 3.492511150936095873e+03,1.902860559500974659e+03,-4.260135580972330445e+02 1.926738393524785124e+03,-3.501888028920384386e+03,1.563949993498424078e+02 1.956413636250567151e+03,-4.836033707272409856e+02,-3.455224082995787739e+03 2.452883595697436249e+03,8.480289013174439106e+01,3.158507643774781172e+03 2.482590061957568196e+03,3.103437592759718427e+03,-4.532346987105904645e+02 -4.431545236004035360e+02,-3.492041647612927136e+03,-1.899805042510755129e+03 -1.123974266903735497e+02,-3.113128725733727151e+03,2.509142553838837102e+03 -4.335657648310167929e+02,-2.486203684676753255e+03,-3.103354953250792732e+03 6.280308967399663089e+01,-1.917694343293884685e+03,3.509772667513671877e+03 -6.320049359778251130e+01,1.917214973507414925e+03,-3.510027413421148594e+03 4.332310247845669551e+02,2.486178041584565108e+03,3.103422244024626252e+03 1.119994400041999683e+02,3.112784436116729012e+03,-2.509587452094109267e+03 4.428191523425172136e+02,3.492151038798712761e+03,1.899682163030365018e+03 -2.482671475424489927e+03,-3.103439339104013925e+03,4.527765603641891516e+02 -2.452934275758073454e+03,-8.519077135765037667e+01,-3.158457847001713617e+03 -1.956910597927751269e+03,4.832273103631895310e+02,3.454995264575221881e+03 -1.927206706691009003e+03,3.501612185631609009e+03,-1.568005456668483646e+02 -3.492477240167506352e+03,-1.903024130636394375e+03,4.255606715000259896e+02 -3.472573526139696696e+03,1.087929047581199598e+02,-1.982270720519273937e+03 -3.142032771572712136e+03,4.880050899196319278e+02,2.426825311920005788e+03 -3.122424355004954123e+03,2.500019500228152992e+03,1.920014976175221477e+01 dipy-0.13.0/dipy/data/files/life_matlab_rmse.npy000066400000000000000000000011001317371701200215270ustar00rootroot00000000000000NUMPYF{'descr': 'fx"@G`<"@3E$@ s%@yH@a#@ )I%@ʚ}2/$@ ݬ(@z淊$@=>u&@2*@ P&+(@E&@dO{8%@gz03&@J-!@%/]@dipy-0.13.0/dipy/data/files/life_matlab_weights.npy000066400000000000000000000010601317371701200222400ustar00rootroot00000000000000NUMPYF{'descr': 'P?~?Jqx?B?B?e~r[~?dT)j?Zꇸ? %?Jb?e*lŹ|?˾ٖr?냭+?$-Ct?dipy-0.13.0/dipy/data/files/repulsion100.npz000066400000000000000000000075601317371701200205240ustar00rootroot00000000000000PK!EW vertices.npyNUMPYF{'descr': '?PyHLݿ^;!?4*?bCY%<"$y?GFp?P  ?,?sFK06dvK?@gك?sh}_`F?4`*漿؊o?ⶴ?=Voٿpwca8?J 5?tyv?<3?bj?}׷?i6#?"\?~#\鿣0j?DR?⎩?O?Ye2ѷ`aпTӘa?U20?`)MݿK1o5?PQg_ٿ?W0m?gwNٿP6+ kE?$ !?c%r?ec?kԴ#tr9?n^B_? o'?,^,~/ ?EݱWB?xQ?"U?υ4dJ1;⿤m-?o?g@V\ZJU?U @H}?w:? P쮿gLJl?|p?m?ɠ?u5.8V).τ&?"D?_"@?[sHv.ÿ+%Gy#?si?#-4Ic?,?%Sp?T=?H:?hLìW̎ ?9* D?0?*5!j@1?H`?pÓD&?P*ֿ?79e0ٿTj ?6H"?n+ѿĔ-?2t\l?Kإ?Ɠ9ր￴K?gDʿtU?WGQ*)S?DWĿm~Dᅬ*x?t̺?(2:EƿLeؿd J3V?]q?cߛYZ.Upڿ)\XXF?,ސD?Iq2Ik{?Snp?˞11 5/A1BKȿ~OƩˉ?lBѿ&X θTlFǿbN?Òϭ`=$/ɿ濮sG23? t?曆-~~濍*i?iۗ>PyHL?^;!4*b?CY%?<"$yGFp߿P  ݿ,翦sFK?!06d?vKAgكsh}?_`F4`*?؊oⶴ⿞=Vo?pwc?a8⿱J 5zyv<3bj??}׷ֿj6#$\ѿ#\?0jAR߿⎩OݿYe?2ѷ`a?UӘaܿU20鿺`)M?M1o5ٿPQg_?X0mؿiwN?P6+? kEѿ$ !d%rݿec߿kԴ#?tr9l^B_ڿ o'-^,?/ ԿIݱWBſxQ"U׿ υ4?dJ1;?m-ܿog@V\?[JU׿U ?@H}y:տ P?gLJl?ؿ|pmɠǿu5?.8V)? .τ&!D_"?NWsHv.?+%Gy#siƿ"-4?Ic?B,%SpT=пK:ͿhLì?W̎ ؿ9* Dο0Ͽ)5!j?D1ȿH`ݿpÓD&P*ֿ79e?0?[j 5H"n+?˔-2t?\lYإ}Ɠ?9ր?KPK!E) faces.npyNUMPYF{'descr': '7:?8;@9<A5:B5=B6;C6>C7<D7?D8=E8@E9>F9AF:?G:BG;@H;CH<AI<DI=BJ>CK?DL@EMBGOBJOCHPCKPDIQDLQ=ER=JR>FS>KSFNS?GT?LT@HU@MUAFVAIVFNVJOWKPXLQYEMZERZNS[GO\GT\OW\T&\HP]HU]PX]U']IQ^IV^JR_JW_N[-W)_KS`KX`S[`W).W\.X*`LTaT&aLYaX]/Y+aMUbQY0U'bMZbQ^0Z,bNVcRZ1V^cZ,1N-cR_1PK!EW vertices.npyPK!E) faces.npyPKqdipy-0.13.0/dipy/data/files/repulsion200.npz000066400000000000000000000166001317371701200205200ustar00rootroot00000000000000PKC?Jİ vertices.npyNUMPYF{'descr': '?[ ܿ^ӿM}?7xe?V+Ϳ`x?(F&?L5?/?AiP̿m(Uؿ3fŤ?xf?%?]r?>† /࿷\M?ؘM@?.i?>6ܿ( ??e?ȵW ?3Io /ֿR?P?ԟfǿ_?Կd6?ՙt?%W-?ĪAFi ?k:+?,I?oơҁ?zE3迪 8> 9?t ?~:?\ # ])?5%V ?g׿#w1W?ZȄ?XE(*]?ua1?s;激а?T>?)qQ?CaLJ鿎B?hSOI?:??{'k?͓^WDؿ^? n?ro=տA_Ui?A"ÿ6?޽ ? 6OӿK?gM==?^9?d \I?!# "d`?yy~1?/ ?cԦ迶ͷ?EZ?QuNA?y Nہs?;L JDP%? Z?cZ?j I#7=?]j?}ff٪&S*xol?䤇i ?@|'?~ ?#d■`&s?߀l+&?wC?sd)Gs c[b?$ ?Tl???$|E/`]X0d4?\6%H?g\? L\?k꿺Wո¿?$?b^??%?f_:a?BbY8?F?9j?n?= 꿾d*GJ⿖j3?o$0?&|?Ϳ7YPL? 8bIΐܿ aqNa?T`‡?sBxa:ƿ^խ&&￯*u?c+|wԿ$z}[g?$8?TͿg^biV?i?_<Ԉٜ?,MA?{lFhExǿ^^?/?9ݍ=?;JHp ?Vzۿ%_eĭ8?IۿOzv?gG? <2?P ?PI⿇pƵ5D"??:d??Åp˥뿝Λ&?dtO? +??8I`wcX?}nl p"?\7?<__CrcE$R#?h7z?]: ?pg)-?jP6cV?y?su9>:?+FIi?Qsm0?{5;\?+?gq33?? Ec翳`f}.HVC0?-K?C&?E^?Yc*[ ?^?M}8xeܿV+?`x(?F&࿃L52?AiP?m(U?2fŤ쿕xf %]r?† /?\MֿؘM@鿺.iҿ>6?( 꿞ƿeȵW 4Io?/?R忑Pԟf?_翽?c6ՙtW-ĪAF?j 迗k:+ڿ)I߿pơҁzE3? 8?> 9u ⿱~:?࿼\ #? ])5%V g?#w?/W[Ȅ쿴XE(*]sa1ݿt;?аֿR>)qQڿCaLJ?BݿhSOI:?{'kؿ͓^?WD?^п nro=?B_UiֿA"?5 п6O?K?׿gM==进^9Կd \I!#? "d`xy~1/ 㿓cԦ?ͷѿEZſQuNAz NہsYPL8bIΐ? aqNa뿬T`‡ʿoBxa:?]խ&&?*uÿ`+|w? z}?[g$8ȿT?g^biViU<Ԉٜ,MA￀l?FhEx?^^/տ9ݍ=Ͽ;JHp Tz?$_eĭ8J<>(SB>?Ozv쿲gGȿ <2ܿP PI?pƵ?5D"?:d;by?π8]Ee6?./[dB ba?{l8?.J+9vF⿀eK~ҿ8Zr"Iir@?l]E B&M8 ׿ÿ;o?k雝G?Fͫ濕d㫑tB?abgJ?DoP^Id翘#(l0r?"[RfS4%俏ÿ`c?r(d@ٿ ʇ濞~xGݍ?e?/쨇ۿy}a翣?F)?1""?/5;뿥.ڿۃ?_Ԝ?4<ֿ8쿣+;ԿA>Կ…p˥?Λ&տdtOؿ +?ɿ8I`?wcXؿ}n<ֿ* ;ֿu$?I&f?AƷ,߿G!)E\?/bU\U2?I??l p"\7ۿ<_?_Crc?E$R#h7z]: og)-jP6c?V?yuu9>:ӿ+FIiQs?n0Կ5;\+eq33Կ? Ec?`f}.?FVC0Կ-K?&E^ĿYc?*6> ??? 7?:?7:? @@ ;@;@ AA A A<A<AB B !B!B=B=B CC>C>C D#D <D<D@E@E F FG ;H;HII0IJ!JK%KL#L#+L,MEM$EM-NFN FN O !OGOGO"P"P CPCP HPHPDQ#DQIQ0IQR$R=R=RER$ER%S%S>S>SFSFSGTLTU'UHUHUV V(V NV.W).W!JW)JWOW!OW"X"XKX*KX#+Y'#QY0QY$1Z($MZ,MZ%[-[)).\G\&T\GT\&\*X']/]U]'U](^0^(^,Z)_1_J_)J_*`%K`*K`%[`.&a+La&TaLTa+a/]'b,b'b0Y,b0^(c-Nc(VcNVc-c1_efgegjdililqv{{}*~&hkdiinfgdfkfdldgefemfnsnkgjjootgthppuhiniqejemksltmzuzmunsv{nvjorwowjrhkhppxkxlqylmzzmrs{otouqvrwwxxyzz{{||pupu}y~qyq~rrxsxs|tt|vv~ww}}y&y+Y1Z||-[[}*}X\+~~Z^-.[`/]aYb^b1_cPKC?Jİ vertices.npyPKC?J-< :faces.npyPKqdipy-0.13.0/dipy/data/files/repulsion724.npz000066400000000000000000000634601317371701200205410ustar00rootroot00000000000000PK-E0D0D vertices.npyNUMPYF{'descr': '%#.8?K4s0*ѿCa:??9Ŀg L?eQ?v ?Ç_7I;9?}L?;65Q?RV?qؿCX&?Y6 ?[*ĿtEL |f?̾j?Xrh!v?Y]{?ax`? EKYA ?(׿A{G?.|Bɿ6z?FE;?X%?=7d7J{<?a q?PV?Z ?a6#ڿ1!'3?Zd?4XMѿQB0,^(Z ?.4ٴD?S&[?a5IsU? ё?>1oX?r?3މR`?U?݌Կ-r*???%|Q>(s3Ux4??3?x^#?ŗm?޻:?YM_q쿓?cۊؿ2b̺쿦]}0?h ꮿ!ܨ?=u?K@@=?T>cP>뿓fF?FQ?=},?'-_~?!߿6/mt?&6?kѿ"My`f'n$?|=۬?hI3?QRj?۝w?YXaQhP?c.:޿/!Կ?3pvԿl+dLA?sUX?6" C?aR$F뿫\4{?}y?^f9;?xd?m>2?)051ғ?,1l,ٿrf72q-{r?.F鹿x?6&?˥?z(qKG࿁L濯6TM-?VHs\!?L#~?F?|`}~A?n?N?Rgn?ӷ SG?nޯ?BÿJ)u迁.. ? "?k?52EY?M{yq?x]翲x?ɝ%u翛O?kV,ZT(ҿO1z/?ލtI?蛄j?7h濉p?B#?hxK?̣k?Glr忊4.?t3p?})h0ٿXp1Q? %ڵ?h?1Ş ??1濒Gұ?PoLFRBHbIW%?RM?&I& R_o@Yҍ?H? x?\`7j?d~?2{ &??_06j"??>4KBOb?2? }B¿N:b忶N0\?#b#?Zu?fS@Z?u\?1q㿌?D/ȏ濝:7C. s?x\*տ]3c?65nG?Dz?e,nrce?8?2[?/5?PQ $?ƙC|?sؿ?UTK??wr?*0H?:'?yz?㿋 J?oI/a*?Ua_ o8?m?9d}o7h?dA5:?Ea8f?%/P?cZ 4?jfA?Ɓ,S\2g" C? G㷿7[v?"%%?қMi m?mf켽ῧ3l#?ɬy?I?i ??濺TlE??Co2oпJ)GkF?un?sT?0:M?.)E? J>Ͳ?gny:tPQ^1?w]׿J*?wG[il?[x֮?.5v?$P?J݅8_?\\L?w~?c8?Y߿f@2bqo?W(?ƈeL?/0H??8&$uK?FybO?[IK?z?+2 _[ހ:?%Z?A ~$rn`B?UѽH?5ֹ#?;2X/?7f'? aϊ>??M|"v%R?WcͿnd,?CCQ?ҁȎ??we9 vf?hrP?kOx ?62?o7鿣.?"?oG׿ϴo2?^r? ?1?UL꿚yZ?,?ON)N!࿜WR!?Z7s1b?m[ ?e?b4 wݿk!?*N?yorp>??dg-҃?g_ ?@ο"7߿>sA?9H?$Nvh ?* B?[r?t+࿴EY,?Ƈ][QTQah?W+{ѿ ?}#?>?j&*sbm?8 ?ٱ?!?_{8!|'~?,a-ߞ? iݿ=K˯ٿN?X;?b?/ Z-?Ώ?l1Zؿif?|eM ڿ#):?L⿓vK ?TqHk?b됿DTj߿ڵ?i?A!?n?!?gz<߿Kӫ?c<\+Hp ݿX?Nl][?d,?{,r?[Nۿ>^d?T?E?D?5#%mҔ?%?u"hֿ "ٿH?pS:?E1X?K>?lp?S5gۿHQk?0(Z{NݿT~S? ٿIG?/HS?~-&8i?< 1޿Sn?|Ri?1?? T.?Ws?xN?wђ*⿏Ћٿ<?V쭨?=]v?& 䣏?ƾ7?wڿ߃X?me?>Ԇ?uނ U?"(:!n?PY?dhT[b4ڿ]t?nOJI?8'b?%Y~?[ Vc?Gx2ۿ ?+:j\鿣NzۿVVA?gGƿ Cu+? ?sqOH?ٮ_ٟؿ?X+>?;N? T,c?`nVM?@܇1?mo?ziwEؿSv=ҿM+?Re1??߾l??*<<!ֿL8:?GliAOֿk!£?,7߿] b?čnܙ?jƐ?22ڿjeW?U?5lM?%?/Kg&?!{׿Emw?9A}t%ʦ׿-AO ?<]JHN?cm?_?_տ9 ig?R5g?Q?l( ?MP(t0Bi?Y-|?P]{_/(ӿd?eCޘ㿘??ۥM?b'ZYɓ8i=cֿ ĕs ѿ@{V?}m?tRE?Ry?6?*Nxҿv٨9?3,??K?K-:%I?-@d?ɜѿSEiK˿Ƣƃ?2$?!C?| ?;?A_$A̿O?Ny6en$qҿ9A89?]ҿ$֪?5sN?.C?y:cvп0q#?0 ;Zz??`U?gK]?a],w? ޿I*[5@ʿD?Y?A-qP?Є?&GP?6U?]3[&ʿv8)?c .A˿ $;?"p)<?8b?^'x*&J ʿdGQ?ֱ?8!g?} ?L?yGzͿ9-tZ?.B@!Ϳ?ٖի<19?߭+?ZM?$C˿ؾ?.z?ed?0x?*#鿴?ADHO?a~1aѿ̖b$͈Ղ?B=.6?_;һ?*94?!a?vDe?W/JĿ6?5_ڿg#?iIH?w?m ȿ0 Js?Z}\=?O('~?R ?3FG9R\C -?>lΦ?J.hĿZbK=?t?1!"\1?ڄ?ZG[;?$%,6ǿq0=:?ijÂᅦHſѳF9?g:E?}D?}/YUuѿ &mn?? d!?kV?6eG?ÿiIى?i*HgާȿB>T.?DMſ]?U46!H3?sk?MkſJ*?D?h m?-/ ?jzX" C?/^j?Gmxp|ٿUfڱ6V9pZ?!?(eѱ?R,L?8&?6_Lӿ:7?+ȽѼpcƩ9[?NlUaa| ?0?&b?neki?hTm?IUy?͕o??ߝmf?_|?濙2s@. ?5.?aD4?a1~?.`shdo?Oj?.T?E6?~s鿀? | ?8XeÿH7 P? 6n?V?nYr?ߦ?h F?0O뿰{0嬿~.?87ڿWisW?sS?ܰzQ?B ¼?2?obP?_J?0-dX ?돑M?Z࿵seyDi?Uׄ<0?_t}?X4S???dgG<[?M?*/Z5V7?cj~ݚ|;X?#?0lZ̡d?KįPsA?5w?}q?HiPc?7q?G*g9^ۿ?^ ׾%a?:LĿ r\1q?ǍA?u8S?!&Ȅ?_?k?.y?*~뿑@ ȴ? C ?7VҿE $%?,ii(?Z8L?Mn2Z*"?pq?HU\r|? M ? 7|`?߹-Q?ްI?V?j=Ъ$8_A?˧-l/?2z:?S:Aa? ޳%?*.8I4s0*?Ca?:Ŀ<9?g LeQѿv ˿Ç_7?I;9Կ}Lֿ;65QRVݟq?BX&\6 ÿ[*?sEL ?|fؿȾjſXrh!vW]{ڿax`տ EK?]A ̿&?A?|G籿.|B?6zHE;ٿ^%=7d7J?}<ٿ_ qԿPV] Ϳ_6#?1!'3Zd2XM?QB0,?^(Z ӿ:4ٴDS&[c5IsUݿ!ёտ>1?oXֿrݿ2މR`쿟Uꖿ݌?.r*쿎ҿ?%?|Q>(s?1Ux4ݿ?3ӿx^#쿁ŗm׿޻:ܿYM_q? cۊ?2b̺?]}0˿h ?!ܨ=u׿N@@=˿T>cP>?fFܿFQܿ=},*-_~ſ!?5/mt!&6l?"My`f?'n$ۿ|=۬iI3PRj۝wܿYXaQ?jPпb.:?/?!Կ3pv?l+dLAtUXڿ6" C`R$F?\4{}yۿ^f9;zdӿl>2)0?R1ғ,1l,?rf72?q-{rֿ'F?x8&߿;TGտ0sN?FXL޿K  & sŌ&V5O?JN5JпTlC-k?V ?Te [形9п=o xU߿Uכyڥ1?L&ĿTg^?iYSt?ypпB N?YuÏImh˿j⇛?j=ῇp&G,dϿBGMr?p0EYM V0_?֌)?LZeS޿g?{?ЧmfȇEݿ(?N9Tؿ|E?F!7@?EjWʠsk+?xӔrQؿUzϠ?[\~c!? pCq[_&FٿZ#_~Rܿ)U7J修[{??{豮a=?2?S'ؿۧL?!.jv0 18տt?0Мbu{@"Ai$jǿ)?EO!Tblg̿050?1!?bBz̿"\3h㿾U׬$Tx]T?!³ҿjy?_7?3!Vſ˥{(qKG?L?6TM-KHs?\!L#~濷Fܿ|?a}~AnNagnӷ ?RG进nޯֿB?J)u?.. "ֿk72EYM{yqx]?xɿɝ%u??OҿmV,ZT(?P1z/ݍtI曄jʿ7h?pB#gxK濬̣kԿGlr?4.x3pȿ)h0?Xp?1Q%ڵg2Ş 1?GұڿPoL?FR?BHbIW%NM¿&I& ?R_?o@Yҍ㿫H x俔\`7jd~ῆ2{? &࿤_06j"鲿>4KB?Ob2ܿ }B?N:b?N0\!b#ܿZueS@Z㿼u\翚1q?ͿD/ȏ?:7?D. sԿx\*?]3c俉65nG濽DzԿe,n?qce8濾2[/5ֿPQ? $șC|ѿs?=UT?L忺ƿwr)0H;'yz?? Jܿ?pI?/a*Ta_? o8m'd?}o?7h鿎dA5:⿿Ea8f'/PcZ? 4㿅nfAƁ,?S\2g?" CG?7[v#%%ӛMi mmf켽?3l#Ȭy鿰I⿏i ¿??TlE⿛ٿCo2o?J)?GkFunٿsT0:M.)E ? J>Ͳֿfny?:?wPQ^1ʿt]?J*xG[il]x֮ʿ.5?~v迡$PJ݅8_^\L޿w?}࿙c8ͿY?e@2b?qoW(ňeL00H8&?$uKFybO[IKz+2 ?_[ހ:&ZA ~?$rn?_B꿯UѽH6ֹ#:2X/8f'} ?aϊ>??M|"v?'RۿWc?nd,CCQҁȎۿ?we9? vfhrP鿙kOx Ὺ62ѿo7?.޿#ֿoG?ϴ?o2^rп1;?_DIͿd?BOݿ+?$Ɩ)!?*t}sٯ"?L'\+΋Ɠ~&?觚@b5re߿TБ>UL?yZ#,칿ON?)N!?WR!7s?1bm[ ea4 w?k!*N{orp>ݿſeg-? ҃ܿe_ ޿@?7?>sA꿹9Hݿ$Nvh ݿ) B[rt+?EY,пƇ]?\Q?TQahԿW+{? ݿ}#>Կj&?*sbm7 ٱ޿#ٿ_{8?!|'~ؿ,a-ߞѿ i?=K˯?N鿏X;̿bݿ/ Z-뿶Ώl1Z?if⿥|e?M ?*):L?vK ߿UqHkU?FTj?ڵiA!ڿn!뿴gz鿼lp꿷S5g?HQk׿0(Z{?M?W~S˿ ?IGٿ0HS꿁-&8iʿ < 1?Sn}Ri1?ۿ T.࿟W?sڿxNwђ*?Ћ?<X쭨=]vܿ& 䣏ƾ7俾w?߃X忍me>Ԇٿނ U"(:!?nڿOYdhT?Zb4?]tnOJI8'bԿ%Y~鿇[ VcIx2?¿+:j\?Nz?VVAۿjG? Cu+ٿ sqOHԿڮ_ٟ??X+>;NT,cֿ`nVMۿ@?ۇ1Ͽmoտ|iwE?Uv=?L+Ue1ҿӿ޾l+<<!?J8:޿Gli?BO?n!£ȿ,7?\ bݿčnܙ翦jƐ.2?jeWU濑5lMֿ%.Kg&#{?Emw9A}?t%ʦ?-AO 6]J?HNؿcm_Ό_?: igR5gQտl( ȿNP(?t0BiԿX-|R]{?_/(?deCޘ??׿ۥM濂b'ZYɓ?5i=c? ĕ?s ?@{V}mtREӿRy6忢*Nx?v٨93,пKK-:?$Iҿ.@d俅ɜ?SEiK?Ǣƃ2$ڿ!CϿ| ;A_$A?OпOy6e?n$q?9A89ۿ]?!֪ʿ5sN,Cտy:cv?0q#0 ;Zz뿼̿^UܿgK?]¿d],wο ?I*[5@?C?YA-qPǿ Є̿&GP6U뿒]3[&?v8)޿b .?A?$$;ſ"p)?:ο8b_'x*&J? ?dGQᅢֱ3!g¿} L ̄yGz?G-tZ.B?@!?㿯ٖի?<19Ϳ߭+$C?ؾ.zfdɿ4x˿*#?ǿADHOd~1a?ϖb?$͈Ղ@=.6ۿ_;һ*94쿾!a vD?f׿W?/J?6ڿ5_?g#ѿhIHw˿o ?0 JsZ}\=U('~ĿQ 2FG9R?[C -ƿElΦJ?/h?YbK=t7!"\1ĿڄᅰZG[;翆$%,6?p0=:ijÂ?H?ѳF9?g:E¿}D/YU?u? &mn d!ȿkV6eG?iIى¿i*H?gާ?B>T.࿪DM?]¿U46!H3ukܿMk?J*쿷Dh m¿// ֿjzX?" C1^j׿Gmxp|?Ufڱ6?V9pZ!Կ(eѱR,L8&$_?Lӿ:7޿+ȽѼp?cƩ9[?ҿNlU?aa| ȿ0&bne?kihTm>Uy͕o快￝ߝm?f_|??2s?@. 濘5?.aD4ᅨa1~*`sh?coOjᅥ.TE6~s? | 8Xe?H?7 P￉ 6n῾VnYr꿝ߦ}h ?Fп1O?{0?~.࿸87?WisWsS߰zQԿB ¼?2ibP_Jݿ0?-dX 鏑MпZ?se?yDiYׄ<0ɿet}X4S?¢dgG?<[⿪M??*/?^5V7ÿdj~?ߚ|;X##lZ̡?N?Kį?PsA6w}qGiPc迕7qYG*g?9^ۿ^? ׾%?a㿞:L?r\1qǍAu8S!&?Ȅ_ k1yο*~?@ ȴ C ߿7V?E $%+ii(Z8LܿMn2?Z*"쿻pqT\r| M ׿ 7|??`ؿ߹?*QްIVʿa=Ъ?$8_A˧-l/#z:ᖿS:Aa޳?1>*2?*7?+3@+8@4A9A-5B-:B.6C.;C/D08E$1F$9F%2G%:G&&;'4I'<I(5J(=J)6K)>K7?L7L8@M8EM9AN9FN:BO;CP/<Q/DQ0=R0ER1>S2?T2GT3@U3HU4AV4IV5BW5JW6CX6KXDYEMZFN[:G\:O\;];P]<I^<Q^=J_=R_>K`>S`?La?Ta@Mb@UbANcAVcBOdBWdCPeCXeDQfDYfERgEZg1Fh1ShF[hGTiG\iHUjHjIVkI^kJWlJ_lKXmK`mLnLanMZoMboN[pNcpO\qPerQ^sQfsR_tRgtS`uTavTivUbwUjwVcxVkxWdyWlyXezXmzYf{Zg|[h}\i~\q~P]]Pr^k^s_l_t`m`uanavbobwcpcxOdOqerezfsf{gtg|ShSuh}ivi~jwjkxklylmzmYY{ZoZ|[p[}q~rssttuvvwwdydzz{|}~~nnooppqqrruu xxyy{{||}}-111->B    S      d    u ! !" "##$$%%&'(()*** ++ ,, -- - .. / "/0y112%23333444'45 56!6)67"78#89$9::&;'<(=>)>?*? @ +@ A ,A B 5B !C .C"/D"7D#E0E$FF%2G&H3H3H4I'4I(J5J)6K*L*?L+M8M,N9N:OP.P;PQ/Q<QR0R=R1S>S>OS2T?T@UHUAVIV5BW5JW!6X!CX6KXGKX7DY&HYDUYHUY#8Z#EZEVZIVZ$9[$F[FW[JW[%\:\%G\GK\7]&;]L];L]&Y]7Y]'<^'I^8M^<M^8Z^IZ^(=_(J_9N_=N_9[_J[_)>`)K`:O`>O`:\`K\`;La?La;Pa?Ta+@b+Mb<Mb<Qb,Ac,Nc=Nc=Rc-d-dOdOSd.Ce.PePaeTae/Df/Qf@UfDUf@bfQbf0Eg0RgAVgEVgAcgRcg1h1ShBWFWBSdh2Gi2TiCXiGXiCeiTeijljkjlokmplnqjjokskpslotlqtmpunqvowpsxqtymrzmuzn nvow|pu}px}qv~qy~r rzsxs{oto|uzvw|x}y~zz{{u}uv~vwwxxy||}}~~"tyty&*7773HHDLH]Yjn                !! ""#$$%%&''(()* + ,  .. //0#02233&34'4 5(5!6)6"7*78#8+89$9:%:-:&;<'<=(=>)>>?*? @ +@ AA ,A  B B !C .C "D /D#0E$F1F%G2G&3H'4I'<I(5J(=J)6K*7L,N9N-:OP.P;P/Q<Q0R=R1S1FS2T?T3U@U3HU4VAV 5W BW!6X!CX"7Y"DY7LY#8Z+8Z#EZ+MZ$9[$F[%:\%G\:O\&;]&H]<I^<Q^=J_=R_)>`)K`*?a*La+@b+Mb,Nc-d-Od.Ce.Pe/Df/Qf0Eg0RgFShF[h2Gi2TiHUjH]j4Ik4Vk5Jl5Wl6Km6Xm9Np9[pO\q;PrPerQ^sQfsR_tRgt>u>`u?Tv?av@Uw@bw,AxAVx,cxBWyByCXzCezLY{Ln{EZ|MZ|Mo|[h}[p}G\~Gi~\q~]jI^IkJ_JlK`LaLnMbMobwNcNpOdOqerezfsgtShS TiTvUjUwVkVxWlWyXmXzn{o|p}pq~;];r]^s^_t_`u`u avawwcxddzzDYDfY{fEgE|g|h}hi~i~jjkkllKmKnoqqrrsstt-vvxxyy1{{}}nnooccmmBFhhdhPK-E0D0D vertices.npyPK-E8("(" ZDfaces.npyPKqfdipy-0.13.0/dipy/data/files/small_101D.bval000066400000000000000000000007571317371701200201760ustar00rootroot0000000000000015 310 310 330 615 635 595 615 640 595 945 900 945 900 1230 1230 1275 1540 1560 1515 1540 1540 1580 1495 1540 1560 1520 1585 1495 1870 1825 1870 1825 1870 1825 1890 1805 1890 1805 1870 1825 2465 2505 2420 2460 2505 2420 2770 2790 2750 2815 2725 2815 2725 2790 2745 2815 2725 2810 2725 2770 2835 3080 3100 3055 3075 3080 3140 3015 3075 3100 3055 3145 3015 3405 3365 3405 3365 3410 3365 3450 3320 3450 3320 3405 3360 3735 3650 3735 3650 4000 4045 3955 4000 4000 4065 3935 4000 4045 3960 4065 3935 dipy-0.13.0/dipy/data/files/small_101D.bvec000066400000000000000000000123401317371701200201600ustar00rootroot000000000000000.51103121042251 -0.00053472840227 0.99867534637451 -0.01570699363946 0.70641601085662 -0.01127811148762 0.01092797890305 -0.7065976858139 0.68343377113342 0.73030966520309 0.5616380572319 0.59289318323135 -0.57953292131424 -0.57472652196884 -0.00025380766601 0.99943608045578 -0.01570699363946 0.44710674881935 -0.00719600683078 0.00685171224176 -0.44697961211204 0.89384758472442 -0.01408623810857 0.01400898769497 -0.89438331127166 0.8807618021965 0.90730500221252 0.427066385746 0.46771842241287 0.39947691559791 0.41698148846626 -0.41200670599937 -0.40400171279907 0.80501043796539 0.82725661993026 0.39083641767501 0.42591732740402 -0.4161410331726 -0.39991423487663 -0.81810581684112 -0.8147137761116 0.70678371191024 -0.01118473988026 0.01102659944444 -0.70699882507324 0.6895825266838 0.72428482770919 0.00020647639758 0.65857726335525 0.67422258853912 0.32043570280075 0.34651586413383 -0.34107080101966 -0.32525697350502 -0.66917836666107 -0.66387826204299 0.65083771944046 0.68214958906173 -0.67179018259048 -0.6612474322319 0.99962842464447 -0.01535430736839 0.31630092859268 -0.00479150284081 0.00514880986884 -0.3162562251091 0.94836509227752 -0.01460747886449 0.01520668528974 -0.94863158464431 0.94004303216934 0.95664525032043 0.29797855019569 0.33462598919868 0.29593941569328 0.3072674870491 -0.30528897047042 -0.29774451255798 0.8966423869133 0.91186267137527 0.28440615534782 0.31877493858337 -0.31257089972496 -0.29010045528411 -0.90635699033737 -0.90257829427719 0.56480538845062 0.58966267108917 -0.5829153060913 -0.57155054807663 0.5545887351036 -0.00859258882701 0.00884065870195 -0.55467271804809 0.83181297779083 -0.01289543323218 0.01324754580855 -0.83195966482162 0.81861901283264 0.84507393836975 0.53703784942627 0.57221281528472 0.50123381614685 -0.99942123889923 -0.00006244023097 -0.00174600095488 -0.70692420005798 -0.69603151082992 -0.71827620267868 -0.70749676227569 -0.00125570269301 0.00121260271407 -0.57137382030487 -0.58311182260513 -0.57197737693786 -0.58333188295364 -0.99986988306045 -0.00003121430927 -0.00174600095488 -0.89422339200973 -0.8888925909996 -0.89982837438583 -0.89451342821121 -0.44741609692573 -0.44243630766868 -0.45222103595733 -0.4472998380661 -0.0008025788702 0.00075893837492 -0.00156686280388 0.001555887633 -0.81227976083755 -0.82043009996414 -0.81256824731826 -0.82058358192443 -0.40668523311615 -0.41009393334388 -0.4047719836235 -0.41179382801055 -0.40494835376739 -0.41198608279228 -0.4065374135971 -0.40994676947593 -0.70707148313522 -0.70211195945739 -0.71214079856872 -0.7072148323059 -0.00124539888929 0.00122358393855 -0.99995183944702 -0.66463810205459 -0.6686492562294 -0.66252219676971 -0.67070341110229 -0.66263794898986 -0.67082816362381 -0.66471493244171 -0.66873240470886 -0.33191391825676 -0.33501955866813 -0.33188670873642 -0.33488422632217 -0.00002017364568 -0.00174600095488 -0.94855099916458 -0.94590234756469 -0.95138692855835 -0.94866377115249 -0.31616678833961 -0.31459307670593 -0.31787794828414 -0.31635177135467 -0.00056963047245 0.00053470541024 -0.00165830715559 0.0016543198144 -0.90207701921463 -0.90676438808441 -0.90220540761947 -0.90684694051742 -0.30096718668937 -0.30189031362533 -0.30025422573089 -0.30278018116951 -0.30030870437622 -0.30283868312835 -0.30115994811058 -0.3020381629467 -0.57489740848541 -0.57976102828979 -0.57499867677688 -0.57976251840591 -0.83195155858993 -0.82845377922058 -0.83555054664611 -0.83206850290298 -0.55462741851806 -0.5516784787178 -0.55773359537124 -0.55481922626495 -0.00098021887242 0.00095655908808 -0.00145794369746 0.00144742033444 -0.69829213619232 0.03401271253824 0.05145435780286 0.99987512826919 0.03513642027974 0.71792268753051 -0.69567221403122 0.01295893918722 0.73001152276992 -0.68311512470245 0.59841012954711 -0.5553902387619 0.58050286769867 -0.5739454627037 0.01612985879182 0.03357987478375 0.99987512826919 0.02144854702055 0.45805910229682 -0.43619033694267 0.00741326110437 0.02924659848213 0.89668923616409 -0.89179581403732 0.0011721990304 0.47355878353118 -0.42047256231308 0.90421897172927 -0.88387620449066 0.42499393224716 -0.39117905497551 0.41229027509689 -0.40425899624824 0.43193197250366 -0.38401752710342 0.82668405771255 -0.80561792850494 0.81415206193924 -0.81874054670333 0.40673112869262 -0.4101036787033 0.02250898629427 0.71197879314422 -0.70195007324218 0.00030738834175 0.7242060303688 -0.68949991464614 0.00981692876666 0.35289132595062 -0.31357958912849 0.67704153060913 -0.65580773353576 0.66676956415176 -0.66648149490356 0.33219629526138 -0.33475768566131 0.6828202009201 -0.64994901418685 0.66223043203353 -0.67127072811126 0.02725872956216 0.99988067150116 0.01431084237992 0.32441619038581 -0.30795523524284 0.00437629921361 0.02534307539463 0.9491142630577 -0.94800966978073 -0.00445062527433 0.34105551242828 -0.29125520586967 0.95457118749618 -0.94234961271286 0.31412887573242 -0.28873008489608 0.30467051267623 -0.29828983545303 0.32473245263099 -0.27815243601799 0.91047269105911 -0.89816850423812 0.9011737704277 -0.90781635046005 0.2963438630104 -0.30679860711097 0.59202021360397 -0.56229448318481 0.57409614324569 -0.58069401979446 0.01696711033582 0.55999159812927 -0.54934251308441 -0.00045547669287 0.0218068100512 0.83395713567733 -0.82991433143615 -0.00432626996189 0.57433605194091 -0.53464871644973 0.84355688095092 -0.82010388374328 dipy-0.13.0/dipy/data/files/small_101D.nii.gz000066400000000000000000002032041317371701200204400ustar00rootroot00000000000000M/tmp/small_101D.niiĽgxVǵNssq܍1LAH j h$^LwǸc8'vb}u? & [?Zowjӭ7/ߺKmאq}SYo=nY3?yͭo=G~6,W?*ZvoQ0'f\i27.50!l _^ ~ W+u q݋G߆{&WG0/os_ >@4xϠ#%|?E-)2i9\>z].kp>5n ? ? #MW;s-}8|g A>C _)'| پ|XU>x}UzzʿW #LאWoڼ C׃w50o ? {AAR.PWSO?z(o# ̓ϑhWpy-> ~FAb ia.-7̧O^,u-/A*/hGH|:mքѷpZx33?Irpx8/π};x=vS;)²QWywG@gpO Y}ja:~7A~ [[zz^nE6 ga# ne| SC @cHz 3p /пU錌UkJ #pQ8875&7O +ãpl~veDZM BK0߄Gcu4_5 ݂ͷEhű`;z@X,vw{}GUhɓ~*Xn`Wp, 6r}8x2̣ơ`oQl{gsA]NѲ[RApm#ۡp6p?XR)~`Vs 8\:XI Dprz.ҟA K0<g?mOS8\ j/e6XBQ(HsHl/pA jH[s~A3o~{? /QqC3z>pHU-xh,= uoa$.@a0ne>ہ.ɜ~ }3g;bOiEJ@9Rs~C?߆_R37 wNa_Q02`^ .S/u"xο`0= ?7bLgQ8i[(} 6ѿdw.R~5n v; 3:@x a뻇ocׅ-8l;9 ~ BX3G.tz6QeY4E=D[[0[KFW!k` ԛ.=0 yj/a_8oQw | CH ^o-GP*=K`?Ó2^QZB {qz=̓8mϞS8ܝL4NQڏ}pw`އF˵|nWqt>|}Ed0 0n0"X7"C@_ UqI(:>scԯk~Ge<$whU} (]|_Fv>KkQ/Moh{~+^%+|/u}Hi7GW/?Wq0OÏMԥ 6-z}M{WcSs*<ua+Os72OC5+ *#˴!~tc,}<4r-O=uW~;.ADH=`]ރ~ߎh#|~>fG|o嗡3841? kw_#da?y#>TS@rw>;@?F1(qZj7w_Q8z7H`I۠7F#gKF^S<= es}VsGpdMaN`-)4?^oQ2yIix8E3<; wģ99N{9נ1p9 {H fG9'=G# 쇸?Iw)7(= uKPt >Gy",Ij ܝ*c(%"*N3O41*s|ГzOA{pV;FJ8Z3uk)9@~KmEr{#F#`|+mIP&j 6Ǩc|xSSԏ}Mxߝm¿n=<)| w˵/3T[#}X{@k 5h/+؇Kw|N0jpkxY=lإ B!$vzH2G0c]:Xz x;G6Iy~G+` $‚ΓQ߱:YخMNМq.Gm iGm̨v4F5&zW+|P=s7Qz3>NmϮiAE<8_eOm;wY,qո`:Wigj{V:XsYT5'@Cf kx`[|ׂMό5BviY =PqU|Ά`%sѷ |)`~Kh"Qe懣MH O\ - \s}p΢l&#sz*ߏD>Sq .-nTruhNÍ`1l<7#˕p5 R*|zaVq7_AAPN_\e<e@Zfh] k: e@, } NPܾ9F׀oFh?elz;zs+ʹۊ_C#yqYI\xgӪ? j=_P9l1+S`-|ľH{?s/Dgsgx뀾j&;O1zr \zȈUPHڷpпثjZmZjnZ亅1:[=[o~Ok>"xy YEl5`kІi/=9s@4涕{B CP+zmpm Ga ` rZÓu௅Y|],@S-+OT33Pϵ)(\1t0f-d4ړs{M_BbjWsw_rQy$.GVoϭ(T׀!y@Jgt9rC30G m4uk,(~ѳ zkx_B@PՏ2Vn* d>\ÈJ~._@>h{S.\?) ig긶?-~^AnCz!ӭx ڊm:{=)y ls)%YA ';m/]zW#ep%ߦKаޠ/Ep]ol0l/P:tw*|j1} YCvC_Y#.hOjlhSV>T5)La1,Dj/`"zб9'L'lDΒz+m2Gsr ?]\8Fb,)A*\ zG|}}T=s(KM}<=8zO u'R$3 Vt;#H?'(bx<ĕ~ ƹУ}\Jk]F6K QH,ؘ`8rXh\LfZe\/yhzuU<Z_[fYM oM>2h}vbyC CVg<r0BZKFUq/a=%uO-52=X꛻XR$z~%.C>u`} Rz [eEbPaL@l84_7 hI<0h[ Z^*GKyu$) n.00~CU:8*ߦH>c Uӿ3ZR\Ǝ':t壻0j_ jDKMkѱ74FjR6Q'W\&EP!^S0O$ό=O/K-< wWF"g)^t+:(k5d:7ɼ8E 4_zw2CF1DX]>MT>N@2$s=<+Hw:~p |!5!+o!O陡h$#e@d҃3磢QDԞ{C6$>h~}\C,B1,[1io>L @A|Vy<2wLYMF/{~x9FS]g*߅`EGWɧ r 2hN쀇>3gc !lEgv YQ;rVx WR: J&5k˽+I@E?()Zm n*uq_Lp5#ڭjtcr TmTꌣ r*eĸ CpO\ϥ#-0֖RO(=sr,2+Ce; _g ϡƜ #E]nrdYP'Wi= js>Z+s֮C-c;z wz)0pum9zjLa%5j:~F*^'+;)Dj3[ i|GC1Z=zݻoSe:A稻6UjyƓ; =}<#e/ϤA9Bf"}0 eŦb\y ;k׽lc%寡|;ˑV~r{s# \oF'|~x/ru+WQEfp 9z.'Phk}YA]􍾰#Ͻ3і^¨uU!@COp~:d2둘4ƒT#Rz'~3Z0-|`|lVD8 w>SuV~7-Ή]hיH5b`.bpG2۵X681k .*ƍ\'63D=c>4& K3V#Y<)u&ڋYklq8tƁ(+rvl% ʹB-ƚ&x/J`23=vr{N\FFi*ֺ/UjD8cCzjaL]BpsvF^hG0WF6ȃk4woʈvCړd spD;ɞ!%NHnP6Qc_f,a>м\+P[^%$M3f>+#2R2\hp)]p*T:..#Ϣ0FZV(AdޖE|1tSzg>[r`h]K8\8C7f͙z=Hp%1Q/qpksgsdc|lF̵zl5V@֣)y;lE˷‹MZ 1i˝mrDz7RsF<JJs)9/t?7xѐYP+w-VStZ_ Z9cIFΧRg5*@^zFo{U,nstRܩZY41e%ϣ7#18'OF@{J|5#R5@3d30`pgR&'GZ裞GcۀxuZ{@z Fkno>1elەJཉp?f돹 7l+|K2#\nKk\-6բ-/1'Fg|9},32~6Co-#xK7z/k;<¨yL7fˋa&ų0uk?|`OЌHc=u׽.בr4= m0# /qV*L#=(DwT{/-2Gۏte=PP65@Mi͈˒< 2^\Ñ7:zZO]_U"[Sh)c-}>ЩPˍ)lg }\88r [MAwAd$Ow GPomN&ZU"e\OAې@lluփϝ2$\#ڍI:X!q0ϫD3+nDlx6v|K޵S}u`Zg52cMq5Z:x%x屐<ȷEpЧZ@м8 Zz{kjf'^zQݘU<裭TNu󗫘)Ý+Jd>`!rF.2~xwQk:O mre7n.&zP 5TA@~|*y~ {^A@Q#v#`ێ̽\ͥYػS-;\GQ(un pQocKncQOk!8?}hV>KlYLZF? M wU.vGΑ{ǡθoHӱ 04D}ХtyHt/f3ۂ&fePD:- 6ǢirhIkv$0WトR&) >F'ݍgp2^̓GH ]˙H Ws1A ڌl9Fg%T('zUx\筧*-<ժ~3F3_Ia&߅O '|\)5?*6h`tVsCj1Jvr >|sY1C0j<:K h@=Hamwn>4OHÌ*Wˠ0s'HLE/=^YY8΃ГMnQR ^mxhpQx(ZrMtlq&QWt 'zS e`hgb\USù6[Q3:lr>9ژ̍>a]+lAX u|r]܂Tjx8ȓu@Ɠ=F9廠l }g`[Nts;VmyБ0nq:h4+5ׂ2là#겡uow$eOZGDN+#]#&'F:m8B9PYr(<֥HQMc,|ZOˇ\U)vJv v~,22jqQ`eA4"nQk, d5ʠf,GfR zp!4+7j59xobjx(f.'"\Y2 #3)qQR>>Woјw8٬bWFFtF֎y3iIHwV*~:ڊ(se\cȁqjcn`mwn U}L 3pFF 6Q f6*ތbm own~vy)Cx)I1 y2ZF p FSۛǨJYp'5b\L$FF?k,ܯDNߝG-iE3T/YT >^)APY Zek,}ȵ;+epy?K3d 5+G#@jS^ h+QPAQI+w "jԄC,4|732FncYl_M1hv*BGp7^D< Ef ^9z]^k hUFapo8eu]UXfR=1h~GXFrl|d6h6GN^t̅\el T跍sejmR11lv~P!3wzPwi4{_$]Vf"ĕHƭv2x;:xv5˪јDuky6>Lgtʠ˜ޱT1r6X fLl zϪY7x_^HK 8>Cj_E4!q؝H]("vY*zQdCi3 C>wPgFSۏ9w ~$R?} zzR%ЋMtQ[]ۅ6ϑ0N̓t+t( )z19o5А.Nbw42.Gp_ &@Bʂ`+vj&Izݚ;{1 ZLGӪâMBBCg! ȯ35{#2k6cuYNkЏZ9`ZLTx̋e}h1d?kIG\7+O9@Pg6̟vO)*aO"p;{3FCQ"zvf@9j4:VzSLQA-(ɧGqo,1 9j]30sW4jI4,`!vbiPZb#0o^7CGn4VT"mW W}ԓ?#q? Kk^{w«1m\~bmG-_RwΝ@wl0Zm~uN W W$(Xq݁4lƉR>(F觤YLny<)Jʀ+s1GC<\hqRЖ ơN`kx IȭًN,\WiЕctxKoiR2gSDI/ʵn.Y1}rzE,$jl1O,Qw",|$q]eMТq\Cc4q[iO>3H>0X߹3'R+ٙF^zy/ ïg;u=2IErS0~1o\5'k^tOp4KW?xӐf5\WA.=x]i1^ (j|'&DlDv'݋>SB`Q#GP+x'FzBrw s36=L{ٕ~U˱].pEԛ#a?F1x= bVYa3R4@Q%m,m2krm//A;C}C w'37 5 /w|uQw/Qk(&G#dhKm2̋[c#G͡NuJjfl»>3g!% 92F]_f>ʤ/q=qQ6rZ;S>>'VPnl{;hYJ-sQs^(p(S᪲zy|j=;gIPCl7=\-~?ikSf zggT"\(ezܩ嚺:pu񘈺jzJWzʵtet5aG)1OMhzj%HBR4_y&TLQv.WV!]Yks+4{Vy&C)ȲU$ug $њєAO>fq255K_*XW(5p T܇p7L.@\Bs+" Ot7WdGjW|$~"B2>:u3OGJ=<Լd|)*?#ߎʢu4毈1V@2B7lgz;̮SyND; #!O s${`Hjjf)~-A?:pIr,7ʀ=L6H?2t?5hS!'3y>ъ^4nDg(AP]ܻτ<.Z7cb$nQ =~ҍH OMVD'If<I7Zջ:2^-@ ~+h6j|Ҳg<]{$ݯ7ߍ{@OccШ%u6qgL7`QӉJdЏ0>c f.s`j,QxLCHϝ)tƇِ~\a<8IG> fb(bS:BZ:cJ"yqz"6<넞t{ 0$ ҜYHvu.UΞ=gCBϧn>Г$GRI5jC\YjFt5?C p8hW3w-ƻp͞ : I_q=!',d ji{";-8z"(~w%s].<>jo6܇-b^G{d+=\\ G([FCչf濘)%`hŤ=@*{BnuGKU .gww@* :f y.n57&SkʬLk%5sdƂ3ѲqHrgInyzM}(wd7eɝyZ|+FiS dPR>I]UmS)\7|wnNVi$ c;uOpmu0(m=Kg&#qm\+MxbLq!P{+sR]8$A͞PEӇsiI7([tQWڪ푊hL[ É~CW2G]v@CͧpM HmтiIG[ڦEC~Shs/-3Ɨ .ǰ4QP-)@iZ#!-p<1ׁ^46Gb!es&B6Io&p@Mb.܃t:B"; AFT&5>W1q"Zևbrф46?]tEutd=0O}WouRyOܑ͆ܵ$ API?T'ԈKԤ Bz;&wڍL&2P?d0"HlȰ _clrhp \K!ЌNJ$ZqEi{S~OD֯ p[]>])w-JmKA@1 AƿziUa* <Gj y]Аz5іdڵBmkLqVCݖvBrw@2T"!W wq)N!x#ѣ`p:'5 jFOoC6]3ho .\?.`U[1&BG=0:3y{XwP:&0!/v"=\wF}wPܣ[n }P՝ͨ%3>ħ- GIOFHO\$9}v:LK@] y֒ m0Q"Xcz F, 3GF=hZ Pm~llOxE}|lAngy->ZeFy(:z@m.<}*z :JͲó&:IikIi@|ipYF-WJ YjQ%=hsb\ýDx_ErVsk AGZڹ1rZS,h2&`C3s\m4"1g!)DRRDڨ|(E#l"ÇWH6M Г=iT]t=cHG=?:eԺz-6_˪,O=35`cNj"sخs)wEݍBB\ ]gw.:jFˤH\ Jxų\GAfi};Ƃ L+xQ9\>OLϞXIke3D猍od֣g ˮo/ 0R fG?>D!2 jzq+s3֮?Ӹn}tt8#WRvXX9^83Kka$ż,w|;Orԋvtz.=;n;Gi *E&߳ NDδ޺QUcEA #H|zou=bnsqd$:RK'~VkAhF48Ӛ{'Dூ9!bZ߸}!R T{ؽw (jGp҂_^ҡ;spv7.rrI_PotR5L- fVlϨFNݗ "áӜϵdZm}\̽|ngf7=h{ѮH\-Խ4W@x+!V ѓ<ThшEqix0u_g&f>S]Oz #њ4Zfa + ~ =J)3{ZP5RQց+cL݃K-3;#m31V FD_+>hosڙך_Qpn^Hj{AUc(V"2AU_j>G\eK]u@Əc>qQ9kƍ^1|0;m~q18jH9/:2:ijy̻G[konYfuv?=D%;JUtho:cU u6܍l,L01JC_1.ԨR>7Fz!M}l}=572mb e5{ӜL#"jԲ6QM#jFKn}]KWe>{/Ƨ\g޼kO-u^4.^B{h;91/'ZGo0 0}o>Yy֙-lw+ȓ0-m"e 5:3YF#ۂ3X\J:#'cԅPމoUߕt E \xn xi黦&p6:Ɵ'\I6WaZ>\<0ʨ'1 7V¿qvɸ;&Sw-`6q95Brwí_{j2rP>;Ǫ.\87F׷={V '50*[C{>} ͼKm,_8#qzϸ1-Oq%WfezG{x4w1{lWG9 O֙މz=~>agwcO7w3?GER*fP~"O@D =3Iglfx}5OdpFM%E6$3wxQjhд@2I/l=GM{xwA]'sHuG؁y⛬@W2sHgUyn+1;G|Wu˫ZC8!d<8Y?jH"]@g^ok,c(pϱxT^/Мpϲ-f3i!#ܥFmnc=UJ:S(1"cu4uBEDhD~k,Mqe;rqAm@uhv,?,+LF5·9Bf_)<q=OΓ57GD)F?7Z5|Qr_9os|=wړ`1qf9?ø35;5n ҕą<6eо3JwmK΂j579z hzثZTc/)M3Fc^{dz֏36{뱹<=>@tZ1v3Ajf-kTϑ%݊f3a %smd8V2zϼLVr4{ab3t~zadהΐfhSf$tќUG#sZ"m=moVܙ|Iyuݑ+=t6!̤tvNo~]5Nמ4gNj+L팦_dtDLFÅ;f;t4&#s3qW:3yFsS2ͳ'W$P3;\yVuM 1T31}sSe[Vflyn!{ۖfi:psk Y97Wp^NJig]Y#q"6١PGp߬`T\HbvP`9e3֫nb:O{nȉ\s^F.?QG\F]guv=b63P醧d+J\G&s'Zid]̈\{"$߮+s7z9a&֖v\4 1>H̜zS7fu7x^ƈA)¹>L c]I*]9Ivએgdv{]K٘@{Fk[1=uGbR=bWpfY?w! $@(h1Q֌D}Nm<kwu{F!R &FBcn]@zbıph$Rsw:U m`>!6}3uyFa+Tt$CseOݟ7v쫶6.=yVf)Kw}zǬ?3<{Cg0JCӵSKEUs@s3ʸv3J K횠Y>|H]9yrEy:؊G2 Aʚa]%+.+Of ,C#L!POwā7C_[:WYDZgf蛻 Gt ν.h UJF̡6b|i-=[l?;M%G|NຟnH;,WZ&`ʝXj|ZBqGFt~$@1ki,k7ùekz[!}h/~m@~7`67ZJ >H=# qpFiŎyN)ؚ:j\s!NcNGחo3z4!}tN~oAu%9O'7^Yv`-T֞7}W@-whhj@{לz )OzJ=E s ׽v4~ׯ݁D TO뙣DziB}0B3/sZ> 0P!~wBe}{pWYO-ͥtR x=_ q6*Ԇ:׌JqvX2q|ݚ:w}͗Z5-;RKiV"6ғ_EUͶ7'Bx=3;O U[$>R7.)3NP4vFq6߽#ZJCMmVGyv>̮#gg:3<2#Bf+|'S0USJS+u Hm̼5!88ѓY"PәR#U(ܙ+=?efCfu`N&voy՝OL[$n]Pcf8]G4i/$NWݹ܎ptؑRL1̶;%_U uWIR O={nQjc>מ1÷nH9 vgAm6HI*5JV$gJ="-IhC<<=}UV\&Zє,G3JgDs@SQZs6(<Ҭم,ZI6Z}˥N4=eY†Ry׃|wGs 39ntQke3ZEcB?wMhiQf|}.B=)GO{J%6OM]*jKwdT,<7RO{s=ht@f- ]{ۈ9?ɉP!WIor\xRfwuD"@; 0k=Ai{h4N0HQgF}$pՃcbрF =5ך1֞:MP{(j*.=n!1@r=@9)_ciUig\,Ͳ߲\=VXs_zM Ƌ;JhxV'@O=x68{AQ~%Ϭ ͡U\?J͕ua_!O;L*Xky794gFϵW*YAzJ1ʿedhJYf{>w_f*7MU7\j͌zZE\w75qGӳfכK3;"+q6 6ьW]l e/.\uzMj?9T|cX]E6cTL#ߡoҏ~ZbwNRPD'zuKIO9p)xsܻ`sd5\{gb|gM"L@:f:Ss"x:i0P|{!MzF^"=3NݱR_`K<y7Y mQOsCUfexo/ʦGK#;PƤNѫJP7]VKrmZNDlt̯ >N=L`L}CF<+{\dTw8!UIJY0*?7!Otno>axl}kl_TZ׾ߧBY+FY |VmeAaBٹ8w;م7$^%½("{eMuYe$$y-ZA0x!eX pϬ9%YT{+'[ޅ}Oh c(;x8!yҷ3[&F\UD ~+;n]Q2Q'{Q> >mAT.F*A 7aZ7[?tȝo Q}}Ac!ע Xjp:lؤva!tȄk;]^$_Z gZ~CkXDDNX+؁x̂_}/auٝ%2hka\Nzխ Us_G84>V6b=Է/wۙU.WZ$A;T>o+:A\׍Γw|ɮ 5/ۈJpJnӝygVpD)L?4VVTo*ɉ$DzZd`J81|ѨD.W>Fzs;B6>ikh/Tŵٗ%`>}Zht {]ߑp'~sOa-ELUV(  ]gbW `c@&8i0F&.\5 2Ή2cƌrQsQr-q2r@U?zL9˭ZKfW΅?tȻuu")&Lrb0N/O%[E^G^&f_>z~#@: Òeͧޢ]k:W5 6lgrE" :+xA2! Z qdS/B"2ډi_H#LHWY(LJ72z6l &vqp4C&I:)9SX2,.J4D9ʑpW:'Ӱx,9ta%<XuwU\Ԫ oZ|V{ߩ0rO)<͕l4>b}iҋH__V`\j$Ŭ:o׫kYPc~&;/7Y|+󛾵'N鳸`bjz|(O%y7' O۷=TebaҮn&OwS#ۭF>=?oWFTL3tyhja(;MQ6jkR\_j͉T!%^<<7:adôpk|9ہu>|~VwZ.c_*[{(ՅqV]7&_"G&;8ÃFl'-E$ ~VwZ L5ܮ?9Gtg %dBu8iU>W\*@]VufEW _> &w2y@9oV-D5bԞc:_~$]o1uhgr>|}Z O F ȋeXL{_"kn_7ަHw;=^\%/.%dW8 s~#9h0+>D<d#Eu=~=ZH`ڹN+AWb.t-XT0Sl rmmV%ky76/}C:Gx2+JU>^Y.S],ʓ'}|9qn .?ɽJ2d>y:qN4)B/N6fgKLkƈ}{hdgd;0MV+.5RB9+YP' ie{NNUZ"vxb:K@|ZTX޶|WU>NCFu 7O^[0h~1)H8.O' sS==ac1]75 96Rb]=.=7S[랗lʿ eĿ"E4q\-V'Zw{d: 'M?vW^}^t.ҘΚp >8f90fFGDtҲe*dR:z42/kZehwޮve)EnN-Rbĕٷ{r1P&k`x'k+MnxG]>snl|EjFZ23QR%]7V\g{Q{7I}> 0x[Hٵ[l <7eSτa_+Cvw.'e/Dڜlj7Н2bO+$ !T 0滻J\o!jzbv:D0gdI [ݜ6^ooʳkhftx=V4ɉi6qŝ\d.,H( &r#wGDˮF?Oj1\S<.OU{eH|prA=xڐ^ܭ|c'ɫ}O\;j1vƝ$|+zϾ1i|J#Zq}}r;vN#w&x\/X:jٟ*S.=렺"0ךBN>j~.\@3Mz[kt#Tn3ΓF'SϙUM0ܧ[[ a%[懦17p2x鴲8fq3o=d JԸ#gdI'=6b4V "F ?Gx]Ί0fȠ c9ug{Xf̽`u !oNpX|=z.v؁z4ÒG΋l] Fp{h咱v;:"?]Y+ wS92E`=F|-m"+/&ԖBˆ'A4Ǎh \gi=7 5Okv4 gA?<"i^,'y>ixWMx*u_#h.cEӲ:{wœ|S<}:\>bu1i(?ѷ=-3ol0? >Oܯ 4]"n$ː`ѯF# ?Q}Svgɍ˸"=W8pغX!l&ګFn)czGQwCH1w/qdQ;7߶z0a:Q+Mglf\J9nD31=w\ 7!"9gJ'Hܻ \H~n3'Ǒdj] z5>v2'-9[n's1o2cʻ ꔔajgyշ^^`V6xap\LeDN|P]*e[u|Q6#4֦Z;O䯇QS> b0'IWa\; |H'JccH D'dR е-7/^Y;sk{J"b#JR^;<o  Y~ ~{ZVJ;w8ܪƶl?,Ч$ Vѥ)l6ND?xkj4=Jm|]dDq7c$3%9  tjbM@7wy`dV8ˌEJCnGUG\Jμ=ҷNq-d+5+}ƪ<|Ay4h2ɿ^lLPYIq"6;MFؿgf߭\(C"o{6ąz6*Ĝl؟s.}ooHds7+kbx$i\=]d ] ~D#2(}L5j1e?;} t7 0v$EqYGcoNߕ5lx1jU=8Gk I.DmB.uI02{BhVy8^ v7+h3q5;{qq =h7;ֿ\k\I;WKwB ;aWA7LV0]4pp:`n;#jm?%CW(=}OzZ<:fhzKBlYjL"B{piG}*"dC {jR^q+}nҘGN`x8%f5 NmE3V_4Cݯ/Gj=f LƧwSj!q%$|{oǪǓO[}(frjw"4ҍV)dO$mo@:Hj 3cx[Eg|!G5i/*a_gw `?g{fvL\kR?lLHs&ȳ^J#GIuY} qrδvkt"arz2idY8m >`eX*oW;UK,XE}+V#Zk u?{N'>iWENt=&EgSD| ƻL;j>6"DY[xv8"I4cE>A7 ?S>v%z{o"k!I=VWUQ1{r:[mTCn-DdwJ{ÙӇ.C[F2*>ޙ?/҄9i;T{xgF͸]إ5HӚ؝!{z{#eVXϮndpɛޭפ=jel3qk;ÖjNw͊Ws$;^Η%KXϻhK-L}?8O9 xKwnih{uMg\u|a|,;T->8 w 4kb]y'5KvcI ^W+өׅjSչ'f+Ff^^R3b /V^Rm/X驌]CBĘSlvJ^QXϚyCe|{=]t\j+dSuǶ! iHC82p}ld߬%KG=MVU.̶Jy=jU;%VSۚaayxO6˶VΏ|eh{~m[U{jceM}x;ͶSPOC?=/9y#]^fz?pmښYZS_7U㖾]VGc82˵ wV OUAIDc6u~mh։^:C/ڎu;Їc1x { }/iu'*ʵwXr{#nIuNd/nzճ< >Qfzs i$,Hej-:$)W'&h~<82SH~1|#Lg<fN9i2Wa0}ϡ6,(ƌULI3q}OL- =X8z[G{Nri64\7N#5~6|V`s;Wffk\`9 4QKw֊aޕ33~ /'[LVn1ĴҠ&d3HkT-%[OAΓŴQ ~vkO0klY ϵ{n6Vr>lm@FO|7S}ԫmMm/:?S"@\۽F^o7UL'=ŸGNqe fZ2o؊zce_9[6_duY]]x2Div ~g$M,J;N,="鑷r_iؐv\{ecobw BQ'toOн4e*Td]rɿIC4EVxwjB xba^ 89݁ԻgD't 5 nfxO_Yri,=#lu$,tK-,i=k/-@V :|^ ô7nCjwDv9oą_ler[~3 4kHr3쒩C{Ē~VodF7/,)tOVb$XONPc_L$}82-~gda fmL6o',"6n ORއ)I+n^ږأ{}  =37p#t䣁oN3RoۆNqΆ %ST>*sN+%(0E PV0>N?ԉ,yd۝6v7D, B ÖIL% ExسX9ݏB]NPl.4ľ7ل8\$A㥑y9I.6,mPκj5א/>YZ7ih")wz}XϦe@:+-@XF$/%DħžoF'Uo{k5a*3"Tύp'3٪PHi~VuOZ]J󽱿>I(ި^^[ɉ5SKiXo+z=__@zc{'ZOJ{)o0;q߃eFw]b(uNߍӕܳhD IeA]357S\zW _+[d,<Ӳ{h=8ڽ?g |oo@!Zq82G{Q9s1s1-ggYH%rQrX0Ol'}U;fIMkx.{ $=&*v̉5jK_-H6&hPqr}n?6dO+q<9s󢻴D؅wȤQ%@/o!NUYCyKത"HY"1pYpЧAj Ջ C`Ye7%{7}dyF]q+ ?yMҙ!9C+: x8LcI|!Hl]?Dt) :;g+ 4<8/$݋YtvִٜG}OH=}!&J-ǞgcsOYw%7ğ=CvJ{~^j |F}KWur/c.>HLEcrod}c;SK($+ܬ Xܣ>k&I&si8&2 sЕ>> L}wS#^]%KmHn$N%_=; Ǥ{@4HuAe7P%N+ߙr^jecz#q3H0V55V-6Gv;WJqmNO͎͎h̠|c,Dy,t&;XXd1v.9B-ԌK`ّK眬%ACz( mog+z9Vb%aԆCE3[tT,"zAxw߲p=X쭓:=(ܹbB{F\_N7bfg,]q#HW~m樋ImguOGv;O o}}/tP;H:2F\QřkGPf__`s.[~'h|V7r{,`O| j׼kݹuJ~0"lԞC=[$̕cGc*qsonéosNYTvDź&'yf庞10z5m{o7۝=˯j%WRՈDVj˚uJζ4O8XçC[v&3Tm=|!ٮw֠Ck{ˌk#[VF "uTx*'z{|ހ$љڞ\F5Y};xf_~te04min[">_?{Wdߣyyd#հd Y1c`=}FZd+֌v3k w +_VK7g$baD5?+IId8Ozς7d́>s2g1|{^1҉WLKޛe_tw~>V/H)4?fy|Z6D ҹ)bimX:\<oEi|K5=Yp58t~Hf%ɴX@vsS'0 xuU'=BW%P{Z ^W<}vtj+{ۓ~z eR "v%VsH[+~Av _Wϰ-ainfK󎓛rZuKZ.rڄR/K_i0ӌ1lO_ޓ+tF뷵y[[ ~O!P*W 28+YlB+jQ'|IZ4?o%s&XFeE!.ONlĎQd+x9kxvi[ZCKPխ;6!_>0d)?V̒/i4yY5 YS]IԵ߫+ݳfuEuYXl|KY؏tŕ"g[d/yEU:xLi ˪A oi<65xs% ZϓE볶n'tbY53 5d0FKtwV_'8D'ႾU tQHP'p 0v$<4tz語⭃%jE=}IJK`E~d}\Ⱥ~%%Y]-gr$8ۙ!:0*y0SF\'sT x2@)0c Ɖʠ[{rU- 3svG}7::1.bd>߸awmjObefo $ V0 éWOxq1vC>j,Dwm=Mce5<,>x?\swkjd0s p:;Y4^+lUm2s IY8beB!5l#ihk{ΆzS+l:{edȽ@=.i]uDε_WIq.+XnH㍏e2{0ʰ^\afk6edUp|P|rے πU,agWb%Xx2]X~"|{gV7n}?A,!QŽCxrirydzĆqyj.tU靪4[LWZ4 >; +gz7x73 _{kﴒcCZD!/ NT*ueqJa :F^(xAXV'+ߑ=7>kՄ@Cgi:$V9-kE%ACX${ Gk_ъ"lb6^?yj.u7& mX_)|`/a|C#dZr؟ O]D >xMv{; & G( Y/y@1##O&jXL5Z9nm=y7{ɐIU?fފ 'H&|lu[Ǣ.4Zv=b}:!.&te':_v ֫k:@z,2,[o."?#`w.n>KP S1[FP13baYT..hu>NT.o&DF| \f!?WtdEx[%utCics'zE]浈OƁ1zALI:XH]Bv(͍9#5PdP'Ahfbgy;_tzoQ!T?17Ehs99Q vlO_bK)-.;V4w1Ac{?ihE[oS*V;'kVdGl|k]3~8}Y"`VJnkЉ%Xtak5 Q?ml` \HiqpZ%'k3dU|:}45aZrlٓ,LГ m }Y(")*TA-݅m"WZOd3]$B bvRS<8l ZFÃSaɯj4غ$|+(.غz*"GNtK&38NZ1]!lj|-q 'K~trW#1B!v\|M{%>5Y1iP '8?djD߁BHA/IJSC74&(th-N}B~?20Wl=mߛ:fkfŪ\k{Y!NZq6dqwaC7Yv~W-M3'$0LdxyxƻT}&^gU?ٙޡZh` "}{VLg,5ίUb,+[i&>BǽXj Z4VV"&&QX" 5#9>`d;H=c(֘^ʱG41uk,ڎ{QKYv}r;DN{0X곛." SJ.jSU7F m[,IGڞќMAz%fedYJ+{0wVXEpR?X}'*%)7*Ohvh73@Vg(^97:3́J+KQ+Hd KJY@gMAq1t$0KתKE?)>ᩐ5(aBWy(=yod,2bjm$, #Me>80ͨ<ڝFV.0j⻼Է$H:fX$ KrD9$ [wORhQv)3==ijXm/'VN?reea3SXvcc-aّp쐓ɪvky4ū e~gQ 2SI1|s__l%@]޼2(N4LT|yRem?qԡ[*,;k|ހB]=KՉ%\R?iMc`(a[jl-8FؒZɯ woO=XGGYBd9!4=Ylm,~>T̑cK[i++"vWW;T7B<\_̲3H&5Q,({^T~2D5{T\N+Z`wS~v?Lzuϰ>t.aN|7g7bZcS$ F0SzxCViBkcD0R\ ']'o8T*:mb6u$9)ɮ_ֵ )ze\[Hy[$7|mYkX.ΰ:FDu<ٰt'm7Ov@SS65]/+;xL~ &ohXq-XkȳzigʖiMSOq'[C g*+.]:=}ۻVqMK^]Vv&%ʪ>Ru0l W*)иRc$xu!rӐhݕf?nTOΈځ>Z]Yӱ*AkDױ:[%7l4ë'[~S;V@Z E>|Gk4҉`1}1~y!KZJ.*gD vic7NH!'g˚q:L=z.1FwvԈSߋ+aSx+TYpʭj w'{cUbdH@xXՐb|{>zւ^^c8h.gml#IEWoQw቙'eQ6͢ZU54'O+=Vz+S. d෋Zܨ[7JRJnB,G:2p5b=>2Rm>x僱%Ijh<2L~ EQbE% 3hVBC`{S'y2822qk!&tIvNt*G7t+3bqr9k+EOӅ{# &HLN!DhѪw'c4Q;Qv3"YjiNpV?wi]1xݤ,}MIKvs`s@3 T^mqm-ĝTW+;cdI70ۤ[pZٱNeҟs2-lmi XZYfR4:vŝYs>oW*x{ٵKT5k(kF$q^E?'/v%nɋf牦DOVQ_I>H{푯IK ]iZ'#l<=d^]{ּ,P>7~ƨ_l=l4Qo:  Z ӒKIZh‹M%FB~;)B#veZ璞%/ ǖ4` l$Z8 CgH iv s!?eJF,=Bρ9iaAt!n2ɪsx BDڅN@Rɒ ~Th1󓶢xDF`Mc{{&cv6 \wX;Cg7 O}g/T>4ɬo1,eZWu *ث`^wbo~×`zy TA]rGϾd3-αVkrb፬shQTT@߼KO~~\߆LV2fewh~\qxH2iU~#aWo$]">Ζbo{a|2MS-|;fȯ|ޢv91ޡ~Ae3ih0uܘ -n'VCx33mN:=r/٨x _w]-?O!.?<`q⯤z#\TyWfx;8­oLJ$"VVΣj3X.<הCi4[i8 |g9ەOtՅ!fJ# Y{LJ!.<^x8hvks6 {Fle!%: a:S+d/;]_Wst1JW\Ok|c^bb~^h5Z2kv1LSx)cs:Hc 'eBqMGv-&`ATߑ> !3}gŷ% NaGӏ="y{X64Q%-$O)#\S,vg}9zNXm"Ck1}jg*ߩa3 |>2 щK:E.)1.ہ&& `@c|6,|8:Ylf>q*\igNIg$vO5HX|NmYΙWSN6)C֩Qcu[Sh h"(BcAyow8El}Ylp(`'.`|r.ⰷ0VXYCC,+zmį$h p+>gzޫƣF-8ٵ;I^!1 t:Lr#8.|ySeȱ9bƞ~a8)fk86EYo1{'~[)&DɭɓLJaܪUz5eg8sd$5|^>rш#%m ݩ&:}9]!f HÇc9PJXQM"nVKƞKZF5y:NpVncc){jzW:Dnk O]S 1uR͵˼DzɯMf^ucΛ[NYoz eBQ(?u?\FY\yg;?o@dS/ʫ,TZM']miO=!iu$[O7zޙiwk19bX5wk)J!f }Oa2}]ɿ ?nڿ KFe*Ͽ9S ]nD8#ʱYM8(co%Y'zQm{ָ?M/5X+Vw{E,JOa1\9R7(]>yZ_ ƃkv˟>?落o 4+^#nWY%C<7:/˹+܉Rl7˶*6tweQɽI44#Io*>x_ey/}ғ)p4vΚIx|V|;&! oֺ_d/~MI'aF4B4o5Z;n)RlIPiv<ir퍝kh2kZ}׿L~s/#SidrV19YÑ"]AV׉V:qy/t+OtOM=1<+sB&;dd Y&ڽ+ھYԲԷb%UpX|$YPA^?m,~%4;nM_a_e ;\{8ޯhfrX4O<Ih|pyrdwBz'FGTȕsoq2ʇo3?0z/lkq8mNfSIZjce/\58y|16ya6W닕 CéxDl3#17ˏ+#‡tҀž`-i|śv&.FGL7]W]P! I~5:ICRjIX2tM&ո3{zi}N~ ͢钫0{ZSI&[95vk)Ew%H$䫦! Vy PjLϕ >{uZ9)aҘz8/lnq#9Uֲ؉mRV\2La+jgϊlSnkZ]'9f͙7d ~[AEZb+-āA.5O<9YТ{eU.PVSrA;E)+~e`%RII|cՎ`VhBq}vFj?WOvNUkә)g@~8+aS'Q4"F֡JHx٥~;,gj_%71MyOR2i{y?O2aƘfaoکh6yo;R~RntO6cgHX'7oQO{\~wP q4mdCME{pss^h? W+!V=I~xh6Q;F.$c49f+4vI2 f+s$ !Cu3al8f# mp9'XN*-ܞ}f ׊xyaA5:<[xwLD5IkqeCn=9jmw; d*\⤋f 6Ć J%A,IsY(M(eyͷ to/i\r9x q{㎬ |h6촖9y#"gd$L~VK`sf9ūbfT_VVtCLB%u`Iy<-7j 9|Iov ZgUy mKkcxIme}ل;6RzZ?鞽پ%Xx F9*>GWz{q,W#GRe}O|#ǚ/yH=1g 3ՊYl w*^$z[C$c'|xwB0+0$|c,3/9lėXˋj O 9k` m+vLgtaA1y 56kYwt|^|6uCQI&=Gƍ]>ZqRod\jq5z|bVV;Z7^9=qxh9,8y377x ]xeXI% :K2 G,w6X;s-kq'tŝ'P.( eLjPWٱv|"ߩwu!?D[VٛOzǪ,<7vMF(9O WBĄa_I'5{mlH_67>P+&Hˍ>Z]fw ?3ߤ==-JkoT#}*2[J/Ue%g=iJUŗ-`o/ǗU.V lZz{LY$}~Suvv 34" @s*rN W7> N],híU1VqOΰ, Tw1w=Zֳ,~*|w`kqOMݩ 74ڍi^v|4z2'd \ƈB==1QC{ o 3Fo]ֵ-.hi#~i>NxMk6+b_܇hXp{:Wgz,aM6S3(tohQd7JaCIzB V̍d(2 pprr [,M|ָ >+AohX.uw왫݂S|w+vB.Na7Azg7cY+OֆMdEa0;I hfYN DіP]ەci7yCnnmѢC &)SAM uB3`%ިJ|Zl2*&j1i]Ydރ^o}TCռݪDxxW:^ߛCGt#PڱwY͞oެe,w3wS~\ t~@is'F#ݧV12U$'/j%f0]o]zuZ[bg|X87 9gqp"uW/4 R7Rd#cmkc1bɒ׶+;9XWwbc 0]bks575s缹W4=f\W?.ַX2s.O Dfc5c2zHzNoj$ xғ R˱&oH>ENvN Ɓ3Cͧ$;4RiGʎ.to`֓'G9.혏ىg}*9|f25ÛF¯k箎xƱ H0`c b@F6x*$y3US&ɛvQO}h7Ț sqwX'Z 'TL>$J[ŞEޟiK~ hoeWJxbDf /ewo+|z?$ΑEzx"`b}R-P! l$  ou(A> v}!֠2 &wQwN«vX7n'EG<JaqT{rr7,F*z(xݟ7͵L=_w.]hr?7|4oe !εLspBbڳE _afZ^5Ks͖!BȎ"w1{gqwUaO՛7*g8yZr(7YB3Vl=8,5Ynx"*;nBee}ӊzѸ6Cjɉ4}WN@9:΁؜fn$ 7Նsp81L*F_Y/Ng[ 2 t**l+ֺrʡ`V=*;q}%PXvύ·eIdS4/Ij  'DP%de֢(în>d#3UAX [jl+6cͲW %ւ^՟{T٬VeL~4`_o˾^XdDc7Hn@k/FRwF_~/vRi)ojQ“³TAL] UNsFogŪ8:!Ī]O7T$FX\2UnVE8Q,^u)j2Q-B"//D﫞U5e5&TKbtX۟4kAU}[]g;Q>U5.ϧ4!"C.lŬ[V5vY}+MNK};ȳ:)aP=:Az[j@qDTaw엵Mn$4I m"g`l2~<<*{S/EݮsѭZ?س>ojӌ,g* jpWe1(0wv[*p'\/oW:ricʱcvXI2u`x*ZZreZCī6{‰C}}?{2@_wcW/1I@y[N#]["? #y *nՂhr7~ӳI\LjW98۵J ]4&Z4ީb^<;֣Za8N066,Vq4'͹1g $J?f9Iտ}R _vIEߓ,MXry72.}|q/o*:Mu=_u>cDzgڑցgႼ#[:h~?x V$ U 32 L|6UZ+dHL@xg~R <" zְ`DԵ{bO7ńʼnIgBEm` ,%1O{xc`9 Xj@3Boa7(/sY?Qt#:UBg/b@XFRcSplӽ4>}!A"3bXyJ8^ a7*ɋRKfDL˵t9rdө3D{%j #Qٻv _?%vՉ{*J'{M¯>ܚKWPՖdMʰy$hCP\OM]pn&! >~HqU]G+^z)nNb5o'tFuD\c7BNKz뾾KǓ4lVv>4DV[ Dy!M~#vjM ߪϳpإN9çP۩|lr>6Mx$Ox%.响耙bK{w}ƒn6"jc>Иp4j=4{{fAPBrvXV̱bv<ym[KRdK +TW_xR<ϡ 2fz]Y +~qpeJbBhr`^"䧷h5 {##BޠbA~ғJz5i}axO 069>ȑ5y*ߚ; YLfjv|b+Q `=Cxw.*d\\,u{ek^!qmlbKVҎio=Mݮ{o,6O>7~6:{M,y鴈R V+&qe`b.(=җlU6~kڣ 2f90v$iĎ09t&e_N,U]H@PW{\8@f[a""u)|sY^wl!l }Rd/|,[48 Q/e!Yj}x N\o'S=bf N0s ^Msҫ3$O5Vj(Zʗw >8bDE}gY $OǸ:VUW}XC jшѩfN=;yZQr~:M} ne߮Sq^<%~=GKᆩBQPBHuX^֗=[DhēGᣊ sqJz>ϋ4 i Y~,yb d<+qrG{P&~ öpF0I-q g5ӼWji[c 6Qِt8p:}~|qa:GI5yswN+d{3CxbU 塩t4=komudH~4[ӝ~߷j|ٜ᧺MOj7mUO=R ^y[#56ru}1@^}^+^7I&z1?5hŸ'۩"pf>\e-IF0xi݊xn5~rd JsjC42'w){`-$z'P=} |ZߠYÚoSiwڕ_Y֫k17D~7$H4|4ҧc/u=:E( |ar[g9Y,L5mM3AoIt-Ƹ<;S'\ifԇ5|KŸ`g|}m~!/#I/(3IiIu{VOXߪ,Iʴs:Iú.zQF;}NO>4*p|gz>)r~o)e*I`]G O1&lk;ܮGc^mws;{ >Je7th Z UDKgm; 7WYԓ W[,_ro~6 ]% jV*D!&{{k"_%W4k8I={ #Mx߈E'CLUX{\}zSgKũgzs@cdz w{}_,cZ0Lv|GdˊHkE$NsJ ?UtJ:l?}ʬs8GHOz2B?̣ =FqOfj}5ȳPGؕsE,݋O5g+Kd{Dװy >K}!gvoܻ=sMF>3S_ k+;;vg󰙼ෳiD9Ƃh]ziKw'%>^Z!5Ggz8lAN",fQmӼCɱI5/\ e0\Ⱨf\J)O- +?^3nа߭kɬAȌǘ=ǚRHubާYYОK }nQYd1v;6)C0ksB9rZ,;,jT<QUvx.u% p~Xg!8:ݢ¸sٽ͟%'z/kʙib$8X2kEtYxò\D16Wlhz"/5smkKm@\=Doyyʡ{~G!z>d>X}.9p.Ͳb7#˃dh jc`F8Fegb:f]m SϬb]ڼ|mҕc?aV861kr oMmcN87sb"xjg~zcMzeX1VR x ;b"h }Cg9SaupqV c絳?4,Ys;7EҊ{t''ǍMq?4 ݵE_5ŭ_v@ԮCh\2Ǻ1F~eOGd}02i㎉}4"]4"ЛjY!ZD[]~9gq?RU6N_u7*H9OaaQ|:;1㢤K)_,[y04'zk'<[2hs2sW|^G,Vd`;l_it$Objz4j{g9] #?َtb!6?Kb6#&2cɵ,ߓg$X fώ[alEtKLgFoլ^iUjXlkN 8;xI:'`OQ/͈]*V!K=J;x.Z7Ӯy:8O& i@R7bͷFYkzX=m JֱJIfW l RdΠ$9Cy.,ڂ,XZAQkŎsGYg{( b{"i9w'bTD:Z [:eAO2ɶR,|Av#rTv5R);y)rXovCa,b`?K^]$LkZtn͚Zx/{'8OyOM\"-QS` *2 #lw#΁ _r{n(0_F>QXl[hTOiڔՋA! YsֽS93=5d-Cnfgh ]#ķ? aZ>xkvhOFzCÊѲzδ~gz lu{ֺ>y7}.llYتd$ _AVqȘFP5g1g{궮}?6b_}`JVs,KFRwac`75@ `9hl5/Ky@ϦU:~H ,ރY!kZ "6f~J܋ٵ ݌>DӛMV Jݪ5w&;Z7ӻ1,͙9'C;X~*?DG\˱uvh:&lZ}W{z[*~/uSROyUL%B;05 q:=swZ;sݍBB?Pc}Ee4粙Uvu6+n᝞*B3S>Uɞc?/@a`VTgޓr0X&}(4>\cqz'lt7Si nx;64+cgH-~Uo0}h퍕! *ЋGu>HGW2^ӳɭb}O#ֲX$ޟȗZ#|@ =|6FZ9X$e,xSc%Qab,aa7}7֝y"2<.SˋeƬHR>3kӳįo y=$̲xYVrh&66fQ,GLKAG 7d.բO&A?<{('hA+$d²a4lRl1_Xtm< [agΛb&v&!dӘ3=fϛ} *,`#,hX1kМΐü.kjPgCc@Zv[|X /l^+^nMYKvu o~ؾ>Cĭ o߅vU,r!dnXGN؛T\犠fN{}&Nq P gN09ê]-^#> mgOlwLÚc=:$[\cg?;q[24::Z#O?>Ӂ%fN: b~\SZ;T0޷%Nd 0vfB1Z_~ľަ#l5{%/8ڰvduC>SkZT_֊a,jvٛgJbp kcPlw?Sr>lT{[~:2}۾Ub|'tRuF|S aF-!\KgCgԐx GvS(xIcd{_5 e绒ε3F~vOqKX;w2<4"Rm(njXF}5&7,ޙ%6m@/g5l^ Dn#sEŘ6m@ yaGR]G"9 '![>2D'C+jgZZղbeAwqiIYTi>+¦|߼̺U*n[mHWmg&?W?X3^|ElR\pOZDŵalvdT+Rb/3CDrĹ%DK:sJD<' ;^%F2+wuPQAƍc}y.MN|7ٽ}„#~{ҀUI>t ŭҺӥ>k]pSŨ/U= .{?`l}Hj.a :u.Jm!$ϚQh[GjfPiqEB!*;ċ'3a|\d4m-ߙGξo q {Ύ`df_rviwY뮧'wȥ^XF63}f~SW48YVcE/'Zr6ro^Op\dTM$ߋKzz\]_3[p@ݍKJ}7ue==M]KΓ(I֡Šiʝ&em,@k5x9;jQZ̗d8qSlK:Z2~vX=rg|IYExߕѽ/U r?pd`NEX/x4$f5J_Kbg*XMRY^#$]:Nw6DB~W+*ШscQ?'=yjR>:'\‹ToLDŽm'VD@&+*Ԯ̓J+U rCV(/Yk)U:dN]s }{)^q4YV,E{6%Kyʐ|Ǎ#v6Rֺ.oN3dcWk*vؓhr~\Wcy7꓉V#AQ9*lEr=0au:9d냄=a;FX$J-*,(Nۏo'>Fћd+F댾oOvCCCef\> &ogη*Wm|:֡>]++!=9* /d9XjKm%.~xWy\2ןvaԘ,Q{[g-B?H6Kf1 ,hly xHYD$L mC`g[] BCla:,o0E6%:Anxjn0CE8HK}> Wpy"n%Υ5!;K mXu<&Y[(7F0VIN,I/Q/{olm_sD8!.)S J\voǖe=:ИԺÍ$oՋEx+n/+:j3M^I3޶ OiAlf Po*aPO&|vzނx>?NPzhcOr )8!Ծ3(x|> GvL|.!I\6GN럷N ,"f[F-ApWlGov[?Skm-bC'wՂOcC6y3ev)N~cE]snoDK6?G,epfjlOkYSV0ba-'wC6L^$eN~$ yxAU~jƊ^zo*1~V~53'Sm3@L$ /eG|v0=/N"9( v/:V'PK#~UYS-gBl b<NI4yēYe`Zb=M?Ӄ z'3} rڥ Zz us>w/0\.Q>o$&\" vB cR;uƾ7G^/'J-Ikg'H' n)Re!]gɹf{f4wfwTȣzmUszrzXb=Jr, ,JUͭosQ5ZsktT]F0.ivst'⁇{K8 Oybz GvBRsRσB 㒗Z~n20+jF:vzc&+?Rnք]͍( Nw;l4/jɐzC+$>oέc |]x)/ծ #dqF06ɬH';/'|y:N K.҉/v9X[b*3߸ǼK'JA/<>9jtwZqjEQOlbۍ~SZ~dpdVτpl>Oy|) QfGGFg\D`d;g|" 珳Wtk0bbJ;/e_I4݇UЛ_7ZB̮hv5θnDXb/-"^KĹ>n )bݵ[[|Ue˱~*سUEt_OY_Q؆I7{VfEqG#;<\* 3ct}tl39CYpZutu?kZv+]j͒5Fkfbˀ4Y,Fәw.zupz&;޳+F :UV VLEͻY3EZx7H(' 4r\b'\l>/kG}]0}gB!asǣ U{@8h>Dd.Pohlwmx<{yb&䲱Ivߞ~aȊ#kk}k^qFV{ͻzCUάF*l=v?;/u(/ ڦ:wOc_y&G.4.~U)cry  Z,"^!{LrTjҵ|.k=3yR߰D6TeCFǍP{Y|,;x/_Y Syv5P{L]xY\YJ-Mt!"SN'sz4>8U)g?}^l"LV G$%#Esk5 ݞ^u}0='O%>nni ?9"F<7xzXj]ɞ93T5V|+ =آt 53ٯ/yovDԺ>d-7NN p U1NkI`0,Q>ROg,rG\#N='HHg O'9-&'Pe+yt;U+N[$ߓ~2\7^%KR盧ϺTUW5 +lU4"#zsq4!`C o)a]Xgydipy-0.13.0/dipy/data/files/small_25.bval000066400000000000000000000001771317371701200200130ustar00rootroot000000000000000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 2000 dipy-0.13.0/dipy/data/files/small_25.bvec000066400000000000000000000010731317371701200200020ustar00rootroot000000000000000.0000 -0.3347 -0.6643 0.2446 -0.9666 0.2135 0.6467 0.9431 0.6058 0.1294 -0.6945 -0.1746 -0.2989 -0.2396 0.6013 0.8309 -0.2385 -0.7242 -0.5691 -0.2364 0.6801 0.4600 0.9120 0.1145 -0.8928 0.2460 0.0000 0.9330 -0.2155 -0.8902 -0.2364 -0.6025 -0.3816 -0.3316 0.1735 0.9162 0.6452 -0.9775 -0.7757 0.7204 0.7967 0.4474 0.0906 -0.5791 0.3576 -0.4061 -0.6898 0.6492 -0.0438 0.4344 0.1239 -0.1143 0.0000 0.1322 0.7158 0.3843 0.0985 0.7691 0.6605 0.0231 0.7765 0.3793 0.3183 0.1188 0.5559 0.6508 0.0614 0.3309 0.9669 0.3744 0.7405 0.8827 0.2483 0.6057 0.4079 0.8934 0.4331 0.9625 dipy-0.13.0/dipy/data/files/small_25.nii.gz000066400000000000000000000071711317371701200202660ustar00rootroot00000000000000ZQsmall25.niiWٯ$UX< BHxM{{j}Zz_oݷ3F?D E@ H c+Ď=3&v8 jLO4x|rX}rg߹zx+/}_xK|~G}p[?x|=|>xzG׾?GwW_y_ÇO_>|;7oɟ;o?|対>W~OAE矿sy';XezzzHz딓eߕ…%x#XTIRXms烖Jգ{ǣ&z||4lE;ck[uHji E1 p"'YGcFG˱#Q N7TQ'lpr8c!0$E!q-G)Ҩ 8l%-Z!Cu5Lb04$)m8tDAeLp&3QΓj,>;7gG/HT;c.;{νsz[-t8>9<5W֐f'nlN.s0?;3w/n3+tZN'˻]ft o^ܝIdm%f mJ ſllle _5s@P%99d `!)dQR4%Y zT u0;x 9 .E@`Vqe9cd]2(i‹4kM["e:I]qqȗd'BHzA۝L`.\b:fJdzyWOw{4)loCTgւÃc39i70HuvRl`З b@4Z Kc(ݶB VV[H3fGx]0z*= E3AiVܩtdhBWD]IQy)#2(G;:L\εv%RFC $JH]ls<% lrh{J8,L*ܿ/;o^+BٚL-"ua7rJ͉QhJ)wrkMOf2QJ9-߬`C&ڝEϔZ= !FPHCjL)gg(+]Y6n|+E 73)(@ }0Q+gm`s=Y&W+}ިT۲P @FMN" CJgi};8}BqSfRԖX1RQ\ǒt 5TcP6LYdR54⃡|NٰkM eKSn(*4ARwZ19s<"LyYz~C UJ;V]ϔ*kY38Ma D] ]y&)5C7 ^*dk@T=וVsY U(U*1vEZk6C v/A 9HՠԽ8]̎:^/C2YHdRUUB4XdZp*ŽwCFLTIɝ3!( rT*o`>^-`sRS/vzfP%^Q{m7`}q'$&)$V/Cy.J FSo@ Hzc2<) m[7UU}U^.z$R^{2Cת8v r:c 9eVM^ ʎIb Itm"@E%XqjeRY'L1Mb*# Vtdfl5L5v4U8-LUH9n ZJ Hr bd#yݢ)RdAVuamK2lCT9׬?#ݠdipy-0.13.0/dipy/data/files/small_64D.bvals.npy000066400000000000000000000011301317371701200211000ustar00rootroot00000000000000NUMPYF{'descr': ' @['@`L@E1+@;B@Zq @*tĒ @!@X'ߎ@K-@j @‰%ga@6a2@5DHn@e&@Cq0@V̝@i n"@(+[ߎ@-5]W@q2;@܎@іiN0@Š@K ]@뉤Hݎ@ˑ׎@-p@tk@0;r@m92D@EV%@@2e@,!@0@ @Iy@Yx&y&@?Ţ;@MH@= @(@ 0@x 8=3@=*@p@&!0@UWo@dI3UB@ #@ PH@?b$@K@2@Z@\Ị@8M@dipy-0.13.0/dipy/data/files/small_64D.gradients.npy000066400000000000000000000031501317371701200217550ustar00rootroot00000000000000NUMPYF{'descr': '?BпRn1?dipy-0.13.0/dipy/data/files/small_64D.nii000066400000000000000000003764601317371701200177700ustar00rootroot00000000000000\ A@@@????C?33?r=AF]A EAAIGvF]A6vI? EAn+1Ygm{ {2b&)BSEHh}<:z}}{16}V)AB4~p^y[kyA5Ia#  U`cj}Gi? g@ {jy  Iu8Q`Wqq[hT <TnH+'NlaWCC' fwo *o# >yikit;"-o.UE~);xRu@c}B&}Ev`koT,zo[ =?w&CpJh2j, `': 1 J}3s-#VETA|L`X'F`2>/r(zU\J+ Gx%aO2h 4:eqdod\RqjU}n-5M4LudlzO=CDBvifO@vVfV|v."EmYZJQ?0CLCHeePBcCKOQdgT2G\]B:[}WfO3Smx],fc_xd,,\pnq[<B>OdsE8r`kaRdODNuxcJ'<5>0-ivR1E.IPB1_V@TfPb<(S:]jH_Z3NUA}V@ffdqho_O[4/J_eJ.~xO=br7#G]zl(41fntjJ>=KeDsG8AG17_XUC*!.D3F+TjU!YeCx!KbXLaGOIbeM03H-J;.y~VRG#K\G9^GP=5OCF+7axX8PvTc~xvfcZ?\}\iOHQNB~|[/G`djmfXZ]Vc^}gaYf]zO135i;CT_gWXVYjNcVpW\OBJWnTE>8HSRdB_mAhqZgfo;'WuSPQ,hsryvQHH=^eWE]gD0jGM\UO;luaiYuXmYH`K58>\SMa;^<36>N[Z`4?HPXK?qr\<1S:abC[Yk(lNHodM/13<!E.axfcURiOA[`VWOLDN&ZM><%F8L<TIMXHXZMX)SQU`k?Kf\h`Wh1+2:,ImG>P5AY*^SybJ@)=$ek6ZG-,M:*9,+Mhsr4amaXR3#+KCN8EkKWH4$D_^<+ZhOV'3)E@{O6UCFH#/6<lO09(E#UTYE=A/$EfPINR*46q|\b!X6DP;F9.A\oc>[~qArTSB69[l?<_khbes7/BggB\k?3EVF21<0(9 -?(3*. J<=>>7<E$!AAbxMQ0&AQ,bb:G}bC{cX#c{eLwK=\ri`Q_KK1{H;M8_=y@1C1&,8!G_<',"LE9),74E"I:&175,0:?07 .A*;/;0,)5B!((H24bfKH9LFVy$';2bwY HDMEZWQM82?8;D1L1)!1*7,/+5.,'' #@((%$'!'7>-@;@L&74G.7R!6ct~.o|xVqy{oWZSKVW[IQxmIGR@[ftk?0($BNe:<FCsQTrav7?KtkV\NDpB]<?JNG^\ycXa4Z<XUb5vXaRX&,*[`]hZq:F?o|`<q[P4=pQ.V`NpyIXU!GfoU5K2`BTZ?gZnV:V4REZUCWTTY&=^91,Grw0Ag;L8\g95 Bb*0OD6xkOygZE>b{y|IUNQ:d{^K$3&e[VQY?RD<4TqkeY?PZK?Q_oAJJ47?EGD@WE11][TYkGhp7)D7CUB1atcYQ[9,mfGszd19aP~Yb0RbYU\iNFO)eL]6dyAjVO`LWxr5GEfTYVQ0M`rQrXG>23tyVa?0#DMPBkR/ToNID4 TCL1){~yDTUla@4EMReS4BF4G8@GP+ri4M>D]J/HSN:,Xz1HW=TM9Z6_ld[AH3R,YmmUM6QiV9[[3BJT[L'X/9*A/0|v{k(w`x}\c7QYdprP;\aMAhab]S-DF^k=GiKhGCx^:,S[\LQ@gC7AAQ*TBF]XaXK;H5S&g?/"2dD01 "#ckhfvhMaE4WY6KPABmiEoJSHkkaD-:AgfRjuPDYZWA@()8P/6]I?*^D,?Z<9O 2*TT+!9R6 B7"b=> 4>/%zPkodqp.obhVT_EF/(VL\DT=ZE O;08tS"5V01 3)$LLJ.:*;IT7M0&S"2%($5K8K94KF?dQMX>(5OU+ae4hTcO2RX}DfV,<YDtQI0'HJPC#/ZGW= J8&Nf26EB (( 3NQ"gMW|qilcrwCrsM(oYYc52"dUS/ @h02O(Q:7&$T3/'/]s4WpRjY?Hix;ubs^T\@Q|iY$/Tib];;[WymYOjvhsY QSMueB[dNXLVJT{h[YtL>L1HTVVIH>Lu:3Z\eO1$M^Z-GSW`H>cSnzatNdmvisiqDrTPc_KW{rNYx`gpWgepkw{[bqa{{YTfm\P^n~TQlraa[H98c\C{_Rer%ilYXs`q}e_`V@4|j]SR]@rzYM`tUjtstWqBaSqN\UF_dc{V9QBuZ<afd^8PI9TkssGH6E8gK;KYg_E}1qPnjn>zjSnBrufntmkz}gbjj[h|acGEb`[hX_ohtMLcl\aOT_F@@4`}Q~jWY<Z|c[b=2[Zs4Y;XzjIElTwK^W{MGCmVO)KcK`N`[r}8=XmdNFl>umSC^{*D7KJg[WUH=YoX~raQ<qNH]]b7$4/)DSI(Fceb^Q\qWZj_ViQSaZNprQ~gM4J\VMeJ[G*L\bmVoy-5Cpx[}mo&6c]Lr{j@*\ka[PhM/6@\\[:277B23!74'jleji{}gVsxN8*seSDNN4eS,f^hwah}c(2uasa6MH=-me|&"9L[tjnw",cTHad_*+<XgcXB,+ /$/&0dvsg_`_`zn0^iyMFLfh& ^+*CBc+$3_E+$0?$W>{p,5/7P6u\*Q3!Fr?9 (.:rNV'"ZkeRL!78&2e~iglncvfdgh-6tzrg8!mP d)!?^<"7#4]L0@4R:H.%F+h$!"Mp|sx@\esyxsnt{_E}_W]C\TEMbh65S?'iG HJW6y_.3O>Boy2Qwzoiu<"!KrzdVFjeDPgHUKP_koLEbaY/6BRK<Kj.W@?<CKVMWeuLN;8:0VlW^]1@c@LN7ClmqihYYZhc%OA`W1 2\flwZDU`ljiK^VOx~IIvW>GY`6EmhSjlMD9Jd{X.?Qge^[.<??cc/zXlLLRHeQWfw["PfeYZS(T+T`27,BSVb.JAVB=30@h`V>p^|rV;3ZPOeWO]XI>eS^T--necIlFOvahJN]hTl`,LQ ApejRJd["No]NXzL89WVTFXJiC8qU%6sks?09/-bp^QcadPiONA^^\5AmzB>V8=@YwEUF?W]jPS4*lhhj3Q:>;5?N5RfAQG%%GDWH:RL2D9%1GBI_YZ`e/1XrA'$(YHDX-MG*YYozC2) ^jQdf?NWbUJsxI)(9e`FWa^RKU7Ijcm00477Jm8QKB]W8;7>U3dr@/5jkjW@Eda;No@ceC);nlD.o[G%A7&-ISq}ZwV8JbIReBC5Rk:_oL:/8TME9=.%O;@Xh^sFVDX\ELoVHH^OT93I_JH<HZVMx}C1FNDryORU-8b}uPD,1_v$ 97?CcwrJpZ?3TCBRolZoP(0F\c}se[34&EuE_Vuh'?PCgNYtL&MSN^XXR" 4ZkG<beJ0I]9>?:'nlLW5(6,#Yn08EAV_IEOrChUeeTGaIzGGLF`[NG2zVX_2,>iA>Q5?B? <99'%50c]F9G4C8'$!<sb997Eoud^O/WfSjg;*RI/>2EtUlPU^WUds8cUQKUqhH+@]u\o}G4CI^ZRIB+0 ND]hX7>--^N8_L/TP/]ENU\13$+XS#!3;BdH(/M?VX_>d_typfzqfgXpwu]*3RixfZA&-cnW77A7-J[;/5V%02-98ngRjgIFGqu_@U7>Sgab\@I<<?KQmKDu\dZ=RR\[LXHrn{S*YnQ@+6OnTdhlR(Dcs_T_KWrK:MNKHgjdrB`MtzbyNS=*,Gau}vanR\EnV+m`TUA]g~\dNdM=^po[|^dVqnm`cIuIAmOnif/-P;V9^BSrL%;WAKXawNBlVcrY`sPYXLnJET]YQ[]IG2`P0MKe8]F8Ma,;dVQYTG2*80?T``@sq>GF%`FzT[[99G&GFTtm_3Pklhr\WMhGNrnOL:hiYpUdO`]TO]o=RRINPsIrkO#h{wu[WbQHTNwMB65.ULieA5Ps3JvZi\b(Cmvgd]8YSXrizcny`lH\gwu_=_~X^VRojI|DC=6>QbEYd]A>XZ8TB&0"^Q:S]IDNH+bwRk[NYv\a_[osgglOAPE4dlK/))-XJKN:sPWXr`S1JL4KZ|u^sNSW/Tfv}YG64P`F`bV=SCTiycaW]\/MMwOZTKB7[UzATWRIed0M}DPysCT9Q2H7Hi]fvwNVX;qfy]Q^CP7Hp6>>PY;TWED;/T_QYwPiT[_Y;Q`_t]QT14_]LQ/`MlJjAGiV][`k@Mf]Z-,".*aE,#fsWbgrQlvD8Pix@3ALOUe_hJlrM<[fzLFOH:-wePNEBTVj_E@.7`HW2VuE4+4(2HH>g8UD+D2G\HD6j?-Zb\UN1( 9LF_d>>[_/ROBju4Acq3qi4<O@99ql7K;0%U:L/;!<;%UJ3<_D%AAPK-L#8-8?!( )Pb@H&&C19-H/92\=.7&:UXcth=9msr#PYgY_d'iszcJ S3!qB5-a;"#=:1C )QN9FV6C'%=;=+9?%JSN6$,8X|Yvq~[pk]+ }e\gJ%VhrPJPB>W<c8\kL>Y]H&?XRtq@srcYF%N9hnpplzS'bvKgiI6=mdRyrlDhqbrIXxtduT<aUc`ziaSX~P9gg^f{Ocv*?yvx|e--\dyphgNfQWpJx;F`c]nU`YKBrf=?SJ;}~tdSf}bTnigbuA6x?G^{N;RC@Z{=b64Y:LYkwM_vgBkswf74YYmZL<SL+WU3KRMhTtTjgXNgq^eIeOteKAur[Y]kM-Jios[ZiMOPKcL\j\VOll?U5fd\La8j/6v^Yh`72zp`>NUkc/[pTcbVAf_yiIa7wwvn98RF49MfoPg`v]7'@B0ai=B>_KfJ>XjG8B#Z9_g8<$xdL5FjXMemWXP>L]d=dNie`KNL;TRLXh^F@ROzA\jZaJ=E}M/FZOK_dk`I=8CP&KrpWw\Jiqoo4)z5(1QeH)reL!dSCS@(>OHfPo[3KI8>|LGoQ<?CO\OkTaQCY=CSZ}hmgOS27UKQ:L>G 1LupttF(9'Nn^`F&=3< %~qnT@I~_nyI#+DCKf`OZ;FbRmRF<Z_MM3$XNRQOR@+Ac+@83@KLSC H *DW5):3>A%#I%$2\'BSYF"J <o1 !+dl^VellpzkagtvKAhbBD* &o>%.- -f@<&GLLN-$FM/3Kd<,3-.(Q:,!;=QK\= <'@V?.WMVhEJkLgbpo]Oo2RNaTrLK3P|cH[qP73@|pO$;?%M0bPWPGQCYTIL9AT=9'*(AH>G[D+&&6iI6Lb:&(BZK@O@=0);PR,7YKP@JYDUsmqR<LVrk$KC)}mN)?5K>5*3XNdL:+/D7W[=;DNA%LHd,7ceE(@5>;B%9WR9%3O?8SA1CTQE$OLH98"".40=aN]LkC&8TB\d*WnhX-HE4<CUI;L<.GL_yS=HDOS1QaicLZ_F@O]mdGEY;Kx~p}geY!Jieyo]^IDa(m[zsfWA %]hNU]K0'HaaYU94U@~VOI[[IMOWUF\[EhtYgfKlkTSIS;Kjd`G@WkNUwpn`494=nqvOHJ>7[Bx`8Vc^EDZ#z}aiC:H'/9]jEQbI2CRIOSGbZSM7ii`USQ/!NtsNSd-%\fvLQ]QO<G|f:N_UAbstUjpi~oK8O=\rPyo%3]Q9gj^L`;FdOuKEutk]L?1baHQPa|eIYfl_O]X>?<:fK9JJ1LoMO#8AL{s`vW?>@fiUculi]WeK3G1EfKDu~<0)FjkoLP_K:gy.G2j_SG=7 _mgw_bP5QWyxji>N9'ktk&.@MSJychBF\J?zpKb96`: Xn%T^<.7BzKrRGX'Snj3:Ar|:J{ZC`F@T^kW${>AW6/8 ^Saqvi`ZZIgE1J4/>8]y[%.C?@HcOna<BP]v[<L[9<[E_1@jdGds^\LR!TI3OT^r\ IHG[RFfnZTQ\\<&0=qJX A6@Nf`_wP<y:B<s]J;WNRA,MoH5J/7YBj (]7dMKEO@5WKUK.18ZEtj-'!/SVEKbl1ihdFBf%CAax[MS1?5dzK?1<N@ACng`lFLlMjnz`H-Tt`UiyO8]`71hi M%/P/.47FNQ676,<"FE#YE&>c4.LN<K=(?budX<.Px`VemZk\rcItJjYIc+0p9RltN<M]aC%$!!LXW,$^q2 b8KmP*F--+laZkH,J& :#'M6JA"81H<'T*T\e`[FYGT4HTN4COFDwrXIev[JG=%XgM;4;$<1S<EEHd' <C,H5"F@,3+5* . 2E=%%94 /:A?Q1 7:6=LT,1HC9A=*]sXOQG3 We}wb83dopqCWol4Cchp`Q3IH[RPfHU(VZ*ER^HPO@YXeaPVX@XPf^psBEo{V3?7qN0-OT[i_9]p_cTun:$,]xg|q`lmZ7Z\;WkndENVkXThZV|e`bp<5SdQ\^l"D$8\WAWX31LV]]5QR\haJBYFMfTQsO>:*lN)8rxVXo\ 5~k3VbUf;>NypKLiTIQjdMO;PXNjtH\i8QRLjfTGW1R9@E2fc<3;TWk3cvBBk]KI)R`Ps<I;QNHe.BKNZTF2#xa^N DP[z>Ius?EeR}ebqOmr*Rs;CsHETbzRNF6^GESC@KgH%A9X<1NkICTNNK')WwJ<6`aNJXi_4")=^X4PU$ke[8I{ejT_-1E^KWE3[v_j7T:2CGisd5>V9TdVc7]-FCCIB1`FG!7=5(B7LVd%PRHY\[eM~K*@]^I&3#A8(&oC9Clf[64cKYdBOFR[QbKgoCA8@slL'\`<HZn}}"*A4OFPDok: D9(4uH#\W-:VYC""*2?N]@ % =, ='zspujOR\eY[rRaPNoQ?Q]0-cIdGRIBRHaD 4B!)6?WkF79=J3Wct&)74)5:%"Q:;-5/- {mx_n\HLFK\RCPVhN$4N ^:$ 9B1/);#/,%+-FU5-"B?,D=C*-D52237_WG4/ Vx#&GD#(Me`l_V[OjoF)ef@LH|)@{F, /AXOM{LU"PWT)>7i>'#O(fB:VmAH2 /z>!2''U|P@-HW9bxszmavMwDialRoX0X]gWA:!NZB:AX5HQ%0...))5-?mJNIUh5Oxzv#O~wO:0)Q@:QafOM=NiE<Z31V<\~T67TDIhna|X@D50'S6N-=M33UpW;4.NL\w{o`CoSPTO^R83}9|`4]B{c]VLS<fpbpF>k\'7^_wD<I62CdqJNJd^AAFSGK*qXAOsbfQV-+[`G[Z[WY@Ya/M}LxaYaGJy}ipi/OYC sqalD"FxvJd__1:on7>KJZoL6jE44<&~mA/BQOZfLF\cQDHYdFYU\VO[bK?T]</CMcbb^^LDTE76SQmQPD5k<CreQ`SJCj>S~ulXqqdjWBTFg{rMJC1^lHuhmWVrlaUW@$;Z^S}m[VubjwYfQgFpXG_bvD[GO-HLVgPokSw{ifA&/YU6JJU0ztSK\10d\gkWtg-IP}mHEcHJ}My\g=P?@c}1)?QR]TKSLqDBPZL9NKJdICIT3@jc0ECNO]JL%YQUnU=A;D94P9)5%Nyy}sk>@3d]YR5/:%@mz^S2QC#QHNGYo}d=WP:\?KOSx^;5NnYD(4@^TCkP[G6B X/U/2cs=RA5Yh6&SP-#\jG-+&~^LgDbqn[YO6B^NDLLbpBP.UW2:D05MVBW=6H?JkIMPMLIIV4E~XUHb8,7_F6s38 =sDZ:8p1:B\d+L!0@nzJ>6(>NlOULFTeKrW\QnWIuVPgd;HlY3Q^;2EJ0.Q/ZIa '.@ 1;711 :.%*M_W@/UD9>&Ce}]v5#8k`hZJd-xXJpursg_aLi@6(2_ucP1G,;&-F^7 26DAG:' 1O:+0HPP AOSM.1:KN<2F2&$-%;9I2! :3N>RU47Rwb_rvWgjgxWcq]-dc%*HSAM-6 FL51+8$.MQE.2  +5(! ,B1& A'@>++995,7O,^J4+)5E4DsS?/%VjjxrIKjKdeEB]lS<}OJM:U[eba(2<<CX|`68.\VWYYILT_XhURASLGqtsdQB>qcsZnWL]_G4Uvtud>M2 Pp^4gtYj"bZW8\kCb.N}L8NPblzN_ZH=@V[mpbA$/HWN4,\9FYNNYfHEd[]eZ],1AddXweC7,-Jn6mIQW>1 }iQH\OzHD&Egsc]JW[r}LFfNAiN}g:;_|VL{n?Q~Y4deH>K\B)%CUhIIkLJK)Rt0m;$77A]^:9C~laaO3smukoYoqUmhZO]qi|wj>d\3NvYJOj`d{dqz}a=Cb\B2;]8BC1HPa^<H4FX8:_ho[2OtC.L^_[Rdm`HDB53\MA~gCBZ[Zvw}C5X6HSVayUG]aMXh8WQ;]klav;@LBRRamS,?V0@^A_`tXLTDB1%Ip+S-AGMFN]ZL|aK7aUC\),41eoX}ubTRyZc~e:gWK^fmP)Bu\`C3L=DZPL_r8M_AANkmBEPACKGgqIMlC%MflnMOZ\nknjPbd%Hj~aWV:+?b"6*4<cwmVP>Tgh_L>nkb\AXtie\kQNGWV+^[kKG<F;Qb`E2A>R`dj}B2K..L_l}x{$M(VM>G0HI9:"=D<>:/?7@DW=!&#%<:sjr^Wf}wGJ8mGYzTdZzwUDJ a~J&4k:),H,5<BDTV39>>DJH,#K*=HFr@Qm?+@N#5LXw|-%M10qSSQWtvSRnsgMogD6d\cC+{p^4ZA0GKOU?B8<G/K9c/'1+&.EWDM:5<\-*SO9(73mm *$GE)D_JDJbiaiyuceQ]s{~zMva4xgB4c]F-JAD'[jP,-1$!p/3%  9TrV&oM380mpyjQx{Zlj~PNUrbyvg[jxgmigzmcq__V|m7*"xveHP4goe:UIm|qd[OFgwcULW`wLgUHnRbpwS9A]aSbhpiRQbSySG^mxQTQEB]PRv`mj[tB_xleSZs[:7#3 (eevEFJRr;FnQId[M.:psn|kZgTFKWPtvVihIj|eaDgGbYDKPylysH9P#5MBw<UnDK9+=IQ-nl|W>$ io~cqbtEo:65NAx@)'SjasYNW:eX3)7L2BR3V_2WZ)Sc^<6=M[QnFSJnrYIe>7G<\\WNI,:B?aX;Z2105uQy~zh8=@4HZq[KLcMfKvs0NiQ&:kbSGJlf^h[y.AdXdM8aaFDVKOR*TS\KOV9QY4X&5_LM,CbwU %$DG9KUTdSRoJmVz]G=oJy\NYia@_Hm70tpkonmj=?\q_B\KS1KVLdqkFjTR34`L;f!L%FW8R~&gk>8K[Y;1(GQ^,3bobyubrj[KUiUIWc,bva^FTgRacZ@.c]GBztM2bU5IZg[UREh@:zgvRSlvmuS;Ha:'!U"#6>Vul]YwqfwAhor>=Y\uXjv^>DSFDjb{l_Pbi`jP?4`|x~o94?*3Tif~REG4?[O@H:8?#.((rfqTiB\DOQcj|a^Y^jbm]>mQkjN3-T0623G+HP7)+SU*eg/S)'3G@#P^]NBQ@0FE"G*2Bmpub.&MW@P;Ni~rg{@qtle]KXza*+"[G="+(@5I=Oh*9H} "$<7J~[=YJIPI]uW[nJ5VobP^wnwzvnFn|h{e|:wo]\p]eoqG;ecpdz@<1:Jn[K%G3-Ui}bYH+-AWzkZzu|xnf^Qcpx-_zvud=Z=}xj[e[Nbn}fdragWPZscv]PbzT,}HIc`Oj]|kN^b{wC\kiTugkm\FQa[~xaDQX\g`/4/yYcw[8axp3>zkq^Evai_ULS^GaUR@xSMbK|lVN[aIQfudGDE[dZcgenlrMN6E]:G7T>ZM_PR\e m<`L ozzTv\U`oom_cAemGGeeufj@3S|wRK-9qrivxz*BhJT=R^N.%ZKkX]lPZotFTFLxgA"JnKdUVZ!~f`XawrrjRIS=VA}z`q`gUCWZLvQp}N9YMY]BE}~h^iU`J=HxbhheC=Kk]~GdC&@lsj<1&9=MdpgR1|ps|w;kGo;CrqlnIzLe<Eom]]I;4Ykx]v9b[ceTZS{FIGO_\d8r*35FUUVK:l\t`n^ 8QE6RL#4xtT^zcm^gXpkeudOHOmQxqB.|dqp|Xq``j`{aN7KifvWMqL:>fksu1e~{vg`I[]|Q1G<"7@)!4()75 {rg[jm9bi_y~khKMNZt_.6Ci|TPbyjvbx_6F2@}s&!@Gl!&Gvld`~?0#d{P>2&1UGLV)A##<j]W^m|y|E]w\C[2ZX}Fbx]A]n4 Xk(SCd^/:46=VQ59B!V][.19R0vC&"&"9~C<NW5gf\lZT[{gYrnf|Gv7RuewU:{tiHVRK?tgXib]YN>$iA,'TaWJcE>.CQ@DB8%EoP1 KbhjjxwqT]{ybUiuevdT~o@Y/7TY|v\W=[MuU-N? 23mfpTMswhtoznTJ<[\e/MKpf|rH+gjSoMalL@Lbp^Mvnbdd_[z}PBPkoxboZQlOW-=hlX;?X]CqX[OEZYddhgjNaVd~cpmnijJ~}t\-")3Vjdf^ReoIjpqjXrno{eYf{lord]GJVr~U_~C[bcIbpY_r]^b|}\_~r_m]\UO]nmc/AI;aB0Bol[qT k[lr_?pzyrd\ir~f6:KhciGTiikWRie{kGMq\ysY`bGFy}fbdXP~mo`_L\zjW[;0H\so3D^}xaKMC9pc9QimVlH|kunkJ*>ad`ocgIeYZYDs+^rR_eH+xZOfJ<j}F@Y\qig`wCF^XE,Mna"EYxni{kNb{U[ynNfcqiWNZe`n@NcMPMdC5QR@/`hxm^C9welmF]sR&?MWg_OfkmuQHWz~n`PNC=6za|~dHR21tvcV^S<UOq(frZm_Y6Bq[Tn^beNWHQcrcPLDC=SnT?D<LA!@HJ07D<:L=2kXmm`G(&fqjN!K2CQr8/ zry{qXGK~lqrOEFSq~rbz:{[lws^KEb~YoR!8``w}YEh|cBf@!.aSB=B:.K7&fT! 2\iW=#>%:ngJ@2 %FLmpS\kVfdxiVOOH)d^<RZ82Jl;[>0$7;F7&J.%J,b2z;3OZ0 ZQ%$%1rg)30+8BHT5;<7MW7XBlULmy;Ci6%d}CKj*vxVc~((1{_^mxF@OI.dB?B?BF=%$)N\2+'UPSK5+QC XL.?d)I|XE09N;F~fS[|~_`wpxz[yngTn{l]@vWXeMMFzc#hFE!9G.M3WI?GQA$84 Sg`>8^~u^IyjlVLvqLUdrnZrJ_b]_j_EBDK\\Vh_0<naK`^JUG;`XGeV[M.MfxV<YzLCsmsxY}RkV[>@_wnJfOsogpeM4HN\TS@s]p\KJIJ[[8ED:RSpopXkXDfryga]l\jn]jyt\fcLTH3nX]FA\O9~hGbPZRlBxQiE}}icNy{gL653XnwhK(1BY[XdEbfot:Hbm]X[QZ5e]9hiLUAHfI'.AUX?Z\W366d\6=|WNQN73wRH'urvhQdd=uB*Ye=r{}dGktaYthQOSUm;Vnf&He\v_mlNRICepSrO2I_QZ<m_AtgKymrwQ5(NQ_ZRcQ>qwREq_~eQRfx^C`j|vROO`WZy(_mCQT6Qf~UY7RYOe"%MKL4%9p'YFC[_I3G~6;%HWOl~w>LrP&asXiM !:"==z}|nN=PKawnPB_UpY%MfFHafpgO08p)OyI7LAnvfA"KS]_RVW. hD0?Aqp8=Pd^OVal^4J]L6JM1NRda,E )6nmr^vfofYNU_YvJRLekyiUyfIzfIk[V22alw~nvP:68Ghz9G<xD:~|& $&N@RoTGYKxP'M?)UP^V+'%) }mGSj\gAmwIyYQnNM2yM\3)"3|H,JR,%3#{r%?K5BFXG"5> DAqyO/,eR9;^On{ZNzogmqds_tt+1Gyjg]MD67c!8W]W9#"[pVJ3:T&u=1B?)IyEa/O$ /UyzB<2Brn[Rwx`iWiOSthO?ORaD?:r`FI*<Xkb0/K@IRP/6_47lL9'&ZfWnzsomoRx_qutn>mjhjegliX__0CRiiqgmu]H!K1\OCW24!*3FLJ0DZ28X]dT;U4"F_NKM28WDKPn]YP7[fhH5D;<bkReeka?BBSk_hzma+PlA]P7]K>2oo*K_tb^ganG'6:%1NUD5:ET/H>?L. &B6_ZU:5;o;5`plF-opU#@C-+'<EdW;SE>oK3rgxHID,W][m^VcB+FWdFfRN2(OIPo"NYKkeF@2.E>*JKB@!L;;5?g]TT6S1FP7\`\n`c,#^u:AC5RiVAYfFFhlajcX#'LeN^zdC2\d5?7HzWczL+DSX]RJa0'0Jg1!]tQNbS;OBeVV5</Z_QT?(bi;0$QTYA]pXmfOuFMr{VNypxZ]\]H8wVjpdA9ZIEPcdON_I8OdI>M#'npSTZ3@$+UNRbZ@6GSOG56\i6LqXXmyua[b8iN3bo^lR-Z6>{h`_R;3_unEsa:R^ZXZ1pU^eN1.~4VoK=?=o1\e?kn@^lfBArhbkK(^^&gYW8%.ni[WTTF/[ffi__RKEB574aM@[IEF<CivpsIHOqt]B+jE1q[M^ .M]^IZXmp(3RCL98a@T$CO@Ruo,2[tO=H45)dMVzF8. !]IIP`_Yx<HK`S%[tyiU@\zaXwumbiGY~[G62Ufbb^UY.<)?_rlPMP*kbb<NnqojPN}uj2*Koy{m&.@?UbUoa6Cr{j}|<8rSXlEuz0B]LAIk^/.c$/&"(JN;-+" -D:%?Z9+<;Hkek#>C2;:H*7|qLsukqvM/YZ=9gaR<Jdg|x>/n6+>MVvM)75;@P>F,:-&G!57.-P&<as<%K20BCGS3*)2:;AN)Ei\7^|Q<8=KYmvolY4AT|NlzN}UjS=eT!1btkNK;<cEajSVD2IUCA`hA*8A-2QD@/ A,<XK5=P*.+TT>MYp^KB,B?=BK^slIWkrz`k~j\S=Y3b_]zT3GD|W\eEJZxL<r\H^fhNbjIGfie`f\T@Y^lJjfI429?fmLS_clgNJclIC[J?m(*nhjK>. 6CguG^`@O|Rvhs7IiTUjkF;]xynSjU@b]D\~f;dnNqk^>`qLgX*>m_WWLLQ]ljdC9D30>E1firyBt{@CAU"?Jlmsq]I=jfjZZ5;gt=NZTLCPPZeDZaNQ]a8<{6B9MlM2/H_IeRIQ<FRtjy[S%8`XVQ:[{ziBRVgdf)bylC:{cK<D1<i[=l\UD,W^K7HVmogn]idIfoU}s:\F_I>M:%ZX.YY1=I,I^qjNVEa@*wKMN#,a6>N&poE3,`*3AF2Z~L-IP\CrMHWW9HkaEm[`gvt98MR]op>AoO)PzdPcF5EV@g3Q+EIZ9FdevjDLiITW-8PB-\^+}{bP,M6eDG6HBt|\@bX3MSC~F=pxMLHDSa L``P8VX9K/UjhWAGX6?3A0EJ\vN"KQ8INV]0Q>'>0'$:1$wxyTAH|SVH1JJQYP<ZQ2AXU:Anbky[2#bG2VYDI,3=0!TqG<c*L\.DXNNFG)9*=-&?xI 9>0+x]/J:39)B]vrNH6htsqI<XRRC(t]E?KA""EMF('+0'5' 66 @5/B6$#@\mB/H(45xY%.&$%&;@5N;#1. SGRc?;:RZ>5>w~iUGJ_m>8rJHN02T9\kl=cE@)+L'9B: -7tF-$,!D:CAZG+#X;;(*! : *:.GW&LJMQ38d>!*9-H:8TG9NJ3irWlieO([r>b2L0`{kBWgM& D"bAe_G35Y3<,5<D9~U&2-A]F 4~[HIJE1':$0MNaeIC&#Ao]R\62I7IK7MxD<AF.0os|zUXff5uroXrZ]tZ}bP5<qweNE{Ldyku[`qL<V`\K\LG[CG;SlM3XqVgXcjEC`poXhc0-Ua<O|bg}B0:0_ackxc^}prs[UabLKU`Jmt^?S\bM{k&OlUX@[noHFH[bTDKahpfYgVAe=^hj4(Xm1gG@OYiOa|Ht46{flWMpz[HGTsmTeOT]angidPY`[gD]fb7rdnIAg\VaPXVaKPT9br@AWY=YISufDZ_a;Z3IQqK9vnB7niQZ`RsyZOG?QqZSmeC2QaU]Z~uyhga7WO~wC^_N`eupm`;DYbXMOoW`JS\Oj0?fZJI*ei;K3(Ox@7Rt|E9J:.]d\8UH8ArVAi[]xpub4RkU{_DV]\x8pgQG[hhWjid9/_db8mPhiRk{HCjUF^t_*,'O1.$FpVE8EhZjypCRf_=B[dTwbNkdDi\r{`<@QG9#j`bvhbX>L@=jhEOe"P8CCAUylX;$xpS_:+YLqX&?d># >ss_zSIoZizZZgbYFausNH21@B'x\[rYdkJ*l)7WgtRXK,pI'?`kSJOJ=& $>5BIPf: %WP+<QBByN%bV@A(\`v8*qVQdcTvpRQP\c=lhVo`JjaQ3(5m9"Or7'BI>DG=%!!u_35;=b*:P1G.&#0>BE/-F8A@fk9BKkZ=@=kdh|hpZnveNMfZt`_ycIx|r]L>DBz\ba`R6096/Cac-Y>%gMiR59691%'.+og@82Q8-0"1?>=L<H@!1SWJF.%1HF=ZfccS/fad(0"en,2E< 3Q-0BO@".p@&&MfBB5.KXYL;3;UH@09vW!&-,L-O?7G@$<7*F5 LW!T1MG7d<?*,!Kj|y`OI1'gT:B'`_Fp{\lyg]v]}M]li{g[o\bKD2[e|rikSOrqQN_csfK}qLzVWW6s?=HziYrvFmovOZwg[HsmpYtQAbf_xsU)2M][gtZ>VPwJSQm}oSbZEAHmp@AJOgN.bVB[gHSlK@VRq"9l|i@C@3|s|zojiu|}bej`[~hl|{dW4WaId;BM6EjclmZ\s5/LVg`gB]^kMYO^mPTHI>TeWNPM>-FHJ4,\~a`R9.gqdf\KNUr|oqsJLb|{dKfY^Hio]gYzsf|@84XUZgL~}o`UPUnlJsIOFB8_FTF[<:Bro]PU1-:qdOF'0tpyr|rnboo{pVLF[QiaI`M`paiZozv]M^`}kjz3'LopLH5nJYW'EoH3>gwaW_;Dw}L&D3Q/ #4: 4pztpo}xshvxbGkQn{ook_CimI=t|r$ClW:oV4baFd|qnc|EgQEdDmaitX;+*ZjJT-' &E*.1!{q]KnY?_\Nizb{Y@6YyxcZWTpda{ybLyu1WWPWXRX"";pvy?2dVztaVB6ebH`H- @4pH .9.y`Oroe~V^vo<kHlkniO`VZq9?/PC<;[362gKev>,RRM=R;<W.O[2hb\;5NWgA7P5M?OiOej|`laRtrig][[UxVdd56IV3W#GT=&2/6LS_tA,6<R87:7oVB]gMT^4%Atet}zjyKhu}{kt'K`}}nDUW]N18cUIQeh\^5.Hw^W1BF75Qxo&4Kyc_.:|hiXJe/unjG{a:_nDLu?z~B.H`qpcYYSC8DXcR3XAV^<H=DD]!#aa{]W`~tKY`goiv`B\bPG) -|QhUCgsIerNM^e}ykCXjrlVXxvbHjW?SYZVF<;_]IB|I/ALTK>6\m)-+WQN*+YMeNQTbIU8qHnqZkKX1n=le8Xvb/<@DLeLeZR/fCJ8/gZOg"25FKjYS[NEZ.5N{Xh;2yJ[1D[Z&63,JP>_VJ/.]QB'- ISpi" r|rku^fYviYiPgZNOYtpBVeDO6eN>(3bB~i\@YMH'ah]F\ZI[HY+@\kGOD"4)PVzndOU_DfNC/T0+YS-wWfy[vT8BMnjicVTWSsXqhRNoRaM=>WFU]SL8CeFU`bQCEE\[CTW0AW,_M$^YmartDtB)?xdP5!C  *)rrdhDd~BUUfzh\mhTK[lfcz`7eMpqdm\V'SkTnidd].OErC-RnX\[EA=GPmI)N_=gk}ef:'FNNLi>#$*9 &(6& 'jsy\\wxjqGDc^gRimgu?4k{ulsrA92<|iYfiiK8HQPjh\."-[[CngVN=UoJSg27>dJNWFDms71=>0]F"4D&76LLdnd}oUS^^SlXPc\AXM?DV@4uuRF+;M<Eo1" )\78896;F,C:<+L@ D:^]WWQ6?*8CUd:(R'I?uq4##LB33]|{l]^;`!zwYwBPtw^N^;37~+0'D);S1"PmB-$T*OY.0"3QW4.;8fe5NGoeco{~a{OKYMHp]U>.%MNNX_:A8&NB=A8du!4 7+iU7JQwf@V:6^x##O=9J,<-A"<@%?iTC2QSC96E'XeXS4.?`uX&/;'F\O ;YRA$=QlO?cUQPTZS]fJ4TZF9QZabJO/^y;W7^ajdIRoC0'`xhaRAF?=P^laKut[xQB]N8JPxK(HSRda_bO7&-31:[Pf&8bIPH)@iA\^6(.CRVv(/lV-4$9Wm\ONHPMTCK[BQUUW15JVGdj1CS<6+Sa>.Sb6HXmV;9MO<;vnEs^Ig?6bK<ZU68C_Jigp^8SP<"]qc_K@PE?BKI8`G gz<[m[dViJ<l_b[0/smFfR)=EXW_ZS9#<\sHA\pma\F:sQ8h]MkvnQOP[L,@ecG_H:WU5;;TQ6ZY<EeAQDX]L[PMf}:PX9NUT`]WLglIGjj@O5.<msGGM1=0T~\J]M%7Ku`Ld\/(Ed[C?#;`P<k<(`]66tG^>4Pl\4h8]]R^^fs]lX1-XbB2E^7Hl;AhGWN@E`IF@-k':D)/$^{NXE%QaCED[lE-GN8-MFAf35jRB`VSMJ5OcIM[>78E/ZQmrInc*"ON_QE4[@50CnqBbQoq~Y(JSOcO.6F%9 ".+1GTdv{cQA4W4:]sK]~WHA ,HFFBD@3=V8>yfpN0FL0<t7nmH)BrjfXL!%--Eh_n7$C>4Itu[LYOtbTh5m&~T-2QE=J!)F@8BpZ+;TSF8bnYZjg4KZ+ie38SO(.<G 4H#67<IL;C' 79 '4+,<0#!0:H|JVZ+5BYcKTWPws~r_ZQP[fvoS_O\vulmD eygpEGu]*mT1MmW2,8=LLNMVPNK>B@BLHF>3)14FbA!!.!,5# .8?<1?\\9>1C:MqhF9J?Hu]lZPS~{^(LgDclg08\iKL;]WLTD7C.HJ,%3KCN81H)+IPJ-9QK>N7<>T*.,,54K6@P??CKAQ\TUvY.P@na_iK.l_TYqYEQcMozM^xyjMDJBAp{JBYIbhanRAUdSEBD^pl;3J9Cf\I>>HM_o}DA9DdkG.FxstW63]jd1bpyfQY=(a|agq?Ctx3R^QuW_Qk>9S^h/G[=1,;FEL;cq_V^Y9>Q<S[>jzN1b|I4UgFIFENcP[}X8TJ %stkojUjd4XP$\c34?g`YcfY=5ia_<BbwD4Hd^fkd[<:X=6crX`;(=J5A_xQ60Chvvy{w@:;Tjg,LwtfX_S5E^.=QSH d9HG=YIS{jMF4@zl@BM3-YQjE=XWU`m@^c7?eJ`yq0?K]qAuP"BG5i^Fa2@VXddL1.$3cV7xqX/bFsc%KdXuSPGHKXG?G_jF8>ED\;?39yyIHEbJBC<=A?'FOUJ7E*.S=P4eGOp\2TeQG'-Oa@3!&372pX/INqVV\[[fS@MJFW[TYJ2OVgM73PiOJT4n[ZRFK=OSCQnm*2BR6$/&_7V@BdBwDCoZJAD'=cD" '8G>#B$u{n?\^kp0.J?ShWHtZ#@TZdY*LiBH- )?,5)MO8T>6;4 #DRO2<AAUJI]<8/*(E;8  ,=9#7/,@BG0,7A$+9s{NTk,ZMX@Da\B:9W}l<+17'7]vaTJ42:1AFW 9-37(N FB:A6#043,>JM#2,K5=/%'59JK-^v&ZR61ILddbmrIEh`,=W8;G]iSV3e`swA%F-s8!." =I<Hb"-B*9 I2XTn3 .7U? "D\ub(96ps.Y`tU@HOdkUnLazAW~bLqwB+\>TA0@o8"0$7$/?VKZUV9;:,AWVM~pF[j\FBNIjoIdSPW@w|krvxGcqf&Q;5s_<,!9$8:/;+8`#"#CfcM>)PTgo+9L!>ZVh]t!&M6.QQ?SOXHD]B"VQT^rP:Obb_[e7&UR?EkWq]/=PLp-veUkwTP22MoaSN9C 4,.HXHJ_D,0U\O9Isb5NTfIDN:C]el1=5/G2V<dhN5;JO.5cclQbY7D_hxHRYG[OX}lSLHD#:ILjcUdcg$.(<>3aV4H >W^F?LGBHPeJ6)*AkKQjR5@-=m7NZ>vJ7bRLOLBLQ@4ZFQb?!(3E@MY;P=/V@6~mD8^f90-/kY$]o^@Ja. +8B7HUI..=RUQFTXQH>n._u=)OqL/JVHBTH2[M%VeD?kX\C8YZKN`K6fU5XRBM/+I_G+9_DD"PA:Wj\QqLBO",oL%SQz^BE4*GTTNTF2'FIuPQnS[Y4rO]d9/A:GUVrkI>.DcVC9GCB[K,%ch:`RPN>&)|>L-<dY,4GZ"H-IRBUgv8_?fZB_; "-;2!*M9CveOZWS/9sYA?FH<gcG)(DGDo[E[s8$OJTuDh\%6hSXrISWUHTBBdq..QNaX'WJFR<NZQ/BcR:@' NV,9  ?!1ZYjnYUk~1 [xh\Q|Nlbxw3%2asKFPrcBLI>Po=;NN\hSAU*'ZhePr(4?jfe.69]_=Htn]2c1&T2IZ/-6dnEJoth=*eWkkgmN^zgiB_KTh5"RC76@AG<JV)!_@MX@5 8?[_D @4#h|O##Zte{oi}b`oIPjaqDa^npgjA{rhU+Jj;MvMl8.BDKJ)Nkm<=[4'1II@\5)E>#LFaA01.aC 15EWV54=<CK'Cg49W*.]Tj~WGrF<qw]`h@Vp[_PZ[5Nc~ZHPZ]>,kS%,WN.;)=;&%)9F2*!2 <>G#)6*Q7 4@?@!)6676:2GP,%RJXLNcJYRILMTtE:QY%M@RnZLHZd>ayw,Afbtft]][C85UeBVMMBS:>WJQi}KT:={c)*Zq;qU8_\hRrHOFbiD0atjS?[qtTe}T5Ip{iUjW)RoaQXFJhmvjm[mD:MZrWXXzwgO]h6"^`_F8XJ:S$SK5AT\FR*:`\CwAUKbX8I@XQy;ImE:4NWcjKlh{AMXgQ(_O?UO{TC.XF.^aqyymgYFJPU~eO3UUYfUN[GFARfKR[cm[nXUX9.nlv~eC)I@JN\;KWB9EWRdM\U1#PmyH=iOj`88rcUv[eO\sPxs0BMLAPsJMC&6O]zrGT\8*Je\pkG[WC_FOtklT*9Ce_.>ZlXXX<>=)ZCMh_lDih\T<`oIQ)&1FG9'9=Mr}d_F16QfmTqG/@JKKlQ2+2NT?b^gzMMG.9>PILOijG{Y;[W:#R^E9(an*0Ub9]djqXoRGyW<st""i\705#!PNsnAM{Q;UKG=3N][>3QKTfLSqNa;JnON=jPNTG;G<@e\C)9.ac<b? $GK:LLgHEVd_d<h@cQA8D2#qS  64TegxYT_w&?OsJZP6r?Y\53QK'\e=PK6PYL5iu@4QXjjU>g{=iUQf]jd! Ab}56PLTyJ%WWL3U'osfi"xM6*Um=WfW=1>Pm6Vt`hWVW<Cac/5`M>B 7B7<2(&ZVFAN<5ULh<K/@aEEYT"+?`]j8+>@6Qwfq}xhwzo^cBluwd^6|TIeU*)8TxX:Nia%'9J6FE]V!#7!6!82DPL9&#$(126.#&("*I"!.7 9DEJC1.T)oehgweYNSexU a|hRB9$)CT65&L:949*+".)&%*&9:%^39>@27A(&<<7!#285W#4E?4D% @)U+:5.'QSm[psrKPXJJYCMK-URWKVNRrmUsYD[Hy|td`LQt=rW4axWze[3fpLPVF=fz]Ytze%(&/jvhVRxFng]mPhQ^P9t{fIDkT:XuoG)zwI[XQT}JWOFLW#,JeCU?J{?F75NJjJKino\R@/TJ1bIl6",E?WVV:SoXr;Q2pxO[WRgMjf_5IAJmmUg<MnpT`eNDXQ?Yv}ufvR9qJOTCZba9'Pc=cUZJ@BEO4KxSW=OFZS0LQicU_970cTXibHYa]Q]lsMG[[dSLX[JDUi;\6GEP`0OnoM9gY8fEImaRoC[kE*.$*8anmjI?7'.KW"P*20))(,|naI`^^Pvk`jOd_ZV8o[Nao_ekSUqZ=AVSTgop[R8OOh>F8;bZCFN8-U&3dJ[e78)JF\fYQH[kQC<'"!J;0H%YWtSxRZOY3ncwWZ>\jN|Kkz-NAO_aQ/jYGH?S}v#1,FS6d|<1>1`SLle>WbvXU-2T0;/2:)?:( *gttz}wXXtocnJP[fRR``fCd@;^OU^P^d>B3f_cj+2KfxqvF6CwiGWv$*94(D<4BVOKDF'K2%( )v\WjkoMhlO*iuuB)XZA)_q;1/737:((*#j8-#/ #1@=6?*8Q;mGf*.GD=oX]D:UcECNpq?A>mUoy^PXh|[(CiB6WmR/>)MFo<U9cs%?!:C B11.(2?$E30U;*Dy\J75#3Sm?$ 1[iMin,Qstk{j]NZd\OO1dzZI#'kS+ZS?8<<1S&++5FTFHH5^jTO`GFG;{fdybKx{xkX<(=\cLW[;@?.7_<*B.I_XYP>BF46LTYd_FEgLqrF%8eW.WtmNH;H_I\zT@o[WLVQjKvxlLPP]}pHqa>^kf5QO$ )bQSeQ]ijO3at]aecencK1PaiHv\j\T4_{bZ-Ijcac50=J_Wapi^RumDi`iiKUKbYItqF'A2Igj:C>+NKWieqyO'7T8N}nY`GWcq7fMQ^M=JT^G^8p_zmm+SD~wVOrtPw\W^?kRH}sXPCjlhTUoiqtmP\gWnej]\W=n,_t\`d\2LQcd?J=gi|`F _gykk^eR?RBbse>V_>Fr`dqQQVD`ftleXIpVBTU^[[PEMNN}|Z{02gkfz|dRxmJQL@qn4EF^%E-cw{R7gj(izxwJ7if]~rL1SSz|pZcb]/U\xyPfkMaNzjfS>UF>fyoT^A;R(cwyqRISn-`jOpgSBQQ{}fnUknhkavn>9<zDWF-*7XawjVhazabnZc|bthNG^Sxmf<TyOKD`.Va}o]rbh`RMVtsmnTq_E]NngcLhwagfX^{R{uLM<_e_A8-11Xq]rK@ *,Bp^eljdxko|}xmRfudT^tsxzQerdYuXyOD\T?rGns};.Ibs{OJ+.5bf}lgWA5[gt}wA?EYz[?1&<Z@}0A0mIRvoL4kpLPct{~1btuTG?uhttQ32*0K:N/? 6KCHN`0<FOE,K;H>hgm0:28AEWyl7',Qpr@{RvoyQblvgb`v]FgZzi]VbxnI03sC<JuN%#m[G\=Yq0-JW>P$,04~k>ON1@CF}F_F]rqk]n}yWpef[lEtyIht?YecnVV,^6FovSP;Z7$gkybqG@|h}p|~Y~!FlZVdLY`svh[q_bDKL{h,0uV?=>H734YFP8@?nT8K`O/8CphbPUe-OFYG^r9].Sg7mkBlnS+EU^uuY;kvqYh.# kOO++gubKg'}|q\RFsu[fc]e+QL_^9^5cVf_Clj8`+O3]j7`BUvMD.;]XCUX_5)REOE4b^j\_[QL;8=S6[|kSWpqSVh|[jtnlXQf.SlftwkD:EXMbWj\48*J<E3MF)WD,UN@A*:C.TWRL>"TmW'm{?OE?90SZ^x^S*1NoPN.IZdHlLt^XJFT`>rNKKeSOg\:M6<;%?AWPTxBC=6j9F;IXh3YQWNqXdV=U|VhP-POEKgTNmSO$./m/1EEYvcAL@ '(<<]86xwlDAKFGVmJQ|NZ==9MH]LE`2iQ<S)*Ek`_J(IV*#CN@eVC%<>IO"V^ ?e?&,DHcNMNM44B<KV\Y ,JC!2/|>FmauSgzkgAjOFjp]@gTbB=?/)8UeCK>P3'SW3s?c_I5:RQX|mJB!6JUV<[8?KE7Vj83arPVJ;>&- YK#>+$>29[Eujh|kjy_c@XKNKU7EqJ9)@]N1kOOC)<Pi%:UVxY[Mb;P+#+@_I^;%"0DCC "1/Q]-?%<M).&:&2!,OFA[YbmHnVNA7Z?EArr4RfJ;LG% S44@l-!#03(IO5,!!*% $M((^Q6?)A0G}<7#'8IYv"3+1)+'/szkfr5Yod@ggfUlx01reYSW>@8NKa;,;'\KMSE);W).GT=.>:,<R{K,%('w|_[NNOPi|iaesmzvQk]JyTC-+;va_6\N/TK^D$,-6^QN@#5:ca{?a]}}lz!3Umq_E]bDUAi_=@%OE|wI8H`\]MquBNN3PjSJ~c7F/?B ;@62@^HRX06ac3YdRk5E{xUIYqua]XWKFX|Ve0@oyY,!B5\qV:QvLNOjj8!?o}f?Ks>NH2JxrGBQC7BVRvU-NdOF_x:P_L4/5"IlWL8EH19OzYTA"9/?Y@YwHLUXD2+h~ Pr|= *^xct{TcZU,x=@lObeKs\]JNeK:Imd>AXfb0+9h[7FX0@XIbqH<EQF&'3?NDSSUV[K _?[{Q/AMNeZuryA@d@A5DBVYbK%ranm`je0|jIYP,&0NGaQU_k^ITOoc1RfC;Yl\aiQcc~h:Ci-ZX2SjE2JXgV)(D7+_Z_b?!2>G[gMM[q~aCgZ$*+2qXV\2CAU~zhtXG^".NMXkSOp\KkIZkF;6G)H=*4<3VI[22MV_N>ETcXH8ac)GqCJMC9*4HcrM6kjBZ|anXQUnmnw7#$1!#]>8_rVR93LYOHDOPFEBK`QOMC;HrO4>T',M@ LV!6ETqdeVaN=VU`]M:GP10BdU3IUxTCd<%M!w{A8+ @jMA0)5/A*#UVnen=Ds]FX^lTew3;kLLd*Kd];GN8=aFzksl7&ThD3Dr'oH,Q9(,,Qj`r807bpohYP=O{UjEASqVjJ"&8D7!RCh5ci]LnjWbuUBoYCdaEOLMI(3BXT2BI::4-0B0%7(OVN/ JH(FOmOG!SeBN$,nqufsa<[\@gcZO]tu+ZjApw!U]@z[4>QR$M4ES *@.^]dNQK'%[w!US1++2S'%6-!EL7F=37)(U4R@5BE=/4H2*88(D9 0ZQoim|zP%CsJ?LgM[L(GL\jJ,'.LeE=WE473)=L;4[O9SRD-$1cM0Ac?7[.TZ"6FdS#,0 >RTdL1.!)@1LEFJPamM.CMW&L}f_N5jd\RbpO8;GO\nhef\FYbcWo^Yn_csfE]xElkKsL4=_WI=bjhekrxf_RMVt=8`N8q{"JQ*&rpk[Ny_Kj`CY^^uykIXy|^?W]Ikmd]qwKLPdNJm|Y\BldQfY^j^skcjqj_|eqNgBpQRIJ7Wh`4Z~sqpz,aYr^z~yZZXcmnhffk8|O_aof]}Um`_al_|Um}vqwvncN>fTbcwJUjVZ;WleklOGHyvg,oT,sjn6>4oUiIz}TJeT}j^GA}rxpkaxekkigapTdlN|[`fiJPVgpmzc3qRisP1?TWLgAT]y_L>Sbcb_&89;g/FGbf_pA@\Um#/yvcOSmokcx`Xp}DmksoOSkX!PwQ_fs'O_1IE@douNV<Dx^~Q@;LKojF1N?!'NJ$'{]k\Tcjiy{i<T`wvVj|gOCZKIWXv]l^yKH$L[_ZSKiS)\KM}nyI3WduTl{nh]nV]t}^PIt5">J-43SG'!6-sb~scTtzroV{v/kRyojkraYAvU:@FBhz^#IBBev2>=Dsi~{hBAVewZ`Hka.TNM*6 BjSffbMdHS{muwpfncIE`ZJ^]CK^e79;1WUaHA]E$<nffMHGTL04<7B<ND;KESfD'6_a~zy}WSVj}f~]VRz6_trfIgjVLD1LknjkF"#?I((h,9F:Pk[@F*vpLF:fja\R<Kky{xtzpAwrR))jz\DCRuvG0Td5s]f~9'XnwhVO^9>G!MyaePiF++=FE>E3CJ,+>=U[(Me=7P<kobIY;5gJL1FKtZEZTQ^;S[TxnRP4E\y[gx|KEJC8I8dwhizZ1$U``[Tzp\Z'*%9G=rZt,TPwO3Ijd,PekJRF-\G_E$glqcFY/C8i]nG81=kRdg[Td?BHXVQ\coG\cEvz[0i{mR6IIdpL@'4TvcZC5Y@L@%:CMBIXA@UWEA.VQ9REMWO_E4?HWA63CA<=YgxYHDXQQM=3;[tOV2=WEODbV)MSbTVYYwfg``_CP;4cnVNK3! S-GQQSoi:MSoaE,<TNYR:KdE<=J;K6VPUi`3.CIRtTMgO9RttrRDD7:9-+-oRXB)>9\MCfV`YT. <owR[shYT?-8L2!SGqG!/A`hn`byS<_4r<X>9P.O#>e]9=Y;<KLQ<S3>?0]CifYI9@6>RMXb;8@(Xna"MY&.&U}PSIQ{_=PeJosJ>ba|U%q:29-D5Lu\<OVPS<tk68XaFF>2_^66KO\`SNWD=DTfincM$"csmKQ8pUU9I]3AHI=SWKRMBBsxkjwwGM.$PcO5!"!Ss=; 6*&e9Kp}fCu[f?6SRV[W\Nebs\K`VO){diMW?M?yeXi[W:4^@;2*RaM.,fW.@NR0/,?C>;A0%:GS8>6G^6,8Iu\ +4 3]GAg`Q6PBeaqNoI>W",MgF=87&e?DI"JHHT):X;85+B-*4=0</2"SIGC4@@^I`\X@):<XVMnGO]K:IQODnGU\qx0AiNmSWg]eVmdLs=8tvqfibI>3DOn!7I;*?,DccAO1*M5DWQIZJN^yc20-s]Vu|ovL`kuo{(7LgrcO0A%@KK%%*_G&14";5P[oppg\8Dryptjvt~vy{/:Ngl_flSK,\mXDBW[n._zG7CC`oWjw7_S?F[OpR3UbDiSTgK]L1N;Kzlf^`IS?Qkek^o\Sg}}eF;H34jMxz,kR"6yQp`0BHnojdDMRA~IASLUniYwtVVfI'JmLWNEZbZTAaN5RK@ei~tF.K^94+MbzZn]H<SpDMDom%@"1<0xfVyron,eq|bf>%3nZBKD&gljT8@a]F6LUYL[mxL9RNDB3`x`:L^LDJ5>HrzhXO@Z\tYuU>,bxkIyg$5ViZ-+f<9z@uuMA3}{rPNI=.#daO)@KWJgWz\hldfu_\ni}[TK*qzI0=I;^fdOX<#RUDLCT0-jFP61hcVKLTVDR,&(.:=CM40`DXN1]i9XL8nNC:OqVHPb*6DEiwcz_fy_rrmnDn]XrT\NCEB;?/RpC1b<aEBUipiwxn66DzW?L'+ 4/@@jru_<NYHkhtKQKBVXNJRMTQbD9^VH[LGL\mFT::n`>9OnV;.=LH(-q\9NF<jW.@Q=3"9[;4w^%C.!)*#[6("$"k}THOvS\?5avJ)QLw`@<kgCGV<@pG\H$&m2-WYR;m] A:+39PS/ = (T>ICT88BN;DVD#5..,HH4 <:/. (F0QaRw]cYIPP2Wo6byhDP?&)_bV/ RD 005=^S 5HH WCO""s9*\6, RE,5PF':;1f~r^k`gJ[^abSC]^R#jqXRvB** }fW.H'B):nM >#62%%)/TA3@-71 <3r4#0*3_H# A?!( 89KM!<VI._K:nC*TB6CwcCL}YDCi{M-1B:0rW%@m-;7:05KaS&7/@ ( )*60* 7>31A3)(,<*4T2=31)-HE+((5I7*?/53<tW=IY9/;%+]RARDT#<-3UaPF<U:?:>/@38+C;GCM>DU;-2)185,OH=X)JN83LK7bE:j^ nf^J_mIK5,f^[Uoop8'c^pZ2]\ow9+,)/HPKI0J92&HcoJ%8PIFX>QY1RY=[BPlscbS__EV81SU@9MJ(E\MB2S|pY8D:;_IO;8E`/Y^QZVBN{wR^},).;7"5MV9*dfTFY`T4-0WQ;VZLiT/nF5FhscGQVjgu[EVQAScI9GR>Tl;AzK2_Y2@$-j\vvZXvf,AyW1Z^4dMdnOK\\[\Q"]A6RPE'2YKI?CD::e|}`7:SXFVt]XytMsjJ^p[SwaCH\J]oDUX2LB:HWPTm]Re@Emr(a&,yOUl@jvR<-(MT3IgI<9BAT{n855XRbX&LANjnjWCE;7Wpy]\M8F=Y^jJiO)\[ T\Kq`IO[9WAeE4I(%7:^/W?MhaWoc?G]OM_jv~^$vt 6lFfT=?Tre92HMN^_SHF.C2qvL9TQ!7][JI@/1B]Bhf.,)#CO`sK,-,85KN6GIIG\BGG[_ULMK(]PESJ/%Fu_ %!<GXh_hmX+GHt\[vlO`kU_SOE_DkolR-BqWw`agbbHULT;M)WtkOIU"4<rj{8S<KYoqH#+/lyx\H+yyB +40<>e{G1[iho^BYk/y`ovA ?{D2grSZW0 @`IOKh_8@<K<UQbH%'<S8`1 ,o``j33.Wo::[qRHJLRNgadxfsW#Rlwx*8xvhkL#0 (G?:6B I / P1S&I`Ga:@O-4FZ)\DjP##/6Qg>-=JjYUY:^L5hrb`]eRC?3Vhv0`wy]DQaKscg?9x& &KLFQ?JD(3@MUHVOOU-.LbO =QS?#'\M'OV-7YeAABVVPNRZW5J71]YPX<ednDT{|[b'ML7_rnwr@U~hP]vn=SvttM\jvewKBOIX]`;IXaYPv~v\pNIbehSw\U[Jwrwgigm@@msygIC)GH/Mqrj[7Pid[_^?X;jgui]ZGYSPmfwwS@Pe_yiOPekThM8}uMQvhYnR>tgE`dQtxNYyFTAKh/:XPxoog4/ own{b<Klp^mskql~vSWxw\[k~dIGVpzvZB*?XYuvPbD-abpm}y\ ocVYihMBNhP.^p8odO]PvlYD1\tQ9cy^,%!''ranQ^yoBVps~Zhze]Xgv_^q~rcgk]]klxiaf^[NZXRbKM]b[q@>cdWfjhG]a@GsQSCEWKUk>OEZoPIUFO485bmIGD/,XRksMc}mkoaJ]]}n|mqyzxpS]F^rSVsdAmfUri6ftu|U=OLkvSEJcbw-el_k_ptFZTS+r4CLLWgN^jarRrSvznJrbojOxm_yt7>hl}VN^neR9Mu~h]x\yrAHtob5)WicAKB'"2iv_ErEp{pQOo_p:u\pzQGw[If`Sjn}IlJomVFPb^olgeT)MGj~QjYV%AxrwI96LmlyY,XmS4Hysd{Xawcognwcm~{8]bumiz/10pW=9[\QRCJ(;E.$AX[gF44A:3>/R'3Bwwm::UqVh~Yqwvs^eob`nkbejA+DYyr],IOcznX;+,;>U1=X':D2MY@JqDQDJEeADUC7h|s[n{mV{zql\oiC1tov97R!9aMC* \US4E7IFQtW~ex}~lxcKwcA*"(RMWB;5!PZcUQ2S9J!^qGQ;DF9NP<JPH.ngB:IdeMwjIokFWLgrLND]ToXci|o~caDF.;qPfu[sglvi*]YE2 ;<%-.STG`&>k0:dEG[72_mcAI\M*>\abafukI]_IKzY6T67xz_]{w]:;dlph\]}v_Ln7lZB[O]gr;A{gm daaSopG KM8AJM?<$@oQZk<@WIBmLYh0;Q#;]j'WqCEenduuJKxtp_ms}sUlfTGfjpS>jD6VjGD@9AJ?3h|8LcbVc^oW:UobzyfD[T"?mWeF?X<op5zt)KVHZ}`3>aoG.\`NOVUGpvkobUG[L?ISqK>XVgfZYAJIDv.%?Zi7et{sa+OUtz|O@=2dqs~wdrXjz[Yu[_M@On~sCbQTiqF1>H[d}i'.XNAYonvqcWddblnmiZP^m~LeJbFJl?n^zZDCPhXlG=t{g`bL#&USK3.^7<INHS_JIB+bU^L^O'(d}qF1>GFYvbYR*Jnh?8<N]TKqaYB M]]l[V},(PoTLR-\t\`K_j)2VE:`94Q@2I.&'/?:EMFKaqw~vNRM]pprpBgKHrgZiZcc]e^]iv_P^dXe<UZV`J]te<&=6MlQ(6Skzut-99E^kLU\Gk}^P]#7VwU<R*#alMJdQ4Z`IMaiWcDcsFn|@sm5,Va}`;#1$-C8 oI?'#92>5C: .EH@5=I@GODmkm@!vxp|ydw_fLn{fr^Mlp2lvyxeKH\Vp\QRXc<4Gjn 'YDFUH4_aRUZR,N`?@ITZ(*&/]LrR2c<+IxrZXuwz[Zjv}|jzZmf`joxU]|NTdLsF3@Yu=[''?cVXTQ@o[%9bWA]qqh^R=Z\]R^yHVeJ`hsDP`dn\JpqzvIOkeppvv\Jb\a]XbBey^7EYglsmq}1@ytgwp`lG@[hd^^QohN]q^sJIxbX]D`Tf~eo_cyrXNZs(hmj6 8[beluhcun88hfXerunjJ2Vs`iRa_SWscMsAPB^{yl}hli^z~d?rxgc|ahe^UxlSQpai]Yg#Fj[[A8iqCO}6:NaUXJPl}TfmqvaZgSevkZ{^Yunlcfo^ntxKGvyfQfqeH`o_wi`^PZcVgpiupyo@\gei^\AH_&VlYG[S81Iu_vdbjSk]`wnIlZ`PYr`\A9jIUnwo=-6qNjWNfdW:Q]h4C4RXCjS7F{pFBrCBXUHSE]cYO}xejn89&_vGF3sX]YZLb_3aVnnDXtgU'ZTt[9sou{xpfkmb`_{|pSr}[0`mrnVT-$ACmbY_NO=@t[dFe/65M~][b`@X~t}xJkh ;b91Q^Dilk1_tn:tybgM2J\njtgPWcMeS_v{YIF`l}SV|H:EQ21jLBfDN6*un7FjVCobWg]otjzB?_hH=?jy@() [exg_N`]]bZpvykSX.{kyysTnXrcVR}omiq\HAPvrvMIf;Sz~d:NcYJ/ky=S;EQxND^2mqYL#oKy|~s\l]Tccktc4/xOXB1(g[hXwbcm!2:/)(@D56!e)C"QJ$7 (/J]<. ;B@wD<8*?TYUGli{ZUCT[dNPoaX=|dZfq0[DxZ}SHK@j94<C~eWG3CJGQLDsf7A1 aTJF3QE%BqwmiZ{z~hgRn^vtkN!`[FFh9l\LONoTVUYDPRWSGti_[`ttph%$U~P3I+meV@E>NmSF1A;zq^ERF,^Mqf=ZH?@L\e\gHB;U{QK^TObpNss\hlS.bszwmgK@[Q ec}Y 7MgbL_ph*gsxU5?JF~h@RH6WlF93D;3}lumPGLo>MmtpK_^EpynkgBl?_IEA9dYJaTS:JNmrYr^ZB6*;[ColU{n&v}j?7Yi{:9E=`=9eq:KytWL6YYCgZ8YmTM$6YokNDMgCIjiEMX3=;T@5mcP^#:I6Tkae'Ksv_O4FaWFK6qg`HKrQdgNW(125rKGN]WKstpfXilQ``vb|P@c[DG\Qe0HePMWy\H2CN6V54R[AV8IgdKHW5TOQuFHUiL1,ta`v\6=rf@%Gj;}UX=TejWFnucK5AZl;"fMk_BdJTJI;\Pg[}]P=Xi57G23o?FMOs]jt\Jk9:AUqN(|W,7; g`mlxWL8PtlneRJX@(<_ohVSHViFlj@1DuU5:HwWk$!WTI03vF/OMNB63/ *GVJV;/0AK[XAS9dW#$WMO= ".pzbhpa_No_j3VhqOI^I E;173.dkC<=;X3-'XKD24RnFA<.'48?3XvS81*uhJcW@P,8:KLK(E@)%@cEpW_C "O3rhO`X`I]mY5KgVNY>hX}v94K33RbC+&OI/-76:/CA G[7Q <A!C cF?*!=39??%(4!H@D'G*8KP?[,V9CwfG5nd2zfjpGQIKd6RY2La5>gqtVJ%IJ8*!1&/!-"5 (I!3$1_b`0 ! 6;5/* <)< 9C%0X1AE/'B9BkK.0=EmfaspU#$]X0p=PE 3byQG+NG<()!!':F DAM(2,G<E677,2-HE4&13K9)(82R->7 7,"P+&C4*='3:[A/4(,&D|wXYK3Fb}p^wWHT=ZqTz9N[}oSBKhigx?'\Y3MK*W[TZ@Q^CFDT^>[nXTRZlI=IIqg9Z]oKFNEZgfV_gA{irvJ0)nu44_itbvY+S]xvs\U@KfO{i<FY2@[um8NR[6DNPuSo]]BSo>Urb~dtS@G:`DahT>>WjVr@GEG<5Z_!igTpK'nvl,=nw}LZlfVKXu5W^SZf\eM*gbYryG3O`QHUu~UGSkdf;4__`BG8b{@:psR]IhfK[imx^7HN$E]@4ldRYu^,pl]=[zjs]\eu}extXFVR#[Wm_tpL^UIZm-gyejX8F=SU8$^|p*BXTEL2e8=BZF?^?kINTOI[U`dJN-<kZMmCMV~{yvgEeo{_AilhS[h`aRi$Yi\b*qezdDTJ^u9$LILL"NlT-TyC>MlQ;t[P3Y^Q@n BV:CQO?fi9rgD%Nln++IH)IfyuvatkcoLPkN)Vt;V&+452"M^CGUI")lNTn`>raO{^XcS)SrYZJE^mK>QJ=KF9;PWK_Q^XJ:J)"<B@6(HTan+0_5+GOct_jimS8>V[XjoNNGNG6H:/n57QJ7bk owwO=aGq<hZV!:JKTPGT49/2?hRW/FK?mxox]JN?htQc>|_C!-?"`VXhq[l`Gw~YauY"zkeUK@j[V". Tb'N;'3,wQ8%,.#00ZJI0<[BWy|$Q,SrvTN=?/zzVdhv`u{}YMyf96C^+#8nVTt3/EG2h0rK 3k2]m=38EO':o\=L??.6\F"4;5 1]!:1f.?O.*@d%' H-;ZX*@Vs}nsIAykV9=:Wa||I&lnPF$NVRopYqO-QH0Mls>'A,;UO)RL!-;@HL9&<CD0.0'B8!*CB&%=S;4-I]C .241*)QtoI2WbcUviI`ghs^Yaigde_t`^?rhJSLM7;PahJ72`R]N/&U\KN3RZM;8kUabp?,QGIWgcMN@KUWfmB_TPT6M*7\qqhepRgakP`ftdiLU*nzNjsRMLCOUT5^cGTQ:dVL?IeWV:H_`\V<8\m\IQb^=F_`K<>^'O8D)M|oNU5k$QW?HTasx~|[zou\VO=kebU^C`qO4E\ri:RzM2?\il^G<*PQ3K;`QS<48TP#5]_JA;7iD%SrrM-9KE4Sf]`rN\iE2(6sxoZ@94hKv|BRjr{\U1=]xkIdbH7LXWncjUNhdb3g\5sT2Sg[KMb1=05B9iE5_|(#.=>=&AajW=NMG%@fm<4BR&4=?<-qKLJ'-IM^vtpeZRrY4 $:Opg@D\WGb`k +5_laYc:" 0E,7h.bf.2=-B=YSk_F[@AOduBQM_RM]ug=VHSeolbBdL !D]F "H<:cqjKZwsT]NCenqj.1S:.7?ERvFHO#9>%D\8O>>8wbg9WL6%=vtc_`IBQC?2[xQ<>lXofP>#gu[+@L0 ZC*'$DwqS\c[ah6KVNMBJ`pi;D[Q9>xu;S6GkYh\sNEHEQ6 OWGctfD"?@rpRXE\V6*(:CN0=<C3=WMMI05$6+/595":qUl]gLS|]YjmaVtkh/zf6O]S)tVA`T.HVX0.X7D*D -F,=/ "5%DD -BFJD#%+KSK[F$4DUSTEWH*"\xdmd`lOOsj<XtYgD>9gpJ[~T :,qs4DO.; R_35+?132%+/#}"+ 5K\44-AS*~cHO^w}y~IK>qmhLm_PRd^XkPOQvX( *='&kQ)Z_W64 L=HD)MOwQ@0?XOFzftzrwovy~g=y\gkwqkl]7{epqr\sql(Ljs~`R7V}cct[nVmX\[ICdkdpjZEGfhVLXq_%4;?:LoyTQV,&M::dKgfWO<b44Zu?vI+Z^UpqDyYFhu\7c`a\>,PsN0mLnntW;hlhRMS@>3rsrRXc=Ff[V46=JkcQfI33?>`yWCLpvc7.PJCb]45rkWmoPj>P[T>9JJkq4lx\S[-_iST?gXUEk`27=@MntoL6BNecMOA<Sj<HhM>)>`-2NPB9_LGW0:GFI46?NM1[CGR&ZZ8N1;V?.&%EjWoe6 / Z57i{YIL[YuN ESU@_yN>`LJpdOaKLLKIzerD<7bX09goFGXhLR[5]7;b[HG]NfRUb||WCWWLN7$%UUI/e;Szv][/JESAGFjh$+XK<Gi1hxY36CNO`5K[IK71JbMHc`\g4,m:7\RLG?.Z}/YbBE[[M{g}S5:^rS\ =J($#p^pyjjh81cb^k]Da@f@2QW^{W;BpX&F]Q_x3Tz\T]j[Bl<IpS,2?)J 3@4(Kx?.VawsWb[9j~MZG0(/8<_65;0WrkNI}qbP--[fqpRU4"1JJYnYJH5Jhl]Z]t]Ym^WT.4J\Riv]qU"Mj\ryH>6SmeJMC Pd=P_OSN~|>D VXd5K{dYdOLH[luynr^foa^tZyOGO;(3<DW'8Sf_G?+L(PLGBMNQ58@/^+6HQhAH.RS2R(5CWGw\x\ny}Pkqf\kkv|iIVpUJyk6%?Iqv2%5Gpe. *?TmcE=0C@L'=?9,R_,&!..eGCCE:L;cYBW+8TZm\Xd??EiAQ\SB<RU`}vW&axg{8$'#QdZKA9S4)7:==4.F5720)>H=pDS@0Z52JROK7(TB3E=:=<<7(!G[aC17L;H9?orS"HX\`C*.."$IdpW=RG$*^gdhP>865?qlpd;P]~YPYZkegLtzV:/4FqXngQPBrlxkFahvHTndFUxi{|S>ZNi{C0U~P'"(BcL?JS602Iet^d_R8KcFjgFZmPTq=Of07?'=JuJdQ0Baf[:TKF{dFB^^R=aJ+Vb^b[u*i&-PG1X{lPDSTL6+rtKkw;JpQ3diTPkUV@&!QPjGFIKZycrZ3MQK^pVrTWfL8_o\]ysMQY?7LMOv]I23SUpJdmC.9CS\f6;GX:5MzUD3_o[R\@+~mTdmR9=?%jNKQI0Isik=2NL@LQ_f`Q<0wOQILk]55^IOSEG/d^yZTGDkQV_PIB,2CV`c_;3`iCGO931"(6UFCNa~x{p\Y7ktcCIPE>F:chVeuhzY=CUOL_H^mbpydNe`:.olFVU^_D1JVhE)DgI:Qem`X|@KRtJF6&-OU( j@tvP61RXO|eF,$J7)4YfAK[K6,9`biqYF9)7EZ8@X^XWE>C-7MIclw^7 ZfBFkV)=O@CTFQV*EX0@I*=Wa9;62"8=~o{nnOxz|NNcYGD[oWThK@M^E$|=Pf_JiH8giMDJ(MG%FbeV"%Fs< Aqm4>i: 7G7Oez#+S_MN@B[RxM1; %5brznvbW:TQ+_NLr6Gc_C9LM 1cI6 76,!9),IC2()4?6$*?; 3"=JM'0)Ps0:4'!(QlkW,>UuyPq}54pOcwpmK9H{uUf^-AOj^OeQ1F_06A3VK% #ghhp,, .I_*9?/ TY2/*)(F2.^: %B)99P^H;8$jchx`mk3`bpmQoZD.azW?9MS72@F4+?)&G-2^,@1BK7+((()=<0J*$A,LN1'K<MV1FVK*-"%%!4A_NA,&U_.5NQnZFeT,9<WdB[~\Xw-^^``OFVadx{[[V6KvpVh6=M9Plv@JqgXPHn9F]F]y[aelZZ=@q@kQWXQrObwiQVjDPDZ{UdE7%_XGw[^c\sncFH[fgQ\STowksysuoujnltqBHfuxr>u_\n|KjqMG`knzix]:Ulv3XIVj{h1)t|zS{i54y|VP~|rhZEKtlYi`bWQNcui]uqUSZGxhjz]J\dCG\sR?g|FOOThj>GgAJX]n</`kpB,tX]~s_|hts}hGfjxwcZs}[eSoN[vJVzB;J?[gZBOdS~bEy/Sg[inYmse2P0EyeUO0>wx}rl/;rmcL86{cv`(_LVreRrvj\svMkriedK;dt^XZuunXJPAQ@IF-Cbd[NiIZxb CFghO:lxE^rggmgV{o9{`TQ``|{QI7SxW_QE\NuTf|pzXIVrmde^YWGoAUT6XnOY^OgmPBJxuTN9*6(0 2=#yt\\lu|vGW~\78lVYaoyatW2\wR)XAn{dWpu9UccRZrjb{b-5'Fqth[>)I@X[d^G. Ffrb^zMsmK=LP@cI]mY^Wtpn`&MM{IWtW 4VWKH;LQ'3E&8eQ 3-@Misf3:<?wZ=X/$11$afM+!ZSJN@>Jh~XI\b^Aps`jYbdGZof`mfa7FNtuZR^D#%.f44U>K:D1:YB8>D+ '(nWPP *7hVTR03B8]\tZUK-+h]]fZZ[NvwwgP[Wk}k\Zb<%ike[dyVvskh2<46b|&Oi2FA(/V[;E&^k{(&G#Q{dmYhSwtyVOibrcela[id_qg^UGzf{XUUINH3i|I1]9\}vv\XI)]~QwWIK9~V]tinf>;kzkCRvqfPF@GgamdEF[qWt\IMgm[Ip~qdZ`kFH~fNF~ebW*=YD3[iQPogqhfaDjPEj[[KzaVRXdlU_tva3iaI[`lFVfETbb6,oH1HQq`xN7WPGeqfbb>?W;:cfv\T*#~GOwwmXO{cZwSQNElYpJUP[W{I9VlyrQS{Yqup]<7ey|_UPC?VGy}d~FPjzslrIUncO"B9bKKW/ySukYlawdwo=CSxQt_'ToRhu]fj]w]Hv~I3:Ziloge:;ZQ#r>9D>+,7^r=SmSMF5Z7\cisQU9 >).`t^F(~eQQ/ou[abc:+dpTAy}TNQ\pzf~WYd\]gTgm^FKZ>T5{*6F3Ne;Djf&&WM=ESR?Ql\VdK}U->_ci_G-<@:}[\I< vgdjXU`N9D[QsZ@Qi_XNzMkyUMf^FOvBcS5^o<96JbdG=@<RQ&J\gA:bKBiM1+h^_@5f@5?CYK, !lc+L-'flf84 YIzR8>1s`L`D>Y?;J&l~J:<=64)RG0=HZOCI35LY?A1(OV>3F=L85.(LR854 K~mG=N722GZOK8:' .3Yip[ecrpBexE!=5-yH,H Y:,0XA (EM79!71;2++0,75!,0,3! M>?,&+G%&6ADUQBV[y8_VVc~XTT>LJ4-]jRlpA@5*8[fiEB5,JKC/=(21  <>'=4>&$S*1d],,21.013#15DfOWC?6~Gt;G`urT=?\Z][ckiB>ZGCS^WN}}R:oB2W,6+)7E!6/8C.Ffnyc'0zio{XdpNc`f~dz[[a{skFdWutnOsr^T{fm`NY;$/QYlefsia+5busbdxbf2LQYLWKqQ{O<jSWeQFUcV:B.K[JdYF@ORxghI~|ZZA-Y~v=Vt]["$WrQTKmSEWdtkJ'gnf:K % pmJJ,Y>Pbdl\jYON97d`X}7Oge8\ILhiHGGWX;fhRJQ02/oKYW[rsAV;SlL{f^>^G>sHSoxk65aWF3xLUrxs|g{]Ztgin{r_]3`Z|JAYt2PLWl]mTL81nWY}SYCTZ^SK2RQ49<]alC6_,#&l]MQ^_:n`sY}UD^KD>8_cA@, ~deXtoSHSfh]XWVX'3]hGbQOn<AI?ORbT6/BR1eeL]B\XE|/?*JYB.BcLUcGL3*FnU^rI>=PSb}\EbJjG!S"K8INXB,(bgj{necOYnbjZh[TXC7PLnp`dQ]i^oLawjBNcjw:T9/>?hk<SPS<(9pfHYuWI__i<B|RdfoSN=TVYaP}Z?e!(./52>{vW|~[hB^kZ/cvWTG~9b_0NE8Sn}|n0JA]OYQ]?KDFSFT[U^?+VH:([A+7%AA6OI]UvRo]*AuT"4@wV16#(.ENd|qtUf[9M[TG;g)+~~U83bZ=N[fshdVUIkinm\<%ch:4$T`4MT}iP+OaFR0@ XDK#OQ"*?'q+?JB9*%%iXEC+9ULVucsSI]{Vi@hT806ZaSHL[4376uWH;55;:PG$ G!JLA?LUT7%D8M5)'3;:*5!3; =fLA 9DAP7"HgO67Ms^R:9!>PoP%eFcZir~)&8jqfd.1L5X?/0>, 'MdgJ/G5J>40FECd8^xLP[XLS9<)=Itsqq=kIfbLMY>mkbaUC7ikl+'G 7O1+6DqhoL`yk5@owtD\xNWHWNBVc`C"7AMy}O<E_fQNz_GSRiZeufZ^erk_ePslhJLZ7Ne!=rde\Umyf~e^f|gSfRm^bnh?@zpcB]zhv2e{L[ptemQZfTkz__bBhp\4=i\fk]@QQK}QX`R>Lacaf_tp}) y\o}]hfJ{t{cAQ`jm|eWBjvvvO\>XpbsYUvwRlh||MKblcdhZjI]gXSZ[cthmF08L\g!NT}mQ6}m]b^|izoP|tOwZ[seriny{lyc|Z/VJXhQj`ZXYnq8PXesNrdRJ>K>-cQJ#Vhkl@-JQ761dno`*uaBbskO;xjGKAcyjIWLBozRa]myaQGdsUKwYg[kgQPibfYh89F\Q?fD:?0;UX[xWj9(Q^`6a`Ukin'#brF7[uf^VzsaZ\{geouYEaX]i{``n|hN]MawmY}nHcpY=N}]@pmpbe|Z3,[[?ekwu(Izmjkoj9F4raM3<s_AC8)/{z~ti`Z[RVZKnxYigZZZJiX?urE^Q]UIaRataVL[U>N\Ac7WZbymOGLq..5@1[|\UsLK<Pauh]2FxvETX&"Xk@B4(MB}zavaZvm\D>W~QRa|~mvB rGN[r yg17#VE%]ndA58T@E-;%+-5\I:.$92cx+% Yvq>3nqMYln`deHmUyn|>,gp{o\kF:^BPbfhg%$B!WqSWRN<_T?AW^F<gfc$2 :]lNg^_*B7DAAb^KGWIGunJko8JEkgG5;^W2i_W{~P0Yp~Z,((PVOQDb>%<II@2I9pC8FRF/-Xk_MkjNngYzb|\xZP{||M=~mZgeG=kFLWooPDNOvYqtKGZ61=Twx{uT;O7hqSiMPIuwtu<Yqql^QG]Ib3axslZVIZre^M\UNAG]gZMfRNvia|o[AORYhmS4QRWYIbci{YqB3)LZJzsvKZnghgPLxxYStf`Exvq_m`wjUQ]Z86^UWJGjv\dCZkEX`qmEZjqXiwGI'1dz}BsWB(NGYpW+WqcYddP _Nl}Zyv|uwg`LyvauoEEs_SWObxW;@dixtgk(:HaZ5WscoLc>FQ4A_[OJR11>OBaZ9--IINAOOBu@LjtihFM# `WAPdVLk3ViFL7iucys^JHXVVZNYflqqW7ZcT|TPmeXqza=m_CeALTGq><@<HBe|?HHBZ={[^%A">WBC?Q0~Vz]Pu[pF05CGdse"giAO8ehSQ}e/1>`EES?Le54JghdW5H7VZYJE7IV%Cbh[X:.'ke6!P+%5%Cy[wBkxv~SRjkJ=[b^1n9DKk\enNgakn`Z3_W?uuiw\JfpY]Dz8)[kTjkMy3"#lT>ukaP 6S]?1P9- 86 .pZoxnwxXepedYH?J~Pg\QufZPs}~9(SGyqo>. 9R{~y{/CSy\dQ853/Bf^JfIH2;4&,?G3@OO }psjcdcxU#koz]X5&)C9)S%*>8+#@Rt:8XJ4QMZgkV)>#?&P@PQE2%4@|WJ!+5t"ViaS7WZ|cq^w_(D>Amnz'WrwvcO\9JiN<&WNS#%hQ'UAUZ$7e03MS4S>;XZUG>;4:VozNrz^Xo~UqxZX1<eW\@^R*$dS`V9/!7A #:4 aWTRr0/X}pg6=LZ=Sf5?:7M=,7K_@1D)79)$<<GZ=MI1S;!;\=HK^22H0L[Nfl\lU2X[.QVYHPa6JRNk<4fI2@<Wg_hoW9SJ^oB.e[<?SnQ"%>BW]VIGSZ4?=(?RG4VpQG7Mi>Yb:5D[IRiC0ad4DVFA*)Z'OeMKI<0Xkd$wN(&L>RFW:3'B^&7EG6'/:SJ"js`dp=+*4S`iZ%K`PL&OWLL[\P9@YM4:-"Ef=&^@?n%V[RnQES^Z5TH(Zd_aWF 7Y.i_50CHRP6,[jrH<aveM1_yaXKt{OiD[xMrk&?FC9YhaY04J\*98>9DICD2aN@D9+DIPiBYuH=7]^X<'__ac9Unh\aUDZtU7W>pm$/Q@UpPVe9ep0TbjJPeB]F_c,N']Y|OD;AnXejP=C[;+i}XL* KKQvkJ.#8tIB8HJMK/46?n~PjZmL`KOEUEGL3Zvm=@8TU@WYX;PA>OFUOQSI%bqfCorK2PcY11$gsoTJVMV;kvSF*Qlew[>W?KYn~rvimRD]`|bPKpEC=;QkkfWQ;G?.UQy~>;29N[eI?/B^de35<sWO?57 ;i1#$+9*Gw=R<1d|DUiDlwhiQ8LK;}Pv:5BBE;zifK=WHK^SIL: 4LEfgF^~J(P^AQfa)23DXAIY'XH-HL6En4ax~c;311!FGHFMOIg_kumVlt aNF|Tq=Sj[D #1BF"):TAU6B-G< 8T*/NE&$*'3_=62 SppW^((;xiMM!EkkJQ;EmMamg;?]-l;Sxwv@6(olLAP4,VZ2]+=]mT D:XgW).(*)`>9+#R/+/`H%*$MK6%5"@;:7KbL78~Q&1ckjXORaXITKrjT^\C&g{p@ 2>,>expOYMC=)'!HMfjP#R5_fME8*Fm->J[NJYNG)!BhTF[JCbc5%e~j;/`QdY5h{THUff<s[>YoD/':MO|q\_D-;c.JU6bcU8JXFCXa\a\3AQ`kkTKH+,m?^gmWOB~9bY]f;\p:Au_G[[=Xz[,8J\^__S[]iv}U^raih7+OB>DUMJt@2J^sRW?[TGgrz}fg+k[jeZhX^O:F>ZYO\^g:]ftGmvk`C@iTSJZrHONriH\vaa_nhV^GliB3loN\VB"HW[HukwkOU_wtQc[T?azhg{m[cHPSqNqxnV4HClZJ^hZ>BiOltUW`LRt{gyx^JXLNedQ7AduKeoy`l&H8*.,TAPt_{Z&A<9L]`s<ZPa.LFRkYef<cy+]kl^o~`SwYqrq}e)2>dp[`=LMIttwSOBvchNxp-#5pWg]OmQWRC>*CevmcC<9fk^I?<HQLWSoUt|Udo=TPnhVT-hNlb5>U4=P{A_Po`?VrmsuzL@HGdQ\oohhcRpWU;T\eOOrbUeNpt6 =4,JA5 NdXLZyqX<RMxgKAkJEN]\T:ZS0:^rnYI1GK>KrckWL^cEpkkspfY&GefhPp@0`Z`+&MbYo~cQ$:|zh9* !AoyQ"9? `5]fgfvoITgk|cUSoy]MNibHedTt^G3b8`myhdPXbfokDaX:Xy>7sr8;X9)!dua8E0XUW 806O?VQGcJnOv0GDPuE,arZUOKW7AupP^s]TjcAO9;B`\/6J??Sc^;T?+^GB7$H8G'`QO89#6DMWd{;K "RJ(^Z[Z'[O'OoT^hu[Uu`"uO8G}R8KzyqaU#0T^,KH&-2KGHX;BC,7D9$!.5Q9=9/';Z@@4c`z}zFjzB;mL;KXyZ;!=g=&&!ajL9H(Kn5;L'6B.=y|dc'Kla^~V>oxhnI_LtwwR`hLXS`iRQhWA]2UJduETLta5EULRLShmg6<lM5>mQ6LEF@8g<& gsWaE{jidJkL)ZdG_}n^\..L~M3V]1,Mtf]J^Vg`Dbvfe10JGE`iVWW/Go96Lb5dvm+:@d`9^I_KT$0}Ki9un[b]OfKh]CWUTDTdqgV`K)G`9VW@GK9I]RmUL@<8?5=KUEGIW=2YQ:8[X5(Fw.?]Y=FNF<h~HElB x|caYOWKdKP3|\CpIViq\[M\I$:UdP.6#Kp?:AiFBxyVN<CX|WbcNA7;}`sc(/H2daiNIfaZ>BTfNGp3'B7LtZFasfC^^=_d47VYGnhRT}+W_lj9MeU0<c}n]Qm<2eM/IU`UucNCUd\My4P79oWSgb\ZsK4ds`J#li 3)5wnVSu_JPXL=qi][MFWh\-][L?HNRFKJ1Fd\\T70LL-RWArV#52B6<H51B)/2 fw\[>,=NF!JG[M "Th? &@,2MsU`hM~{VSgo[N^mc?823SkRD8<MP;"6~H1:BS=aM/_M*Ajc`Q,_C7IY}|=3Ch@2.1-"3K3(%:2gP5$-5eh]JQ5NVvOFLH,TW\5]T<L.,<^V'8HM*>-4(48%'@,)8?#/@#TSaQ)~H/*3P!4AfxX"#6]WbQFR%M_jmAXF);K[[sV0BFfG*/Ibrwh/(%854wI1*0#" &E_D7'&4$4,<\&=7?$.FP;31D\UK^W&WP[Bdw_szvccSze*5aa<?W|g^Xc,A+;-AX/1+$& <84BX4!BO'3&Ogog9+?QVel`qobiwxxGwiizn}`ildVTXv~xkA5T\o?uviTaf;pbKMk`?QpG%HlkSywbjQFKYi[flPDJOT^MSUN5Znj!hUQgvu}ODP`YB[\8Il~iU(#YGu_Vkpxp^[ZeJfjz@CF4@P}U:,2PkfSrQR@fYkqeX>Z~}eQZxZJOflPlAQqVGVLcy|TN&-nolUoamq4 {l}XLMhd}lIjemMKOPs~r63nsmyq-4&*Wk~\5IU83ZvgtTqjRkO<ggcX[Q;6LNHS~=jp]@CJauh@AUtsI2|p]:HUrb`yb<>MkS4dk?Hd]G;Q[8?lvind 2V_]bm{aNLpf^u^%EvV=vw7=d;L_MYY#(\gxFmjGmdepJ"&N_upX\I{uV^qY`hqXPk\Dp:HWV{Ozo_Zizor9mhT[_eeuQkogQCewV|?7bKAe\bBXN]n1SvX_@[_jep[!^".:\5. |}qQ~j1cx@TzX_:\V3CgG/`k]4&8IBNkT/h`QSbqgwCBsp3a[1fa0PH^yj_DH[TGliEG$CNE:W-HY><I=U}uU`vheop;Ddz}nydNY<AZtvpkO*BbNFnuRCHDZ^rmWhM(_wrhe^72O{^L<hSRW'4Q:d\E+ 7]x7)BK$3xUDIb`DXqt_@jpmX{m|_!JdtJ.=cuUJ9!G#&MEgV+!Vc5I03g3AhA^QN=F@)}k=>3>U~ZypkV2}g`W]p|M{viS;d?SU`4?H3 =G>Jks-3)Q>?iL,)HclE4(9 @5<J)  R^dQ>L,)D`ljbI=ET7]g?`D!Q4UtbRiHIjk=#8YksyN1AjXV< KXX<@9^-,<*5N>hgb1386SQAL'7E2%E^7;b.9OGFEQR0."1nE$YL&UW'?4&/QL7EBWa=LUUI.2*X|kX+2:?;DUZ]dE?EXJ@\cGg>$*ZuU>BDHR=IZ5HSV_T*@Q#+LJJTHWH:3QT3EVTq|V\[_^G\O|eE)$.6D:AG1m%92P}hVUA6QUALWbZ>3ME>D+'ReOHQ1QnZOSFb<.NS\)?_)UhUZD_Kd_DI_"7Z^XDUsCnO.'=WOc^NinoxeDAHkeHg&.bi1+DV\K7bMW6>NFASUZ#bN=?XS_7EQZk@N5JO=E42sm;6?TL34SeMN:HJrkRsXPBQpQQapk}bQZ56EEoJ]gbfhB)GFDB<MR?)BD:7(*6KkeqT\iQIB^smbcx6lEWfLO|VM_\]U_|B[U91(3ujUY0i3KIXXMTP]HDOW@:GlUBsLssbmZ6F1#8EfAhIa@BNVUSB-0O_n^BjX?EC\zi`T^?AZZzUZTSF93]BnTRNN,F\hm@4DUB/Dnt\bEE_XXle8Lyk{re\0E<??&(11=,Chb+R?FLD=ugS?L&8&GkasLgR'kzEZVZU>9k,T[]+?A_a[0]0KALskt8.4FFT`d<9`]mvpy? N|hC:6A*(cm0& BZq8T{~rL43>'acL|gKOXQN8IR}R7D}|J5eQ{dnJjiixG,Oj?+ZPNHDRNZCDGQNn:kreMN[*/QbRDPSIMSXdrTA1=D)*We6-Ta8wWW@JaePJvOdms8SeZ!EtE0gigY)&E$,AM#=?B!0B'9T:18;';o)*0 /6_ R2F.2WvL/\9/4]G7RfLBUNJCCo\Sn_^e=OV%hCK5@;JXE=)'N;B.C`K 1@cN3?(L 3& @Z]&8%2ozMj[Cbw94tupQi^`xVK{ z~U?O@Kg<FS."+@PbUEU}PSPykehGNCLd{baPN/YhqVmbkz[sfi}lxxyjw_dZwiowyp_qm("6?;Kn\JX>F_DVWXX10JHYJ]nH@1'3lh/MLVDJZXiOThA?d*fOFURT1]\Ixufh[eFo`l~TRb]XreNRki_n<p{fs\EM32%6tfz'PDAVFNh6H]s}PLM221?UpkUQKC`rMNUa-"G7Rlq\WK:<DbQ]qY^8Xlkbl\?SS,A_p/Hw^A5 xx9'cWDjob0$)50CjxiR9+U@*ekbXDE`zl@YuJ4C{^IgFDZX7'bO8Q[@giZ`Z^f9NaN|ZVMFC+8=pW,6WU]VmCW[k:@dhi1C<{J`P:!MM';D7D;EIDYUgqA%GL@,PXwa29I<7d`455BTQa>JPL;WUy_@T]7T_HMf}OJRY5D]BJ=t^1QG0?d?,Saj:JMD^C&3OcZRa<C5OnNXZ8gRbVO[2CD\]p[_ga_bPO?=rNGkZJ\UbUrnc!1<VubheVRoNF`{Q-Qxzxc@t?^spc%^(%Uq#6Y7E8GJHP?\[\k>EC;Y"P<W|wg]+KD*ipDUKWwG3ULuJKPZkB-pHu`L0+PeTnXU=:fVWh_p_utJ|mK%-]A0)3_B HEWEoguZ_e6M?`cCOXVkct~w>z`Miiy\bM/nwlY~hC]lJzQtJR]kN`8mgoBKIb8\<UZXQ]Rc:9rydD58bXe<7"(@Rf]L]Yk<JXyvolsm" iQ<iNPSTOY\B)=*ITZ0SUBI<4HP_UC8),BG[-#2/20R+@:;asLW@IMTY`T:kljv~E-/mmC>Vaa{y?kaW^F<.lRF*A.Tf;2#<?;U=25.18.qB5?">,rL&F7EdG8ee: RqpxmaW^yM5s~awXXx[`txF ol40WCJZo[Krr/ Dm}bN?RNIZCV<Nt`KafbPoxGm_sv`^txt{mt{!bffB@cT>54Y\H-5JN=~mWLZtfjvVTU2:KD=+EE3*4Q]<2Ie_UVT[snoY8=]iRPaAKsA+h|{j4 ;Uzo_N6EaZbcceZTnnU(nXXym[xdSHXpY1]_r]wldnURU6@70da7Fc`j{gfU`QSNh~v)LKT]DVbjoQwo=v\X(mN>2d?;S\U^OtteN;"|WNR{kVbcs`DJ[Z-HlkmQgQToMZr\Y\]LJSNXyh[GN>0<CB1:uY3;-0udE0~`NMEItakR/FVjNO@In[H]]VPTkwWJ\aqiC^iCA9C`\a]SJGTLDyNI]l?/aX8os~5/\S5diUeB9.FV_MLGn^B2n|dGNVeVbeXZjoID<N[[;]u\8)17AJEOYSVO*/eqK7*?L=:7c1TZ=$;jsqc?=-TYOZWX1a^K 9=G%-~@'>fDZG-SsfxxmHGc\^D1ka_\K?[J5w)I`13L:`X$4"Ep>@I^a8;[D,8]\fdDbj->R/6DAEA,/33'  -{rpZKDofrYZUM8vleqq@-C='b;G\oYA;v[<OiM/CY9.-SVDJ_: <'#?\yb->d 8'4BK!77HW4WQ$5(;%&O<5:-jlDR`nbvq1:K']_NZ?lP~H')J]?-$g[9 $ UP3;X6 3?4P/#4FL1 'L+;25SiSMK!9P0ZV=HX?<ZsOtlx`c[y[hY#W}7OW`xb,mJL\8YnLKi;$1>\=[:G FM?:>,0J #1"07*X22$ 3C[rIHiYAHB\MDFajsrfekfYa(Mll_sd9&)[wy`rU4%$pkg:;N34 AKQSD<,1 &=\64,.53?TuT=CDomkBxJKL0<[;(CF1#DMLG>;Ui9`_;FQdI]lqrP=B:aefp#X[|Z]h{zigJcmnlax]SbcOoaUf{rld@S+IqqRYkTIxfb]V^a\|qCemd_fzRgjdMaqx-Z^na@:'OiuuRYmjq}^wNv]_NT~i^_Jjcnml>KCGJyjLDusP*[h/_o>CpyoJFa.'2+PXKr^@-EI>Tm6JOT7a~VLGL)eqsoothXe?vuegdXTT4L`apbrN<KPH^]`]P=NgWLCFGR[>ikh^C@IQruz[^VG\e];+}f<PQ8?6Ji|u]aP%|g~S<SqyTZjUM?dbzW)5]XDfjeu]KL2%9hzo]u/?B=rtU[U'G0TR!gZ&5L~zF\ReJPj]mqb`REdV^ES*>B#1V',voMWO]lgcFPM83 liU3N<ERH`@:W|vQR2U`SJwmqybADUc%swpT`HQIdUbTQzW61y:/P6*GUfJ\V[C?Ioi]`^fSVDUj`jueKYEnfWbn_mdYR&MW^1"3nWSofI4 %,!N<7T^lM$4]TRu}9*;!9S?>\!%:B}h8!.lwoyoDS4qwsf`SG8AAVnx^i5Wh9"aVs`IV`h4fjJ00H@>>##TskZM2+ [oG7L9Hd&B5VHI4G4=C8/-#fu9&  tl_M[FdY}M[zKN$a+;N,0)q"S7+,4NX 2,)@>98 Y3NI`=%)G,&N A?(6J/.1+N<)8 ;A>a32'?TXFDD7nXCxaUf?9KF_BOR(ICZy_W]^LdD;RTO/ 80wXyW*4-$%`N,+plvhD^-eK Qwv]i}kst~nsi5Gql^mr91>MpU&":(408/<.6UfP02;> !]d9 ,Nsl{z~g<Gcmk_/Wk:7pWGS^K7Az{qo]A*\B_]_[?ZjoE-8\higigOT[kUKVKWSAA><2FLZw$&_/7YZYdpt|yF5cw{Z=cs]jQ&47kpdr`:6oyxujBFY?~LiuAKsk[J[m^C]f`HVH>P{uc\ra@Y]\usdcqn^\fOmc`MGMY<=Fbcn`C##3ON>']zO>ShV({WZTN>-`ozb\fM?tfDb|K5Xt`JaX=4.Gq8PeKOPbuavFMU=Rbl\PJ$jvWesueGsOKcS4B:N\o_EPE#2k`$YG=acHtoYKB6Ji`kW\dahfZYRAZ\o`uMLpWUyPFwe-GWR_`Q|O9mDD^JcjP3MSAic@?U5*EJJk$$Zsph\@ EP>5H)/p[UVDRceY,WS_r>?97I_NJYjnkww6Iq}<DomT=cYzm/9an}PMPV]QLZPq TfU'Lx]cg0JG2"=meR"0, 7U -4w[LE@{M<PCnXw49pRHSIW]_wMEgq?Q>H_vA;]RNMc6iY@h\vj,@P^KQ^~| PP4cq[P=3Pi$,Y=R8#)( 0k^yeQnsoPIkZPihLlGLkT;jG-Z7A9BCMY/F8LCdB":?%gQLo]]S?,.)>_Kn60(10$ !%$ r]Or8adqxwiLfzhmeD9':@"GZB%;A(0MQ%).:->87#'/K/-!-/5aS;"=.:itr],%?#&>bc-@L-9;>Q`l|r]CMQTG'MYqRkpeP({ST\LH$/29{:D*/6&196C /0&#V>5IF1#_60C<<::;)G2DelM#0ELWpey^@N?CjbSeehlfvaqqdJVX:DDYe<3FDFW))E2+)$M13 I#=*/&16ZW3= HOJSR@%);A\qRuh>2:{K>K^cGQ\WYWENMuks^anllr@VSnN[oJS;6trI`yld<EEwrnux=1I_]unenUbWohdbftZLjVE$Ilsr\eHa[[gJe^\ijTWEL_TgbQ<DK-v{lFfGfV)Tc\`OjvVIQZTbhgp~miIFXJqkyRMuuzs{{`v}i{Q3skWlDhlk]L>WmiPz\ytjgKJgm5dTHO'8/H2`Y_gD'YugM:>t{dkzcWmuj\Ad`Q}sNckKms^Vnc`WFWlAzyd[dXGpWn`?KvbX^fLD:2JPmskwI^GfYv|)NXyw5h`69uJ<LZJ-,<khUL]pLrzifsueEZMsNYn@J8JO`T],fikiXPAjJ:KU^B_P2A_S5LqH{@ZT:BD1IG@MXLNA1A>J 4CDKD_Ul^Ucd^AnVWaz`Z][GI,9QjoJs[\^=YUijTOR{u5N[l]jbNZj27oduTCuZI\Z_^IZ5gM_zi1!ETeUHwmwmy|v\meewqq\uOo_[Pnkhb2\ayx|jgdSRHdppt-.<BZX}n$d<aoW;SeXCJ.(4Q;RU6$-&1%yzd|symminmrK>Ywfo]dcEdvPvq|..kZwG(Tw|vy;X>UpnaKm>'ake@Q-G1JY3)%I1qlXiv[mfUQin4rnbM=ZmO?l`KETJe`8A:)04G=DG0,Be/',I+ /4&%7\6. *+"IRwD9=FHx\BL1ML_Y[drxDhhf``\f7pugwQ'/8vdZV$5OEA+GL/?0"PZMGO=<MF{~lVVS;Yadvb^DPt7i}mx~}u|_lzeuD_EYLK`h14*fRy;?4/APJKKJ]rmIF]`ljw`Ui[y~`1DHA2?KDQa]l`83TklH-[_oN_h[h]KDpmf_aH2K>lVmgfXg\\f_JZgISmZvwmH&DNM5<^>;9#?GGSF@G1TxprblY9 Y}TFDJJHP@PXwxsNKnZP:OXbKKJZX/S)VfBVor>ac^^jNSyn[|bRH_povhr~mM:?kbidsN9V0N.mY )JB/Rl^yi]\tY8#8__XH8miGnpRKKJ0@=uDYGa3]O~W^StT$cnT]u}~M2Ulz^M=An`Ac;UmF>;jE<J:AcQDh]taDGmBFUEja32dUNYjeMm7gQ9kOod|pU<6KFkJMZSX\bKhtJev[VNULq{Sr9pwKSFeg`avVYJ:XcS`}p^9+Ss\BvMbW.7Q5J_c$<!<=SO8XrjFIpN)tykW]]UAL_SJ:%":vARmVDgWV.Nmdkg=^5XqlX@R`Eh4iE&BbcAHIT3.Vc<7nnHhsBKmd`T* "8SF;9${TbvvY3PLtW}fkBAO6S_cSdZ(%B><JXFaYSa6=]kUPITy,bp[9Fktt2>]4NpRERVqMbX*]T%*8+26M?(%(yg^fxznP}qg^gzmaG_}U@?CXY28l~VQbIcH[eKNcRf2M4BqhN9EgE'Rr?=!-$(EL[XU4?9G_hKXE1F"YEK? zjXjuRTuajdXY_ddB)[OTS6gMCnR=S]Uje;8A/"C?(cV6PF("-vH6C>"$),9oY3JZ64rl^E*Thkj[IipmkSfft{_eTXxh:NlrzMx_trd=S Ww<X]:!y 0+<% +u1CE-=6_CF"WA"1#/BJ<4LkB]MXrahszsj_trluoY}ssszii;qvpgCG@3IX][SMo/E)<lDJ<&+mz\}I8& 3;Exc==-2gTQ[OV:j`QwJ5\YYtkkwYNhU_AyxUY_uuw}U7D&vixcm@?XK^lDbwU1@6@>UKW3!Msu^A\]AJ5PiDPVICNi;NbD3?iX)\ETb[IOH/PnN6X}|NZvjQyu|VT{U0gtxP\E8 )Wy[squDOF;jR@niOFeM+Jru9bOGZV98Z\YlWNGWYR]IRPML(MZTVLONVUNza^SXU`:JTOMKd_Zu_G;<AG)A&R~ETt[,!Y[SdwHSNK^^|yZ\8rgUmt,W5QCQkshoRDFID]y_;pSHc\ZA|[N;$95^[6hSP-?>u_NjhuN`8yuUZWXn&/YU=P5Map2DSF7kwaQ\6\.NIPI3ShZF5M?L*^N5k]}FNTOR7CVuR"V3tfIa_V=ZHXLj\BJoNcExO9\Iy8CNORDKiTQcZ=ORJUD7<51NA3.1!cRo{am>J=AGg}O4OaeSQObS=an%KR6F>>Edr5r[C@41f\cX.%N6"7\DG:<,>LQsr.M9<DOmc|?rk,*I}7WS% 3"_quoX_W_]6UT@F K?tw:3G.>DcdtjDIlaQzz?EUSVS+2b4"-.TR%4E*0@9OP;PCAiodP\Hd?BEP5&'8vO+01:T=[~WDks?9OTL3Durz=U[)WEIun[9V85F\wVw_cD29TPSdB26[y|gO6;('7B;ATA05Zh1'-F<L%AYI8849(/"<8+:$;7LPc]Q`IUL7H_r\X=IthdNCI$%8v_F@7 iW;&&09CIG5$(C5!Q4 ).,CS?>GBL72H[?YLb0 LPG9a"dN\55"3vSkR:<D_LKU|kk#JW^u\Q8OABVQY=!5L$UKR^S?6!5"I6Et\GX<,Qcm"12-)\xh/1('>vuhojzq[EOl}QfHz_qo[(ZKy~=]<&PEV9DM4I #+S]>BZgzEOl@PfsWqjncc[}ddi7T{|pvoverskUuv&rvr_RIfeVURLG~ZQoRPOhi}lwR\t~(kXVy~kO^q[]avR8i`zjhwt}oRxc_~uTiE(/Lcbj[Ovt>[u<ezyWegbpYWGxOQ^Scnp|aYmI-:STzQ1.Rd_t~s:cd;NmQdzfZV9GmsxCZJ8(O_7%Jf{P &%z{T=YMSiQWSGAjXUkfSy`OoLL>IZKUZ{P,2ZGz]{uZ`jmp_:=QPam_pXTiVjgo]e;>XJMh,L}cyT2* lmwdku\>Lp7txdVpD6PXNPZ\T6?WZ{iG?PTpxjPI/5XnTsbOVS"2F}oxxMZU(.Vn19; {aXq}}VbpK|rvZkH{qXelc`N\hdyK8VjnBg}URkOOZIUd\gi742<_^[wMTptlp;30&TH:38K~br{SYRpbierrkLV}onbSGAb|tYX6GkpPWXcERqdjp|w<@]k<_yE6YfK\Y-:paNnIXSuiS#?xE4N-+)nkyKCkvvrKDYI6>EUtWKH0NSWglXne.&zu_TnU<Ke<4PRUhI,[gcTytUY (LMXE+3%DLswSUT^Z,Qgipld^JNTnsoQb~h||bCwyU[~Y]# b11qX]ZD0E>''/A*1OM"11>6QC?3M/2:pK@Y.%C4.N;ALHJ:(I]i<0YhVapiPpJr;3yVV|uOOdOSb?|kc[& KZu1&%Ff 3H"kh+'.(CcmF7 +eBCYG6YIIcY0J($11UVhoSDDI`T7]gtg]B;ZkR[]AioBU{I[fn~jH1Jc37@@+W2A-WTPaYA*.q_{h[EqpbiSWoxl>\krD,`zcO=s^\a}vmtf@XihnIX] 2[}/Olj`]TF~vrdh`.YLnPW@Odi2NbGOfNSGjf`npJ]^?>UxLW-]CKba$I;#=OQUB?QJqb{P$6EEwqa^[2 5NVVB`__VH9cps]r@8AKjvLWSm`9S[[Od`nh@BNnO2F__pn]U;NaNYYfY^]aN":QlghU|Z!2:SNNAWXLF+buxt<5gOac}MIktjaI[nd]WtqY7;GovNebdiSP aELOKDiUQbZZFPFD6rI5@?7RbcLuN:AB3Y93FFjWVV$*"kreE_wuzQM_gHRf2IR,>GmkpUTwk><0Z$CWI427hrbFP20@suG\?"Fg.QcA:[/EMWmU<ViVva*X%MC6IonZ7B,\}a291vmsMICElYT`.ZS7S>TdefSj^LFheZSU^:5%1Z4R[K8C?)QW1D3:,.98OQGfZ==dagl#R~I+*OUeiNE&E-~G/Ctc`c=>\PEP}uaP_=>>]WH&!W;GZABmh7HcD/L1B\W=LHT?ScA1<@\-?m]N1,F' Ze55/*%=J,6?$Jt_]wR5Uc^jl1/N@ECS_]C30B4?ywP<ObCD5Q+ >UP^ZL'.(#,,F?8ONGQYBOT,, +)&3@.-" )3(544JF#BL4"&S3(KWW}\~tan{qjGV6F= _}V,&%#RcZ$.4 9=D# 1!E" !.*@M$G`+#*%-75I; &R'+" N-,2 6)(,6mZNPAOM=CrjKamYQ< 4cszb`@.RN[E=/D &#+;>:G1;8QM:J$&(r.:E I~2 Tbe^aJK`fbOsb'1\e2*E/Snj8&O7-K3A"NgPyk~LID]rt>weMI^p[><2DUeddhpLAR\WVlV9&@nO.<RlIPRs`^^_ykkhZjaZ[z^IDHemf{_gdJLfbYwLmV*orA@HJSpLOVIZ>i_lc-jHLXu{_eunFpV1KqzeQQN\fpF^[PM UxrKgYFgrlvulXb\KTd[OubSYY\tACdSJZmg+XhboZ)$dK(5[eR7C2:26R:U}R4P9U9:-HZSbGbw`G+Ln_tn[gJDGDBU~ig^G'-Dq`mnS<d]RG?=nc<ONQPdM(5B=0B1)E&Zu`~WQ-*lpKhp~G<M|{kr\|bo8TqeZIYh}VcW}}JU`P`i^g_bhbvzq`sPby0KMIMg{lg0UWeqieK[#;;\e`>JcQad\da_EjkudwBdu{zxzkWjnoUh\7TV`Vimk~_TpqfnauvXYx1eCD{V3XuGw]PuM1}K +fFf>*wwpFqhmz_`huq[ur@.2Z`LyyZFIUpsmpnsnnF0dc@cm\bRrXxnQeJbZ7\ud<8vq:5./zm4< 7$lYpU\l}tysjFFkeuvz^veFnYjdqZn[deUyshnw|xj`RD;nmq`gP1IbnTrmP:6V`\:tx|eV7(7C_OmDGaskt~y\0QozJ3;UlV8CaS[RG0@;85>HYkK8A3=u88ViLIDACHXI"BO@~yH9.>jxZjhuk\ps[krPu~i>_p51}|R8NRg\EK=;F/5-=JPGC^L3@@MSHZK4j^QCZ^T[w}[Lpzysjnktd}KULnfsbVCL[+_s=>WYS30JM;EC,9<gYRP~fbegwsl}ox)bfdx[Z`d]p1qnquA5ZXxQcTR.DGV>OZON\XdH08Yhv\Q2D>S\Gva&fmVCnc+xRwdkmme`b^cjdvd\U>^F1iYnXhc)F \iVQ=eny{ViclWaf[Z?TBbSIW?\BANhdGdaL;muvX`X\}G`gPbjY.mYpo[EECRXvT]WuG8]WdVGC@ejKWozZ_qG lZFeB5,hPrha<`Fy}H(>NgVs~jm^]Z9P]NhZCD^Q<HiB*5UWk2pyINVP;cdMdb9X@>LWY\FX3]U[:[AQbSl_)7/JZ[<cMTyo!OOGiJo}:M-o|aaiWGTFOrE]IU^Pj4G@KKbAcT:C,|xU{>,50I&jF4Z[`<M]<kag]G^HEyNCJ0::3>4O94XZk^`[c{o:PcEINoy:P/GF$JQ>WllNcp}'9./IuDJrDOC-P^A(6JMVK,PZ@FYcWHuIQAFXKF4+8U0UX+0VO&  *#)T_y|r{lP7Jhli?4H<k`EC]wLIS@lVK`]E,7?,dbYSHaQ$E=3`Z.:aW1HV^E(6/#2J63, 2h`kb]7<1Z93 8/!;S? UnfdS9boh?>Z[]I5ckUHKRGLYOVNTj2<I?=QSyTSQ@E1 4<>;bL3*'$?|_gZ-&"#$#pQ/6N"E#"1E()9=0V5+3=);Y@;8MTEZDZO9<E451/QCXf;CQL0#aUC=@2=>T7> 3Ab* $ "FB}e%23=$B6 #HD;20k9J6F'kJ[zT)-HbhJRS`MQ>gN ;iRM3 g K9'#99*#$%L:icKP*? F7.63J$YA=-(wopv-R{ifb[lNnt|gEd: ^`d59I!NAFOP*wk?& B;Pync\?hEO}_V~lVWkyyew^d?.~zaYfJen{VIwgYeg{smg\eWvt`eZipLMDL<l}w{i:<`s}eOzmcve2' 5g^d}zZ3]|U|tyijuZe}RJys~XeUIO~nTq_@lo]jniXVsocktUibs2]nuU* Px^yua]E(ocqu~rzixpu|a-Fi`oUznUa{j]iZWuiry_C[a@ibbjTXjnle~O_V3c|k{O0S{x{:#U}qi_C]qkeNo[VtuPz}9jq_\xpo`epbNSYe|yCCo]RdqZDi^gg{?7c`cL&/VV~Vqrg[aeqV52Kq^Ul_.0|lC_g]olLMUWNoz]nuts_Hb\qaYXJJq?A_uh?8s(l`F0GJL/6)Oc/9NJy07nrBF`d8XXFPm`h)HHE -_\K1"mrii]xjhxcgTjgWvj]hj_]}wojXWOi{e}y}trsosjfugKe|muz Xqs_>11xr]^C(<8yr{U=tf_{yr^h{{olv\Pd~^x}p{e_guYl{qkRrlmIfK&Hqs~U 'gmpbd0XnL.C-!5m-/C=*=|}\y^`hliRPk}^w8ydqdPd. 8<_u@-C"M&Ag^5A>&[=Odf=2?C8SD%L]fIpUjvt[~mw~|rXKs|c44%~B(BLLjP aI2OW84=QnHfMBKQFCyc?FUWv]z[roEXkg}PQ~aW{br[Qkt=ohz}j]y{n_oaD)lnvG2WY6?>Aj_2DYMwuosrm`aomr|nR\9GXE]IKTj4ba 2534QQ$%M#A6Q`)DA=0?RF>*"Klz!O].HETw93VG<OVPMD+BO1A@X8.M@WfTF7(<rgMp0]fI=fZ{r*ynOXsO62 RlTK$><#7PYQ[bVH//CN@G]TSBAXZKN:N^_]]}SSVA9_wiW=AihH:zoRNFk_KZ]|eJX=n`3FFDgai20NUi?'TmRAGFCA;\T@7P6MB%>jIp>QgL,KmcX|REjfI[^ReY_8]YZl@R@JNLX/8)ot@x|^[O,);STr>RpT\DWmBNfQh~W;Zu0:>_|_X^I;/4%4ZevtC0/B<B^KR^5MLLqqWRE`L4d^][lZPYcw_[7,GJJOutv`0>sA;pSPjgY_[FmEP;>043DwwPngMwgAjZI==7MmTP9B0;;0JLA-MmbSIZVc>5geQURXMEE\UdkEVE&HTM|M;^A$DCXfP`j-BW\Osgjv;QNILXZc^He\<CAMf;w|" ,15GL&B*@tP-kXOf;"K{G9^G@OIRG?_]@8WcdNlOT_V<|H0JdvYRTjtBCpycdkEWTVRX|w@atd[rfUwvYB=14M{o_Y4S:<Tx_x{)@@>tuxlTbyZU%:hKsdp[]jc[U\[y\gqPZS4^5Wl/;2FX`iim ?ZJ]~\j?Qlwkn.VHBP.'+[fMT}sFbicdMXnz3ewt|}Wa9@2HjbG3 4D8NSQ.8:lmQPC#6UjG9//(u`D%)1M\M?_XaOLy3[}_?0ZwU3X{vTZE,kaQE[^+Q?itu\GGT>"DF,,40JE75VIJ`D 0+SXXU@'Y;.yGIJ<9Kik`wmtkJaZmOotc5Q{jP0 uawPOAVq7NIS( F{Ao?7>E#OTD3'Mcdtl:4Tn~pf[mzoSIp\irdddikhhtsV|aup*8TaUJPIE?vtgaI=DDqxu^IDDLSjX<A8BN_^]GKpo>5`hW?NU=OG;wHNilN")kVz`O:TdGPofTR7Kk2$.eQVgqO#>xz^`QbB-d^k~^C9?JHXkQRgt}\QNPrWAfz`VE5FZdiwTB8Ln[MqB,\iI4ZR`qPNmcD7YUVClQ%DG*IgoCX{Z/5fog}TMM-_pm\oBLUqYNkT<]:P_?2E?>GArKjcEpeJ[c[ehMDDg`z_/Dhjv]O FU1Q<Jnrp7Rf8-Rh@ 4clkrsM3=|tOie5S.DH5sP3=>,>o_h\ZQD?1KhT3]`;SiuYXP=csjG]O2@^Z@KzQOQ@;?48W]e^]Z_[`iQ=7)>ZUKXcCt:6GkwyF\`?^[DGg],uL6Wr_5W$k`PD>Inf[g[]RVU+@;lK$Wl25n3\Pv_`qBbR6,~sC4E 2 (F~YaT"sA3@)<HhV]h8"@3;P:F^=\qfjFOjO<AFYj/)PmVPZCZw6faRjcOC# [S9KeXT[0.C+DmH-=5.3' $W[f>~MLYX8Wl_RIQTUaF7RIKh3sABb@,FB.4G*EbVB>49I;PTNL8&DDCIKFG4*E!.U#4B-/DJ3//-#zBRGUXx{^^I#@cEc}nQCXK85m]72: .4%"& (20F(IL/,@2!O+F"EMIPO.=0QVm;*S]6WSK'SMD]gr~iBbb;Zybd4LZ_knljE7gJ:IS1PXTEK@E+RDU_.@8))LZKdg=7PHMkt5e/04>1OIS]J:6WgQS].di^q|J`LWxshWA:$<6dm=216EN8_lG2&H*6)/;SY%5=9N:)CF8H8'44?#0!>@) /?*AI>B+&1795]O&74>lbBOeS#AmZlilfHp9)9GD{}_^g^{_J2'rM3[g4+zgH5?S:}W[Z)7N?AS?BJd\^ChfNKkiV:9@IRXy]dJ'LONob904MH[l:Mdf<?/4 $9?RbSKikfPLf?OK>V9=fE7ItauUV{c?TC*5XZeBID+K?>Pp4%+DQ*.PqLoZC.+4"]OS9+UUA:UvTHT_\B?JYcgDAvv=iDH^RIRZb|yOqy?NXsRekC}tcb^E[]SLX[U=HLykK@:"ib\DJK,$B:q[x\7IcX8L<&fuH7?2+nN5EotO=KUpddiQ#7P`\\A=KQWaS~dR5V~7PM)g;Y1>RSlEWaW];4M,HbrUXcZD'=2U|CB(_u6/CDPR&9QlJ)=tXJ9)wpaRKh~j|[9ZJ?UR^qrp?]Q@BO<eVerK^2'6Z'a]T?8]E3UPTO@G=T^MUQaviN<cRKe}[KWFaj!hP8*L)I5}XS-;pEqrRZ^e}R>`kR"K[KSf'1D@_=R@JZWM [_7lDG`OW(5c46ALB)ZD!'Y>ZjWb(#dRU2DV$%6'*, |eHWwMb}W:DSLS]{nJ32REFnX+7G_fhWB^g)217`@!!NC:@4FP;08OtA>@*'K6/."+H8;15DcaV:3+& KndhokrcdgyTY'8MO81tqf_D(%$$W}K>"@R@/?44>)$3?8&;".< :<4+=(Q]ka= 0."_#@^[eScPCwDghq#QX9UcqP\G4<rR|X!.!.2!CU$; ,P3_wZ9lIG&-'0>Trh,22ewwK<CYunfrystMdvn5ANK0YLxM:! \ef: $"#;5.2Z,VZlYb<:Y~|dipy-0.13.0/dipy/data/files/sphere_grad.txt000066400000000000000000000056101317371701200205500ustar00rootroot00000000000000 0.00000 0.00000 0.00000 0.00000 -0.85065 -0.52573 0.00000 1.00000 -0.85065 0.52573 0.00000 1.00000 0.52573 -0.00000 0.85065 1.00000 -0.52573 -0.00000 0.85065 1.00000 0.00000 0.85065 0.52573 1.00000 0.00000 -0.85065 0.52573 1.00000 0.80902 0.30902 0.50000 1.00000 -0.80902 0.30902 0.50000 1.00000 -0.00000 0.00000 1.00000 1.00000 0.50000 0.80902 0.30902 1.00000 -0.50000 0.80902 0.30902 1.00000 0.30902 0.50000 0.80902 1.00000 -0.30902 0.50000 0.80902 1.00000 0.30902 -0.50000 0.80902 1.00000 -0.30902 -0.50000 0.80902 1.00000 -1.00000 0.00000 0.00000 1.00000 -0.80902 -0.30902 0.50000 1.00000 -0.50000 -0.80902 0.30902 1.00000 0.80902 -0.30902 0.50000 1.00000 0.50000 -0.80902 0.30902 1.00000 0.00000 -1.00000 0.00000 1.00000 0.86247 0.43287 0.26225 1.00000 0.69511 0.16208 0.70039 1.00000 -0.86247 0.43287 0.26225 1.00000 -0.69511 0.16208 0.70039 1.00000 0.27079 0.00000 0.96264 1.00000 -0.27079 0.00000 0.96264 1.00000 0.70039 0.69511 0.16208 1.00000 0.26225 0.86247 0.43287 1.00000 0.67870 0.58769 0.44044 1.00000 -0.70039 0.69511 0.16208 1.00000 -0.26225 0.86247 0.43287 1.00000 -0.67870 0.58769 0.44044 1.00000 0.43287 0.26225 0.86247 1.00000 0.16208 0.70039 0.69511 1.00000 0.58769 0.44044 0.67870 1.00000 0.14725 0.27221 0.95090 1.00000 0.44044 0.67870 0.58769 1.00000 -0.43287 0.26225 0.86247 1.00000 -0.16208 0.70039 0.69511 1.00000 -0.58769 0.44044 0.67870 1.00000 -0.14725 0.27221 0.95090 1.00000 -0.44044 0.67870 0.58769 1.00000 -0.00000 0.51046 0.85990 1.00000 0.43287 -0.26225 0.86247 1.00000 0.16208 -0.70039 0.69511 1.00000 0.14725 -0.27221 0.95090 1.00000 -0.43287 -0.26225 0.86247 1.00000 -0.16208 -0.70039 0.69511 1.00000 -0.14725 -0.27221 0.95090 1.00000 -0.00000 -0.51046 0.85990 1.00000 -0.96264 0.27079 0.00000 1.00000 -0.96264 -0.27079 0.00000 1.00000 -0.95090 0.14725 0.27221 1.00000 -0.69511 -0.16208 0.70039 1.00000 -0.86247 -0.43287 0.26225 1.00000 -0.85990 0.00000 0.51046 1.00000 -0.58769 -0.44044 0.67870 1.00000 -0.95090 -0.14725 0.27221 1.00000 -0.26225 -0.86247 0.43287 1.00000 -0.70039 -0.69511 0.16208 1.00000 -0.44044 -0.67870 0.58769 1.00000 -0.67870 -0.58769 0.44044 1.00000 0.95090 0.14725 0.27221 1.00000 0.69511 -0.16208 0.70039 1.00000 0.86247 -0.43287 0.26225 1.00000 0.85990 0.00000 0.51046 1.00000 0.58769 -0.44044 0.67870 1.00000 0.95090 -0.14725 0.27221 1.00000 0.26225 -0.86247 0.43287 1.00000 0.70039 -0.69511 0.16208 1.00000 0.44044 -0.67870 0.58769 1.00000 0.67870 -0.58769 0.44044 1.00000 0.00000 -0.96264 0.27079 1.00000 0.00000 0.96264 0.27079 1.00000 -0.27221 -0.95090 0.14725 1.00000 0.27221 -0.95090 0.14725 1.00000 -0.51046 -0.85990 0.00000 1.00000 0.27221 0.95090 0.14725 1.00000 0.51046 -0.85990 0.00000 1.00000 -0.27221 0.95090 0.14725 1.00000 dipy-0.13.0/dipy/data/files/t1_coronal_slice.npy000066400000000000000000020001201317371701200214650ustar00rootroot00000000000000NUMPYF{'descr': '}|Z=|yZϿ_>B>mG_<ߞ%=!lMy/_"_~wO2z5u(+vcDB : H$ˁPV 3(B U* ( -R^@TؤeB;U{` +@T* DA*BY(@(+%^@T2QPf EPV *1 DAEPV (CQR` *N@(+d(+d=_y@(+T@Q@ *Ti%BY( KPG!+r(BY(VFo-E: (Z]) D!i%BJBfU%BY( [\>>0,BY[ ee : H!E* ( '+0J L!.uC* ( No^uuB U* ( T@Q@/ ime )ĨեUNÆ83N'ٲ~QG!JuPgFե*G Bq6U1($=_irABMΗuut,+ ( ֢q_"E2]+[w= E(츳t b.]QG)p&]QG)䛩z f aA(+G.DqCÌvl!L= *-5#Xm~~?~p!n-z#[͔(k*r#m7B^~ !ASM.5dnw3SUںEU AԔ,$+S[yWc`ۘ0:r>EQ~P!a*OVL(ͪCժPv&P2LM*CcF0Ƅ>AMtGU5BZY)vxuAsѪAbd !mg2ԘMWrjGMYŭ|}LثjV>f ?BPUn4G}LWcF,P]YrO ad+C\ eSXr_`"ؗA-E/(}, v !5 LcSՒ|bBfjuRwg}gBؖfj[5#3"^%C&G29,*9|XY(XɳRܓgգJJɥtRrk R]&G29mQ L(%drD)yJe!˞PJ.JB1yPJ=y6vRɳkqJyWɛuM<먛* ul8tz} #B2 v%EئW6Q: îN]ě+g6fJEe0IϓYqWlz"[Bo.hW/B2nH hE7^DýL V܇"u⟹}k o~oyˏb_nc:5dfjǨȘL1jےzݧ .g=BI|'%wʌ4 zSp-ܱ2c>  m>rSW?,̗bH^l[yGL[[Byjr+nT&7}QM^1|91MQF!9P-<耉vy] yO :hYȂU˵p $\}U%!4;?{P^=,>Fm[7l%?kkUEMP$y%]TղY0Z86;֦4͢=1¶9 nǒWX˖\MȕJALe6h°꣚$۱ ѹ2,ԓA^Dfy']٤eO; >Q";-w}3ݬA盐0jm_a>rϲ vIL.-D^ZA_O^d FtS y6m+RҾvVr@AwD U r鮀l9R@Q@;VBQ@;K D(+ŝj( \wS y)p{wDx(+\lwW@Q@ )참+ ( 7+,n :#]bV"Q@ )tZhډJQHE{ٌ: H!u BrM&G8 Q@ @MPV )80sW@Q@o&Q@ )pd简ryY@(+?PG)"Dm Q\7^k( \E : H!wDBI)R(s) ( [ : H\ZQG)pJD a8ܢX@oC( ( 鄌umDB : Hq“ jnJQG)ގP@Q@ )RQG)޶P@Q@ lwS m/1( t~D : H!}nD : HvC@R(|W@Q@ )t¾+ ( ι!) ( :]D@Rpe]QG)0*[ ˁPG)pN7WT}`FY(BL]QG)PR\(*uB m:4#DB : H!C* ( e*uB T@Q@ )4U[ uB T@Q@ ) Q@ *#B #QzIDB : H!Cq: HAc{*@R=HQG)RH&#a9( ̙ό eBqlRH#DBSk B a]"4Pa]* ( U%Bה "s.}Pvd= E?pfnIH??7tw6_qSoT+۫~jw>^(/֚(oGSuPs,CM""< 4PG=呃uj&%/Ӥ'%!,gF82žzFԌPS/[FX! ,3#cÇ9ʔA) *6 wXuB R : ISjӭS) ʼn8P ,iBY: Y9uWi%BJ"u㔴BF\QHAP7M+4udI(d%d{ ԍ P :1H!*u EiBhDAB= ]QG)IDݨk0AG#D#pB2nN4 : Lm8cT!p u u9$ T@=(Xϛ ( T@= &7?S=?[2T3BɌe-y?ۧCD V@Cnw7~O^d߽/Bf*odN53Z2tӾv NNPyuCv<ģ ةyiQ`[츇|sLrCS>[FlœF|{U>GaoW7Fu ;.]v]ޢc}#-Nv\NX"n‘iB-# 3ք%D$u%IA2fJgY{x4fAdV=áOvi5!9 ;zF*'*X3Brv* &`;{0Td7˻W;E}唄wz5s]pu=);|aS֌`;zF3‘΄`&!CP&җ1n8&%W& אcv{hBkqO~z;7<5|3NhZiPO389\ΨۈIHYv2arBf* ?^,L[qrDv3i{ %$$o,Ed8 3xMջ1_j\Ev|`K o 9F\X]amg?'5" /ln[N'A>Ub+? 2SH~gгl흽2gRigVxmKz#ȅTT\ɇp_n7757y]&A{j-"Qg*k1kO-:V`?Ǽ/A8Wdipy-0.13.0/dipy/data/files/test_custom_interactor_style_events.log.gz000066400000000000000000000175501317371701200262630ustar00rootroot00000000000000Xtest_custom_interactor_style_events.logۮ&QF-yí !l \ZA$ WèS3 evSŒNS:5sdtX""!RzBit/eT!p%kς?acvA0Z" F,2 cXq[,)`ꤍ\(0rP}:!+5sSϲqcuY6^d!0%V9 JTqsw?"KX()AUla`3AJP(63́[I !J;FPI#nnos,łܕ*^d#!9ρFEsu a(aF.ґ M65 \8F0991v0L<ȿ@oS 5!,Ƚ++AVu ! :*edPgSP&!BwQ2޻ +Es􉌷)k!HJ~7)0%f[H'0:1єb1~?)T40?$, Sb1q {PҎB ^dЇ )kLFS„ 6O:!V`aٔpT!QsAP(:*E`J@<  Ahv)AU"U,ւ,`巟ϟ?|N?}c7o>ݷ;njO G?_ o /;xVxB<)< My|3M躦\uMbW"^=_+z*׈(V|A墨rQ\U. z(r<1i]%]y1i]%LΰP*Ԇ7yÜ738Ü7382Le*wuLuO;<iD׹rcz{{;,ֺj]B]T.2+>z;vXqXՠ44uu~|Tkq MSզaۛcۻj{.흧<^1UA(j[1( ^ /JN2.yr_s]VZeÕҦVJ[D \Q+p 8Ws 9U*p7yCϛa3 =o7yCϛ*Gt]T躨\A=`6T=ȶc6b o7ċ qSxF<+\xF<+<"Ho-["^ qSxF<+cxԹZgL=3dx(ԡ`|j--[d[dUYW1ʺY9¬*xUxC)#.2źxRxF<+wmi4ivwcژJøaM=cܳ{Ƹg*pU2vج:le6JIy>|œO*.)%t]R躨\uQ.bF6 ߃ uP:`ۃj{v̺.`u?,\)jѩSQK^Zp+Vr_> =ߔz)7|Sof긂S?j O'GģAs7ě1:\=j AV9 .>dq!ŇeUp"-Ez{V%hJЅOMAR.N<6<ʓUOO?VS&)P?Od|)!H' ?d !"Q2k _um= RlCo9 WPlXǃDD)@]%%eUuWpQ%Jj]n/E 4%<|h "wI ow㧯QxKE 4\Q;~Y 'dSQ!PA Tv]%o7%p%(JP@\,!(ATY L \ T%hJ<EA} *AR{+C$[ 5H bTLIR1I*&IZKRٝTvgU'*jPjfV̪jBRLU;S0UԜ%b +Gr|pIWtI5{,Nxcg:<:?6JӪXSQ\p<{ǃ?ٙc$ s<䴐}l;q-u<&F+BІsg/^~yyG*GҪt}N_(Kw;xVxDE O*p"^ G̺n\M pl{Tm <ܼjy?MXo_!\xx"xp8^7fCEEʙjY'\|TfU*sB'yYUڜ0pI.:uף9T \P TU*.ץ~=_ ^ w]ᆸ)*1 E *pT.(AUڀ uVWīqW!n ψgGģ*p T/ ?Bc[j]xVů_ ^ w]xR8z*ϥGkVċ[j]xVxB<)a_ s-XW7ě+U!~/{z{*{{^mMk\tw?^+߸?:}CA6tR;*I/#Q xV!n w]suOwˣ{|ލO ψg;xQxE*WUA_ k*p Tp \Skװ6a;u \W<߱u:Kaˤ@n-mj]pGÃo{TmpG^ GE庄qO* TSE*۞T3 WxQ8.+W'ēu\g:So?ܕȫůֻ}e]xx9)[j]xQxC |N7b;GxV# W_ ku Ӧi6MMôi*m:wLҦct6ӦɁf^/ wGj]xV:Wr8XN'n±I!uYn/˶֛ wċM{|<\NxV8Δ"^>[|=J7M61m\M)*m MQiS0mJ ӿ,_9[G+ qSxA(]W:] =ߔ"vW{qWxA(MWwĻ"_7ߗWo |_ly IվůֻD7Ĩm_7$(!۱_wK'gijД-~޼#4ůֻ'b{Zo>JxV# WӧOWI'by IG9J[jyոůֻ7)԰ůֻ8߀NxBmZZo^Wj{Gvzzͺzs+۴Y  i۸/֛ӛoou/Wj][n^cķ[ h_ >ڷv.uGhlźuFw_ >"k-3wzŏ b]xZT*Ӄyźyh_ nOX] ^;vzW+~.xc~^ce&mX<Z;j]2_ïsԾïsΰï+~,+m낏Ĉ۸/ {b]p~.xS~.x~.x~.xk;jDzRźiumu+~.x [;j]6vz7?j]tZ{ůψgW;:7,wպ.q-X<"[wպ6wպ lWWī&W~^q /Z|#hgJ?1_P!6uԇ/߯5K ]Y ȓcTi01 ))AS׀{bU  Hw%P׀ '>LȢR/խW]vwv>}yk'r yR;_+~?S^ug"o^y?^^ vE+_; omWmO7kS;_kΖe{ʶҚyM{ikûʺ}yOč7/=eu+_:@?V.'&8QX;;\=uxKIANSeiy DrqC>|yW޻_mf 9O$7N{Z4yfxO֗%̕_7G2\sbo/i" jmc,v".> ̔폳@2^eȳ4ОcwwMsxɛ~r7_FWK[GGJ,י}iwxq@FJv߯n"x Oϛ #-=R:Xp{vqsaynqJ&_ۇ_uxy=<>_e% :Ye|1릝Ϟ_GyV&_]7Jb>C9ۇwSù8~>1"<߹ΒߝMz~)Up9^O jqz#d0 ݱ}tďZ|2>O-~\q\B{4Kqq6tr=_ypq]4x%>E ߝXI ڇ_/;!n:y^|~^2ɏ#̮.qߟиF4Q}xt,8'7c8a<\I2я??tmRx7ΣK(n'Ahଙyb49Ws#)qxEg{ e)5f|nj9sh7'kQj!z]oؑg{EO+z_sw~64[_@>GDsw]1KL|/OOdioq1x&￈P[u]CSာ_k}r?ƻ N㫍)׽zfʋ_%|oT9Swpd8X%]@p:),5NsUK09y:ǝ3y$>iF14Wy|^F^I?eqO=5#S?ͼQhQc/I~{|S͏hOy@S8,n1Wx^I5..ϜqwOy=W%o|/y;G'Fs^yw?e{a%UF{{k7g2.?zy486֏#5Bc}\8_7q|7~5[M%yw @1".8 &n .ow3?2~W܏Gb7LJ~x$ |~cnBMFU8gf(?4t{/{g#Ty:<,Ǖ񙼇?2]SǕvcUuHf_$C#5g/(0=?;t??[S{Z<οe\ѿGw$!_fS1J8<|2{y0>H>eh[4nҟsɃȟnl8Q2m~tF̸+ޘ qx@Y:IVΨ}o\<2e>fqX9Iqqt =yͫm^z N^1.CtWԯ?V兊g;W; j~k: 歌/5>'Ju2wSQ'ag50y\3rw;F"/|ΎI|O}>qy8x ֺKWW{4/]_~~70~z|7!Gϸ*﷼F6gΗFm^U#wjb9F䏧/SoUp.oqkqt,.2Obq:qM?5}ݖI|}᝹O<0_`AJ؉}yO]~?`ziX?UC=ǥNO?k3oyK?WQ4:]ڪPDZ!wg{/q^G *>SϏ<^=`vqrъ'ԋrg{eӉX#ӬCy^1#hsͿ-u77+Nj9T~'M( =͏۸ ď'ӨKPG0o1BcJFx+_:PLĺ2i:e'L礞-]~l6C/i>el!hNj?7եnx8W72Vu@=Tfćb'h-ޯ57;x)5)+|qǡk'wDg:~N=} VRa:X{{LJ%b7^ /-~:'Fcuc}l:^\gOlOkUZgzC%Q]tH>di/RY+4x/K>ksCfG_?+2h?Xu['ZO:yՇg@c7pLQcu_B,c{w^''( \$NuU]Y׳HF~䨰\^%j)qyE{{z4s^>u)VVW0P?1F,Sa~c}F5'Y?~Oݚ~<~MCs?\w_Ƌu?Py`=y"8T> ?5|e|h|#9~$ ny; !|Joįvt2||iO}#~> ;uD{⨷y>u|u9gW?g=C^cd@|̂_/N .#ߎߙ߈ףx%8CO ֩ . VݲRqrJurK<x|(2OgMī)i"ǩ}Ϙ_CVQ>> ռد<~9w5ޙg.ިu;u};W0: uRߴXP,?" v>KNgOA?H] C1k8?|Y݂rP89o$w-z~Ow_^-P7zOh{VoMzo6ن4}y;:rT8/qNJ!%؃ex4/tyy0HpLFxσ7(.K62jbi]]'vb 8jԑl]U}^2)I>gd_u,u ! ӿg8G:1,/[}j֏S"Θ:֋2-_N_C_*xw&~Gcu+,uֻn-|hlwݯ;X0BoM8x; - ƭ7\sއ>vM&g|d:Mp).خ8*[V-YbD+w~}~r&o$u&^?g>?̥ wGW.|A& 7ԕ7w]8|^gY?S=dLJS\GB }m}'\obLe_XIǵ>]Wϐe+3u{{'|xD3rS6.8|~$y[WKĿ_`\a+y_wUjGٺS'Z|^ЖO3+?Ѻ*x;W$g < \~t~ൖ-e iMோu _UWU 丁;6a[m< r\/oI`Wc亜p{ >KNj+@g @7u/;߇]z`kC-zVU;ɬq]駰ue>YwZ᳚܇R:c[}6Ve9AI\VҾf{RugpR+UVnA~Wwx x{d^<ǥuGNP;_x>z7:g;]sfحG~@st0wotF>&x\sf?V9_Fv KgRܣ8 et*='O֡7xN _OmȯG;;Wy8pr; ?"/b>3w/^Rl=[ߓ֟,"| c Qiݛy)Iy =Bu1uC9)A]cίNWI[ X8{o&y 5m &RL3_w͑KOyK+)dlzBwgdޝ {A1]H;%!qX9^9;.YQS@yE߿/E^v.BV#?[Óݒ$ׁgq$T_"}`ѿ)LJ8F.E|_6F9nޢNMU硼+Tf$nD byٺAs9zykLzyֿf?`_浖hL~bDN?%y|3_8wrl I̡&dV{]?܏J)c>Yc-㵰_f=îV.#dz{H7^uבlˏ$O +oߑag8N1kx'+rGWÆUv?xAb~͹:<)3 axq2Q>e]~/G ^~ze?C=oZuUճ(/jYR?qǞ~1_U[]ՋuORu۠WƬc Yn!EVʸFM8>h{`K l5t/W}/~2\;i,O -w(n5uG}K);oF?jWaeQg/܇6Ѳgc[g7W2~>j$[/>W>޶9芜]8a\s>WeH5wO߄~1M1PNʣw^~qXoOwԣ]>qw_]D/tos!Ӳuf/OMy|?<۪Uvz/~#oRE8r/6xIS^]6ţ/vWIfuHG>|e>\-VqY{w׏Qa膼5ŁfYObF_dWmyo1-, u ?^X ~}΂Չת'G-gRz=Jwu#Ӆ4>ϧY>tړ䱌w;]ʾ:Gr~R35o8SpUp]hqǵ[gބSxs!lmM`Wǿ>!8~ n!I۲ U(+e n+~>n9[ 8NvЍO>&|YO{s[?> Izϱ'a_YZ? z}Z9=s_Ҿ+=@לR2uj|mA>MJΉ׽9?0v\V ?eC #^ @4JԞ2ch]퇸p]jIU<v3ޏNvx4wMD }_ųhOY>Jу|8Xĕж^MSF]U_40Iw\zC=V?4^8Z>rƯPwQ7vĵ謾.X = C}4@@VN~_^h=ɞu ?jzܐäLuh;s=gBIpA>SOhܧ =:sSg8b^RV?f'mvy})j*k7ߒNywzB Mo@\{g9Ο~*.x7p7Ȼɯknv9>i<4u%ka%ۈ_x:Ž73Oݪ>lW%>8׍΃(!g%;u3Ӷ^쇻MsδfρSSU3Խ:^zG <^'z;3+;_ ;Te7-d'4,=]yR573WFqK~r%赫GW "~ {J~k/Q 5:K Ta\p\GdIul=跳@StXOE">iipQ2I*k;_j0H:5{rq|"ߎ~u"xδ{wQ > 1NOmGю;ZcT_}n+~/27OF#>7^Ѕ]r }| $Y-=oݥ?ޱ?Z֯ :ߒdU9}|M ;xn =EۮHTz3|Z_|k āoFO|l?>#/}GNntnᅚC_{3:O?&=#x.ȳZq"Q9s݌ď;:[0ZG בM9t<~MJQ;_~gMJz<{V[N߇;5w #oXuyU)~8<>'Ϥ'ڰ-CH~1g~q0jw~ĭ'F4JBW:.msGZqE?гJl 0\ǪdWO'L||yow~>gC\|=}R4w)!wݾby_8 zi:*>߱@Fi !77ƣz{pU?AgyCe~8 @gxvug7ĥ4`;zkEqsWρ]>1/:K낿,kS/s]܂ڰAny.S`LJYeu6>뛳wnIB_GJf}򶞟{$KyxՇp~ѿ#&?_&*".<" ,<'u9yN=T;ܺuBsW gvv>wӜB]>7m][&YjV zGV~peWOk{q_fܫ|K{yszy˝ढ़.]cxThL|^-/Uﲯ&t~֯ -97ݶCשuFOgӆK/b ~ 'mq)"yy uuW,:XEbL症Q~*xƛwO q|.FFcΗ?k\g>7?w{2ssvfwZ{k0;y4N9:WW:|':=X7  CuP_Z9G__8-,s8w/Sg.[g#;d2>Svv>:/:-V}S//ODӺAѯt}ZyNAUy݂]#ucY$/ 筮qy~kNϙOx-3]Z0 ?^)ˉ?g}}~?X+|y)1~ٞo45S/~6>2Rg>E{D? 5jW.M}.+ +gmܶ4pTb~̗v|*"q8Z4Uy2+>؅; pVZGO?]V͗ց=|Du'3[C+[{]? ֑U<_?x+_v`{^E~ d9^#hCiAO yBa7":PcpDL`g༞:QN Եn؞/d//P`3>_nBa}=מyү\/q]=x:#?N!Ip>>){PS(瑞]COogN}p 2/-qy`8H7xPYb9NtZ+A!ٗhFG䣪;jO|2| SgQ]p1 Ձ#jOhnM16w5>z͋?~yT 4nq~4gdވ~hzv\_/=͇Tf5sfxzGž7+Yk_ Ьl@<ܳvAYN>A|~vzWot:{:ǛvD|񺠎p&'<{[n 9W ]p_ۧrܕ? = x=pQh77DY3a䏫<-0zMT>/Ïm }c#O!ݺ 8HHk*|וn/ .xn75(.}!NC>o y-?90Lo'p>#YYS#~m\Ck?B\U$Qa"vP:˸?l ܟH]6p/w i(e ={tY_HM#;_Ns7l*nu`m1YF"u^_~_:-H?J{B[üۏ,C|%2>G̏}bjp?rzzhҟQ(D?|vվS?蟬p 툼>~ZG߬EcؗAs"%5'{C?x[_UTsjI^Y3̺b4irSUygX}#EQ>sp4CH;ȏt[uۋ^%Ǎo]9#E:YCCᇫz|rwd_3HPuJ:/}׾@_F} >VoH=~D!XKzBzk~;E57{i{ m“~~(\F?Oop?~.<ѲWnԑnuc"0ߡrEpm8_An/|]o'%: 7YO)|ca3>( K.C|&_/Ts7pp߸yq"ݵ_]}Gꦉ=yfq;##O>gf_?\'a?g<EOI~waO?9!=Tp 'zw(u7߭s~x/t;e9mc_v!cE:m^d ~>47%PwNA?̳#$nՔ9F:׃fN<5@ՉO?Siw W|g~?A4}):excij,\s#8LJ@eυ4v/>po%)' !x->=zK>>+ι.\^j;O 0#ݙD?} x Y~G4an$|E;GL6gbcLVᒽf5y~ήj3 ~9#S"}9 u o~ş!ak ΕbkokGNWԭ ,;3 ?82&\A?LJp[H?<hn/u{nwDEQFqƸPNs/I6 㷫PΆ>^ ̈́'~Q:&ʧ쓋+M/^wpȩR3LG~!UA@HE<)x:':ng_:!gs )e󟭌8bKUCmvE;~0&rgp:8rv|3>B 8]WJ:笴 ]/=^s]Cψ>E? £4ܓrGA'}kV_uo7>Q>ftKB1?Ͼq-^Gg0Gf/D_@<$['qɏ5%#x-;?<%kܯb+)\?Os/ըe~ dΜ妐oI:A~M( #e;bt?_q9Qϫ) xG᧙Z@E|t}b#'Tȉx`;]=>8}%_'EJOVytx}_V=D?ܺ I |?5WK|S'oξ&nW};guKC_C{ώ?׳5>` JQ|JႴS̽pދO.|G<Єpq@海bř1: ʜ|u5q0G.}o7y`%/ߑ>}V-MC] G7-yѹJ0VYso9܇%~(ԯ~G|ҋp]jECUx<9~q>[|Aapx?|/?0ךzf_9tRwk[''~+Gvǵ_.>̑=f O7gԆ^#Cj;L']x0WP 6b,O,S#|JǗjC8/%;=oߎ/;: s:.B„x_|y^b87 $jR9o77\=^"0u֨K@\Y\]?Εt1l5î7X|H=0ʏh =Dg R?J<黹7= zM=~5]31YGeğ[w WSe!>2˴Rs1Jn"IӢQhMC/&@п&4/@:|j /!Y &< :Nekg3=f ^"{W!"~y/8S%^P7~|(`?s\#uC諌GLP'$4m6x]cydA<$\Ӱ yIȳi{nx꜇[My\IS:xSL'򨛝qO9~f̞`n9_OMه͓[+;m]?/u#"1{Ykϣ9a(?2s= [c} cr`0Sj⊝.KW\ !ʇQu{֠O^LQ} j2K]P{qctOHoYՇ\y/ht5/A\hkyރ|VB3;_]D߇83ח:֙"ؿ|V|xZE Oi95T[x x NOT@\2p>̩]C}讆vG>н95;;ujQb{nk ?&%쭐~ XGw8! ڽ7ȃ_ؿT> ~I}1^W1ĿZ0[EN5rbp} \ NeLE/K/5A}%A\728iA\1z5hڟ.\1u'@}R:wcG׋sgGC}LzieO /^On]5'T:O}O.a{/r^ROO\9bm)>]+aw Q]1/{?e~ҡH-]"04"CJ7Hs9tt3HHyZkf:< =LWk] rN-hw?Y8EyF#o >5b{D2"b)1˫Uf8t@۹yأ`yg:O^"w-!#/G}~7v]& T_;Xg)4_]O}۩By7[o.K@cFdk;3^bo8Yr~AOh`63 |͆k_y%9Zn_ϰPuv|D>__\Rg]0+h{??&%a- LLoW O;g.8sz>ޗyrϜf/%O Ƌss#v_>4ߞH/ۼ/lq54EsA{ ^= B/8U}!)?9ȼ^$.ꔢﮰ_ϳ$pm&l=~(a}\OfO%*K4\օt?<_!s}gu¥W+/ꭑw'jsv*u^+9/Sp=?~u"=T㓏ѹHklCWnSƹZnJ^U,S#Pawiᳱ N}v#̾do8w6}W:oSLدe3 D؂8pN'̽8OU?r^K>tj`[AyA<8۰ģ*A?ipUX?o7ݧz!,XwȽ;Kdu/Ag)/ʫ[^ϑ(_@CA9Cϩ2+dKw)_l%Ϡ>|@鿭&q% \ αa Jڞ>5~yu^u9A<<ϸAs7C n ݡrcy# .k+`8Pd<oG0gUXIP[/s#;vyҜߥ?j-8?Kz">@t?x1 %myANw-U\MV_˵s'<3oz06x|]a/.. !;g߬-ars3 q=R /3y;[v3oϫ>(sI5yO?wԹQfy5>|TWq>M*ZW֕zj/~uXoDus\/I6';s,?yyyI #ηѿr<|v?ug>)^'>vM ?윾%|Fqs n'5{iW8\.=u- ?ȷ WUW$q7S|&!} Ptn{bӪF\u 'b`u^ΟoNy(Ӈ>xeB^}:sՉHdO.A?Jm\G@<ҹ:Mk_X3eʓ/]^"䍟.:x} O" 8Gsb4?3a/YE{9?ՁOfE&lABɜ+sBM~<]ޯ^ >5l'%hL``y"k Vsh抪C~@x]Q\B#XY't}M> Ny4:I?y:לS?\EaD\c7(_'ySo-mGq[oSbu^[}*)%=^#*ΰHg+Ncc\4ʍa:_䯥 |g?,*ODpx{:_ua[yz7P~Ae7e~Br긝?zXgϫ#ݏlyryy:CVId7m|y?_%۴ZOϊ? vus]U/xƋ{[ϠpxG>g_d0*^_'Zfx?A;܇΁:Ry/Տ ꫱W1;dwOBxGqd\TŽSnko;בq-l}7APɱT%}_%=y`(<=|:wxM_l1qiŋ֜L* Y?k}#i6Ϫ~AC'FWu\?ۇ>EAZz//>},hگ?15꫉[^ocl)Ygh-h^](N-%:g> ot>Dd|cc|4ټxCE6Kv\G:/Kyj>Cr(d'%[VɃyyms_%7Y<8cqP=SݝK cy.9~o@tؼGvB1k1/a^w*>' ~W['xm Vߋ[l }:'zz\$ɩkyVN/Gz+!\.c.3nWbS|ђ9p=YlHuݴk}`?yƳ.%gҩ:gBͣn^%+ƘY>YqxyCK/þL鏭Uf3COוW_}OM`yUg+gʣB#0.>__}~O0]'}~Os)T4[M}$'n^IYI}P.ʏv>uZ?}#|P9z=X>JUg~3y!+[ۺK yfyur]d/N֏'J_m)N&.9G? jOO3}yXΩާyշjعeT}}?u疞segTJő?ԇ~WKm}N~s=p6%S<(A7W[vU|ҹ Z NwtN6XQPqޫA ')~>*~{ŏNYi [\rMo+k>x\YWzNL:Tuą".KkѮi_mmę`S/1~TkKESƣ[ky^6oD{hYp'(_~aų-.[E[T53ɫh-w8UGsK4C+^`Tv//<G/?#,\bғǔ^Y$OxMsG?m(>ƅʳ1X*nTI>O('zzO_z(_nyEEo_ZىW>E|3WvQ]Kz/nHs?l@-R\+dk9gaJu!yGZ?Npzl^AuaӒ}׬#UO~A^"~A/ ]G{#ٟn#)ޯ,mu_ߗ +/Rp8 g:͕\ m; T]WWoķ.8n_g]LvWj~Nں'#S]є/nL.NKЊgXSFwTuHm0Gp,yyHY#y;}g" o.zsWXWy?=Jw(_Tpʓu.l埕fߕpaNPKSD),΢}uYʷ?X?L!YW W*XW4Uu{^+_s/nʣKl VT/I}}{:wTuS|3%vinÿS9wxO;>>uߪ G 3My/)(yKY'$I3}qz*x5;ǔ}s[AgOOATmwدeY7mBX[M]oޗT^}rOesOsxx<͌ܬ<Z?A }=(C=h;> BS__ǿ>}ǿ>}=;6uПuw ~aU|wPp?0*1ߋA'ѨuEX[7j7 |Ad70/zg|8uO9_D1=B^UI%PXU_ǺOQioL \&dY:Ζ&P̺C=7/> 0QYOv͜ZtEss)j\ M9llS1]g=~Cb!_xn]ο1gi,K|zVK>?TGBʝ0E.CJ[>π{ y֨p<;><ٌaŝ۳Cuk"_q晡yvȯfǜiaDYaC{W؏eQGw;ޗP=uE(t];n-, /tߛkF:;'azh7 ^/ tb_bhg|3 {,z4fݧɞ}{g 'w BA/t߮B7PVB)u/Hs~V/i߃vr^Ĭп{:5=jY.H)Pt-֩WewIϠ}Ȅ'V_fާXڰ}}u}?8/ 3#3/],Yz?Q?w֟Wo`G;E6s s=g~]v١ϲ @*CynBN9ru53te|[RB,F]Ћ9!ϒ)ɛ׮ƳCW鉰<z^ޥ.ʝGo&k~|JOЧ+/'O8g~{{"8qS<CfgC2!M9??TϽ5V̭;)TtNBc@Bĥ~bxFzO2|T.& q_ A-έ?x8O 3qPq'qyaz7/~Y/8~ے#y˻c'B-Nȷwj8x1bp`wP%em{۪0/,~~/\_{0anSM89|i,ZιG/}qC?A;?2.o|w%;گ`?,!~]ge*{IB>O[%_ bڍ0{.9+AwK*~١r}8~W\wn'>x97"r%;#tu5зW2f'xOԂ=zK8vIvuB8>?W.yړ qp}#W탫n ?ŝPZE=v*yi?GJ Wayiɡ'?1WQW#~z?޶E,xʳ"CY`w΢Cp'B31=&6n-$ds3w>˴wsۜ#.> 2ε8Tv;ъYX_U $l%#/'AB`| X^)љbI(.(rB宱 ?X BG/v?s1zK̇wg,>c\+#.;84[ǹaGGc?ox'8?a]aθv }#O!%K|iXDظs{Z}zZ.{3 }Oa.?ʗ H^i->O_WY~uq_S !I}O&!}UP;x[ʼnl~~;y ] \_6>x&NU!~|?SCpcD{_ɏ~k7+ߝ}&r*^8O5C僉SY2./?"~]vwS-D ?HzL7(g?zF=),>Q<쎇<{|[w ɻgOF\$byOĕBYo{CosM·סgɁ^yAڵ}`T ,{ >̹""^Vz$*.!/A3/+dxaOOqhGY^V`4ecugXwƿ>>oaqxwpCm\|K?6NL}eџ*s txkƻ=}<RX4퉵Oyw.+1a~pCVo(y|hG=v?_y* u<qec oӴ׭];7= *¸-({mN>'6 pZ?1o~L8(.x/nZ?)GNv{qZO[%^TvC{X| H_&^^*z[-<򞌯}!cu_>,cY5qQ옻NE_"|` VϘOOQU<sG lJ*^T|7<5_,8Oy"w0uW[̽9K? q헭 _,9<23@WEo…6R Ѡ[6>!_|P=yʽMخëgikJ<%$Uuɇp>,z@WSxD^W:(Tȼ[h.PyqQy6yxCyX HU&,W/C+|X_yk~͗O,Mr'^^@~^ vx[C>G?z$_v,hoctӞ0>}Mh|>wyg̳_u߬}vhMhiRœmż/N6e_5~q\^C93s'KMɏpp?͎,E[ *ai$7kiµ)/N1ޱ<ۚeZ)W|&Oc.?[Ws|pL5ZT~SB/^JyoBk8Sk#BquF 6r`'׃jS4Ӗ%ϡ> ~1l`2r/o#c~Sg[sS^}6nt\GyyAOH:t?m>q[*3oa"Jjof܊:yiAQ[eY-cW}ƗlIj!P/*C$(E}2rY,CŴ6XLug-#/J ɱs{ߦI91r+w }W!kz>N[ nr2]|kzjh|:>:W K[oN{ԓ09})o϶~FMZgϒ+/?$/OCujS=Q}>DmO>׳3bJQϲu`<';-<qY_%* :R4v~lZgW ~FW#m[Zg0[mac,gsidp.<|w~] A9Ac|Io{gxNWGy^7w2+rc'TaXdeˈ_ XK?}dߡ2ɹhm*KJ<,'Rr-)Z!ɵo`Tغlej 1oegqq{ng9q?+j_5f>]Ky?z|KWs`iȻfU=A|v|pu~o׸߷|߬EFOQ{gF^4:Ӟ%[ aK3rȾC?,yc`T]༫'Fq+qsTע|Ra-ޫ?<9A8 }=d>[C>Dm^qx}Q Ew^Y-xZy,|8oX{}}4+Տ:<Q)yz|ݼ_9Oytoe2 )SO6F{3}">dߨFvAFޝ4{ia/qyUq9'qu+.N/e4+8ͽ፟kg_כ6Ix~fٸQu튿wOW++ާ T֭SpaϮ?`> 7wG\4'p/b8O%\\qagw0r}FN=y<y~<~e^w7=3Iȫ">_I0@x)Pp N|iF{\Fsgn.eŵW`fl*[qOM︶t|'}Pୠ5vݿyU3#΃ەryvxw~ھ êķ%_,^|)_. :P"UKV<'Z>y .DP6lH)wֲRAYϢ<@F>GIo)<]xck0:_۽|ȡ{^!;D/i>)Ͽ\8`xP\/ͳ7'ߊz'Gc^w.kwՐ5/3~6x| n?b޿OHǼ_> {}vNO HU?:o_c3p|%Ͳqz92򵊋j'6vɌ|;mc~=ȋ+fo9#A#gs&5{]\eL| 5{: [B?64rH䮄owCVggr p~~`USs޾)}zb~l^& aWɵ첯Eo{cw;\3&\\`/C|[9JqW}#(GP [wƾ&[oI/\dPIvNׁo xHc Cև$xsOҽ!oy;B-#Ă6̼W{q\k~70="{\s79W㚿}f}Qȹsski!|ݼrw>+uZxp s_ ,HkݸJy'k]Fރq ;\.с#%ot$U˅U彀_Rܯ?X* :;$:S Br7ڶ_C-u$ZzssOyq]F\.4|s7mG#m1?֧u@7MM9u7'6{JVEIzo' M |JnX~xV'g\Qw+cSexU_~+Ͼ7}~l"5p=WWb0;v[>#=k~ו?qĻ?dC)kC@]>Hu}^<}hN89Ywb DlfK&_#T|dyቿsZ8LWCg3ć6"!"N<.l%6Fw/}9ǰw׋&~A?кPTy#.Stjb%:͕"F.}}]t̜ƎjD;lg ^i]k;J@w:~]979bPztI~M{+%܈/r|~yn{|_<.(&uq>aXwclTWSPB<)nyC}w\'iعyѼ}Oug=|#7$EyՓ1=ɯDu7ۯyTB?`Lc Oo&s& z!xf`76xӡ;_.!j{xP~&/r&L$8^sޮW3;1UOi|=\/B^|ZWQ/;oGumq'% sQZʣ^i ?9ߊž 뾹'? S>lrͅ=8Eۗy_볡r|Z$;}Mtr_4 y8`QÞ_ |~qsˆ _q^n.? _)k*߅/ O;*Gg sW)'\-B}E-3%qpд=uMω 1b,u`uu:LSE3y'WMq?Ձ7N~vSy:#Nyz&o,͇~MA߇0o96ƺ{AP݂+qe4t]?I}Vׯ 䇸Fh<F IEs wn ^`vvL6=cgސCuZ瀿jYב ?roӿp oTxP;[v:^V<8~x9EGP}5)]Ao:}b~i_^ܰ+!Ṛ~/f9-a[F}Ixh?aM}؍!q=KO:fOߍ?zx`'Í|/P@{Q{ ϰ̙P_gh^&/iT ~i/sfO HO=^o{AOLtcsPu ?1oFB_2弪 {"Wu0V wУݫ -V]^ Wa9SA<,^<CX!>O xGUS ǡC?w3ښ Y4 ܨ_a5_ɿ΋\#cjꔘP~XUwݼ >a=O ~2#,ʺdɟn\~SX^wN=d '&&y9g'}{9/ko7T >Y`w^S+7l'r vsBgw\ݮG*_o "+_ux?8rx@ i~C|qW΄~Sqk}q0uAfw#,^q5)<qSWۻ볞zUoP y9Wv>`]kjN2_efNS WުN}btCw"c|=? =y4v`υߍqaRsZg_y:&ׇӥ7CX:证;E^ml3\=BL ;|FN8W _OLk&╬N1 ̼8W+?'źֈQg )Cs~R EmUz0ϣmG#!U ԉ8w_1s>ߩ_.+~Tݣ{t,࣮ĕp.:4ACGWuOU\rgxix1/2G'NɈko(c2\GzkM< }&9CZ/(^}y] K_  >Ld?jP>UGQT4ǞǾr{N'XGad*oற9,0atȓ0A ?~ [H<ݺvxA(}t5O @'bĿ#}3α SիK&Wx8a'U/~ \Qv?ڭ X틵 ~MH[ijwsb7!NO ?%O@K y7W6()lv#^gdn <4 Ԑ`wZK0 UUП!p.yD}8?h)mU'>4 17[~O$Ҍ@*yqAOuo?uǗ=s `EJA7X2 WeM g?c?8߳q*i9.:DD\Ѯ'>s_!^]r80z0TWjٽe!I#|mWF}̼xZmO'ء+p-AVnA߀w}?`+yu-M=?DkUx܊sPyռsSVA9sL\.͇y՚Źy=O9wGg{<e{guL_zw^x?t|ZȓU*dIp;jWg؇- yr_y>yҁ`[ݭc\| ;3+ct2|.mf5Xyi mE?Di>KaGظr&>%A7V4/uEwé>}nAa?\ù]wa[ WۧzwM/Zii7,X|ܺų}Β/Lon`'V@Bw`HC X]L zm:HOٌ:اw'ۂ)-ﴼuBHxu#/̼'%m.gC[S8\}+*7ऐ_Oau؁na'}8s&ܙBU}+"^\%m@cbmf7aC\뙃uf߄Ӷ5􍼶xB˧ׅ^v~5?q0bnu|"Uc{n|xg0>@lW},o^ }:q*"^8L\ke{Ɯ5Y1S) !R|'C|LņCy 8Wϩ7i`OXU~8YtìgFwSͳ{>БL{e ƃ7'<yw^3Ϯ9̳CA|R,$݆X{x`+#]ʹJ?bi>{ySA?S'0e=0 _D4p"(wXݡ?k#_]s9/L(k쾫/Bls8Ʈ{_gۅNK41z{z:[ߤkAޮp?ֻ$6-1}ܳ9.p|wn^_>Ѓ\]q?ׅ֥X%$s~={'bo|ix ̷G|x*mQҳ8 #.wf~.ȿӁF341AsG~ <*=yqDCaq~ܳN_4ۦ}M.prY!*-+qܟ{@$أ'5y:rXJ7RW/\*cy<:OEx=cs*_캔 sܟD_Sm(4s@<3>OI*~dkSޗxG~q57k3BF;6&?"Q~^f߼C ּ)`z<nnN]KgrqvaZ/mJ ;0mr>zODз`g4ݍ ן:pWF=7Zyu`5wYOf@MﲯESU}.'^ )*g;ؕYU^\ >L:`&_7Ix)~sAt|x_6m>: \|by1l>?c$oGQ\0-f[?uz}Fy9[ObarfK Ɠhx g>d>盾FߺO| auvwb#^W;P9dßfe̋ Ҫ F צDskMx(ZE{ 1ˀF}?ukߧt|M<\Ι5~~ͻ#*p{.嬃9k3.tx9#/{γy]LSa }]n;*o_6nr, ygL 9XFx낵ϣy5]E/ #v)g~ 'Tqc=yNC_bOEL(8޿]X݂;efy"ʰS= ΑqWuOq_eCiEq*s 6jZDЋf}}?,#Â|ko1^4ZR?յ]ROWsuG;n "F~i݇L4_Fo%#iy*k>0P<0Yg`=hܓ @.B|KZ~6pe䅼b_}oEy[ssWn d.2-\G{EGG{欸NO9JT˕yоĻ[08*CFsܬ+ߟ;}bD~<O"bg[#7W8B}NUwG#rG'M{\{[̺1xaK~o>T,SsԷhϽ6 |qu#y7,P}z// 1Yq3݅a>Jw"*B|1˰:S[¸}g?Fqq~so]"B/LrsCg԰>3՟y ,B vm!, ŵ^_kг QV6cGP'G@_4 hmg>agx,,ּ7?{7,߿xt>_sTzO!{쭈?JC|O: ~Z8u01h[Ðog.-('gԦw&5Zybo2aL5#V3RǬ{N"9 mc{?60B͕.{ys/=eЏzO{&y\߾"ѳ%0)}k22N ggPWhdgoJ}{:Ǣ vs;cYJu'K9y18? P giV^ lx>srj6;q=CV0Ǖ 'G9=C?18˴jE7=G?"c8?|\z;_{ s=[yցs[G'/19TSCʟ_}Y;ntoA=F_Fg}ˬqJ\~3?e3*iWJJ"#YuM17|t,'iu {Ƽ| w;2Pp[RݧTo3h2?:p7^qTצNr5xb^5?0(|У.ЇMՍ|'wB#DށT zv[-'uGܮt =}/Es$Ϻo9 Mz6Ńg^q9__iF_=6T>0 xuӅ ßH 772/Q(|DrwĹ{iާ͹:?BxEm<<~F,޹UgewCdP=v[t$΋$9Z6|49Y ʹq9= 9bw@r};w'n ~Aq5Ce><'JQ os¹=a'#o;L^L47}P;qf`Yi\fPvm~ht֋xNԁ\s 77kmac_}`K4yĈWxNa >I}zs59J\"w5ToX/?yw .^T( 4Sgk[AY7a?oca9 u΅/pK =D89ޝtޑx|01>v.㫞ӕ=+1xK7>>"y/>~}sB:O_+]3 yUqE;AG5*h7G ʡTΗ;y׹):wTAX{8%$ۏAvηP6ֹ1n ϔ?c/4g1&뜺j/V1h77=!.uG,|BC8Q osϐciG~p ,O y]vJ>6#N{ӏQO=i&7oùd>\ >w=t9\"xDsr J:=F_F|?]z7__5̽4bwcqĽ+bI)WwܳX(:GQչ]G<C ͝x/sq4W|2ף%>_vuhc|W'} '׿dwI-lҼ@u?n@XoaI 9Cw. _b?\ЯJG,փ s՟Tc$9/H_^=JS8}sn~Xg򟚫<^Sӓe1Gu~Nԁ1Q3e!דKҜsZF)Quoy^@jw䛶k"+kvy9tuPQ7JH)'=K.P~ٹZs"eC%[&":xwD7x Io)oT<)}3 : tt`TwI~4$8C4d׻}Ӝ[?|B;C=]7G)'ˋptK.+=1RĵPuNuQtb3"˔P9oεs@\]A80ov<_(^<й/WJ}ozy?7yw:/8_ǰ8AA?|zogR&'Y(_\ikz{qE`*os96AޤKs<7{/c'kI+(7+"*扉G<!o4GZ'k_tWa7<"y2Wxٷ%%9U 'st^2q?↮s6Nn4x̷u %>}(gҽ+@Wy2ℯWD:Lj=^/5~̅|h8tBM~@r8X 9L ^!wu98Mמ/Y\mk+a ^:'yC_)_6Լç:-5?*ĝQW:_z">NM`oO~w΋^Ox|@d?3rok=/7MS\Z?Ev_X=Av'Mq0?rC;~ onϩ۸Λaf,9W_~rʺP{*r{;cU~Aso3bϝv 穻\3q6 TA//-(/&?/Pgǡ'C6E<%LWC ӼϷSߗ>>3#̹1{}q;/[<|v@ޕJ7ܔH4ijO/lz-G1Oy-4Za/?'~ L*i9pq 9L;UX'wD<3'~zqoɈzb39:Ps[?Euc/<*tEo_Qꕑ "~Q^[+γe)rUGXcz>2}숭P~u\' a@Oڨ'0s&7ޔ*ϲAxuuc=tn7;0&?%=8Oy"O91ΣL9]G!i貔nY S$;FvǗ.Z69O;.|e^oW.y - aE|" }_!/iM2gQE}UVvAb+gQ@yYowUNϏZanF(]= de]G;?ˡ_S@s\-;ꗮrExC;.'p7Գ +QGT7ouy$K͋_&m\_Kil2x ܩ}n6cy>A͓/[שy=:CA=NލL׫g?\abފ oڳޣ_\/C68G~]l*. ?X. էZ"{s{XN<꺥'֯;#]xA-v^RW5՞#AOsea^:oAO>%~}P=S|K~}-AK+_W@}$_+d[@m\w>A\*Yi s)75ON<)^&ֽ{z^\wZ;N >]{%m_xQtxcq%wʛƧd~m旑#hj$vϞ8鏕ߒ(NW=^ѼCa?KX?e$ǣB퉍i]۹Ѳ?OIKCY|Gʯ=σ_~D甉_ܗr#?8[N#}eyZ{!{:VޏpK7HG3xͳpgvW=[_.%|/}8q2sw׈ykϑ躵o:/汘ϔ}?:|/ta#$<#{}dתn SLge%?Kw/M}hy]s,oD;h^0~Q_+^q_|H9ާ{)5#oߟ:Ï득y]s+/J-APr,|ͣq_Һظxk՗>_X\!_Xo,ƯΜh+ɛYφ}d6+s wTQ:2kOT?%ޱVa懃?0u|Wd\v#lfcO-%|9Ւ?xk s+R>Ac3?l^uyZ]6)~Rr8_{(qi*#_*'+~BvK wwW'z!ШЧeԗ<ߝO_<nHN lj'5WT#?+TIdGWoRg..CVO_+ݧpo9U͖Sނ8?Je Yq|6Z9R[cq9O%N\u=}$gz]qxC-;"A;\'yh߲O>mF'RJ~U~S른ճulq3e NB=nT'E}0ߋ4TqÞsMaT?|mʫGUkk~LfZ~fW+֏>Iky[Yl&r+i%L3GNC}z?xN.<.)P]}ؚ'eyS}/N~]DwU~;OvCy=5>)TI=ɏHԷ2?o[]Cڼ VyDӜskx<^C/xz_=wl3{ : +gŗXJUK겸ok+{>ʧY\8H9ws^4M8[K5vo}NEu"N_>e%6&{({uP:SiKz);t=6L?.yX_>נ_ ΄P9ă;՛)&>Ru磾)~_~r.{vԪRC{`2tk~lJ</U*ߤQ=nA }^:ς\:?_Px:=Cۧ8Wj oھ1+ޗ_[?yď>z ҹ™-(';uO]{^ol>쇭e>JͷUH~ꏕ< 7jpp_ПjkSd}1S yEa5`>3H'YB'1[7UG`GS~\|='w*"䙼x/{>]| v/Wg:ⓔU]I?D;0zuPv4ѼgGRz;O|<: 5hXߨyf~#(w82~|Wz5^ g ?_ǿ>}ǿ>}~džgknLب0>ލ[FNjW3/Wg oWț%oGǜXGvE>t_nB>?[e艼Ho>i!?PxDsO=AUw:U;>^/P5- ?16~lOw!O.3Cy<6ȫ~|3gh9׳b_} gw?AYνzCb{-uM1$)rݐ/xCyFmb[83rYOŌ}䯊؇!'vr{cu?\B>Fi |rfGӋp_z u_χ);6|}Ћ.iP'b> VNrPϐ!!w"{%=t_EkY١`V.dm"E^ hi6⨳gP 4+,Wca3je7Yi΍qT>Uu)<-:g$L3-fRW,hP:ƑΟ% <׭Ov;s%_}3M.⧐N1 s͹~e_K1k} ױP;X5 r~ IKϺp];Co4kY~j/| >D3ԡ@~΁:ޡm؉E0ߢrH#Kcݩw>yD@I-7 ψ~?ܰˆDs>Pw{.XmQ~MȋGe]x &O ?kny[ĂPy?Ūz?-o\}So2'?cƆM`geཀ+\<ڎ}Qy?+{w%f+'c?V _wkB.=w㧝>3B"^=u)QaF{8Tqۡ稗'֯u.g0hF|n^ԫl~Qw0[سK:9sZD[ 3Kmmzr?`^ܝ 9uu 1hk\~O w8%!w cJ݀΃_vΈf vFk/1յ% fvqO109>,xS;WV_~wޗ7K;N@}k7k=›s`>Uvȿy[[ )Z3Ao">|' {,G v\gȈc&DƹsV!˔;~0jc_~[ߋU.=?.ݺBއ|=6:˷}` ~JX4gYj~>߲O~>o,ꇵ7oF!`vb?ZOq]2B*>绵F]ۗ# T8#C"7fSmpkfcشSfk|n`q/9z:?3nC;vb=C[}ŪbⰇ[|,?g)tօuK{m/@ۘx؀z?[=:Rz~{%T6 _A\s? L˵;g/>%~bnf91ny*̄}+q?־\ޜ+D.7^>yZ`2א8s ۰ϫ؇z/)!5<ެRl4)a! F#>?Sa<_)Ͼ \3Z`ogA^86L upW7\̮`?~s:]wrV|-j`_Z-}?ҧ)q2OOOA;*/|A mCi+ᢢ8h17µ r4DҠN9qVxiG>Bia ~w98rKG=둢 Bk>Ns_\O\ْ/}w;)G-}~IX y'*"TV+p䃜'n~}؇WTׯus0^nyM&~ _ ;27jLH ] C\]⣏Ox]xqwx)Zw!e2ou#ߎd:ę4np.}ﷅ᧎h+Q}7Fیau`3C9~]1ɜ '^Rd+3vqԻ5:s[αه0_W=3G`۹Z#zr2=F7;˸ֻ;޿7]C1!J}U[{Nc"MTd|E9z roex]B=2Ø8ySrB|930|/gʙx gsL;ͳ?:N cX`"n50 %ʆو?`¸__-m]Xoon"o_]uo؏`[~%XO=ym?|'F=ĵNVߙ}ʫpvSӰ%C*=u/I84O3OL+|`38q㜓ݰϽ1A2KB[y_B:ǎ$?i<3& ?rO|v3#uI ?`ʅC :$>{(iw~oDk`HK8y瑾O%3!䐸V73E`?"&߬oI΋X3/9?e.)V~\tOrV?%ሏ{-`V]1lWc7>Zdj4N7'.xZN#4T< A/ ]w' `o=A[~ f^ӛs7zzWsOԂ8tnİ<5w* enQ|">LT~=nf_8K^Z{>oW$$\o8HW֧iė.Ah/[|s˱ixb&Bߵ^<:}'S|~8ՓGg:a~^_)z3BGן憮':qT^w+d̕?l 1o#j:a_1){|(_Ω(x$MMu[2[{S5]ő|#(OFxֿ]3#y+`/-+u$O`*%`?ȳzӟǺz9?vxװqB]p݁8~,sE]6fS7^sN!Xѝ$ Ƽ q6r]y#^cN8Uޟ/8< /;y 3'S"15b~1F9s>'Aܙb (w8|qWX}s1/T}1/z ˇ0/Δ żh?Y|7CbⲤ9Xiq3g.}s,ai]?AkO}hSl[y?vt.pʰ#oWI:nF\ 7>m4Ly/tcgW3a/\cϡz㾙gtKn/kROWlp@X>_S5h|,pef~C;|ʻ{bjr6e^Y~!Рڧ;9pcȍ\{w^--5z<̘}|KU~]~8gkFfכF[yuـ?S};47u*/3:Cc#$GN5̼"a .Ch7G[s[3}ob&< ckyx0K?3ַX}X1OimA?fxΤ߷?w$Ǹ&px^g_z&>}_I<:Y9m! E߇=%εtQg">r].`Ո_q?\r!Z zOSy\'εJr:[O}Gfc?Wy&uw.w<uPyNvXkOk-C8o<yأN">nX Ͼw!b!9nKzU sg7l"BUa;yG[>qM]4܇聧 ޷!ޝ9֩V:9Sqm O$O'f}2n/N8EƉNso0ΒXϢ_ݿۋwq]<y?誰g̲|OaiJN~0_ X }:q, _<b7axQ:׈\$xZlΫX?ZvЧ 'g!?1~jQ`=8i;o81WiԻ]-LA={3n!:bBϩ~ݨ5y$*qW[fT`gO?} du)V#G>xw] ث٭2p#6ñ9]e8mKXn3$[ @Oa/G_6~կ*a/ a#oɍW&>,jqW=Ɍ|n|d=-Q)ω֛EΈ aד›'9P[xab&$b~O|n;{҃Y ֙uQ/^8S!c>@}"4!o=q>)>7vF +9ΡSzgQێ3ަ׀X,iN\̭*`SudAn뷕80],%> T׵ϯua\KvL/K׸:SWlk {["q[rb^.Eo[o B\ny֏B Y7x7s0!zOovD}yKO'Ro6۰:?:.$s|-Q~Sƿ&-/)"q|}X~Y7zia/N}!D5# ^H;]a7rRs.q:[3hnz {ߥwoד;@6(z F{K^'2ж1Pu8S\m]m!_ƺ>L <`/>C䌼XO+K9\ۼCzw ǒ]n}q}ggaX:C''#^:CFAΦFϊ>΃o sm @؟O3  5򫹡+^Tomc{7C`Lm}bgτZw\OŽo=9 T}7ПSl@J#׮-ip4?6qA[A||n†_-A fݘ |rFWaWȗy|ϋms#?*ofwᔰqB+9_8a/BoG;S/ÿ?=V3T4h7~-Y1/=]O^yī:x=(3@_FaXVv?h43 \cɌ\yOռnQ|yAy GF>=;7v 9_8h ƯЃ/2ρ#|{/29_Gog;y)s}N{t48GKB@4Ok숍Z4g<#:)l۝0 z~9C!/76g1G7t{ʪS>wyr[9Cו>lIa9tsV|mv񭭫a}Euq>s9Y3s7-Tp |C5N/okȟ++ . xݷt6]3^Ïr] 'Yr#C YyoړgsW퀷'C/7_w=16˖4/~)_DOq:_{j#ng=QΣ)nGvu*dzKllC:*ۿ7~Ĺa`|"Oqv>ԯ3gv? p:n?}kw<7XW1z=w˞ߛ8}ZV| [ șo[2]]E`k?/28=ys}7wЃJ9=/vD}Bs |6rJ '8"1D1|wa\);F)':74iAzN {rm?:xxsѷ>Bɻ<[֙e0nsKd)ظWm? }hyo ~!v?l7<,q푃; `1$-fSc=~4ρOvC~E_z=*|ߓ<! "=Lqг/k"@O;8q>bףOO9~/.j섧\ 7: nFkYcgg\*^p<䵽q"(xr7n9$ ?͖xu}A6/Y3uiGF>B|yN˳r.]{ʳJʐﭹb3{ެtU򆸟<5yt?b]8?gəw `Zр}yvμ^6Oٗ-a#=/ћ@xnM,;{5z6$9~ow-Urw yʹa߮? C/^is;zK%zt< ~}8?Sr#^m|Onՙ{ sw| =٤V>s%{bfYdS|'M d<W~;?oxf~͇'uukx}Gmy,[)cqu oԯnuI}GdVqqc(L'!33|RWwȾ+t?zy݄oy_+y(Ǎ|{;i1ƍ&{qww7_ܗtq!^H19wM] =رsx0G3s`omsGmX#L| o2\Ǽ'Z iy~V:gSq埘uP}\5&ډz8wWrOX{ Ƒf;׿5)l[/pt+_ wؼ #D}! ',~,Ya3e=%oo`<ųf&A}__c47ϸ,?^ ?{*Om>ԏ?/?@}'4sP=Dտ 84o>_O_^.#kwߋ\z*ͳ~䝼xg#s;:Aۦdz}Jv03`o/3 o( J8s;"pwn{<a]~z/܊|t,O_׭=T._|Gk-|ʻЋ {\ T$Y\_գ x+sW|YG'w|WyfY޷}Sg^Jr:,َ/x^`?.҅ Ab߯7>~3ˮ#6LR=Q=\;/Y7 okwlE^AsxqE9'S@=ң_Q7ι1Nzsqk7b>n3IQءe13Sz6_oC>߄\+ք};# #ب#>qm?I <<_R]s}|rX?Bn%<ގS>o97A2rS|$/YO_|)G鞔xWvA< ؍Ԅ]Rzw5^k]L I|?`e^Kt8+2/eoMS2˶ߒ}}j!}!-OE_ j˃.* hz6CvuXvE٭/V~]{v_,?]_7 C?:&_5j_=%,zU?P;;@nrC>K,xDnع;𹵑a ?v+[>CV>,3ߗ"Ā=+/Ny iLoC5x aV.}$+wrW l }_X݂͝'PlMWRquQ|]'1OSBv^@ӪbKtKCbi˽~g͍Q掫0x :g`T=p'kQ`WFľ}{Rx|XR~+ /Y|2!7 o{þ\#^ž}oҳîR7;L \e$Lzg? vbym>{)!=?!."ʹSwE?h}Ok-9 | a5:Zգ{sF/'^%1Wr8vOSװ~ž<+z` ?S-pwO/m ~viWޱ{ / Ջ`| IF˒_cυ};v0=ҥ|[W |xZڅ_,;)m(~oy?+,~ }]w~Bj%x7sr{d̃houg>{4yXg{DWΔ+>_}=s@'wC)94PWȫ ;qH{':+z~6|cW#Oy?U_n 9z4= a?BؗۿeE˽? ĝ3opν]sZ+R/=yd<4Kg|f6s2]";z•'0p=~}'>k eA\_={!؍MM?ma|,}+hqhw+(1 6qvBJ?l;xlvO18XoPZ=T{&; mV?cWӓnn_{(]ǨRoD !?]{sxcB~iЛ% <2INODYPrs*ދgL5#Fb؇_co5y'OQkWT( 573?W*C;J#.D~˿iOgl%T}uΑ}לHOoHaݦ}xmmb*u9 Wp虿sgpð}dYs1tޗTꔋ?P&e)<[U}D Z[݁6#O- ϋq~UI]OZd] )q,wӜ:g"q]wQlF G_%M ).ϼ,W}]0}^E݁LF: rn䱫nVu]{r ;>X`g>|+Gװϓ列+~4x+U$?=CΙShN5oƵ9v~N~Jm_:Gj޼Xh.[O> 6,w{^!Q{g57H#똗nG3 a]_*4޷ X!ǹ(y*37b.ߴ ;j(9SȍzCx'C"S  s#=fW= 7O 0*꿕~n[#C=e#Y#~?i${/я_r* _7(?~w׷Ky@Ȼ͛xZ.zpϩb;,r[/ۦAq}lkζިKc/kFâFNu~Xَ z=%ZA# q;+a1ʕ vb]% @>}xsƍ*_˚OȻ, {Tl%~ '2$G;97 xf ɟ+QǝyAϪ]W ==ژ1·ȕXBi+8g\o ._ qEg!O9o'!N+7!Ǒ9CNz;ϽlO ^1/{lk>U!yS_y}xZ5o0lmӘy:QS&~s-_9(}K5?? ;#1{quL <$1(-Z ey#o~fWV?6Ǖ v|%-ϻsň"^^ݟz,'ක ?wBoՀMlOg^?~)8323 4Pu]uq2.AyߠG$j8 ~"YCa/~ZqWVÑX>׵;d ӫޯy7!un}~e\k/y/\>Gz9G<fc9@_,$b+ۇ[NXT?w.DoXmD>f@\}=bXVrav U!,w~ݔ[Gxޓ"=.v&G|<̳z/1RravoCӣS, m7(g؁ ; tp[=wwUqwwTh"⑳8!?LԙCO63usY*I|7;:GKsG zT݇RB<<{^&Ͻ>]9,EzNÆ+7g9x /ݬ_ ֚|W ߁C|Scp{) ź6D`x8 = vϷg{o宇>*Xwm[~L 4w#l?_!D]|0DjD &z(э.zIV2ޣFYkD,Q.F}3{?%ޙy~^u2G4~.2F!q9y0M;G|LjW>l qnt >`#;OS=8_ŭyi$^ )xZrJsU_ƮkO?{& Y!86zd '|yWZÆJ=ƭ^y>G2GIщQ6/mQ:WzߑjvWu yW^n/.}/IQr,a{CXH>dd^jvu+CWkp#^ o6yANn'Μw@=|:PH4TlA!8?`lmL\$N͕ANL~#zߑ?Ss}Ov>Gbi(&?zC}T4H>%ֻ;H}ӛ  ?*y&ۅCO$qt^~Y!rz!~_%Ua"Ρ%*XcڞsW.KW1ܿ%YqCp˛'$7IE}j{ =ч8J?%|oT' [JobּW7mw:F2 ..0ךhK2Z܇Wb77H}}՗w{X-⒟:]S{-oPl3~&RS[fi?8qߑЫسܷW(Zv$u]7;s}S!o;vW⡂e?SM)5fr.s/n95sV7Xt }/3n_APU>+U:c~$b/9\]?geP_FGbGT@Ev~?([;.>~[J~/r}=7#pvX3n=OWO$_{i/ /7w\WOZ՟]s^%ϖf7~B z ﺧ\-14Zr%-qQwAn?T=y9ڃ٘~Vћ+'#(^yX%>c)|\9赇7}IGNDO.SxyIB~&-d>(MZF֮&9 y KefsKE 7F;kx6y8Uouv>WxJ?߂Mnu_oo(z1~F6[M~Q걊7l&yʯz)s[}_XP;ߊ[B+{ t?f˖eg>Oߡ.}m~2xH](o0#^646~6>\lFqĩ+˸үF7"ux)>w[͓.Q ;?k ^j'eXdNǎ|!xZ}v_򟻗6Bِ~刃 ՝Ѷex_w/ܡۭꑗx'#ⅇet4ϣ5) qwovb]Soj?z:U)Hr>:KdMwٿUn4?N><"z -\^{F  cYx1,cq~k&-4MxAyTtuW6'?R{rv^c%3 {@}!?jeVO}N{־H?۸k8[5y 8E{= v#.w8$u{[gnM*]{S0gB zg^eE"ɷGF|<G+߂Qw;yyf툺y’xB;ߒR 9H-|<]I]ZC|:>>},s\\zDqX&k#{qmߎ.qbfH}K_9.T!8lG{08[!ޭϢؽ!#MK~ :4K_fvuDiQpXrNW< y 4#07'/Uz6#ޑZ:OzWC޸{o;17qj_0rqOʐWL p3oO["/W+o Lt7^ ̝% `&ǃOYܢq3';x1S^%<yH$d?M~{!]o?ڤ}ȧxu0./?)JB''=OKqx>Qn/}F[❶.ML??ғJ]ϭ)2O^UKGb?uvwgJMN\?g?| pK4 揈;>hGǎ]H6'NeهgOQl&qJ!;j?1]_%o@}db>^L}1~2ފJ߂/ebȿ)& o;ׄs΂ } vy)c&vB<hTm G_z"~칚˲OMWX$? gGOxs*c{+}|aB''=튛Vźoܯ~2oX{~x$5(IK ܔQ=|8a:59>R%s=qko׊8[o;$IԖ=\xU_fz5W0([tI 7DKlL^`>N|?=Za48ʈI<+5ybM<ԡ _G`[$|d|0̳y~$eO-kW-V(F`z`n^%8LLW2עkHVG֭:Y4F~ǗBA^/L:J\gD}M<5uc o~4r~m//դY⋁GK}Jr-uW"~hI*ؿ%EH}%ě"|V{@AᏨ~nM'8S7a &u{>Q\Ǥ5]ps;\/+y^α'̳ _aPwzl|67rWѧ]W&!8Spз[}zxJ"w@bt{*?q1cg,Ds2o|Ws,DUsjCxt>}8M̷\[8;vz%}P繰*]۬ ?-!2>dϨ׌w2M+GM wsa~ vQ#v,~ o.Ft saְZ5h߰|5/%|KN|.X{+x9qcC{/쫑}znϽyZ1\ó{:_ J؟b^^Y̒={jmG%{c>[ၶ[/Q~Ipxzby~M\Qz-qP?N&+~g?L:YBax&?@.U'|]@2/|»ԁt@o:n%/Cx] [gaI{Èeg9Φa܏>XZ~>+ 8{HK2_믳z48} ,qsrpcHkirؾá4*w*gƾI*Cq/^wb 쫳f GnN% I_,1{+ϥޞ:'هfgoǑC3Ouo}1+s z!#y~n+|.NU ݷ{6{W:1Ԣy?bΔwHƏyטoT|[YwQySx׸JN,}|w[sȍКGÔE$Y[;cs!-_R\m).{ŗҏIֹ zd3[ԁ_v9/۪zvN@nCd3wx9n&Cdw{\#/5dN}[-x"`Uz7$q^7]&qG3Zwu:Y2z:ewM9? _':﫹s~Oda/bwEˍЗa`ww#o^Su`WWֺ󽵞~VyyP9wWpo^wǧb߅G]:͇+7=O[4oQrZuWnk*/LL/׷sf#ju7!c^x#{4*.ŽUs/8D7~ncuSp{d'ԧ/~(iK=oƲWwz _aHԽy+sx_)?$k\q|?TtMQ~?Dj1} uO [㛲s%.&^`Qj}nz24_7kNMG>og_tV~S) r0j/q1#~sg>/_>@dϲ/::j/_[#D#B.enKxwLNq~{\Ck[b_# ^?ﰒϏ kQŹr:~a_c5P'3)o] n->_tR述Sܜ ;b;CbV~N]AnkMKOq.dẓ¯ǽ|Iq\rq^Rc?!{T㚐ޚ[Q_Wry,G{Q?r}KI=ۚE Yz47{˶>d?8 }e>Gn&t'.؋{"5/]=A+{L;#?WCxs8sm$9A,hoI|kF[O~毈??d4:{qlܽD%{DŽޯyw:wS$8`dKmcG<ISdoƑZ _w߯}{y+?Ժ04BD>/'Z7qqS%S Oq㡸wmmY$\O8x#EݐAo*A~)5E}|J`n~?#)-43=ǜ_s :yփϦq.k`E[[菿*T/P\Ρ[u^Wcŏi?hpw3Gǎ} NiKO?+}+.Oneyr"{F Hu>GϑNC~*s x=ї\7Xr gdlG~.E^7\7$ۗV\@KG83b_{5ϵǬ>HTHMi}.J}+t}̗ ~An9~eEsk)zcGwIhnSs;77\WSf.v {>A٨)zq_>eo7<}o2 ^}M}! уȗt~Ey7x:) ujS=#:SWQ _.FŎɁ`.JH67ؓizxA|)LU8)ПV FW|2'1!|WT>E,Co-{t~ǯ|Isu_H؍/s$Y(''dqo-qm.͇|* 98 }kQ\Qn?go]U>ˠ#vPYk{~'?սY F־ \9/<)uEփ?1OR|/^{x}vL Ch}F3庞iE>'U4OSݎ(Do1r,yk^C<םkV> ?uKl~p\^3y(͟N"2O C{=IbO%^W'usCl?769ܞu+&Ohn5`LM|Mo= w?^}98+Yğ2jl: .Ty2=N2D2,x ;&{5O3oJ]> pZכ~>% oNӽ$k>.N50z&pTO`k\ȹɾN.qAIt~YSg qR'yBƸ}qX փ|ԭ1ȿʺ>lr9' '}EUI_,3+zږ%+ ¿}s)~)؇ׄQr?Od.%O_/t*ILq#u̠N>]oӼDGlu/XT>_j/7F[/W=>3loO\̌{S<=٣I5 A]\[ Z7z!|9u G^)qTȗu>Z]a OlʩuߝPKxlG;O N"|Wp-pN߾|/y9Ujhw]28_cGӵ#sغo2t5/ȫ _Wy^=jzWxx Wq&pKFO$$~n/ >A;Z{~aĴeOujFb<;vW#s|;Z ?~>}DYj!yOssw7 p7׹R?;^Uֻ)9!t-sˣ|Sv`(ɞפn׼zw~.9<0)n*8q!O"Y[xT⫱?%*VO&yϗ>꜀>gYcm'౼FO>}H r$ygPǓ9a;\sͱ G^)OqYy!s\U#"+!AjPptewkx'tƿ.DX֋M|>[ -]u&^#ͯ5ѸR O/:QDR_ÈZq6nKޑQ)so|[x`^qc:E #uZ ;z.qɕ/C_g !:+_G7{)j<vF۠)sc[5FPY8\5/=uy:`Z*n؟:5*d`'Ǐ΅{)Ty$x2'E#"۹[g'[ܸ3#v9#O']<Ɵ7Qy-[񒂯z|t[[W9%eM7RB%>sA}NCxorK}4 G+-}2O}uF]_0Z7T95gyȋʻ4/E X coRN t~ǫ-~F\U[M6'7 ?^}Ii>G/s֌ة#vEpRA4ޱ6YG~tpݿua9#4z#uj{A‹'4oo/]&vǿ*GpOxvRz#ϡM>38y~ û|8ԽZ 6F %hR)ҼV5ikI|$se>^'{fo,OU_8sdoү0 3*|;;2 912>W"OC1W/  UB|Ί8W ɩU5h{$s>VU3T:uyMuǷB$Edܦd&S&Df*6]+[fLm.ɘڦ(.vH"yydk{?wsϞ~gOk/g ɪgB|"TWW5߯}(Zst9൘a}(|^F*B=wW/axnBe=]>Im _zϗec˓?_=|Tۀ"oyF`̽\36oPpz-W*-N)ݞ\H[Tyj׹6S2}%G|$G|$G|$G|n-vUցo_x^ _)u*%G3*|o ,f%?u12!a ygU^&A.{ܠ_9)^74AzD.sgϷqqͦy30fNYY ^imׯH7ׄԳW|)_<̩fbɵ{vԉ"3l2yGI7j/zq;yw˳L\ϹEH~7-w~Yꏝy7n[d?kN{T-7@p>oɱz_3jj0ti8;_b=Z&i>۳WC>{F|r]cOຉ؟ϼ 8s.wB?g,%_)uǿ@,O}M߿U8̾LlB^~ޔ-D|hg,? ~r!Wbs@.} =}n<!7o yYqy˰'/?s{=mLs\g:.h,Ə՘o%|'K&pg[H`生坻п I鑅:-/gkw^Cu>|zz(oX OϾ/y_JJߜ98u'gQ3ޠ$uI9Q2>8g9|dwP}Mcn]=xC6(7ᩞ.QcBt^vOe}[{F5fǃc{۩t76e3yxfӢ0yfrN;x&VJjmgp` nQ(-rE-]VJ|y`Z_\^$W$+\}jvh!u'+*%OBE?k _RG*Uc]{}+|nwXN`E?>sv?F׌qϧvnLK쎅EYw}gd~o]tksM_@N3^ȉ wcLj|z~Zޘ7]q6߫hVgqݴ̩2_yC ve أ2 ܛ׍a^T7|W}oF- 䟳/b? wkcK+w4N%k}!!}`FѣYnM;O:[U'.nؙjL^J?볍},M! ߏ$>ď^vlXorD?BgUvzrQ8Mv@WE Ocj+1؁ߍ˽jS?߻[xxc^~TPl<-o|^| l'5k?]/ SwWI%+8U$jTO`g0?a?[CmۻLҩ=;@R_{(Lș b: X̥̽} ~<~+?B^z${vҳWٱM9fU'Ox߯/T/Vvlkڣ~]w]zUHL9|?\a˜e[bدCo`r-mCo̩5ߡuk3RއnJc,tw{#>o_{sEwzڼ/>|]5c..zY>5$%R.+w쨲G`I0! |w~\ԏ[eE[N'V Bo7)TUqY5>!g^ָe|8/Fş=duEo|}5#sEčf)1k?vݬĎv\s󾙘oYIzx5\>͓Xs1jU$ AWOĉ<{,[o̽z[W~?+љz{CQg^C\ Mw»_o%g?(~}'9oMķ4ʧO鋓赽1@>Y>I~o{|"hoB^"WЧif* yarz?͇'ٱg #׵2N_/xKbfv˜ &Sw>.n8F{Ļ5Ωq ? ow:Io%]Iy Snf](/nמ%>,XOSio<8}2G9B^j1t:yHZGtWs?;'CFmCWPP֚Lgީ;y2*OEk8yJ/Upr a *zBv# Eƹg"Qsʗ }>C 7ɜֆOZ AGxOd^跺w γJwu8B>$ Ɵ4N?Ik\Ssjӫ9Cb`^*enD"FпQ2C]YO^bsSu Q4^&W?4 DE^V}|8n`3&KŇӹK+Ka/?BH_/r^9=%s.-c/?~S?®.[a=.з'LZ!~Sr#)> [/4OiM|C̡2=W:kyKߧX?EJjȷ8 ~daDntI~fv_ϗ#_gA}#3FB7 ۹Q>xuډ:C+ɕxj޲cEC5OR5h4[y֠S(oOXsc!q7AK}IDydN߬"ӧC*Λ{o$a1%ϡ1:sIT{d޸ɩ^:/zq~Ywxɼ Rkf{ L\si'csoMZa6} w  xG3/Zlz8hwtQyq\CRrU+|yِ''~+cS;< N$g=2>  ѹ\N'5Z_uʞO߬CC{oE^tw )1!-J !%!8(Cww!R"c~k;~yח98R:_x7ũ)ۉ+Cϟq~G$uG$<]k4-~CF|OI~'xQ z$e#E#F_98a˺/a 9^οEگqۈ+jL/ZzA#W<4䬪#8YW wU_1>+O!Gb%8pC_'#+VXߘK~EN_QvOONQLR90ħkc#7[ =/ZqK^wԛ}n(`I ]ZƷK8DŽuNQh]NLpKfvd緷0JB Q>FY{k \j"%⬽̭TWvCW,A|~u|mf$c*j_r~S93O+`'ٕ4kmgA2Oa`+8V^Cz3:ԮOO|6:_'_#5;YB߻r=^s959;Rŧ)>?{n{Wm3v!Z_gm_x|g<<ݖGgkC?.rIފSKK@? &WMǷk ῦpDg3xg36Մn{n ::5=B~Nv7[isRrzT<鯅wZ] ֙Z|pZ X7iNHn'4;GH=VϗG7. wۂW׸rhCeC5:ӸQ/l~En_lU wF~^?^o4.}hhr$O;+:_ \!wh _B+UI\|ƃԇ|1ʉ.5kշ4NjKu~qk;lY{'و9zF'wÌBtм}t눇J}{(z@WymOr͛/d!l"G>6O>}ڙN;U[CFF.̗YXu^TgnlM'Yp|\m2y5t< i6M?] s3Oo|ཏdCӿU /GNOqA\^p='VivZO0yqknN`_!uA4Oi\PfFAO -=7@1C~} pY}_ qQj<ĜАy y/|#iF~zqoϧ{B8ƟanӍsb"rym$҂ |=>֧m}eWS"Jm_ Ov/o[nEk|䄝[]|.ǸWo'1sIZU;w3U<17>~4f-uG(rw~[b5h5yR[+oc85wSqb':|wURF~1O4u4_fWP~]Zgw};HXw3*5td_-|ؾ7 ;G. ~ ˅ę"eDd|?L'~I W|&}sWgw@6~Q_'{ >^~'=[sO.]彳1Ջz37[é=ֈWiPy%_寇o9SêOظeNnJ]㸡}7G5|w ]^e?W<k%Qd֏Rn2MkЧq }*SfbiCLޠk}xB/P!uȵ2ݡ%YǙ??fn+t?=wov#Ό z'^a~OV7Ol>gvGoT>؛* /gQ@Qu=-WHFq涌+ꞃ?d|OayO-cHȋE('YkK7nr8$W7?E?}JKVXɗjqG/K\]s: '|?f;y$ IZ6kOKykی%}Th}iO$i|F.ɚx{Q7AO3"fa(ȯ7NGӹb| ەyX|taW#~7;*O"N^׶CCӛvI>9w |ЉǕ93?8ECSsȷoaoVRc-YrCKFBl9瓛GTƼߘ<пxi緾*$[xӨkse>wWaؾ俱`[oOdJ^Мe}#W7gW{KNJ0y:Gnu-/Xts~7cE햯O s/=>-&;WBek6Nߟ:q mo+k쎙롓_ d cp[{ݏ"' 2.qÝ=R` n]\| n eL|y롇}{Ƚ5xB˷#~G\ 9-< @!mFޕ sbߜ=Iuw_[ Y'+r2 qCCb'I]GPcauC W_W<ϡsQ^~9~ } }4^d)#IC⦤I|]{L_޺Maoc.#/Boߪ~N}ICO;:ޞ\jo}f~x+ } 60;;]t\/M]_X3臭GّGgz3# y6 tgRYT<ٗ 8N[ɷ#L|);30'OK #fUa0+Gsȥ9~K|FAލ-]K nq69/>įrEyn>=*rg@ouZrJpUz8k1K|'i6|=m~e}?<Ҽ9G?}"m[/9:u?_eO<>m30#+[S=>(o|z$? 5 G븂]{_0 Jk/!Q{wY}Zr6zg: t^2|1pJw>Ao~3~(FE5٧&9wv;3G)c'_4W&C>L8d͕Zq3iwiN߱R< ^I7݄Za"2/skޛЃa~(>{ң$t>:ՇD 1;,dFf@=z677j?-iNv!,:7?}93pT։&%ou-v֩iʗ\w3|~_gO﨟A",د1O}zv]`Gf.V͆, tEJ~MzY9|A'+6/}zzyr|_~Ov2v[`' :eO{h6r|PpwOP N)@_VF{Y#\뮵=՞:>HnE"W/b߮>}}6N{Gw#^ks7 !O?<,N؏7#nZ{7/}]աA>t `?S89_~ ɑYǀO/~nx?DrP*f%5@)uy[u*Ց϶}+aQF"#[Ux~>X>a8vՆ<b?w;iйo\_u0}.y;`tpWښ GBjmmUn4WgĮ>,%8/R0gH0J}{^^Ez9!vEnT6NMYǨa86ydG54(?-319>zxtaS?#clS/th_>S×1pзզF ~?mo/Y埜}c{wٻC67k &S/S:W[Em| $@)}e఍7?ݧ}x4?{se@WyQn/z%xdNJf{/i3tzDދnWbNn~6|\p.S GF5Bg,)U7z;[o-B/Q<2{$Sv䞑u tޞq<:݈K/}]ҽ>{Oj7Wssp?'܎>nXE-QgrىLt/B^] />#rcsη,fbOeIs>rUu܀ as3җ<4^pILKrܣа?zOLjΔ~M!4=cgl~g2J`^ďa]5nFi^z)wX|0~^uCW'sa:wm)Sw7,nƣy3wbϷ܀<+6B)8RV?&Q{Y]+/h.6zpni<歍FbW\}_1}GEǤ?>xP_1 _* ; "7g_2$D?tifg~lL'*u掘t_\f^$"xy/>uB:wVVi%zH߫XV-|\S.pG\["O样ǿ'~͍ls33?'ĺω|}8|i=!s n!?#\'Oi[:QM:;rv>5ϐ'Ay!k|9c*O>E^D@Is*۠bgݮ5&#OS*6|Zυ#J'd':W^>|0z|F6M!Vp0ڟUOD^L']B^/ybYr~!sݬ5o n'+!sxߦuo2WW ?GɻY|Xˈ_FG.5zy*%0"_e]+/OvnpF5g[!M$o۬}Y<e05m tR#vOs~f^2ͮ'}|Lį?GhC#/տԾ^إgF~_rn(ag߈; 7ƯOv\п گ)|{9QNWO꺟ϥOtyj??̏R<'O@ݍԷK/ /EF>R_hx?g5`UAJ0ú.?ÓVo ^m:;FuuNi߾5j!X5+7ľо:/B79*Ʈ8\~[?GZݳONbKRw/%/p }V'\n!9> ʆ~_ڏgƺ<<WOB /->鑉u9r~?;;mJ$)iϦ+Z_ˉR#eG<׾2@7!)0ˡs#ǵ0Ѿy J?sac~V7iد ʰ`7ksPz>Y]i_&ތ]"d.6wۡ/Sg}4:gf{J2/6dps|l+JQ7-J[XoZa]ڒ8zԾZ荥}3=bs&# 2ՃI[rt~8|w@GG'5@ؾإ<I=k_jeY?iodO>O{'st·΋V9kY~}^t\ѧ-Bw@9fGrV@ 9t3ԛ5Z'`'=:9/uxUx:=$`n+|aXne |Uoh_ z?zb$f֛k]#t|;u/'Qu0}iHR?!!gҠ0;/{Wuk܍}n ݞZGp!Z'kgKhɃya[z!J?%䵛!}T?w\7+f>]z2C_=lGG"jjs wϵ<bVm zq(,\Hw9:"Ӻ:v|>o*~ZH NӥS!7)5/KY-qO#W}|Ls%W!q ͤƝ^9x{O)ga/)ӿ[DgH\@^~ͫvi}~)׏6CS< 8ϲO,Η/Ş |p~Lb|m}:aDɵϹaC?l5*C|hPQ})t{" ,Uȍ^erru:&~puނ؏=$ 롟{5;o+@KzYMˑ]zڅ6Tk|n]~y\[xj^vP6.䥙*%t)r2!~A_`)J_k}ӡgZuB]W| ߵ: {#;x0/ ~/TC?CO}y:wF%ٮ6)*ij|?lhty"u^5j" Ҿ^R!SF9|:}#}"uބ5^=(vsG;:uR;|E_7;Go%gaz -~gI=[Cy/`L})p2v8YW~&u{7.tv?sވy\q(z GN̟|%+Ͽatao%^ 6F#0}S)F Ns~}>c#y2Ҝ]v+^dK?Ro|~ƿ_@)<6nNGi?R B/~/zahdikƨ?_F IF7a.pE怎ssROGnl:K=+qz"A!%Nqg$wERo7cşuo}[Ižn_y5W=q_ZSsF:ua{˔_5aI:uϑǔ9H_RsE>\>Sv݇Qy1o8x6lȻIȂ9[BN=~,ͱ>~H_ sG26ا+ZWӡCq ql3S'h>6;j<P+혧d>S{|m:T^9?H<Ə?R7W1r-H|20(!|( |NLk5I;23>ϑ?wڑ>-u uH9_1_8쵎Q쫑ǘ#qN_tW]VQqi][4&$NstK|: rU7On?zmn+uBw/Vo`o)>_efo@f9KD]9 ]=~/Y<#SK?q̙|i18˰{y=ǝ^C_߯ }K]ُ;@o:b/I>X}**#rH6?G=*kƱȡ93IgȔz] [ 87L/hCY+jЂ/1LЊ:*>,Ss/7-{"yh[I/a?*}u~uu<|Q Cra8=o4,DhI@ꣴBШّE^sfF2뱇m"SbZ SyKY9βaMڷJQ=JKQAτr$ߟ9 {Y Fޮ1[a/m\z^cCzor{!GLhӽCﮇ'[ܖ<]{د+`+%-QӒ ؍s=.vīGW=1J㌡:Ibf/%nJJ+/ȃIl/j{a9FB_Jշ!Ϧ\ \XnG;[Bq+_ N#As#Om#j?ƨg?A"|;Lԛ wp싢e _CʧW ǿN t.}RW'bj^y+Ӹ(ꉼR]'.C;f/Bݐ"?V (kgl,z[p*؛i?y맸;Xq0\&$/]p~g䅥1g%A:?W~R}P{cP~>xUS)\fM vb,Kzy%rTw do1p2'69߸[пS~j/&ҟZ~S\?~&ZHZ9I2 qENkzB底5-|3 ꅉ^ >p? qGyΊ_G/uϬ{<3viʅ&N)86D^>uI]]BXB(ovc% CZUss_R7px1%﹏s4:W;; ~M4Tsˢv֓rm+^9=#~w'.E;/9_>-t&}|п9RQ: V;'G<@\fvVJTogs;ΕI2ĵ:OU¹iS$uGܹ96!t--_/s֡O>{I߹w"e>CE<8P/`oӓիfso~{KD>JzA@~7iKj3B^/TOs{!7y='2E=ͻ觫9w "S'&t4'c G'y/?C+a ) ;?(/W :5ȁ}WO:']uyG)Zw1rax9AAɈTuѿ5. $\f{ʼ/7xH#e>{߀o!B<;Dޝosuu+;Ú2wȍ >6%=2e+{ 6oI2. cZ쑂7nA/ޞX⹕zBg "|2uUF /+}$gg%ow \~t⡷&}d?a}=1=P;ۏC_}B |2[1n?;f Z;<~,}f/'Q'YK;%+s#k8y2wOOWW8f6}X5?sf@nJ_)&-G^AW=]eΝ}% rfwx?cȇ?~)~F1X] j!o?orA,y3ibZOe[u{_(oO>4t>N֒vub葸~'7d[#5Wɧ "{dݔg?e~Řz?FƊоZε\_=w AƮ}Ĕ|=sa]e-vO1}^z^.Ͱ{>C_zZsнíɿI:J2AoY_\K7Ks S'}q:^ſϗ9m 5Z; ܹ!sZ]ţ?]W4:|A~EOVEt(yb#9_.t0IYod[k;.[Mi$shKio e'{2E2Г pĬ fSQkc&V~y:o^W*Yg z?T: 'u7:6eDnD!?~pNxuy0q+ϑq,e;|:Gj gOL(ر71[!]4~qVʼ ym>?Jty4yꜗ\zL=&SW=tX9\⣂v&/q~0:,M3YV`OZ>s ?lmqRxݘ2؄>o:?̼~::?_wCH(wib5J?2\~9432g먍7C4ע^MfNHF(iw$Əg7ROs?_A8ϟ"眫3KNZqr®|%>(}kn>=cd}~2Y?R|,zOӾv |4WK϶e~`.uERog˹09Bݯ]{E ڱgf?C7@<5?[gkGGY wx;8}#ֵHM g솟jzB>=9Dly&zT ?a5KoO}Ա/ ެt^UMSRg _?/K" toX~|Dk͙wYh?1+ᇯZ;Z.H<, ~jyRRd#hཷCUoy:/SU֯Ɉd[R7H>O-JS擯KG\ #d?'t/hZO_"Mn֯SUEޛx$J=0Ϫb'y?I\f2ѯLrA7;!J|>4!px"?F<~a>"o{+ ~5΄ؿQ:-qAa_"s6:Y$F 3tѦ?k/o S{1`sRvbH^G v{EuHfvJ/ i~-u2Q׾9Zy-T =r߂>A?A}}iU m>oJ|VwYcΉQX1{t)/n/-|%:k_3YיCoaŬO|.<#u>n\XN.v''2YSO%|cᏱձ;5n~jma>Ol?+zk}Gܑä?jAt&(5GBԁy >]ԕ,8~9_̋SO+j_PwI71te?guUyJv sUy {?E_}5K=_ ~9*}v.&+Hʾ~t])xN队*As3o~?~H m<>| ?d QyEJx>9]"/5mM|BڗƩ_r'kC}ظĽ}~Kԭ=:-L=_v Z?4&Zy #g O1~G:Wh6{_i?S(1b4~e}.s8B=SQ]tuJRҹGV'IsO_V_ڜ?)n+ZψܾT>gIEjwn Q}4:Sw>/}U>~~T} T(Zږ{vqT4ڦ]鿤~>5&:HkoI3iMs43ΎOx^53z_A?޹u%Q)vk1oY;~"NQD:ygc_~59}VbRf?z #?]5-4.5~ L?XB_wex~.}}bW(W;r:~)p:o?goqk:J䩥H\<>B{ԗvT}AХpTo|`l4G=:?82 ::#:x<;Yni_N'nu}Z- Vx+煠k&s~З7pC7OsmEco%_f˲ ޅ_}q_>9p?[6#k~y>QGbw~VϬI|H9J_[vЧIf=Fi:;<h>ʨW}}qGԇ-$,rf7MhA֜?pk\C/fsLCK^ꊷbqa;v[)jRxlC dO,i,VZ9d /u2[hL~<voQjQxbD𣬟veNS~SL |>/w@N\+,/R)EZnй'U#;`zI)Us}>~ ]pO刑&>wV#||<5`ޏ^ΗVatuYz#]ыRįeh'dWtoeL3?_ \+̻ K3N~vk|Z݌۫1bXWOS*#]e/{i?l}!u̡!xrsOra=}\>5ʏfů^=W2k_?ǙX{o'p$/zNq'?z >ܡy4T¼rXqo$>mO]%|3g#ټXpg%-yГG'rĹ.2H>3+qR;0ֱYosJ_qI^,ϞW%>`m!omS`B2;~k@A_o_*zz,r^))]b 49tygT7Eݰ?{qtfG i7e>`j=@?JWS Nuco]{!Ϋbס~[ ~tLpJ\;I_gjkNx:z ?/އS"-~WoG͸n9R?b-;쿡E~%Nv2>B~"BS sf/ պ'vU_'#7 x'f$N()U'G_ ~}%Zy/!w닞>KEqZ`]u}NIƹTWv]q?.ѯRB^օ\/b'i}9F9 ӏЃƵ-Ǿ<&-xP8i=i 滬|Y[=g Z0a~}ܨQ }n%p>DClk+# ' vk\P{!(ta,$)zh=HM9vz⇪]0wjXyZ7גּ6",D[Fc+qUPϤ18rPIo Q#gPA կҸphCe0UxG4c|S4ʢPBzݬ:?GH|"gP4I:Bo׷s&$ONQg2>ܮ\׿?)rCI.Cpڗ]q :F:G_l>ZWH}\ NR93ŃӧEBҼƷ8>/0^CL- QC毯ÂWwTqFڟH-xPtWp&av)s~\\@akү[dZ1ֹ&OW|1dGXw9 dR>y961'_"v E3PA\=ȑ95gIp_:bȥ,#u b'ɜ_sCzcWNZފqܿIu~-v}Re9|7)4yAT}&şPKޑRO5H72&}{썺Fxq!/s&'d$D"*}4РP~k}{j7'H||y~}5WϒQ&=UF?aV{>Q +A`mW+|9a], c Tyj(Tu :xO"%K;, Ovћ&]!AH^>2$KGTLs%SqޚN:ή9CnB?_5~kI^YvMNZ'zv>#dNso#+xfο8y$wq'y Vt#%zP[/ӣtov6'<)Q]Es܇ë*/^2 Sp5F|jC;ŔOTO=o&| 8憞נq]Ou_ %}NYHb_xz+yCF6Cx~q|cs_\ SrzOψIdtq8řh{ŗi=.Uѭ%rQ䤛ot?Jtn^FVVW{G^؛. OJGa &7_/z33Od]ȅ([j>:r:yj i}vw*D^H;+B[SZ_>]Z?Jke Tf7WCqACj-TW!/^$\sjeLuo/Vyfgn:K|FZ*rDwZ?ԓbxrswIN~g\=?<9>W}e7ߟjoNCmǮwEw󿿔\ֺy{NoEؾS t俷fÛD}%/}׿_}׿_}Ѫѧ~eWlx7!nSb3?t}8ЙWV[~е?:7Ϗ:7/%>tz޾2ďɯs̖яwvvSo.l}dmH;ڏ#냍?R'y3~oϛ>KǝwK*\Ǭzn|nW.܅x?9^k? L>gS$Y?kH#wgΦM3q:qz;(N+ M GzfΊys]>;4)?dQ{pg wg9oe"ϛb]L -3Uυ"3oܞ;}\lυM?֩`]3ҧYD2al~-}/E?ޒr(^~?=+FܘNwLl(z~t'9j'י:H3KϊKG]^k wFmܛuG##.6/`Y8SrccB_`JAy;COvcT:\e(MUǐLJ9EˁT}m^Ju~Y( O5zp˂S; O*wxΌy "@.܊88>?/wB,JMx~)G CNx~H_mEACmَ:kJ}22s}9Y,#g \Qbw ]O\+W~`3K?g݃Sg>H_~p-W̟t*m('KMVq#8߀\C俢ww@ޝ$?^]\ O3Gbϫ]e_y1׀g{M;+8߽ c#buLJZB (=JH*KĠ .%].) ,݂K(Rg>g?<{g9z L \l^7z($\R?1{Gn~i O2(G?pfìWutf7iچf>e/9 kgf$~﯐y`{ҟ}X:GvGW~)}==Nuqz\2o^ku+GٻK6m6olRl8^x"/şۓ6K|aM>YuۂSG3v w؋24O2cNKߣe]Z{ z)q~c5 [^7?|uǯf˃ ^swwu^ɩ:\Bn['=yW?o9>'z !i5BCd|oq2;~`/אַSq=r9/;Ƕn.~ >U\Jx$}t;9mo[1 $uɋY ~k7ʛC/5_',.!>38W|$ۆB۟~ŜISixam뺇mP,G_MwڳJ!Çr_vRo/ :j`ѷ>:Gq7fgni;\_~r!ו 2y^Bnnw,9~|)L{s>|8'"3׶T~Luvno;_>n}6nȿa񜇇\z+9Z V:uzW$ ?p!;x9s#/=R'hњ?,vJy$r84wހO̬\O/,&Ex @+9)tyQ𼇺cO-#U.YY+KsUY3֢?ytW )7gy/"{Ⱦzs8/^wWwYޯH6~s0hZ;"#u)5:'G/חr?>><&|Wr>72L]aP>O3qzه3 š.IfENm1LqԾ3ۡ/Q}kIk."W!z<1~$n nnaX(:UmmU$^OpY珓 Gm!'^(;F:`q}w-G"F/noiG >}Cߤvy#}vu{ b$?́?ywfX9etOouzIFȋORpD䜋i4'G\ƒzg4[=uw;8=suݝs1nks̝Vu[s?柰|PALWK򨚗rJbZqby7W^p߂u؟M$~>j;oO{#bgjJ|ftnX^9' 돝7(Jn{c_G߬&;OXK?T$rl\ r4z-&N|o!{؃c_lws{,$EǾy}On>d.63{V>%?,,n;z4qnKoſFau|Ʌ]+$%yg3r>mOς\:ȕ{ZQї:{OةV$9{̌dݔBL?o4vW3 q?9",Ɂ_񇚯b.J0v-aAQ~y3sލ%_;\Ks<5bI wp/= (Y޽2+S\6eM}p]<\pPtS]2~x}|W}첰H{| ~LPy'`NDQk|\EOMps1g槹Eq]3z>h.qi֪t죜++^: -|VTKxIq䅌neV[t#[:%=xc-?# :+.FTEjO tD鹢}C o ozOtUo꾮ʼg#xn qϦf44>GgU\#:{?6l*"_M?#!ԝssm#wv`_sJ|tLw|}}>[/{b4^Zgm- n{u:[6(bsip%8H~r~ݟͼ$jc/D g>GGVKPu<zd^AQL, q/y?x"=_9XUg{?7?>r7@y#sI+ Rg^w5bo\#.C~JO / {=~ AX.~`uFpnQ{2wOpGB#ۑzfg-_\b=պzd]%yK7c*s9~'踰=ͧop+ $y:MO_kčz} uOc9?b(Ϙ~!u?V+_F&y+i/%i=bǩ?>UEAN [5? ɹ9X#,u53mpؕ#S?:|])#ih#Crm<-Iݡ?`ji\uʙaY6.(0>m'#@?'O`ʡ{+qBlܧٿ&v1sݩٯC/ɜ''P:o|Qs.޵RA_Y3|ġ1w9;/7#5~l+p([4P*͂I2׻}u ~[XBu '`+x!N'~!izNay7Bql" Zϥx@vV-Y{َ>=lqUfƒ^gvOcmrS)3_yDn9xr/8LQi@yS>vFOH>Aq9*r-nR韃 ‹/禡:̽Y'rk~#ޢx.]g2dyv3DPKue[7\u(1ȳ٬ppokڿ{4<$ğX@Uo?p6]ƋE'On~d/sҝ{'/ps`$\du|NǨ&W''a|N]j_;#n)*q7aO@|#T%}Y&^h곞=G^O#_ҭz!ޟv{}.7K?c(umR7+d+5" }`})/Ch$E|ߖs#_| o^;j.Wyp0uYYCN|fK|I spop1 ςk( =z=KӜ6i{~kct춇*^E6b;ͮP%}Gne! y`@vd+|ʃ_~w"\cUaږ$~L=h[$+!|NET~|EEʺa"< ^ji鿳_{<2ڊs%xC`4޸,4q ܡ!,@sV?U{s;kk&^[޷~U'; -T>|ƧS0\.#`5Xj-q#o^NRM&>K];QWr_ovǫv>df fݲ> _>b]}Q=cT{}۽?ſQwXt!:?u߹-]?uI.>3&dYYY?:ާW<{MOXC^?棒qfyUS_Gg%|9 D㟆6C\̋Z6fsy?y&ߡ*>*x`}j]d3M뫭d[u5 UBժzY;vDH=ى ڧBGqw|G[H}пH%8}ϡ/[~rEmp|񌣉 #<6~݌MRg{?Jc"k#y_eO6r_{OR܎qפARC[bԍ- S 9اwaݝ|"~`-e^>6AyeeAľg~]腃۰CF[>}z"ψR :pY ͏ZKkg'۳A6`ʆsbol^yqf(qS{Yܠ~=k=Mo*eJX/Wֽ?l%oM[̿g)ɔ Qb/lk̾L*D>Bղ/ QqE )#<$X V9,~.س#NgwrCqǣ\ #q|73_ȃup#'szo+c5˙~VwEߊ#?=:o/vE+^PC'54/p·griߢR~=szj{55j>}yF3OY݈}޹~OAC^t1CWB O쥏+i_B%_x[ Aikm*^S_og ~ث5Xz:Cof#yϩ}{GVer~[}|kE7J scCZ".v*S?)ш;9{;6U)^YP7Rwo׵õr8ąz%ysFgu#GO?|5LrFXy(mtmI}t!oǏaFi|/u)}cq)Y!?41/@(6c)2g>P'.u\J>(Gg7c̈́x֋){ mO%μns޸;76y#vDBuoؗ"O`WF٭z}lL??q:x]/kg ~b8/9ޛ:|Nf;_'{qiá~\_kd'?y]~Rȇ$9{c<_לfU=(=I#yŃ7/̦ccUi^D<6Į;D\yR7{MnߐO$+:;b0qOк`eyոsP|5qy=)>pRF 2Kd^GVb__zӫ:;5 q= 83-*rW$.y>?wG^n[S8N5K)n2חぜ݈G7]Io8s2'/&}K^+.N 7N/;S\Hc_>.E.7O1{/]}9r0vٶrek+X9K5d*~qO7O59r{KE>u_uUk?z3H>/O>z&r\q={Wí=xcV}GcJ2c/ >Řxao8;>8aeF/ #z>zրp6&%.#t51=/ҖߛFӁ;~ĥZ~k[W'Bml|~Q;Б!Yγű7]n8]{Ȭ˒Gȟ_d;[3O%q>_ڻ[O'Ϙc9|3vU"ďęݧ{h2Gj%gp k]o3\K?Lu D\(28wcϠ/t_zs%E:ryx5߭9;}JAw錞XVw+G8r>=:l{}sQ5ܧ={_sg_^G <$DĊy>s }28KsI_3%qq=O[/\O.wJ0q+d߹ӿuT%^_hAy?ʹH@ڷO뻢/oh]y-2r_ o3vAH|:'oc7ȵS_ͰHeO{Mp{Sf+B3xo/ȃ9lr!xL#Gc-:co?-0W-s |';pͷrޢ9~;7nEo|Y|(pV}3ʞ_%G޿lw{C Zf oCI3 MA#Kܱ[8) Ѿ7RolKEx܆7AVGS{ù]}=l*  -#4MiS=,Sc_O2AW7a~/y ^[U9 zjp|C.`-;_u/y3ūZk\Bpee2K>"oN'z$aɉ;z=z?#spyx'a8m ]{_dlفߑxji6.<k~~<3nݟWf(!z!%|: wγa/(vσw=C\=7:KR8B:N>g+xXF9R͞\ۏ~'?-s >kR.?82;BW!%GȀ\9iĞg\+5>|:^r3OM/'52]~y:zFk _;Yk}@M:~X3u'π/Oɨ~7i4~tttvB} x~C`>&> `7}H\kJv_ >eB|GIN_.8xSWqkΟװFHGG"+bHw7 |= |rV숑ww_ȝENć 0 0wzYVA 2ïo`d<~zāӷgg.ʕ4y!pm~nPÏϛ[^e=O?bHk]AF\`-ѲI9>$"Q=zʔYg^Zom |y M'nUcC?QOԏ#D_sp 2'#?Mw;0N<:9!8_tJH^XSҏݼ5RY7ΓlyX]kT|9 rKy7/ Q/[:,|99 i&{z@ZɂCp^=oc~Wȝ7uO/%\7{Gmݥ#׽},k[ucax ^,ݨ:FO;=Ov'}qO?:'xYC㽟M:V[ ݧގ}(q?}$ĥ'FlSxĥs=nv?oÊ?oO}/}~N~9۴aH>T>J& ?d.'ִ3i>ىu-șw6b/|0' ~fq+LIl<Ɨ>Bz )t;kQ؅w"`A|AѡsዤYώq.|`  {~y0y)K ؀~a{Lݚ>VpVZK-NM;B ;]؂ Ϲk_?J\?<"D3q?-7*_K{L]hȠ~bf9Awa$_'OᯋW3VynBؗU@Rw܆;}<,'7s;KR>Rp9;͎\|v}TqA.Jvkrqv鑂{+&tǮZ}t #cd*8N .EOAF:ǐ[^c)bmM-swOMG?V _6]MW\/o)]C;=~pzhkW1_n8ҟ|Ήrצ>Xzߏr~{c9\'_kоfG y@uWy{鯡r4qO-}.L{mx[WOIrЫcH|[s7Rq۴Kп' Z~ ;vR:zVkg5dƧ5"0'vZAyRg c7R4>-~*,q[^:nVw9UV W88˒ïe}e>F=O Ի GVWKУ9t{f{={8̅A|[Rz2G%n32フp-~q/Ӵp2g$8Dh_@yui!;C2Q:q?`v}˺=xO {-ݯ@OaMU]&}k:DO+[ ߧ>O8us)6,Ǥ+l>ǐhٝ``N|8u>t[lܫl$s,|0 *tYtY@߇߉Z_7I[;6L$yEV؁ 3!_!2n._ ssQp?WّCbRgtlDʼZ^ݗ9qO//n>E{ _ Wzϰg4GAGGM;̻߱Mo g"WꟜDfGN?\MKNC.>~ezg:)"v);t~񆓟ڏIO8wkbw8!|;!qM{RϢs߂sl ,Ыx\9`n\~EoZmwa}&N$J TԄ/7g.􏴒> 3k,t=u ЅroC Gӯ(P> wxxr} &P'sjZwp`_X__ڿ z}ȼkl#sԍҟR{ܗo꯸'%>< t|M Z8ˡaȗ{ u{9+C e:ەz#3 N"K߭}CF48__uLjAHݧ4֑Ovr5搯4Ο38LoK-Q:PB?Vo.ҷKw<='<8cc~E|,0~N.ddVȺBI uܿz!qzs 9c9PRSg_gMv?̍v~Z FӨW.XC9r_djws̍3 H=(P33~YӠ׆?$vꊽ13F~s~6^gcnW.Rޥ ܗyC/l$q| gsZbHHC۲K~C7-%?S%N*C?LaOQw֖}#A_WD9PyF~Iw8̆s zcȧ!z%Qǵ*v睃s([|YiOXߕuڑ ӝ|0}^xH9Mi?dGfϯVjsPoo8#TJ>-bw;#RWcRU__\3=*|\}z(]@_>NX}S >hL>e͖$Gjܼ߫99Z˾z^(4h(דy5} \̳ A-_O`/9u/r&9bk>өlk߰ f=#5Al!z_O:]bD8# ܇!$yd? |f݂sj;`gm3թ}c6̷ɗ:u79eZ4_(^"K}{ܲ |oh2gY{ _1?~ǚUHa=ܒo!~W=wKZ8Z?Yy~}yC~ m4@DfzGS"{/7) dO{I~=k9x@i%+2d_} z4pph'~~'5oEF ~Ȃ=48 c +@O#w[} ~$oZ8腾 Gޗ[Vȁ鶼x;9;){~s' {hEeLDh\Ź Q5b~ɓm6UX(!zہ>ߣϥÏKӒk+qǖsd>9Uoiӛu,'q#fͬ_*}ݗ޿_+sFg2y;_;K}$we |5@(~wA*9uM1$ܶ$9vcˎg10<ˣq$wYGqS^iY_}| yTBKiRc?ƿ0?+5fEa<,ȁ57$?uBйK-{skc,x>74Q⍸7o;0ηlbh1ׁΣv,?4\O\/\&Bm7~kO7|^/; SwEN8B$2_)OGn[A2J ? k&OF^[2PZCHye6OjD9zM=쥏 TJ>3C࿊hE ,q:+*`ܣVT{V z<Iw': Wȇ=>t4 ~CG3̱k A"F>ɻvG\І']j }ͮ|?ZgBL#Zs'}גl`K IV;8@6ۼwsz>{xYySi-1!)N;΍}=$q-vz;f^q'ր6DEv]Yׁz/ o(Gw_FWC_G#;[/?WZ˼`C?Qxu<ٱ/EDߌ~>"/5X+N6QxEn)Jeczy+!,n?Mӎn,< kv>M^BaWG~*K=sk݇{wϰD&/|qɏ8n%R`/VqWjO`6zȽIKb$;G4|L^az0r߳/Dz?_9;ha go7gʋ蜐Qx?ߜ-5GpgL":zD~VO ->/Kfn&_a`}SPw8OF\;xR9HOatQGVA^+ ;=c#OpF7w4 5lRya+~ 7u,p7GcF^q1;5Nug;`?UEwP s4v̯px؋uHHaꈽcSn>4Xwrߣ>y>2digO ݚ sHֹkQXU?;yެ˳Fn?>@ȵ;@~JE߃:-O{9L6CcG/{y 3] w4\Ǿ_x>F;~_x`駢x@ٖ"uf:0brqK߲KEiWkoy _3)>Do%U8[(.P:x 4g o:[B7 {Rq rUeCQ; ^2H5_WwfYI oպ{No:4F278^Z:WF- @atC\!^$>쒜qw? O9 :˦ȭa/38HMO9y_y ~)ka/-;zfvG;;3җQb:N~Dzݯwd}'LPάtW]4KcAg.Aw2^JgLqY6ZWiwl8=9NFJMWd~Y~IʛU~<Ӆb@fY =k=޼kI|Tz?Swz$$~Eз΃:鳓υ<ڛiŮ֔fm?GN~}xW ۄY#ײ?UFo_l~!|~|U;XoۚT>kح@U*U _o[+u;wϫ'jsfջ㡟GXȁxvnaRQ1뺍[NO珹GsAzqDuW䍠 ymo:H|םMT䭽ށl:HUpNؽ{LN ]J_M;TgWx//jj܇7 SL魞鿦>e^i Uy_gN|X XrZn1%?('ЭY3}.+q+7zfٓ??Oԅ>:L{l{ꡇgG; E_\\oo!u'(:#GzS0;y:/}i<;{K+Y^]5Գ\z]qAp;lw.D|5⧣fc/ __Ǹ}.'yc{.4o/xIs@׻]Iy] Wٿ% ]JW#5qw309}9]/L!<8>}E{ ]˂.6!,vS^3F>Nqs5qՠ[+R8=z,/nP'+Y :}N˖|kݫkau%s՟&N6t#A2to4;5,H9kDps1[O_?xIS{iŐo܄|0' PTk۹/}MHש;g~iJ2got~R6;O,Cp!C??Bǂ/vN@Ϋ qm.YQ~p[G clwe$s%ol4^?v{_s<FX=j.|QaX<9|)Mu.$?L-F~GT:s2o'dшt;\nCfST#:VhǟG$r7I㏣yvUg4.<I: / nFko4rzF 3a=闢uN܇Կyc]䮞]r8rw ɺ++_#aG*!_go46-ȿ,efcweBSb>j쏫gC#כi _g`)K =u2_ۈj1Oo@Q;V2eުm9kf1BՉ^auekۖPg6/"5i܏~ָU _]kT<٭m+]?2bvDT^y?vIyǓZ7CTA?s8s9nfsHM^[?zEK-}5l$=48MP?sx7w B5Ȅ4NQ)ħ^RjO8ϗ5g|[% .(sPty%yW gV@xAr̩tjσoN;޸q;Bτا;߁O@vcnʄ!zN@H=S&? _|RkߍƟ~m=FFp n#uN_3ykd"~MjGwC̬@ƌò^݂|h. ?ͯ=]k0ljKsoZv>=] >IL|8]~Mٍ1~+2i| e(jup#Pdݥߢ=2q ;DM Gq_ 1$sߴsϘ]Rk vl%Bӫsu+_U(/bŞvK?<N4'&qT!<-qv5 &ȍXI'_t> :C -"W2|P [,'[-yrfĉ̒ȁiЧR'7+.ggow/ŏ8C/U`za}ǵ#2TYL]ˬǻ/~ ?%}⏉_K_-z Ca g~ۊKzFOU,>}p~F8lƑWZnӠqLhȜ ib7ձ\;!w:b\s\"YD-<=zN+9YRx!Ϭu<.ȥ' "ʾj:5c-E>Uu9d9ߘJ]sK \b?//q7MK=, ?wioJ| ZA ,08ϛ~/ϝlo9nZ72]\?_"?ͼISdmx|87=,K]Vhbh'8 Oq"z>rx_QRF|hم S%o.rr#[h4532BWRo`̍^>: Ov* ~wQQq$c߄'ٳ)4_h ̻ĿtQ)rf]uYG^ Ok<fDusrYSSyNM.QƳD,DJk'7CS^*UW%YUqMI_G=>ڷȩ~QG|P-oK]뵸}|ǿ[>#RnڍK=ߥ? ar&iSQrɗ5FK`ݻ@.[\eE۫uoug>{9wW3brPFJ[;-s--/< Co&Q}wcw/ڲFT VS^w3;RootT^Pi:u.@_QGG-s_z3*y'?0's͝pWxkuC12T[Bx0toT2KjՇ:OBZ}4o='@:g?z_7o>:K峕o/N%7p_bǹ "Gۊ}=i]y rqwK:k4\_C.9G{'q8f.2p=v؛j/~ Je؂~짙X9cj ,8zy>h9߫Ҿq 0:"o:>a/q˳; Ěa,<'8~ݰ+yh<7RO{dҵmWSw|7=}/,<2;VGoDV4u8I7)vq# @~ʒS\us/*CnoaP^&}&qOŏ)ڒ ݣq~ToXocԻz^Ӿ;t]} }՘]* Ku[:~cOU+qeO>^b_H~B:_S\y^c8^vUy^1`]<f?1fwo:C߱{2ijKޫQː˂?1b"~3hlXB\[xT{<:n%{ռ h\Q[kݐa-?oƹ9<!_wɧثG%;~ćyޒoxW䇗5ةaݴ~zouJaqwKWH\Y3w>;tӞ%uǁp-ow8g >vx]JOb9/'v?̔ெ' g#gT)1qcUcgطfK bDMf:;_?t'O۱?)z~D.˱nr|Ƴu;K ]qkkگ]`}W#'A76NJxcIX֐ɚ:MJ//پQ]|ɑw?/ܓġM/o$?ocsev痺O;I~ĪEiQqF'b_z?=OΥߊIc7wxoxΗҸ\`YG]S I:13L<í~(x ͛ CGw"~n ?5?bIz :=]=t>OT %^j_c5/,76'g|lR/ .ԘtT)!o"&_ѿcޞmヾyW5wH.wy#7zTj_'~ tw즅51p|'>eY< ;AAO } 9}xoyQ;DP{F!%Oݻ|"kdU} Jyq5Г='1W?Kpƈqap?*]XÑW5_'8ɌȯS7O nݾҙmjW|2ѹ=:+~^V?qOO]溢5b oߌ707 s`zR9Y _^\'y_~]Zn}a X:+qͯ(Iy/X.~W鹏?t, n/HQ"u/<SG*'7&q!dθ{^ՏQз1^C"iBg"σyYͫ\ZVVxywAic߉ݥ eSON;sDW☚uedՏ5N9ӓI v9%?든+(yX'zSwY OQyi]4wɋ]uOs_~>& '0~z sH>LqQA{=yE .sld; rNp0ng`Iҕ0Sz˱TQCN̖y^d0q}S [)NA4d;%x-nϟ7s#vxD8x{1<7vn\'w ;zDJ!~Fʼnugm<)$rOD4+xt_k} Nk>>]̳F~[GsvȇO̩S/9^<OZug^-:"^vԛ[G`H~_x~<5GPMڷR5" CqyRȉs C{sqh~xj9a9—~"} /ģtワ DZ$c8^?.R?懝5~![[@Gs-8c\ /\g.~,Dv󿿼$~9ϠnBǃ=ō "w|]D>ژzS'y{?@񜊣qku2aA BO:qXtZ8ջeɏ:4~hUw2BfAy.)!u3 Ǵ]E퓤ts奱!~bom^Ibxֽ߭;h>y<5לm{~~Ĺ6ݟwsD/nz]+y|;kW6ғ'[Yl}y}\;}П'Gq%7ۤr?2o|NעQϧgD^b=N؍>j?_W`^ѝ=I,'<+3$ߧ= V7'# Ez.y L~o3e{bT;u*lߐc0ٙɃV,#xn;rkV\8ٌ f=[y*6VyUӒnj>y_/~:WO݇1GqJ5c*uzKcNYb,jg20'+}~&5Вtk.G4C0h:>šǺ+᫩u8GN1 u?eNmjp}ņҏ]м\XUN*ݨOrnaSC10:rg8G?G32S\s]`nY~{H׏us._7\Uiޚs|ɜҏCb .o3@7? "o|E O}$cGnpV}Elֹ ݺq#v,=x)kyjcA!Few{@]k o8E/O?nq2, #?~k[Ax?ɡm^7i{{߂>RwiSW=nQ7jX[vf~Mlg?G~~޿yTnWC~9!p[5_`║~Y{'C+* nZ<>i84ه/sۅȾv]+( */zY*uYwh>Y 4_(? ;hSf2b:?v>~>*DvEo`WUc%3(-8ؗJSj278RrA~je\͙)K=/KGxٙd<JȬYZ `L;BRI_{d z*OU~[f~bػj"Wk$Qn:O'1_Y{> ? |R!} DX(Ĺ>< ~ż@9z)/~Ab\fE>Y- {VmQ}%\tu8A?_;vS y~*'9Urr @oB7 lm 8pB%N 觾{J# {=enĩ~BmF_6\l߁H=N}eh Nȃmf\dIW˜裹Y= ̣>UJvH7!x;/*YcvRgu3sכ;!YCEE.Y|q7:qߣy?]z]X8}Yyc\U`9s?}+Tcw97{f{촪|`fϫ5 y\F?ɬ8mPH$4_x{ȟ2psQ/"W݁CeF:a_1/~~ O1Vq}fx۱.S9=z.rJgS}^QWTm ra>D—٪r\xQ[CS^ƾz}HECӵw& r(C1L{6_ vVPE:3wrM;Z'-NVUO^_բ?^ No'Rk'!7`o#H;4Q"4n;iED뺭g^NK(nӍ8FT)pktӢR:cWQ8 :8i:QE3nNi\<ɽ_-|o8Lgq&r;g~ٙ}2?|Q9up4(ٷ2P{jYG?쁓ҏ<>q=OތSاI,'Kc5#O1_`PP {`e`% ER!P/<}9Ի<.b׽ina!/: ^8+V8pdswr'V/B~e'}ߍ]~V/`D{(s)Ym7]wdBƑo5ߛP7+x3OaGf~iVh_9giq;AA:^A>֔awEg]2vYwτXZ:e/t6Q~$}}=Z~}Aq<{띸8Tp 8=Uɿr޴+ݍO<~Vp|O;yy.+Oy6 sl՞xdג}ag_A?70i=XΤ+WsmTUNs\?o^W졷}>Ήo#h?ވ̟INjf}kqq-ٷ p伆*)ح"^ >_5 MYP즟n,!^tw e|^_{^;bǽn [7.6^Ns;ޭ sA>qd՟\g?_˜KGk'O幥K5\7 8c F&2"v&~y!pxcyYg,7[|W{/:eOk73؞%򵥟Nʋ4φ7}!soK'uU`̻r!*Aʧ~G~!z-v1x?Ճei۲?Wr/g?K\[q<챵nةe^Nو .V{v&B/-gΨw/]sb6 k Oq\/i;]L}Ӌg k^9/JylP^#"espOt/RyG\㷒ŎPo<0L?UOx]܇}I]ȹ~w&]mW?N/8~a?a89[~aa7=9u?z.ߍ+#d\к ~prQ9#=E^K>xQŽDZָXdkV{v_+oOϋ w$ okiQG;Ϳ՛z|)xY溆ɯ+>-gL;(.)^ !o36b*:wh9 ^F/fpd5SW#5O|~x&_`>5'uz>Q^4SdP^Ry&=ڊ^2g[/A.s>Ikc4IK^Ľvm^Cx-g):ųFڪeN($ mW̞7פֿ9ApS8 :?pG2@ _YG5:;RXeZ'˜gU-rO&u@%s ,$cfpo,I zo=Uv8V^G>?sjc%5 Sx6#ܫ84\ENw]gNʃ?q#J^u;~H:)n:^%ub?-G+:}*!|aT4#R缈܅k}sZ.a^Tꏓ2r*xQyJqۇ9X~ԷbRbwz*yWYsh}B0~{֜-?"{3F/ǯJ^O>כ{+ܡ|d!oҊ܍^i{^-mZc=_ʼnQS溻S Wk+ G3x~Zɛh@ m~?z>Q<? y]x{kc_ O\(}9ު^mm/`G4wqCwV _+9*oζ+o$[.$Ϭ<56=NJJ>6 k?'}5v: <9W7[Yً|5_ď7HgknԽmI}G =b`,~n_7Fy.,޴>_o#'K/- |žEq!w}[mvNs5V FiY|=.M ב\\*^- ^ Z{o%#hv)Go /ڿ "mGkr+Ε [ꫴ6f$kf/om}VX=Ѻ$oi=};zD1%g!6/kǯJטG}ۅ$vÛcLQaJ]a/U[+q/aY84}c3:s Nlg^sf2'pa&Smky{]/|>-ܖWGoyW|v=jݔCm}\OS:=`_> N&4iH=OؑV_+ĞpaQty F!y<~]xL;kewF|#y7ea!;^pO-!][OꁜVTW$}?<5~_wFJF'{=yzVǃƿ!n^?/% {eϷ(ZqSCtOԟk}]W}C< ܯ9 q7M뛱R/?8csَNa{#yzO!]?oX<ü'vEuuM'/i6;`o1oߖaz? ѱn[+f Ϊc_桽3iS[o89أJz>'塼.0k3(?׸WW$ٓ ?&ݪA6z+g% %/>{w#N8 y#udx+xz3mF%z|8©c>v?ΉxPhhq=UyjWRWÃJD;&^2rw5x miv|\t,1׍=Xq5s*xy_ߜBG[˾@3s+pvaqӺ2x1s׺ /zJM0\i. k6xET+"#p&GIHYgxܗW'3x1x˭ݍ26l 8᠑_śQ?CƸ-VFb\ݣڇkeB_rቅqc-.RUܮu7f ص'w'Ïj]7_oF#^ª}MFΝ[wjfy'=}Ȱynݎ;ȿwcoc|oP{Ҽo]=d^/z59ms`\p}/GN7T^̚E0Zwhw?WFg ?C}U_[=o N\ԫ#F~ݖ ;%"WFά@M=Aj|_mr}Ay.!屢y)음wfht=/>lN\bsYmuܲ_y 2W{~]*xfCۧ&>7虻&ww) F/ә8T/}?S!8c[\^|-8޺x^p+m/[!霖p1ɥ=FҗL[Gu9‡jЭAҟsDY3\פlȆ y5w%V4kݹN/F( |##.Wq[wfq}#_ZniݸʵJZ)Լ+u=f^;ӟN怷3Fo$k5g_Y}B ΀_ w`R2gOy\rZ<{ d]G~s;5׼ Fݢ͜@=k>WG:}_ZWD?WV'%_͟+ͯ{挞~uIyWnQ8/PV:n|2eί=_ .q3>H孵!2OAq¡8OpQH]Q7q%#ߋe^.|8Zc=r?~b>geEn 2r׻GIԍF/~,ys}= z?=u+b) `ו6~$c|C/>OIKW/0q{*yL՟ oeUs<_ g27Lց~ҝ7gϛ@\pҗ;wËM~@G|o 2ByQz屴Fɵ\)isp^t#;W"I_76"e qrO=N ywT}M!=sGѣџ޸;K.^j`h{A+M<ǎXq=On LgΣA/'9y|UoBbR KUpJ!! rg@%f"RFXKBp-Z*HBBHŪ:繿dM={g<)8s]<ϱ>Q)>aFWj*z26{Ϲ&Gg{}4.l_$/F@5>YYD>*>GmDH~e5ѿN_Ki?{>\}}ĿkL:~zo$^}|vĜ@k0%x"k_;|ʟ݆G%8>4]^־>.\vι (>=㼻=뀾u~gyz:oQiY`#M򽪇v:tɼnX |7|7zWC O3> w+a#^l~[9sD<s=;1ywoǻINġ9M%o= c:p{Ͻ L+]#b0:* w?s95gM_nHƹ9t)? ]ǾfHifܡkWګ蕼HEKKZ+r/=Iܠ18n_yھ|=>ZɾVG{~Q*|$]+9܋NgK{1KY~z|TޱkpRp)˛!/yʯw:־b3c7I\~S_~*kҺu͛w;0~U#994;ݙ?Bn%8] }ځxٙu<k~&MΟu~N{Ff0:V]+v͢߅Φ/u\MgYݼ{5}0rfG~zyfvUrWo1sr~u՞aq](ukAQGυ~§F >:=a]M?k'go\h3 ɣ:?sLڠW>5$yt %BBI}WoIy*VٿWR/;-IkcU+zy I.tpaO^Lg_/S6!; Wo dE6|r m)>AF8 '{W۹hw;Uf]Uei'͗pG?6{F5#Þ(/|sxLCIףWCSzY鄯.=z ;deih'cYYrT{ u7·SjƒoG&uQ֙$@}uҗȝIot^]G~}|AښqB_Y͖ߓWW靀_1yW4/[>{n'> ^)sx2%'.:﮻&#{{=W|P0~KWSg ~z*[}Em >uls\)׻8|b_Wʡ_EF/d/| xS;K)uSŋvq^ٷyr` үE^cږQo#صA_FZi{z}]#ic+׺9_\[ Z_ÞxsA8ؗۏ8Wۜ^R'`N^TKO Ww쉳k?0bEl{iA~ǣ<$<<yo Cˑ|~35gIE౎OTck?wC xQom}P ?|1YYKd&z$׋͙K4}+ȷC籍">L`n@I.~B AGyסr"_G< ˪ͬk'~8$<mgעZG,I$O=  oFT#߇ͥwC'/k6b={z84A~?:V>ٗ8bץnrQv Mգ߂'yw_SDw>Gm)gdsD{{{] v'Y{Pƿ4OSjl2&ȇ =HꊇS+1xo/(/ aK'9Kpa{d)?NAߢg?%_ǐNW$Xxz{01/I຾w9[k & % = =kuL]؅nv\^Plr(ru<'smLJ#[_w n9E|6bOhT@E.]u}/\S[]tUy~iyK<=[kj{M88.Qrs |ʺ8rڝ yF++zJvg+kvVJ<& 9r> /~N\gm;+7 y19Oh<ٓ1ӗ{3&%os=؂?R5M󶭞%κ}ijM#P޼&N䭻u_%0j!~4qϛoM5?y$xcЏנ״{i\촅7y >uEE۠Y:.zY|BNqݍ=e%dCG\Q\Gu~qgFw{칊WZ s-Esz@D|s|$^>\kۅ6NI8 b]~&W/.V⸾>{k^ p\-jgZcr^1kC}CsnշAOK8쿃I~r:۱F蓌S32u ˛UDgoBjpQU}zNFosY*Cу ޙh/fqh<z'ukkNpF9$=?$x.p33 7S?{{cQbJ?#UO|5xYXEŤ51'=ۃC+{U^ Buk tȹ۲cy!ϕZ}[^w}q"|1siNpEһ`Hßy}79Ui#YC5~h'{CA~k :\QL3sO #2o ~g#g=^MFH};/ĩBk3Pn%yޛ<'qXϮ+gmHDx}Y#A_9r$%Z뿄Q^ yeaKI~.Ӻݔ2EAK5s<^GN~+?鋥uMZuy /7=sV~oZZy} u/:Daǖƙ w#C󓌦8v_#CbVo&~OזoRWqTsYD]m3G-&"f%OvM~{λq;:O GLB $ߜ!bs~ޱjI3(=Du)Ϭe Fnތ}/]T [xЇlj pW!'GØz] dPvѱc6u=r$$dy 2oK#yr-}}W^QUi_m3ġN7Dwg923u1u zO:r-?]6ΣyXFgI`^jbNVpɢMo;WI\Q4ȜB3'^筀/B/$Ҋ|C䯈Wnao'3b/#ouڻyĊ:ЪžG[{/`̯/7<7)۞^\G.Z#ukE~K5;p9t>>+r*A%D ѹCr{etDZD ="I6:ꗴڮxݩ~wfO%9ּNn<:;| \3-ҟWho5@?Q~&|_MkT=.\=u_7yC>ҟM>ӽYy΃΢gtx{)k9EN_[+u텛XW 4d /p1O{$OǾ6#V'yuٙw݂;[.\Vc6Dǡċj{%9 k/Q!~H=rNW}(Ӥgw_j*ÓƳ}3'ʺ>^^m;3ڻ' =5}_]n.wbw|%b:?V`g<ž@Ru:OlB/\zu}k +͵п7r#s|qF0MJ}#չle JO_ǮAycRyٱDAxGs. sv|fp|䝸qk?ug:JyzfH _/V#o$\-4b?Z ZM<iQ?r6Auz 4%#ȗ$V6_ys,E.y^ sK?}E?R宽}pR`rLzN326]agWyc䱕Ïf(R/xor.wCɫ$OoP>(#뺠i}y V8~7z|I0I|‘gb.%n@籃&yoaKC#B^g=y9xgoL?@,<?+wby `68P9tptE*\*gy}XĹ/Xq.yv_Ru?͸3Ǒ~=sq.W㯮5C*WL$%Wѳֹv9Hf27Fd }]ta&h u!cCh-vɽ[>Fag)?B)L% :g}x/,uvۼ-#̵ytZ}ryI/kj? ]Jw5kS=n%6Ǟ]3{9'¨$a!R):z_`wE74s[S3 >.xYwQ1g+sSO]> MuO+!{:tk/Ưe9u^B'):Qjt>YKi5\~cG_]1E9)mƏp >ީ!OCH{iQ}Gg~ ,$>)) q.ho }HȥıPGu(CѧXwF͆رRW9~/>&$֟8UG˚hڸD_k΅4g ml*2\A[8X=9ao96'oפgsw: /|uZ-(e1l5+v ?Zok~| G:h_t[C?IޔQr~__{נ̑A/MxMA7S]xh{?N.;O_b䍹˯<-ڵQW=|\hB<)sX'ZgkW {brr}FȽ\/Kzg|$'aJƥυ܉ Þ 9MjV.)3iBī#qN;װ-Ւ=7%Pߐ[g[ <'x _Ҿ 2H{~:g(z[ܳ2n;?ݣ@?]k|;yq+! ù CKk\] b'Ksr3ICRt%r^~ 8B}-Q]yߕ1M;{{s?̀nʨ+ w;.b߶;8?`_?ǎ|TwqD~wٶEsj#ix9|x)xXeQߒ9FY_~2mƳΜ.#JEF0&Kq#6ߩ/{$_9%wG@ ĵtӷWɜCw12o⛾>\zW%-HZ'}u>y¯hꪌ893.P;ޯ$ˍ|5>a/H9t}AdGMK'e;ᓪOħReu<"|{ \n)>s)oyFP(뭝]w;rGIkI{+9^2JPx7K;mzHɧ~Y:9S''q}ߧθw̠nsyL<}Eh7q xMK.~9Hհz%Cvc# 5ԯGޝQ=7LEǧv_w#g\GnU⯳Cwkd۹ب·eQ{:[hĜNO)7}Yv~o7Ї!6 Zꭠ6xvXB{}?"*s;Y?>?:.'IFWp^9.iC/uPG|ICZA^eG>ւV5:g;Gp7`y)QKJrGS" x`k]l%jj5u0'wO̜`:z\_,Li}Q $_f%}} q.}}3%k{YT:R}g5;~X-Wq/~<ƒ/8G]WUב!7}<7un/Wd}T'o$+ybGGdέq|%2gy7?o6%vOⅮʭR]υcڿSG\Kދ+nڳ[GrF}5G~=ԇc#'A ѷVJ|!,z!qHloo*[L>D+C^^(uxqbW&.?RHQ_~F{;edm;q+\&bGXC.r<+}|z@+{M9 =|-5>%ħH]Ʃw!Rr_yLLs)us\|eW9<׹;#uWmnm݌}<KJ7|]WDˠxPnxD}+z7g"_p|(|4tI+ž~ `#ꅟQ_=wz1|mwᇗ>.fQ%u;):-1cٲy3AuҚ7e> > xzۨ ގ{MAY#sԍȇb@=w&oV~hܗ*3W p׭Hz|68|Si%Go%.^u&W|-/gDVjޘU֠~_UV G'^>˼6w>{A)B\,wCx6؅{R?DD =oۗK:WzIw*ԋAĿ:w͙0r ?%ݸJqďBV1fT(r7u;\psg}|s5vi}atс[f0+V~}<tczƤYqcB@5lj߄KN|uc+]`y >jg߫\y_?u{>u79,\"Kݓ <12oOo9Kb+NȺ3ADODޯ= {~7Oq#I}v~9SLMD=S 7$Oѽh)<\!ׯB~-G[K:Vroʗ4m%soWث2yg7/陵-/2u K2o |ٴ/s]1C+m}s!uI=_[:[a9׃~ڇl%qrl^Gկ\v/i7MM?x+zLM~/uFH2#:([\!pu3 l>2LӃy{0uJ-/<{^?!:ځ"I#;{Fa{&p̡+A裩>FKV%n򋧝_S z|M/ңqniR?zKwz)qA:@k ;9w`,a] v60}2'2޾DKд#kC y|G?׾@ [`65?P쮑u WGK瀮FBoyρ);y/{wX~CKw qMm6sOE\R߼2ssK}s/uiE=̭6BӰ^ϛ_K#`ޠp~-u{Gb,R]Y!{eoCg~Jd6_AN] (_߆|:@boUCvs 4Sz j|篾| ? 弧̢#%N|LAwfG3ռx;nxS.z|2^I #|v;#n?q\'ܾ̩4W?{)C՚)ԻJ'`|Rȯ&Ր"?3G,T2g#{qrCS;j\ %K^ƣ;t>SV?:$nivuf{"ϑ}1/o* ltcj:䍀_I> 諮W ugh &O*{9U7ד>:ϻ:c%>yz%ȅ#NǨ+t: ~j̑AN; ȝ#y_J_gZqE'5X/ h^oKs0Id3!Vܲ%%|7$a"~MJ韩k~>YcG}+ur'AwJ唧_˺;Z'(|v혝1Ho2EӘ~;m|}uNunUm_&ZӢ}MV]O\v9"NՎ2L'=9}Q|~/~{`^ŠvϤ^n+~8y[@bW5Fi$בwc#2bBjKCWzk/ұSۻ@2ѓh||pU8qmO¨ ?7,]u.o?~?p?W 63휛 j<|M׮~$8!uon)=_ KR[a/jj,GO;|i3{xmn(MѪGK9ѥѱSyaOewUG;0Kg4݅/~ a,}o_O.:|5G\ߑ|aww~x-zC_/U5}>_MGrQ>:vawğg=/={IbgnnS#xZN>Ns ǥOÊ4?H}W>J$cM8̳[Ns]ى3򉽲Iyw#z՟“N^ww}͍4p2Z;uqX=g[;R$fa7gOGoJb~0ÜE"Q$K{YoC $OK1+xUjO:3{H}ìrP>D'ߐ:J+aC7sGMiR'jM,}"R5z|cSU9R֯ĥ3Wj"O5YBE__]d/ͧ}Wũ뎮溗Π_>:Q}i_细KepPf)8_(֖+yU}4+`W]L~Ώ]sIŪ=,jU[/cϚxAuqrCTť:w_i :7ckۜWJ<v ЃҾ"sM=~ 6.[п=U}t[I .9[}չ}Xj6>uz7msG^^74mduE?h<=k_v3suaD/fO2Om}&}n9.|~6tˎzT+oj5M9eSb?߯Enu_q̧%쪁ta_9/_?n]i~kԓA.,G޴~$h.zPɷ} |ޔ.EyiRށ^z z8݈SkV;?- 'i-\n۾҄A-H! h;ɇ` \ITG}вy=]jzWк|3Aqh;*''чwa=MʀޤO;-ܯc8vMQpE8ߤ6NkK~||Bf՚g E5%J)1';zv %ځ}q_'jzk-N?zwtV ]q^Dqơ%<W~z^FNӫ )8ו|t.5\֎(.[fɜ 3s<87 J ņMKoƭ(/s_|VW}8JN4=|s>=T Ϻ2yZ#9{9#tx{P˪_i_ğb8TqEOR5#7.)I qɴ/N&~w瑹}.Np5 \ 7ꘛٟ_7X_NπϚ'~oŬMm3i<$W?}%;y ]^|?ağmV#cIjo+UMo\~ygЍQlssS7|?)K9>///t1P{+&v%2d?t~_.wfS֨]Z>u4̷藖S {Go^}xZsn"$(ȼ1gO#%?ެ~쇿;4OE/c&tr?2J\GO"1K;#+-ȳϱ=6^8r{lz2FPgoL**xz _jf;{6(o4..xW3vw/ޤ_X:9{]O R7q]\WҿF/tU*nR[~ϐu Cmu.U>ٞ?|Wܿ#Eh̓8$^ N޹8~P7I׹:_NGV֯\'&TC:# >AjkF1 \ׁ|Z^4E"9Zu;2ut, ~Z״ogӧO$_[5<9(qS>HNϒg}Tݩq{A.ד]5z-y/M@ވ>PaO`C|>ɟW7Ǭ噷 ;C +VXK[.rռo\:vMuկ?/b)P/*|h]>[4?I5gkY׺ϠsKKR~βzC)U0!׹\/'uD$'ҹvtM^-ܟm$~*cz5zP?Tg!2:r|~U5/F&ڷ819З1'p+eO!=C簡_DHU?>ŎsE"}$Oࠋ˹\m_H*'oGG#$ݶKȚ]}ɹG^ٺ91 OS早=zSy-*9_'8I(S`l!##-Oa@~0b/?o+Y{{~/4D?k߸BeGƓ{JKi5̮NVK}D^wÝ}׹KzIݔnϴCwR;Lإt--_\AuOz ao>Ec>h7>pvWByEwf#Wg5}/clGϾf^'ƱӸS;wfC@'RO7?/u^\:XqYcn%?UȷC']ȫ=s-4"=GЋ)ڏRyڷچ! )r[\uH} )vHd>/I݀~h1t'V^ߪQ_3s֡s!$svק_Vuq<늽/v7" B7c紧{^4x0|EOj\\$pdx<"y>:{z5c9L?ȟ_~".T:}PG7 STP'G;$[⯾K7i^U_}.x=fX h܇ӹЕ 𻪏8u{}L|3^ ߓyvGIX/q{'j__-3߯w9_qoݍ<(3}sį_O>%My;mtшuٶ'5/ߩJ> FybTi'Nc+ȓ ۑaN޴8K]Gh^yйLV?Eڍq$OGG4&!'!9W{E~UIM^5WUy#oA~\];pA*v%?YoYuH蟤zGي\!O .|Η8I?J_Wp+9tksOt[DoΐΊW4wHݕK1jNS2yg'R7+_hAvq ryHgugf|g}ǜJޙҝy>\~ 鷨Zs< qƣ7QVef͒8@y)t韢~Fgtz\u}pkI|.A[;3]cJoN=k#=R.bT9.d߆~ Y9/ >9_dASv3t_1O3 z>@fIEN w]>xIwNC#?$Y5i!eȭPo~v JV?vo$./L~1*/}m?@>qo;?E}L/3 KQsi7MlҼ'Κ cK>^ڗ˗u2WC=;S$Li[X\[A R5vxVP744ǁBn/4~$4>}W4nj%_9^}Ոviߣ9yK0\UC\u'IJno ]+:I{7 t/???_]g=;MU_j-_7#t䝽<i]b7zDya71&zEW{Ҽ|x'@O@7!|*u#.r{uIQ[Djv챨xCڧu QyPq15h -uK=Q'oQy-E/^:TU֞D?!u:[Wpy9т_Rg[{Y?FwbMp>w>7VDUp5y%V+q[:H#$(5#֟HΫz)c:W?^SG(bΛo*j{')M I9w > վٕk OvsH_] 2o_J9Qv}&uj=ٛcH]*C?q9ڏbn6q |=ny~|.xҺ[5>jTQ#;["ߥyJWZw}H RZ[ *sGFa_$a/V³Rl?up΂caK2J8 稊:0'q6ͣoA񟰿$ϧ~ƥ?ԛRl>+vRQt.}ww;~/DK Yc[gNgrW>g{ SεDuEo)^<߫;^ o s0ENkV/C[wlGW< ʼn|*]h|ș;txK ˡ+K),}-Gww!mxEڟu<: ONFW{oq@ugo4z~ЩؿWo|>փ3  |2zluݗyy 'KAB@.v^j¯\>|j Uox"#Fϑc3^ꟅuVbUJ"r$q?+q6D;E?8]5jNߞ畟<ԍqNssy@t;Н 黎}: ;$M2#l ]>ӯt{΀f'sռGpW>3To@gG aSYj@ | /nV+xpHqO)57ub}e\Wƕqe\Wƕqe\Wƕqe\Wƕq}[Fk_*~_+W)}`ⴾؗ|6x5?\f&Av| ~w+/L(%9iك81Sz?ćz?ć7Ɉ_^ZCp#W*ck;5{AތP]myy9.Qgm,G|VpÚK_VkL<]S̨GwY-4kJ=H:})7mfQ_7?X@u4NL.Xzs7yO۔u|7ŔmԳ4BCost5??3-؅jO^+K~mKL\ǝ5^_7G|26xa0n$svh>x?B}ԙI(>s<9ZzqK`?sQSQ|9~qɧ'K|53KQo/~<䯌cQ1z5?+t ]6Hk|9hBwuϙ9So.)vɬ?铙z&z{C˖FF >u?5Zl̽:c߬;Q)XI/K[T;9ϕݫUs,+a?+L;DM:N7B>C}7*=&5B{OI=Rŭҿe?ݔ47oqyJ`3Zz9ځ W*|/+w?$^iMGig9ԏ(?M]C/O ß :Qi*1??U{󫱾zK;зoXs =}mtۢHN+s@o~N*YK.?:zSdtSHj]Z0:R6~x{v x"߬߰Oӂ+i^ΥzεIת{m{rf&6JgV]7!{q8ꌭz+FKxg܍ S#g98:Ⱥ U3(j< c'sZ ;SA-? 2)QtW‘Zpgm'S?vBM㠿?UM~2!ʏ*2=|-k=8J:+;:I:G>MGވ?ʞ^mB=y sl' /eoЧ6?^IG/_ j̏ǔFn݁ȹdf_0XJE(t%R*s7!ֽ+8/7`?=ΜBoR>`%SnE]Qt)wgg9,{:f6>tc?miGKJȇ"bP?;]šg\GX=Ff㡥˖L\ 8S(}m/זoz>Ov(ի~m@z1h_4.▹T~"w2+_|+z_?;ɽY uqlпsL><]Qq ׎ NAu[ש*m}ot <ȮW~YSN\GOG޹uZ ¢2הA'9p.Mb8 r|śyUJ #P@O };}/j7@W_D>NUȦ/%Xp^OieQcO_ϝC={:]i$ކJ%F}8oc爼׸W}v,Qȡgl 5_ga|zW,o|?s4UѮ6s`%೹?<9qD`1 b{jCJFsasz]μwF^kssr V }2=%,f֩vȻ~=Syo^~7$؏_z89,648j-|yxPEY֛'?h:vMpyЗ͆/8ҾŞؿOW5n~4SeR~~P/}rNp$nTgә}ݳ0Zr7K=Fܣ7*}M5gŗ'xߌ(~lc:~y-Sx/$K⽡xt4G~oYvo>%C#xα=8FA #W ~8^;r0V};$Gһ3<: ʇP4zWI% oFlO"_ =83;-8μ(zӸX_;yS='#~7CMdN~Qc{N<`Nj WO=.x88]c37ș{դRq|5_AZ\j<+|JoR< jϛ NGO\g$׵+qWWa?_&qwuˢĮq߆F_~rJJ_G;]v,%qq]{7r@&?Bځݧ'y8fh̹25 B7cx~>WρH=$Nf%x#ǮF_h`G;=.>9/"S)92_zRG<_hܣjGYcbt)?^<_*nR&jMXГ=|n<ޯk/4=q-Wɳ9ȾWؔOQq9gWA`Ϋ^>p=KO%OpEqhE3g<pU4Bj>^ŨC kI^znV:)UNR;W>e_+pIUЇGN9!'>a>א_K{'v'8]m4?|~p]M~y#>VmMOVFu3n3~bWΠxq8_va|skcZῈ]6x9~5~ԚC}H.CL\wtJL'R|g[/oAـ;?Hn3_q-kg6=.wĞu?|K8(-=r__ ~sܑ<;v)ysoX\mSxߏF.OLߑ83}?_-{J; Q#}ac=uB9ynYCT[]EW4tscIyJ^َCH>o W ~{OA==ī"~Q9@xoYCn}%z֞\0n 6֕w"mKA͟WHݒ)os@wV`YOKݰO>Cn%[G#2*~}9f\$a+%USr Fֿ>H(u.e~ǡ0[KaA)oHm-J`zg3 3:iu}b(WTjGEs2_ cqSc 堋*6/2-]{<-52HBWckK~;Ϭ<Kk^7de;.~s%y_fW`CT>̼ÜM'@n?Μ?iz 'ӑ?eZ$l?~S$Ĕ5GFk$[+Y8%-($!Z,%ȱlaYJ)[TQywO~y;{~F3=X %~< dSڭKgX ؇Cqy&WH$r׵)zWP|Ąu95Â?Dy]-_uqi!L%Mxjge[)x)|bc\kλ?y?Q*~;:?Q~UszX?9:I\Ru\FyC[MN[Ldvקl<[7n/ ͧ_[Gy۱.R7[7C{*+8 \jr`["r/Z ?q}.w^Ow; @-S9)O$xs^i?@)RIyzgpàx=}t%.A.H(7r:%1p~_3ܝ> 5p%< `(wRqԋO3̫ا9I#=[PqJk_x+v} 7'w^:#VҺN(?£:ȯď+uR\P,7y~KTx(R^7^xװMd? ʳro?~y7fsOl5yWd^cl9d2~;ыA9wz9_c/_lnJgeqq1H|dCsȉ#gu([F֚Eȹ5)Nur%Ł3 ͈]wjcCTlKRț+mqč_{l'ѝM_oޫGk?ڏ+_AE=r$>܏cG9u$upj\ݵ<. S!Zo~|HDk؂\N)zi<~+|{ 5#Β!u47ܻ yhNmuuq";|O1海Pstˌ3cq }9KX=$<&ص?,xm践Bxv 9_>?qh-^GfePY|_㗋?0%}Oc{%-V.zfM[jX4O_쏮Ixꝙ<6컢}Vf;KCK:?9LJ3Z4ou_gn~>k58ٮgT/>:@0k.5f@o'k>cO >qȿw#~F|kAss }~c1oe>Wֶ݀^%m~?'l@o= yYy0zO+($} j_$&:n6ځF;e:i]7^kL?5f-#?K:{F.ታ_-3rd^0!ө qڂ+zU6H{7 nqK?gޥ||. ~g]܊8SW|C.e? 83wC'+NaD>7P;i_G֣:gF [OPTa[g>U5"fX!vZe9 oI\WS:?<*tι bW~ױ x9O N+sΫZmK;Y0MⅣ[{M\o컃<79{p*o\Ȼ+;s__#N~ⓅܸZ|^1A:Ћbuc׻q~+Ms? l?ִP*"N|}}(h8RSo{L[i{c)s=oE~n3KNd^n 8ӄgU=TUqW H_4az݌{뇵7#ԬX}UD3FyU.KZ _!}Kþơz OvIWOMEixg:zw­ A^&F'ϻ Tw$j/A]_'/}7;zwRR}DJ7rsy=xó~Cӗ/y_e$Dڹدп+/XGK25,E/gGg#=\<7sa'u8Z\Y/F6%_ޏV~g~q7#NF˱n- ugh\+Ix$2oT1|ē/9ƾK𯒏y>)E1d>[2{'/ZN3Z'_fw oCru``-w.TCFNO2>aY]E7%l8%SMEJTj8V:7 :S}jDgW]* ӫ45_2Fmnp}Ǖm;pq=CRDЗ{y-#qu1:od^D4SڐWI}/W);zGլ,S e2yzYԃ!/wHsWgxⲖ|sL\ou;T_"fΞQ^^4_%9*r^%4X1"G0 y:Y]^9Oλ,7\M]W6~2\g<)3yޥɛk{|q\嫭FX,Uo;m2kWVe="[9}ri"q#-v'DZ"bGW]xE`jK?{*-sHsҟO]ؙ }UH\$3qM{~'ʓUSN .9 &ǹ;yu^cVɓϬOa7~1 ;{7m3wcow?x-"÷8xbĵEG\mDhm;|Jþ%=:7Y#' o76 ~zx?6oo@Gy%.>?8+>??MXu}Jue9V}Ӽ4tV~:#.I! Vd`g®r:ƿm)ރG$^̯-]J/.Yڗ:UzI_?O5oJ 4|i6~֕? >BqGu`WcTMӬӕdHI)mwj9$% ZMTxi##?P m>w9F@~>{IO?sRhiq@û_F*k{\~NMGs+r\D1Ajqe^1vSۢW&7V}8*>c-߆!4w!2S8!@ H-h;>{"yֺgC53=տ߽7vy *>d:T|CXU4I/b}e ퟑ>0yn!H틶 @ڎuR7p& $R}SoP^xNTpT_m.f/? | wl0N2?YsPC'֣g!Es]C]9jsvM_Ow9-3>r{,r]ÿƮGOL2e}}-e䮆=?@V O͸T#ч߈0G@,V {WrNlO" sW+])[ Z{tNtN.xS($1:!ϘzK߫Sy9-58 `׎LO'h}Bz⡽׹?YC~c n3o]ʔ"wOP9,IfR,\O}:/Y!ϙ ϛGÌoρ ʿ$~kWaR?;}XCocGsxʅ/*;]w qrRcnaef;U&qOSE[ M{zo IAVM;?>ɼ4Y'= {ךP>~Ctb94\}&vrkbsH/.qWcgHhLp O ̛רqY|5Ck&)+5nr4$z-;ǪU}5wN[|ܜ=K^f(?Y>7yK}ҞIM|5.Jlz_?勱зICoKm=Bg|~]~OڼؗMxuq8RdZ*Nv)~O$ߵod^սr~32OSy:A8Vbvr)oy{~dN,v (ΌyV|IAAInrڣ'F\n =)%7ѿmWW;"2r.E<2??q̿zz%uW$.n`#Wf ȹGzyϥ+태]8Oi>]i;ؑqq=3ޯ=b7Ww}mx7.h<}{MÇq^R<-V{7BXK|646xT%*KGC-j{e \5 ߻kl|%o8ore~ sOzM,:GZq}è{յCJ:Лg#sJU'N "?ҧߞCЧxOYęݚ&wB^17fTs77p}ۖeQpS {N/y!{QHL>'C޼{nwi=[ yy;9 t /+_3Zp_E<ӣqdUɫ'ygǰ< 2aԏJ>'uٻYw'C&%ȟ5ه:GƕS;}G:FGn%@.{&y܂x6j{N18Sw,|]pER"P;+uR3b>~ vA ,aNs2 q7Qȡ.3ΉVʺf?ЇU~<68`|glUjjv OpͺG /.7 L?=}Pڕ{T m#Zsč[KOSd:U cOOvӰ<[4yeGeߥ)̟.ʫw"|0CAu*XSG4*)ۓ7O\ܧ%WkLc_ s__),?-}w&Y솼ȾwS96z9xŗ%~@:-7Q8>iW流 _ lxfmp?ߚĹE: ;N~ޛ.{ymzyߜH6م|({׭+qйwX{J` .vz= q7;}F'p5s^?8+C]H%N]7G-|'xag)j},G]/ONe>_DMyatZ=b^(ɗe5,%,e27`TX^];@cđO`gR2qH癨txZ6_O3WHq/ޑWMħ Gc*njz]zyxֵ:ODS_Yus͙FޒzFwF(!Dkw|h{uː/}6wBòGnzrJƖ:{)W.=}'l)o:N>9k;|+nMݿ3Wλ#|eg(^" ~;|NʿcIc龜_D*KF|+|rrrQ!7󀙜 u_kq@pn˄m7ˀ CyIڜG+}!}& Rm:<Æ_!4BqgQE4 ϭ[g{?ވ]]5 l }#2ۋi ;I+ƧHx?=|_ 50'=8߲RelKX d;#uR_Z<Į""sB2ۤ_#9vdk)'AsnerTa<{DЪ>{Qo5-zN"sm27bM𝶃d7#^0`-s@rk b.~ו|z03IWj{nK\oH\ׯ)r~M?y|rS9B]k\BCҀ|CJQ Us_Nek ];^2݋t~w o4bmS]&%!#g!~.;I{/:'r$ড়%}v/Ml"MK8vvOmHJvMwO9}Yp=qX9Sg F-V~:h%ZNF/G9@jwk]r%n>0$LݻOYB?f_39CuOAVϢ~.uyP}mזJI)l:Y7~oà WG1|oͱ;\{&>+5k}><)Wr#*/N,“þqe^y~9w^#~"|?+o'?-6'q"7G߁Ȉ /铎s.*~ wNy?Q( o-~[]قdO씈G{ͻ|N57*ޙ<~ o̓o58cGc:v N~bx2GyxǙ y<(ais}[xA=·':x'4\7J߅-׾_7V?ԡn)svOsovoeΜx|e?ꬼN\;'ZWP_OK0DG/@Mp_Z?Wx~׏_?\܋yO]3,ϑi[+QqSΗHbzC U>OӼ»*xGmskm~N&oO$UI6'0`Oȷ3n'H.j'? 2h˿Y{@!{ZsOWM*9f}Дī9I>V)}'J\n/x+|Q3}oDJp;9)\#8O_Ao p 3W* #7WsE@ᮺ 7f_<ML)hǹ ߏ\)N9$AY/ely,4!i4oWmy󀄯oу,ӑ{miF^,t77"ERגSz?UXq,? vO~k]x6qJ)v=Nm{iPv^ lUi}3 +9oO]/^^ ^kULu~;/GFFEn23`L/J?z`5|3ܔCHK7__#a ⭾1$882;'#k7;VcOd>}<7r',|i<aD:Sߦ|Qe/1|:!-yuΙwasBK~ݗ5.}6[-T[v[ ݖ?ѸeMSH1W8%sb՘?0 I/˚˜۰2R#>mr߇C>fBOYei)ubcRfε+yx{d.}fk'ez_I-6cOcipb5'zJ痼dzv8xmD|υ{Lν=2oCuO۽~&EΊt ޞ~'>ScN,F#+\^e_չE~/'IǫĜmjOzDޤVL_gSg 9:Oi`cJ"kyZb yV5x%ާ0Rg3㒟7\EWRh>}֡_&6r>LuIl dߘe(/IsG㍯Kcԑ!K_ \BYŎ1 ^tr.?kA0X}KЃ+h~;ܕ>@0hnRG=9{; ~~L܍|SG3?+AN̼I'iul;O~6qz ~".4&VbO#4,ND^dO#s*u֕IOs+V͂iDJI~lNQ#1r#orh/DE<$"~"MsmMy>e3W0\8W쳝~/ 6k b۵[?ڇtO}+5zWD1V_; !=}2)`;wjQD SOv&3cg5*Fi3Ux,<6Ƚ}.#̿N^/(5c1F6GD:5>k%|/ese~s.Qو-e6kϙ'Qju9Wxg} ?k _?XOo‡9S''y/;<cNc m| }A6frOIiCd7T<zֶCՋm xdÁ*Rx#gNإsϝݝ9 ףM跒=zA>8MN?UZ9=F~KEV8SCZv3#H/8jw\Jz0|ڻt/#.h(ԭD[;G/"#ѣa?J+Dƾx#~@oEk}Ey{}$oъx/w?g yWz%/|lZ~[K ]axKe?oXk+Wߙї=W_;ʆ[59eK|K|/,1ΕV}|n$L0P3 ANk ꑷ! RW]Z%+ rڀ߯Z"ԫ v">xx{s~V{_g 8Ծ~?8] 9wKG,/k#{5') (#MGȧm7gsNV?8p!Q_p6l5݉ aL j^OX![m[Owx{C ڂ\A}Sa)`KYv~g : 5;Kokksw݌\{ Wcҷ'1yV)^i qOk[^щOPw~' p2?xxaoE}0l; ~ә08^16&q͐;~1'g!?EK Ғ;N0eB{o>Gm ZCۗ1Beos1:OkLoO^i~ ]CQz>zfdn̏< %uo;__X9e5@6mJ(TsjZ" R%n{X{shLyL^ hrV1˾S}~Wt-S j|fLNs SBd?c?9IέSO>/aOyi="yߢO~F}W) r~"qY_ws}:WWVjn%q^ (zK&{>ݿƳ#qԅ]'^UM@d?uqt'yw&8`}𔨑 O >yqxyQ?W7-&#ވҏ!8oSfoȃW^PC8r)~C8T<msY_Ƚa~Ps!κlcrnOsU1.-?|}YD_?y;d](?Nf$sq!|65[{`\gwS/iRz{x7&\<+߭~A~OaND5ZQ~m7EҚOa[J}KJ^8ܕXSdokΫ̻QyW2y݆GT!:U`*Uƽ@S(?y^~q[8,'ٟӈ~ۥk9ϰ_H_|< ?-\tN[N昊^[{+r&ÿ&-!^9\4Aوw 7]Id\'#S" +|ݒWf/9@tDn9481_7_}V6vүa[ٻ9⦈;SBFTs%<ÞȍΨi3SJnк oGWNE)^cݓq_n$zs7ȭ 'oQ\SiS:>|\n)~s|7&u*\5tƯ){&^.#urΥCvG??yֈ,`|9^ȝ?/CyDRz8/7G Q|٭Y-V/1WCxz?b<Yc̘3>3jW欚cWӢJ^B\q01?&x]>v߫o H#؎h ?_-vםo'G"_bt6/AF_󏭗 M,V?ycQVZGhI?w~E6g.oTmh֟zv9u>%R :44~?J&c#w=߼94[m'9~Ciō̫G~%y}-Cx%cŞ9ew[‹ 4~T>XGc_uI">|_99_C.iBӴ/%|ߋȁ]n9WhʳIq}ߚOuyB}og^/͸"#3zǬuLMcdnmUۍ_ԛtN#:&}z`b?B@Лp.35aq+N‚_ov7g![6cw;7NsUun_P?e=H_5'#؇=/?Q^܏\+yaAXoh3u<ӻseQӘw0Kh#G?Z^n | +ij]߷Eo6f!|ޣף vTy<:N26u?٧v\qT 77;,8[{{*%A s\fD ¯KN+nL2borw9mw,$$yC#<WS ic/s O<'֞b̛o.r='cs0\psEdއq?T]>(ߛp^k[cypZ.t$h_pCbO>;H]!P=_6Ǻ׵5&Z9{q8^үbT{#vZnZ#vdx%^ dI+y귇C K^ك;)L=q#^ڜ?Zډy.2˃g>{~.ݯW4}ަOG|>\ ?eu~D]Tn|rO M 24KF5cUvFq 7/8_f{{ F$3~Dz?D^o]yG2@qHr ?~O{E=#ozFĚ<:!泚oyn%V>'<k`x> BZD-y_IL%Խ_o]H,8saj}F5 (z9ڇS-z%u;;+D|]o~W'_O[g}m0كvBU\ȡ]V=3* AQ'Lw[/ '?zSa&7)qݒ~!7 u,qS'>WOx"a0vm:+jl=oW{tgk,7ٿȚ'X7煿>Aj4 pfvW]y/?w}uYݔW VMMFȫ9eq%8(17'|?JrH'KK 궾t%DY8MO0l!>QAϳ$x}<3z>OxE@S 5n" /\J=x"'NۺM(eRg&Cl0b @^ 7;)m ]Ly9fcGsrGox9?_ٯ|po^KcVq?̷@b<z''j0s$%n9( ֝ #\Emf?-se=|K\eO}f1kuagy[ɓ?Y;-ױ"NFEc$9(W--9?0ʿxx/|Rʁ.1r55a;ZGuN{ fVõPoWޔx{X.k^Q$b-c}{y?,Չ󟬥rmtf{*-2oX}݋s_L><`wj\lGT8{9ށoI?^W`r&|xs; ᧴_H)<򣸱o$>!u}m=>'#}8G{e/*"zo{:#vH.xbӧo.f\s^\kUtgMqeOUO7 gs^o>UzĿRvbA@$vJU@ndtjZ0 1z?,{u۔AplΞSU2.J`@tJ`iUzi\~,#>JChQ=ln[|f[jD_qQ2WH݊?_ߔWŸR\R= fW p_`g w)s4~@?I,}ʇPX+X͟"qDz`˅>\ߍj7涼!|}hePg^yk=Kq(yS *s1/+S^z=:@:A97Ɍ P7*uɽA%uǒ_H KuU\|?d#-,zxߴ@bc+>iMJ}k,j\K?rﯸROpQYoмU]+q 9Oܥ݈]"7Wk=qa,A%p|E~j<{g;Y^7? Pq͹,Tqo;%]G~+UZ%-G׊~S8f^>;ZmSS?֯GW F>UDOz+>]GɃu(?_lj͹$>Cb/FoYSf?}=ftGM&8DNpCCծ{ňQȇΡx~e`w ;_< /L)-4G#*/÷^>M%n {CJ~JWU .ff:B>S(#?x!y7Ÿ+d@|NZV{aȉ .cO*2ۤ@ίτj%?@R7:gwσ>\~O+s|"̡pd^ʳ<\{Ƌ>C )2 OfLJXyѸ]tP)"} wX.ILͧx?+׮"  "uZ?RWyƽA[;)FS\g'mgwd.WNHuv߫R} /wQ/R9+$^=./pOۺTyY!ݹC*7>(hCػf/vFq[ٔyw)~I>}^xy\ ~e_A__}HEmg;H\{о?I[O.nUXSÿ6^}ZgyXC2kLިg=n[这PxD=_Ha. N, E6].u]\^~3.7BcslYir(auCwG)x# f^D_,ȹKzSD'jI0^g|O;wȧ{?)I44m]1՝oT(?;S?_ŏxϳ;i྾Ձr]?A?:Eӻ]\09+v.!뜏u׺>NDRoL*R흍VBI@E{bՉGv֧n%Swᣙ9 *^;rw$u)u8>ڊsǞO ݚ b_9 NA[y9d[*HKu&1[2qC'>];hNjJP|y+2l瞅/4t|% Kدf?a |U|@]^_+8Z;x՛- ?^I:?#n&ݥ> w"͉+_Y[w7ᑙ7s >k'D-lK?Ȱ5I }1x߆yU9~˭D}VhuKYۙʷGe3e"5/Xuͷ'3G02o̜v9?+y@Α#;4)[zבs>6q;9Ỹ6jwaN?oO<=v>7ؕ:s+vN[eF:&}bC4r _ D̑~w(Cp!kLqw>; vi|T]*r~u y+UrS9K2spQѧxnXguU1X\\%} ~{ ܈M7;A{SYZ(/8vJs2}]F0'̳MJ4gecF^]wŵnPrJqo#S7Z~I ãv!|>xK kND7_gwb!Vb7{|s4~-WqʕjIn+³Xv%;K?z&lZ=(WdJo̽}[_$VLaw8>8@k J"_&^٩h"#|O@^Ǿ)}UVo=Sx'x-,vs}#nb&}N1 B\\c佒ė3v:'z _ɎM7wAmYҜˠ̬˾˂>D ޶a xR⪜oaf>qeϝp{/8O}6K\i^ .܈[w3p}λ:=}5}O6~ȿqߛOQ懸y27l78_^'O{Bnܐ\0zfM_ݓ Iƾ囍qWf,[joqm/KPɤP9U?yKwZš}%!N? bG^gay"^#,g<뽖5.?myý <~?Fߣwߗ>gsTz\IIp)Vq=p2\6:~(s!}:?k \RPq7ϝ> 8\.Q|_}}r;4ڜgwÿSD bɜeb*ĺv<&(NYV֧K17H}|S gN=YBhҌ8X s?M`ƀ"yWDЧ[E7M.5"ϟv xX;cd~{ۖʧ@ =֟]hi\+gHWȳw?ۿ?X"rK8yFKXEF 5{'Kcy i=wV~vąaafm0|ksr"y4O^ ΒXT|=qJ^D]n //ioߌj%ߵufoj[iGUU2?SU%s;-x@ӥFR 󷝭!x˲_uޗ#}czraUdNȃ&8 bыkډOyPMS:{/8sV {X_W<) Ņ7 b:;`e߿.|zOVD2}]-S8iv}X3xOsIi瘃1_YدɃ4i|.ܮ/vؚ;z7^,bFs8=vǜȼ7\; 2 @ΥWH4 Eo|5p&xs=iP)upwg-q,eֱ^u~7+^R9~Ǿ_w77O\'C757'4)~Gy%vy[Wɉ<s9X`gRk99g͈a^K>uktwanVt)ьcSw{)nyZ`h=ܲP;OXܠձ4}3mğθY RRuwQ9̯Xdaje5eXԽ^8nK|y(9GH9sd=cN9T.=·P~hV9ݶ#Y(h4|Qaug3[AԼpmY!vz 0ai^L"O0qAy h\vN2)f)9 ?Ln ?Ank9$1;,ykKl`nVkֈ[eUnW/nEŎ GN=?;960!?nn bO z "ג<;K'¯q:|Ng'4#~;r$xb}Ѷ!^l+*J~oK!ybc-`^xkM߰|w'n&,jҾ _ɻ )|Exuob%M;^e|:?JO>H>_ wJBX@O~W6>B?͞FU7YiJq9;'oᯨY|m6^&%_eW^:(No錾 D:P~O@\ Il'ᖒ`n$ăkk?ij~|m+*/Bmu.D?;;JN [Ozņp#9 =Kǯ45#Өz|xnC3_ ɺ7W;"~Ǫ3aN9Pn>O#wO|r'!_LN ]_{rT.'];kq3[OoM9g،ydO_w>~)mkrj-+ :?psWϺ;y9GWf/*\4,o|0 VZwQ9c'C$Ժ拝wNH^\7;[Ǯe_JǼ>9vDxt)4ȹsA?"]Fͳ<5~j TQ㚗gi{|^iF5/s~B.-y(?njZ-s*I|Hz]x|}_kht %EStIޮ;Yܺ}"e`<ۃ&̽9\Fi=3hqiIU^EZu"5& RϨq~/(?)T"[5?hމ_:#yg9uoI'r5'3{CV4\yy?Ow;-=pp9@99f@^t38Gp|?3 ]v[7u* Vb[Jߍ~LMCHh}*3?|]?:bCW{SPZ*9kղ E8os=+Ƽ~~dU畺~%8G9cGWh-хWU!n\q%c%9;'B+4~ iѻ!q}9V˅ۓ`gIZ,}+-RNffЗ]@^{ssvޅ/.EO*||{p^W _G/N@?1_7oy>;o<(&zeopGX7?i]^<<"mO- {@/o5GAE#64$e nQ!윂#MK7>IsHh'M'~_nѸGZwx:}\Qk}שuVTJ,|⯵.nk~2'R {!PY@o֬_@N9{søNЗY8t{äd˙t霾ݟ bԽ'γQ蛯.v[+f@~)Ǒ?~@l|\[$8 Nv:xğҼe cYs>QS;/ݗY .oK]۫:4Hߍ97w"~̂||wu~O_IܗijBp~μd+8G"^/q!({:,kԾr># +ȾHMqwO)|[p9!W悮 <~eCp}tm]З!`9q-|؄T1 |3|s~y0_ 􄛬 ~V$C?Zu˻ F<.׭ }1=]5 NFۊ3nyϙ uZl*WBa8۞?y5j^p/edu"y5K [b? ,ZUyH2'9yY(^Xи4n߀?؊87kXw!9Zr^kFCcROܿDi?t;eU v'٦;l?(כޅ:}y#[s/b'δy/Up]st~l`9ۅa (~):^[?87gvӁuWn8% UVɋ:^@MiOd.Xp^g}9l} FN7_ ;)gog#+*Oƺ7bԕK{.[·Ʋӊ[e~ c?,`%nhw1P>7:/7I1oOQ<ۺME5@NY^*)y7kLO`KJ o@ЏUx Jmi+$D_] ~ʅ7,Lzlۜ2C#јik6;[Ѱ%D?W_߻Kpjչn^Gs}u3KM e_Σ .eC*{.9xv`%ѰOzLKc;ߺ<._ WLc]7/}I/m %8`*x:nRt:L}7>:@AɎ#x zпywbO7svg ׫{=6>v.t>9=O-e5>^Ww怏R퀎ζZ!=:e.E@oV$y<}26C6u%tgN $M3]Bˁ>:0wө|ncpl>;qcZν؋W}eg^*~9s:4~OkF>p-)mxO/H>sb_}iW7"UG~KOpx u}Ƈdk_~_&y1u6.eN^M?ƞH5{bQ2R~;}:k֯yOB]GSO  血O| ]#`,IΔ?m(C5Yo{V< vmd?1D u:־:m<ۻs"|۴{Cn˷훉v\}V?%S+3;H?s il+_eqH/ֺj.enܗ^?i8= LvjCZ/~č~ :{~VN3R}MR#?aعo.n{^ Ar#N;& '$,#~c v_Ͼ('hcorۛzp;2@`5v嬳g/q(ک`k6=Ԁ@ cx݋﵈/>n'/ r?oF؇kG]=KDBu'סޗznOf*WiX"gX{RW>2 fgJ8S;$?Eq΍EvأwJǯYξ>Ⱦ_5vn}}~jmA?a_LɌ_}? :cďB}}:4b ߍaiW~"e\_ CYNL>";ßscg8 ~jJm䳭݃^qkqx)vGSY7b7Y^ͺ?3|r(o1[RsG`4?jT|)/ău%fʄqQ.ɕZjUG|Ih^KCu('Pݥ6zB GpB. zXr֫u1=$Omșw7Q{MEp*Σ~C3?`KIVd_>[b`WKOǙG 7'VaBo]+?%.e7 g.'sA1/_r11{؁ogѵ"UmlDJו#ϷUfvǶzx|Pq5ۋQQq\&3c_nl_$ '/Z ITf)ubRԨB|hrFoWyY/3zcf}3/ׇnoAެɺ5 >IC_W]gb }s$?s!>G qbͰ[>#t,ϕ(.vu)?A]?$5[~a_]Y);=3aN"o1[ 'g&oPid wڌ3mzÍd u(#|m?Ho#+pvRoB :Ys4ʳ^+ .vo8eH~ۋ$WmJbt3nIdg /U5~LkgfGYk".dO4O>|}-qoȗJ}ˉຒWR<=L4n#I/T<aIϮ;DՐw_ _^yyzz1DɷcGC@Oo-N俼8GϱiGDD7طב~X)N- "ȷ-3 mcO.'X|ym5?p108;Rg9t_9pe5z惶׏.<+fBxcq 9dc9 ]wFקإ gN|˅8!W|BG!k)[ϊ;*_ Yıq17gWyoƠN^UvE{azC^?$ >6ah ~cB9c@_g% 5rtqzf'ދ_,&t:>7Yn,;@\] kVM_)·8_C Jȕ>2X;d}ԂnwD%=~?C ~޿$W&|sg?ǧByjkYàϻ6r] cx XTIs7=0g]#^`ŏvVN$~>z1ziȁhT$rzYN/<+ifO"sW_^L_y~\ոy>گ"T%K{w=o@ĥ; (4d6's/>>^Uc1"?4jh@Gސo< >_-ùVl-b3{cqs-7~;}EF\o@o kwx|7xM> LO WUY =Kk<:]bO\<1iﰭxğ9}'?yjI}]+%~JZLQџ~8z@Q).ƛ\Cyl.W!_|9O* 3*1zLR56'N,sRдƉ,MI$ឭn|TD,7=ٯ w][4t)Z>n0m~nF@lAn'\ >X)C"'W"WުSռ8 y[Ԩ|X )ŀnBuTw<#ү/2>Ǿ=#(sC/Rx9)m`{AI>oZ;8z/<{w[aM:\|wߧ٫8v*6y4Zղ Λ g)w@j}ԃWGn}I>̵վ 3J^MjPkH_P9J}3%|eq⿸crGЏߌf|NNlйwGX_~\}g<}]\5a;Nh!ç߷]Rڌ"=vg*s?z/lSv7͑|;zᯈ5w`^|Q有:}w_RcϪo3g ;Ǥ^}^MGsjNѸBsW_fB]J4r[g7y>YZ)N{oৄpmO uKXֵEj[)N:{LYЗ#%?s%$<'So \\A\v fs]{FK~U[牶:7.Hz>7^2}eJv\̀8Qoc7|>hNK<p~__wWZgXD/wL\Xϑ ϓE7ewH _q˚\NS"+tNHI'64Cw9 g'9 _{I[JGꢕnš:_sa4B^o-\uȝ}UEw坍ܕT]\I7F"݇7|Qv̀Kۣ4"y!~G~Mes謎g'<_پ>ݪ6AҚ9?W^Ωފ}||px bS7Jq;M%&Cqt_qos2SZ켗sg\q6~Kӕ>ԎSshn|цj=@~l91z6RJ_^EN(ʾM>}xܴ̫r7c].S7oN1T${?/"M۝[:=89t {7X1(D%%1/J>2Smwb')6|I?^;K  >?#k,Fw禼 [ '4yПֻ'qIgsZTW_nޠ??I@/8e:5}i_hou,;#n OgG =e]\V_ qt^uZ/v=08:ς?0EޯcACs>/32?7~?q3Fp#FΜ?ޕ|7.`[n~}w񘼌+;j$q9//ȯM N9el))n/ 7ZWЍIS|n91Z}5ܽY')^TZkh >*惬ߛȅ}cob'/e__;/x+"S_ӾϤ-:pwė) ɷcC7}j?OľwZĿCeNٳ*뿾}B=No&%KeoW}kudKOq*}pyt<1U`OGqɑk>yF/&jlGg+Nx*#wb/SD~5~|j\ +<Γ eN=0y%[2Oޔ8b!䓒 ICԿ"Lu΍K Gԧl >TyZbǗ9D?CW| TWeG\Gu?2j_#0cBwXy)wL~;avLcT?9G~Ȋqg"k k^ۀwQ ÏেI Yы4CA>lsՑOnT2ݴumerk37]ܣgxm(姉󑫵[^{4+z 6yLBކM]95/kH<7?Y[pqn\JgGk'؁#45p|edO~+_Z. gٱq49Xkw#",!俕u{Q_CK?P_̈́U''zQg6^ج[ȱmVbtj=2O䆙uC;R  i}WYOrka}uz<#E?=?9M:Gp7zVJiowJ Dya_l %}S:v_ nJk1x4k0ⳋZOA=7k~/cH[jKq3ߺ"ĞU=9.ܿ{:e ҹf_ѹBYǿ5_X}p9R犧u~Vd4q9{η= uz"{'Kۙy2/,,%ϪuQ槿sGRU %tu==c 7/8o Nu%_%>8wyc"o'/:>t 9\ &wKj# w-=Q|@?^-nnE}EQSRWS]Ϳ"N4!pHK[ Tot; -Y=jDϗ$腷KqF/:uV/pʖ P|\φأ29vlɧ2m>ҷYW:j \"=8𒬋5"ϋ?N1ϯxQopxgesVɬ| yk | >S:prs{M3z_O{h}mur3ހ^K>NN<5"Y}U5j"Q}7 }$=z$g/_G_+aHYO_ԡ};댙8zћUW:Rh^o;|?/}д__}yCgav̋]/m(+8iwZ2'O Vۓ #G2&/[2ȕ/n7u\az1ocwvA?/Q{Yܼ=runri8IxKm t>Mrn 5o=qb.M|vi6zRn7bLądw.Kuu g"u ~ZѶ>?HG~s|yS]'?%x cBk|Ӌ%/1~zvdtq~ 4`"yTp~>F/9J: n&zY3c56:;˒沯O=ق2 92}b>}oݡr_$؇gK^j_#?{"?t`nk}to^2rPlnFUJ_cοM?q)9ϨME#/Z !Nqoq७1*ZsWc7g^V|ݭ}gEUpӥ pڇ0|i;;Ӳa7N%.I=!nO˧;!2(?u[)sШS<_%ԧA7պkJ+ %oVhߜsl< ֻ ՉSE;J^k1Dp.bŒu*6okn b#[3؝'z: $J݈3+k?y5cǼ&łƋ3$uKcŧ^ؾFQ¾p9iG#?6eNB8:+[pqw>vm8\m +!7uZ?}}pWЧ)R>"Ti2R/:B NqsB[VzWsu>Ɖ?ίйVX/9DYɶ?bI(됫LNȷš%W\Q\/rZt^z~ekn5 ?bu,a\޸qW ŏQ}q sԘ#oӛ|@*佚T{Zp#ZױKO˺}x {^|zJ ]˟?qIk_  }z*}c@~)_!܄S{BI!IH9p2P΅Uk/\'l׀d]X'G2ʝӍxāa ťaGNk?w:t{52ۜSn\Oa`Gc 5[eu4fGoFaqS 8;w18GoP9WsPb竌<= Z T(IGiDBm.U'eΖW ٰ?4θg3?f߈[ӰD}+?!X8d 3$| suN:G>C{ .0Q`n̳L$_.}b#PVw_Dc?%߫uޡxusnWNɰ=xQb++vQ);) ^%"ӡ݆baU( ɼ8L(oI.1> {W ]}a'T_.}ums;g0}~H~}o.12{B+V ٯZU;7-P `t:L~Q\yXEVe=8 CwOaUi>a^#3GMFw>s>}Bcq~AX5໬7ǛO.X9x݆;"+sֹƁ% M!.*s"ߊ^S`wI?H Ki^¿ #q~;kgWA).1V)([ л7A}z`A*/pvEt7O~%u{S^:&>~u oG~B/7w&${z-\] oo7rغӛcg?ߟcaFQͫ `3Q䈑a0Ix!RgjtJK5~'2yoݖxZ1> n)^xKdΟS`ws;4nu{TXpGGa9G?ב>ޤ>Dp3wPebD9{[z0oa5M|{Eta^>~Ơ g>/7Ƃ )7w$z_:rE ?[gx'd5:= w.w\#"yзGB-U4YQ3_xzlJ~]GFSpRb8c{T7J~ϊzR݉YiK<i.HBBjVͥG~kyqfӼsৼ_xmMSsnϚ7g24*msLGZpunK>5ChKMohIrfuq ѿVL2lI#*1_oڿ.p>Oܳq?]pw䁞֎N~{>YVbЇ2㗼@UK>[? 438|>?.auhJvoJI*~ט\J]^fم7t(to qs ;g/`e^162AM./O?WL+e_/3ˏߙNy!?,sȜbs4F١󌵟pFǝQa:gu?{)}dgU|u8#s=}|Z:|\Ծߡj+ vNDW'A>=9es[ƻ}h49K}S]c}I_X`SK~dvWθM +@)_zΑ~cv/񣾝|G<$緜G!sFa?K^uCݡO//ye}k;oFF7 !KSZtڷBe>`}ϥ/Bh΋M=U{LyՀxps>=F3Wn&}G~gx_@y?"t?]h֑\A73.\<7e\ԙ'O;%bK%]eiy7WY&Ȑw2ޮ]MQ0_yl7s_;Fֻyon^8E܍>^K2"p_]^BڿV/̩9 'CD@:[AzzYBNȄl|r?:EJn(2zM_Q'?:_9ZUK?ۜKi}|)fR}诡-Ws"nB_}% f~7s]Aq뽖 A.cϏ|<εݒG3+яp ;:}=wӻ͢A}D`,i#&Rסo:)'8Գ9UKcO],HYs+2|>Z?y5řjkҀ>[r΃*"^-{#IH:S u22a/|`oei|oۜ/[*Һa%e?;^jJKG2qP"v4VCq:wZ>HV+E.`']Xrr'iݷh8!7t*RApW'vPn}V*71>9uζqiSA2?@f)n*:~~x{&vEok8{u߱[&^. \]a[K ܊')G1ҭnH2xEj|'lGW@S;{@OKhN֕2G6v\J$n {?{Sڣg9.VV:\2,Jk]gq gC7ď ?C\p}(I%CSWvPR%vQq1>qso3od*qOOo c}9Y;{fدk!GeΕ bˡ?j&/%u ʒñ<9OHKkUB3[s#9~L|ִ>( jiU- cwBܜYrYMwq_ D:OÏ~^_?߇eWl~sY:A97756?~cmy ̉w5F^[['wIN_oZj+{}]xD)9&=vJ,9J^=~\lF~G>t˘=x'Hй:7ߔs%=ŝw3NnyqU+ҿAq:34P_x@O_74Z.]2$%s}鳯G'oe酜`$~cmN?u vsw>ԤK뤞M< /Ϻ?~;ɥҸKOsIBl aI~P#?9p+̻f/k,پX1}3WKӹǜ;nQkf|v 9u 8GZ&:ƶ3ӻI:Oq[ޖ:GK}4Ŏݙ~Ow?2Z7:/Ez޲aÉ_3 &~߯5|uuv}X >e/#Icu.z;gЩf>E炾59.|ыx͉G|> 7m]\G^>on~Fui_H9ʾnяbWEO+b w2go$=z)c'*Nx/sUq;<s*Jr7n?Zf=~xwcL8|%/J'#Ds?> }I?~MMAiޟ׍u9OyKk/lf>G3Tn"@q,'I(r޻ ;#F1"2(Jxz~~U|{+oߙ~;EWֺ_^v/W*ŒwyD\>ۨs#uCE'U?PJH_!+eoN23| q,VvB$VN2_R'"}k#_2)}XQWQZk:'o@ b[s_Gg:K~HI%{en1zh*`H\]J4u~.v>^bo*E;b\ע09|/-sT.[B3#&J-.y\_tNҽw0Rv!2Hx_nάkDg_U]29[#s{*y_#/̃oDn܌nX\m?|gS)?oOs ._cm«s8y"~d&b֦ɷ;@'\8~=[8zԹV\t;Rrd_akMwf@G2C2?}-X=Ѝ-{'w-y;A<k5 )r^} 8H\'#B#c[d]4UQf'RԾ$w`}$؊znqy^f#^dgeN1x?5QG9#V|jk?[ O) Tٗcj$ɍ]! cj>R}'snwb]R:eXzzv]wOv꓊àS-;~ƍ)(|&]L%k?-2}#*gT.=lu$_iuGewݥyiaI.}ֹ)f"?h+]>=wh/I'9vs?vRoٮx?dCV=-Oz~.~P*7n2^BNwN6пջjЯA*+u_V`L Ů bQpW%7^'!GNw͆O՞|;L9ye8}GV^. Axٿ+>C+k_2[㕻D>c6-[97ZCO ϕ_ڿ}j/ 7˰'7?$" g7o~kIS5>uFh?pNojg:UgָW5:P9ѻ6oJ?NoFzBN#'?`)x3O̶^Qk>RTO4g{딺bK_=Ži16sdUC쥇1s~zWEό|ڹM?x5!z;a/oGOf/ ūsK.ҸԌ"s]^Vqߑ# Gizu?noǨ}\/9ՏqVcW#ү05SR`َ߳x| 'dހ4.y?o0Kt2nNNN3w-%|wwFܮ?^/;qS͇,~a$M 4ϣBo:827PZW<28q9 %nmM[7^ufGE\[JqO>Ύn{E~k7l΄nnGϹY⵴Wq5>j}y.u0fV{Ҧ#zVE$N(5?d]tɜ co[#I?ZI<)iQJ} .t.}Ї2-eOZ7D_i>\?A(.Cj_iQW;Q,D3D>,U.iʛ~L:xeJVgLmNܷ׽г1`v/2gG'3N1!7J''I%⟹/bK+Qu_~~t~oL'͘/к(4BxfFЙP<zȟf2ɫ{&D~j|ɳXVͺEOXuּe_6~;V+8!dc.?D^hcI?6$"SWcWo:Թb)}; }H%5#tCrɟM"o 4 }@B]o-%SX/"1%y/SC k?X͹4 :^޴k/7 6cMԕVI Εݲ ?8J_!bGH=ƩՅί8⡴F("}Q;.[ 5s~g i?Ss3;GaB}ov{g8 CJߕ4콛gU}VO[^XnwsYoГ$?uq ~(uͪ#}qA5k[Y*J]hh. W3gD>Kduw̹ Go*. }} ag O/sWygcGRB(#8igk^TqTVbߐ_8G.?R#v_>h(vEsyqH{f'r[<_)rCxJ:9OZ3Q:8'DI~@q~b1Ojj+z ďI^/Y{kԾ82t/B"# 8[ԁAAi|Wpawypu?s/Gy^vEy: (}zy :ZRGJ]I4g[CnhXl[zܷB~иel&iCғPhߎ\>R;^_݅qI8t",9[Bg/3'š~L6#OBť{[-yj/X[#%xڧFIaWIJϴ8}+nXe⏪:4ὺܔ9N'ոVvҧR㩚T<)}y5FK]9/tӯg=$i6ؾ$"7vMOQ=n^Oct,3Ic.x$r@W f c@tǽ.~ֳ EAuQ\24ߪy#o4}(?\HF~sWpڗ%FU32~7ez qoޫs=ci,K cr-9ΩG&nUߨb-=_!y|3#vz).L+wD^ A^W)%~8b{TvxѣY'H?sQ!yc(tqoB8_?EG(%f.k0|(SUOSzs4_:ߋTWԾ[5qP_JSC8pߗWI#\GB7##7'&͏J7|zkmK7vya5OUѢy~kjJ{>why*e7@G^~3#Ǫ̜)HjտR|&qNI 9KYVE"?RǢB1v7my_>`ޚKmRDnei)8_O [da~ڹwçL絻NOzrчZH ( W YW^j~\:i9[[>T_jݧvHzMk#ghCz(613zfɑ oh!xbc7SB/_;HvϾej2_Ukb^|{ۈ]vUa)O(O赮k b&:&~ >?0xWwG@W?ux#/1tէ2F%_kL9)~ba99Zy O_FǎP|+?jX?JJnkgGG>GX'5sL'vR QPWeWr̀>eml16Gp_|+RDj+kA$oi/ >/pS=D<>1sa{/ocO$8FOfYv׿_}׿_}__&>G .ӭOGv[Ѯݲ{9wF T!9*_2HωO [TN]m?*8ġT }gA,4gA5"{{}\g Aݬ}.dy=[pƦψcM@|"VSL?*`F-g++YDݨ?:#}(>#_$'yDڦ ߕ:q8!_W9y4WO^-;~aIdjmA~wS,N> ?H}􅏳8X>ur)ӛ< ;}mR_ )YY?n74p+r_h!8Xɟ'g4;!#{w[u[e>~yᒯH9Wc%<|Ber[-]r\p$ƁO0ۑa Pkے]@SRw'+1Ye>`1sp!Ϲq9`7zC)WB^T}:;_ז|X R/ [?{r(x . #@_'u0ojջ~A+B^jN \#| {kL~ \lX Rlg}wO2mF2_O?#|Q}d~g7KUun:?esK M+( >2ObmuN׳.+w՗C7[^fvq{)/K߈NY"NЗKa %E޷φޫ@F߯7= ߾@|n2s/ɋA>ܢMpM%'@cPߌGݧsy~pM";t#7<&Nn<+ľy >Rc>@&fƜfnwa? <jFanߠ'NzLG4|G=m?HOYg YmkI\~^3Ev{ 1}'z3GR1MOԮOQom>:g y[P[/H~h[t#iX?+Cg4]w9r/Oi襓>:nlEʧwK_ 4͓,_@__Ϛl#lκ@/oJq=/z:}e/Kp~:oܑnB3p3 baO'_{kos`p2*Gq7$dž6CWM'H(nʝrJmׯqy =rlVxӏ|ApXҿ`F*bc/}@v{]Z~]/&^M<ῢnKdž$:4i%E7n/7E+ont菾ca)ϖi3}d#<[ܘ:5hs^m=e$v ,ꍍw 9̕gv8mF=tgG~#C5^Qo(|ϳ[?:7݃>ORϜwԟ0ll\I31z9Fնon ;;eW{5i7?\sw8y4g?I-mnM&C5I߳L0Es\J%,uə$EbEHLE+%BIŜH4Xk™4嚘(d! K$|>ggks={OQZ I0#{>}"wF U7.T::- qZ#}`Γ3޹z>D6*B|[p-RǓ>{mČ~9o: ?-|DyW<uف =U>B.g94yܝ^~g_'1{Iձϳ沟 s͒^T`EOrĩ*wA>P{\&+_s؆ܼx=bwMaaԍ(>.ώ?+zn qrOԇ*Qʜ}~9a2793uPU[ ѳs.c׈UgSv{=J_ߝ|5?Sg"{<5g>۸P;=>)c;gWS%|"ÞcX;Oq]Fm~YTppűzk oý23v.탟Q ;ew^6orC_Y?KgFrc[RXowlWv[=՟ []^T^K(o%?~:q9&qr)y'zwOEл;b2Keu 蠦Io>ns(.>lN^qIsTGrss,z\57+]/>Xi1`UyD.ڍפY_i~[S3 SO?S?\w2޿ Un탨v6^%x^g"w=_J_q/s.$>{-.;vJsVꇾ""ܸMkgcn`!zcMp[磷~owrOҝ>ת`O3BJ%sZ7>a~pdwRy෠ﺝ}<_RSq0i -\ݣq"}* yIl\_IAS?MH͐ ˪BucSاWKqÄK*e|BqGs-n$cQU >s8UV{+owڻܿS+?-vJts+%u_|cJ)gO|;B0ۣrhr[''G4  L_pg߀}Eqx} 4/ Pq"O[#6>y*MһRg{Y_p+4^}븐Ő ZVFɜ6[&"JWy<|*Ƀ _N1}Ϭ}?gHC])i/a?_O ѼŽK𒗎p./nUV9xt972P}+{? _1KIGsWc+?׮B>wN]O]ܢp`q_rAw>m٧.o^/h73K]pd ;]f̍ݟ.رsk~KD+|'؇O3oWX8g^E\`SOd$,lsK<~|2O^<_k+I=tSZ[e;_\;^\?9NzDy?'QUwc儳XJ<:5 yxqϣ]?|ř51]'8+zY ;3U>Hqs WO)qoYGp3}<B'YW/E^_}͊?0Ə%;=S*wV8Q?zT?*Ƀt7BoyKׯ~6G !8Ee1tНuHpCӸ[Wxq cOh??78%ڏY͹ aȃ3&CobkwZHJoy_=.9U{Чz q1sv̏`ҕ|0gpʄm菼ĹEt.|̘_ЇT3a]/`A'$?'\:^`w%/D]@B~U {u̬ y99SKwS:^%̓|{XKy-~J&skq:W[3k^سcxGk$4rS|;̾K%!C_;T?ZYʺMuS9? x)WxyS7_]i f?>oD陕/*?f1 ;SO >~tO%~[9)2OZZg=N_R*@A#u Vts.S įu(ԩ5;+h9lG?0S;L}Z̜ %3}8w vDbWCajxCVꎝ`^=!s ?TVTThU&}{oTNT)y~'{3d"+G?æv։)=)~#wz&gwſM3"Gv$J?o)_xa&rFr' T;A_^gﱆg9߻+CfM!AD )efZC%kEu |֐#ۚA>o)>y=5yo(&q[fsC??9$qKvh^v{<ŭy2]oyQ~Ҩ5/I"O\ʜp{5(J(O\p A}I=Q|9IkLkcTО yХї~/?ƭ5oiqK&enoNw+|meͫ;zvz_?mYg}m{_ Ft yΉ/{db6u$&7A$6cǦl%^-x5i>[rX>u\wZtUJԸgt"[$4; h2W5}UZ7'@ w>P|RwQPm*.[1#"鸏{WC*w' i+8X:(]YfǟS?KZ2Vj>zQk빪ȕ[8;KvV?=I_4ْ8Ki7л30 :9k{;t]|7yy:. uGw]gIƼ?qy%GmÙEm-kR?ci[Jh2}IهpZ[ #X8wd.֛'0֏>$~^|uDuRuje. {Q Sz: ϫ܏""{j׸~9ХۛORjK|Qk^^]|ോcSq'4ݹ\;dy9CgvSб7~r}z_C~5;zkg>G=A)ʒ{ U''+\ix/&gB56~ݾޟ}zCbǪ񨠡_(S%XG(N1>Π~֍K]кhO]Vc~gpn?wCp"?c|87|u74|*i^QwߊxM$㑺"yby{";;h䩜vo/:p ς.ƹ"OБzCoE~;* 8/N2w p2׹A_U/zCkXf=wt3^x v-ouWBoy.'<.lbCO`]q3t3$-rL~kWjGSN I ~S6f}˦'ئ;t'q<8RU|&y J'>(aʟ;c雯]^4"|+.sp^0%v>7ᙾЧkzN na-KyC#y )+>=-EVM#gNnU~Լ:oD~6]skc{cgm+Ri7쏆`w}y~~5> ^qX{ϭ~6mEZϰ KonG& } uwbO NtiH5y]- yϘѣJJF{+v:~]^%ɞ3Ǔ*ħ^~- SOC>4N{gKSr|n\#&RvL~o㷫?Qns<KքT4o}8s=vDС ~*)y!v<̨dz'H7 a/wV%?y)ۃͧc/}^("A1fwR`;Ut,hk{լ_vIz}{ ͠SffXff1eHql,33Cb)q3m={PwfwFzuROC7?:%K }t8qn#<9n}Uh^6>xi]ǟkv/ _K|^:Wۼ}? =7ar8#OhGuY7l7S9y@? GKD1}gw*>*ϼZ+j]aJ63YO5 jcG$ON#Xo͙~:4#^:Hb靳v@?O&v]|kczt5sF>&{;p!tvҎUC_z~o{kcd*KED n|/(.ݻE{EnNggY!/eG, }A"3{r){Fv(IHRꚝk&^۔K+V؃&s񯍈X Wc_ލWA>6/N*=zNY!}Ebi]6yk=[-֜Cf^d+I ǀgHߏgP?%>~ cJmZyJ}g:Zss z9n\j:~ѣ`}a,qM8Wy7en/.~b-c_O{7p1-g%Nۅ+Eʆr:<{<+'%{'>72wJzeK<a b}R)O?;ދpJEUySnWm"=8q 7 .]8潴Dq'B׾j+P'ϓBNJWruJݣy +\/S R1E{J~،Mpq pzb?+|6:nr7zq9׼86td>l ~k>C ?9];W{W޹GwI5Dm}!=wv#r0kmб/6'χO;NFmzeo][v_x~_'%yvބ=1vz[=#g/zOvOlxk#}3 WfoD>뙗` Zg^`0=7suѿֽ(] Zc/%N86wz^MW1Z^g=} olO;g[ET Ĺ!?{3&G $&8X}s}s{sj7٣xӾ4x'~6ލ^ფ +.~ཛcW)F_Nz,FB_a+Qjz^֟49̆+O"/&|tcHdTnGNfO F=:{X>8]mO_ؙFCOWsWG\$3J%BvDDjŗ&3`3_~$T!둸}0}z|uZߢ_1d)1UCo|XO}ۦwƻ+/MkI#r04,z)kܷ/л+UW`]+3ju9-z`_ )|/L)qyYW̻#&sǎ甸-& YGO?q_cFNv: _[cu%?h =[q9%/"u">z=wg___%}}^[v=~ܙ8k/uorݱǣ}>k X 鱋k~Sޘ#y 3G O맵nZ[&rUqDjGH~pe} "}hYcZGa-B ^koy5-J^a-Z{Qֲ1qťۛqͧ?ݬo5os7 o5ZG~E-}nߡ<}0toHzǎ)3k6w$>b3x$MQQ| ­a} !qn<ϛ/S6#[>x{%_Fq<{r;{O)2vʅl۠W۸RIx_>gFod/Dc_1&}&}߇ǘ:Ы{h}˾yw?h~ٶ= uoZ_}o楴]a)t.8MWeCOѯr~oR֓KC"׻ A1^^p 7wsv'J]9C?syu_Ek"3㼆O9;>t9f-\qoF|/s{O苩_||p|: e ?ovƀ4ǮZP]@^$i,2s& i-BwX>[?fls7wG*}uwH_q<D|_>jG1"G"nx0ߊZIB"雡s/5/}H>K}H|u%_'51?W.<VOG-`JF|6x?9DIr7.e><+t8e/Cv f-T%6.qww!Ӷ]/(y.=IOHP,\?[7.\N| },?0-I2k|NjٙJ݂zI܍WٰF>~Npھ}U=.A$wq6ͼw3 $\l'xSay;֮@}3Oy|ԥi?w8I_XQh݃P;\A!XA_8 ;LO֝k:>f#' XNOڤ%N?*r\pB胜GV&.z::\{""v4/_9_}Bn&iV{> ltn["rpiuez V YN{}͞\ΪLKyB,8=3S#o Ygj|5פ$/{Q6qk[g/NCo_-n̹J<ʨng܍Pszuo nH}#%N⑍S79*]?ȭWzhkgߐ.$n'6!N}r;A>68*䋍VDQ.G`?+οt철&>rįr/G۝!ͺTx<7Z|?¯ ?9a+s?SM7z?Iwmi>zIڷT2#Я/z\@&ODy^z,wf^,?ԱhH-š=#7r灃ؿ'}q\ηY \3XƧ?|]={@WO!-cG]VsglM{S>aԁko"~Xr{t쬛gxVz}Uz~lP;F.o~S99rim{`O|@Ei=kݙ9(: %n-&C+1D9TwYOƢx+!%.sjW"g'4xW_ޛk_+j9>>~گDƇt3>ښCn}%>dބ^LGVK~AŽ(Q=}ȗHjy-2puJ=|=~<.c[L_iU3zww_^ %/o~m߂/ZW$d_\nu???_#ڏz$vh>>_ʱH^oLYX3p?чƱ@1z"sf&(o{gq|sj,;_> fQ݂.zi{~{WP5mT*&>;iW>"j/;&oyit"}7/y*n5Xl Cq4R&<^HN"82}Q|ɒW3U_QS+&4_ْ0_Q/CY?xR}Ҋ<֩ɯl#.ƾ?8mOZNtf|R3+5?%O3.i˷Q$sc<7.QrΫ~+/7}^|7}7괤ߎt}nkӸPxnX!yz5-gIE*gޕ'InE_џ1Ȣ_[)ԧj~;'_/ycߤ8W#srcZ9!U{oֺ]gOJ@f[8D|_[#PG?~}82xk|%~[<~7S33~8 (_K>^>~~v>x"Iq{5dzxV1d|Τ!%u௾1{ߗ?}{(v~͛p:7jԌH}/27C:?7q'tu٢oSUk'G G.'SgwGp/F O? 6 s;zX%";Cs vI~LóAwNç?4&>mp{N"fA" q2NjCGk>jyp@]TB}#SG'~UY=VҬ ċ8?{Eهu,ވ]-}5;T~,W#З|IOk_}9xOk]as<-*3tDoI:=sy/ #Y/vؑ7.~0ފ])'$-֢{ǕMre+z!x=,u]*qȫhyu~۟;3h +8q\.!7O4b.&lcUs^¿:L˨|qog 5ifK򴥲qkK 2x@̜v娍U.<-A[9K}zQDVRkWWu| ^)w{kM{+u:S眸?s.w(~AR7azA7Lݻ5GM\!f' L_fsNb`JW͗.EUmh A^>uͳz^׫}{&`|2|b\ۊ딉{AW7bgl9~殕nMr5pKA1lBÑlz}~ϐRȍ͈?[ vٴeOV;{~XwS`.9ݽ_JF1_yv3 ?7kKi:@Y՗ҟ7vJxeOBI6rJ5`/%~a]Oޒ}C?tr?F)#=jgr[qN{3Zwȑ?r>q_|GkӶ6 iU~x:uҧ?gN k sO~\bUG>Vvxlݼ }kL `ۿ@w|h=rt!Зnm¼,El8͹XgsfR߲n!s1m{>w<߹+Aa ~ Į\Q 9 lgXԱ<)?c %u:OYv/Ozߡ_ߙ^_ Rѯ[<{΋QtG]t]TM*Vq;i0p/.og 8{q;5O;Riuo?K\3a=cjOyKֵ~\c ė{ =vk3}v& sʵϡe- 9Xi"tr R_㳑mEZ՗SO ^_΍Y=/$]3}uwԍ=Uʒx>F#AGr(yN;s&U76os o7a4?+ʯ^ZzPW:p⟈ݥu`ڷ_.rrIv_|7+*|ݎbbu3U7a#A\Tc.x?ܻEُ "wFm? s@ݍ&vx%:k9_ֺ:G^ۗ8, ؞sDmwh>N_1+Gf#  wOgϐL/ ̫L x#86 >Giȩ9GGwCnHPJjAw:ߓ:`7O;~??8GQlxꗰ\IxUtx)N[_Z!u<~KCsVGEz&t~XzQ*D9"~Rw6O9{pR KsuOݧ9YGYr͗N}꓍SɳX|onB%{p'Kfb_"x #X1rSw;Gා<8gN:{gP5C׸SqF\j SnK4` ?||ڦǏX#񇉳1OH5] iju]&|=c<0+f[d,_e+}i6x[׾NQ -$`:S?<Z1%v fzȯ,~5A}kǬ9'|/M"1-k_JRjƢ<1t_zAbϤYxH )8CyҾ;y|h?~,>: VShЧ.B?sp$b ~} ]RlLA8)wrvd/wcAIީN+g)9Ϣ,stAq]gk^*$n+x?<00̻P|W\<ܭ='ȷ\|e Y읂'دUyE<~EX/K!&v9 y`]gۧk#!?rPGS!6_ې[8𷦜39/SP!%ryľ  fIJ /:7Z\ΐ zLO9qߢ* zv?WcOu{J_aN 3x|e=7u^})z"u@/քw"sO0|}=^:Ʈ9 " 1 qf/:WCMRcߊ:/UqfM[_|g7(XL?ڟNAi`^McRY}5^Hi'oTލ;'b:3 G]:}?QC["cD>Z" I}'g]4G?ԒE#CsS2#OĂζBպ`-G--ʊ:>9&$^wwc> UϥxP>J1_f=?\e ^ZA݀œl|H^z͢C+si~OZt~~!`_дCVʴż-I_CsS~; |@_ZuNB{Xo:M'>W~X|Δȏˡmrn~(KWΩjMi<:7J(F9^uҐX<8v{{ؿ9O*~GdBNڣ7#]_mh{ m(X3z?b}!3з9Dn̟S#W"!uV{w+yD^ym[*}P/z]Zߓ/sjd">Kw<'oc[=|z;][; Gi.[kGKJ|G  pF2W9DܘEZs2Kݛې?GHZd\+eB1cO-FL}>s"##ک7XIevJ(zҟ1?q|ngÈK&ff[o|=!D|EoUm?3=e{zy3ZGza?xS4salC:ޓE/z6uEZ7l>HWe a剋_Ы1}dAk`/oNPVXNPqK_k?t32CN*^9H_ōJOA,p2J[ ̷>Rgv*4%tQ9+r*nQp17 I'vg]왳̂dFOԸ rWHrkUıRCI\޿q*B|0 ??_8mU'A'G9$c>V~y_;/~cbͻ=SyάQZ >FA:OLxċJ |v)>f$_更 a=R/r]q89ފ3CoJ}@+SD7{O\gy#(>F-<zaEߺw 9G}N#]Dzp?r fS쒘iEDZ_[If:Լ閏J}DXBᓌQ1l:6tB>,].+7/xc| :]lt}Y&9q>qUUN SĹ_>qAwC|!$fvJpW_9[!`+[vpY'@AgáO>^~2ZIHgk5=vczC|][FY/K_;{ ӢD7&<zy?Υ<|ЌE {S'+u끺 :ٝvO/]I{\Z܀ݪc [_.s1Ab|.p6 ܗ:bNV}&zb>2L!ķ8·7 Xv_ˁ~0îKnsޕJĄŞѹNPys 8WVH`{H]/Oa_~5|}#ʾ##?%<{W7ZX{Щ1`FcM]E/k ] 7w3L٘e}#%n+f;rc?>>UOei\H_Ѕ J]I4пy4$9wӉ[Uސ9!c3 '2^6|"YuZ:wTnnt:g9p /DaÉoY d p|5#LہC='x^WW9c!7ݏ'8I .@o/JgGͦͩ#=:gU^pPX_9'b3WЏ\|ډW&v{BTȾl*us߈ldvhuNJg_Q{@*7C/zB>I 9yw05Voi?vNw<G|Ȋx!|t1-l#><_p)*y7}җO_gΤG&r^E6wּ2 s/d~qҭCo跟q;y^vg@{FrBuf#:l*v̍qfTzWgSGsIFYoC5 ?#XIoӸq`{ჃQZZxɸ+9vyR/PuޛU~|dX-S5>\ (y: zz= [f[__aWg_{>Aۅ΁ ?JdDP3"Ǿ 9q/;c4g9T8=*E^%)='Cs[8=S'zCNi}9Ϟ(sQyM9i-cKXz ws⁣y:[Z^S秮M/Rz}:{3`rM;My Si^ hνM㠊3sK+8 @Ū˹=_u@;'Srq?Boﺀt8n \;$+s& x~[+2&%uqw T93"/&(cԆ~/{ |q';}Z?nKND{B8ĩ]׃HH!V3i?} ߧوze牉XƟ\Y?8*~%s6 $}t3?y*x/>IܨQM!倯WI od~ŋeμj ,}/%Ǒ#wyVׄ/^\Hp'2oEG&n7uU^Sw&i?NO>xG> )z.2ΔCF{OlQ\`PwU|Xs2W.%03i->9Fd'eG0$ݻqG@^!J/Vį>3 {59;̹6^o|.S;m5go >jʟ}0OѨ[ڷ "u)Ϝ(%Yg|T#;w_!.yTAa D]'GKc{iHL mgߤ{9Quw1(IVM.XvℂSg|s^oImz6丹VzA W};Qjrg?Ϙ}wH q9jl}@/(N$Gљ62qQEf"tA/IjXb~ #A\G}-w>K._KBcRɰgs ۙsU뻆S(~zH}Yg6Lr2>f#鴯Mu[ag*ɻ 4  sErJ? 9V*ϥ>ƇEȣ=, y]sYC,(nv{Sɛby_M6$ç_q//?pYox9+s쨩o_, I/(NZ ^R't(\IЗxFSKpk[UTI0sۇ.:뼘s $sYrE0wdݸD!}{w":#r?k-u #{Џ zMsooaw;uſd[,y=cF__yoTط tyy k&`."R+kc;CPrR5_!^?rIQ$G?X7~*!snRnӗgI!-#6_G!lQ?)_byҞ/~,~ǠN@gyNbtiKssoz뜫j9k[{jbg}{T;ض"4^jޡփ{`QW1Œ|r~s6|ȣM|&r]W|b/ɜAqGWR[+ʓ{?>ө?[czpzR;vl#7rC4/=DLs^} |'un=/=ڎg1rMp Y +9_ |]Rn(A2fEъJ^M#e鯮Q!2?,9zřbp^iJ^1UD/+ I ~ٌlsOn y=,|8o\ hi';Aki]nQ[6ii~~N<}RX_Oڕ{W\Wq#פx66m'U<27ח:wWnЁSQ=2w{ uoךW"7DhV믴?}'C;?t\IvF|>.J;;Pv#y.~ Pd̪HPJDOjm|,XC/7w !s 9?gJ4nS\%\wP=>yDi^C㏊tNF>Jڞ_~Qh5vK2*Pos|z"Wܹs|gCKH\@Y@H@~HWWlN\IOR9nX[⇻5.ӧt~Jfo) v;gNe旫BDݿ*])LC5y5,q]AXT3)^>K'oXӾ5ƤЙK+Rk]&HF!~e?u4 Gg 8đ}}>k}$_7UpsH:Qܲ{݉:ɻ^?7$h9|W=ju=#JX_<sO/8GK Oxy*2:q6tςnDn(^T Nݫx 3?1>>$8@,u?NU>I`m3߻r;2Y$]){yRUbtyЙuO[9 CV}Nsz dn%a]}&˼CӚT}k^zk~:[+k7B3 Es~.+~9i&P(yD+\Ըr$h$P/,r:=*>:ٵ9 3ѻϑ/[dR(9ڏ?O ď7'nZ~-jQH='<@ߋϭgYo ;ς? ?DiX}6y(>ξț'2܉zyV %j-Nk 6Ns B>y3Zu5J@ImE @Ϥq4k?xЩ4?[b4$k49%?a~^6r^w\S;WF ,הN0S:&'( qT&5[7/%^ﱞ @{U[~ơнwCjf;[r#S%\M<7~]!xK)?JߪWa Ԟ|;+]f;չP?JiZዬmigYZ1:1bq>ne4L=!~RK/}E5>Z/]$牼yhsl} +81FQ$*Wo]ч|~[y"+/%w>&Џ삃<?HSi<,0D9?%:,߲\ 6s3/!>4/'k2Rq:j^,Xhx\"n}-*CO϶Sڒ >}oF}I:@}%(Bk'LFkR0 7 ҂8UgsOУmVSχz׫}ss;xqr{`=R;#{ds~_o}sZx|#A׽ߗݣ [[KQ'%w`NdcG<^{^{[^q$kl k2?e\^|M=w^l)Ո>]{mLFwыM?oKT}J8H5:>Mt;ϥ?Bc+{c ܬKg}3 @zg2SR~3[$O"w|#OVԓ&(;81VPp):]@SR _0O-[/M2G},]ë~ C^)ONs:X/Rw߇+;:l~kM>6z$2 `BVwJC"q 9O2g`Mp;+/C쌥Nєsϧnopݽ7Oli2Kg3+%$OT!$U:{xe6q1#Sy}+#\&}`#7y04~Z؈K/}˻P xZHM=}v/uf"܇@:NfU"T|p>\ \|{+ٚ'o䉍rӝnIY3Ӝl㄁n*,S͟95۟- )Q2sӧY`xw'/Nzs͒^WoWũ_X@o,+OS.>QynII??NpϞoh 7@coŘkD)F=Dۉk9=;3gwG'SXQu5}(sKPu}DF =-9|q~VlG P_D7l~̕!囙+xk8Y3kﱎ?7- {+AqNrꀫfP5 [wyzrθ >~؋ugo|"GuU;H۲d9m?lpSNtҰzq)o_ ͤ _pJ6wmG_>)'P+syF=:Ϲ WOHح{ ]%In+ؕ G\Wj#`HNXa8OяNF+ޖW)6A y#x_C7ޓGm tSȜ/)͠nstla~ٿGR?9l0}6W_wB7OUsC:'u?#Wn}C]:zsSwSԡxYBFȕ1f?O~7п!SŜ4nHB;o'j>)'hy¾:}~_ $|V:=j.㜋JԏǑ:F =^9pBg:Ōs#[Bɣ"_Qdz|zi"P'~5^m#v՛6R<B}yN׉ܶKbKNybΘ\ǵ>^xy|VLc]MNQM7Qy5IM-K|ӎ" ?!JF:Uf#NSxw-Ue ~C:@;#?Wc=?Qd!FXհe?} ?.roֿ` \{נ13L8kz/pn#CS⛦Caa?Rʟ!tۧ%~i$>k6A]Vb/U/s70`O\W[=;s$ 7r JNKBt'sb9Ԕ>6@Ga7иݵV2?pڋlU_Ĺ&x_ ymڡgX -e32p-~$uV- yUsCj{=ыoҗĕ}{I"sFǛ8$N =5f>@0}M7^kōfϹy/~Հ~(|ܯ=S䓳q0ϭq9U9E;kKdI{?,/ 9e~3|VicQI:2Qmߚ9GΜ3Iup?z&"_r7fw}MoW~Yڄ9z˔_Gڎx:CN3P߸?,Rfn.7GITDܬeV!QRYω\Uʼ7hvH׸3+Ղv_u ߴ}"7KWR&q }_ta]8X_u~}ũ ky8#we羙'w+<7}! XE7w|\=.#pc_;Ϸ/SnI+ߧGտOK/ߙx-J+@+}NFϑ狱z+p5T\ҟ0忢3/5݆Sq"7H }Kr)5OMnϫg2!?}!HL CϚW`)JM@nKRnʼY|8LMtrN{To4o%ku6b2Gז:Fq%w>|d^sBұ{:?ԙ>BӱDyJݪsue?Ŀp_ƄA?d.ibgl-:kB{e}UH>dj/*PN*]K~+%o} &wR`y&`|KL ɗX'coQo \%~oOr'1rg>FJJa 1_i_>Ó8S46)s'qUNm3/oKJ_AW>~Q̿8k4bJHnu_5Qe{#| 'zV&Z}ȱ,8*ڎM?V`fе}-k9vq߭lzCZƕrGr52X}#l>GӤO v:x?[>ۺ }{Ɯ_A'r,oR,]\PFDrU\DOK0D H"He V ׵DF9ry}ߝgf9s<ϙ &J5sG`hMʣqwƂOjN_~Ӿ RϞ#[ٷByZ |3o{ի'yE?9^1QĮO_h_/Z /8_f|S֋j's%AgV qhK&X_$2;H<*y vHxʋɷy:Նq)ϳr%a,z*|++wsljʇ_?-kNdN%<"ko?͂ X,xKU'9}oǠч #s6/}1.|2oXST:0nHL7'.NX1y`tc'Y}bCAt!Dp2z<81qk:Eqf|̱tY߹7}E?Oikj9,kyG^yZ-R?,['wQ]c N}?I}Ҽ]z$vZya^uydEQ^~4.QFy n.9jH3:ϢAC3?gO뉒WwϦm\(>bߺ*S_};r.q{SR^kOy!nϻ'o] vے㵲?c3^ه˞CLԩ6IUѣE?ڹ1/7D~,eu%CoVU-C}z;~Ko'{^unk6̫w)_;8Z{iiOߓ(;q'4N?"s]~^Qh#vf)S!.z1D//M;{N ?'\6%Iv}>1 \tE橭͙=П^ßI>*/+>g.hFEW8S~opy>#xtW.1'(XO^GxwN[OG똌/4f]W ?+y+qmfd'<ߦ7wS.qn=' M{Ŷq]a<.aw|͸ә>woqSp.7}%ߥyCo38(ˇ:{sd- _:k!_ ̃xiAd|ʏ jowNBCi\kyF>{C,~9C0# E^,N)VߥU&7I TW6~*I^vXYFVCn5ϩ4~|Y'U]Pzjl1~Ϸcݯ֡-< }fWyVĝ_of~gx!?Jc_ W3C_o<ݒݏOV%uDgs8=5zf{ysOgs_{#v3˭7rinօ|l~ow>Hc(1{V#ћ 3s]}-ے呼!~UʳNfƎ0<+|_noٌ<˺>)=sW?z0 jXA]K~,H3.G׍(?TyJYۇ>+DaGq𒝄ܷ9>ޱsp" yӽ[7'Aj;?n$yj-0sqhikȯ|+WJxsr~3i?T3ᐑcʯF#3nzsu|Sk΋f~|Ne}n|%S2 P BC'o!n4o#ׂt=u-' ?Pq5ȪrF޽o`sM5G?haLLdž-<2{7vUf ǁK6ڕpb>{ b#-l_\=jsb9Z:{_[k+23n~%nhS_ zzC/k+v?k0񲕭;6kvK#?1 > Vv([^8MkAש_I9Bo7S*uǠiZnTi:H1\Vf8Am3I8L([7*(RaZ'ޜJ~aO#gPvœzmU8EֿZp=Z N#B }*_:O OhƝ {B;s/닝*АEGkct3rmm:\CƧ7md\{gp?0~~?V F 8(yp̴漠&NvYg7wC?Nǂ8G=6;,3N<~o,qhfU=<W+>3lW'og$X̏_xBi3V*ه}jKfWߔ|B?>Oݖ[$qJN@>$89\oGe=eky#O#Az/@5^MuY9p+KWksΓk',.Gyp%Oh]:We]SWn!lǛ6yT'&~!z5Ӽy9,cz3F/|0+s;hW MGq\/1\-\5/}Ĝ$*qLu1ϳ72|utb[GawL׻h\rlI +K jN~t7E.^OJ>K՛֊!㕯 ]oӑd@I1?z\oj\%Ki˻˼ooij$#^vvUtZW|zQpV y ?Į͏xNN+qs9y8j#W;#~kjU⑿'n{;Y#9n-6]Lyy Z#?Kwy#o?y,TL?Su~^UoKNy^5;Lw>K|wɼ1}I˱[6~gWJ6t6k<K uG{SoEƀ_-wƜ$>0zrr=&&~`mvr%J~X2Wx v[VmdkA<4i=Ӏӿ~CAvN׍}2v{~=*_\ }ޖjwtȂϽu$3׌4ǰRKf}|_-?ԣ<~tq؛{Пۧەy;9pMZs!AިkdM:{ a@=|طwΆLqg*ÕwLǃ}2<ʿOTA=c_[GagN\?lC[oܣ,Ǐ5oM/>S>\cN?M#OW|} <>rs><훤ɮxx+eџw) ܮMxONa7i .o'#%'.D@F.:s଼7Ss_'n=y<6#57ĝGWg[:Yp*&+amh| %!3Co.ؾωxyGq}Ͳmq+s_-F}xBAp7;{C/kn,/~ͿgʯƠ7|;y[|?(TpxqZ 3ߜACڴ۷ /h3vs6N(;_.y@,OةmKiȣoUX|K]XBЏnuhn؇_3.,j -ȓvj{Է֙2 S q?ݡ||o}އ)gC।n>.rF_?#t#<' 9e~}HA7W)Џs%n>q.$FZ -shGg}̋ OI,u3/q÷Nf034wC˻rS؇KJsUDOw~^d N8\jՔ|sVaGÏɳ=p]GOg]k./3,IͳʐW5+{67|H_N}ü mMZ v8 hq+1r+<ؽ?"9~Iԯ.}Ћ1>J^9Ϯ~xaj2*Eqwp5R:Y'onǿo+WKbu4p>?@x1^׾f܏J_l>ڲܭϭ5y@إ˫b1.׳!Gag_' zy/M ]^-~hK洩O'CG(\x_S7OfP'CT\p胰'箜K~$Sg"F.K18Vf-tI_o/jO^[NC|i,X rٱ/籶5DW_&_ 5Яz7.e&W.בr"~lߊ}8s| s`ם 58H5țk2kiίi~'',|Su 5a.Ͳexv}5NQh~fq<;(/Ci_:_nU\\4~&yw˓ñoDsƟ+ לˮGW9Fo>= #agL턽y;"v:μFSé ΪķC;8k1u b0_Ikdd.Ciߗ+DGXyz`LG5ϓ= ڇkQsαuHD^Sg J \gL[qxq#ov?vԿȟ]b~5xjnh|jy-4M] O'6֗g/cimO.]#_I^bWJfr_ݿna 01kS> ;\!)_(G o C: /KTKմ|﫶Au.uXw6 0zuG~zwcBĹ˾U{Ž/2-ޗ?q" b~F^tK9Mu[qC˳YU{}:ʓd]_?Kݕ~Ryk[Ug#n p9#P]3'/ @5Ƭ=]ʳ'<~ޝ&ػk_K\W/?o>:?Mg d|}1Ov)EUo75uk/v&^nw^0̓qdA>߫HtZ {tOovNQ~UR^{>bXx\6U'7 nw9z'jw}DG(o{ҷBe/ }y#S/({LdBKdc>~ܖ=yZ-91f~] S!usysS!$) {12썌syRo2^0- <f{k=_yQwqxhh߯DG_A75pZ ]u sRtXOFy)B"(<~@a'3gI푗:ݻigr J # *%\ߣLb)Ᏸy,u͊y1m<_ |c5N\2gb-sі -Q(o]9YbrC>!{_u \@P[{ܷ;og~nZoI XE<+lxN/|:|4n D +But.+7.ue ֜Cd.lpSS#c*J=0v>D'xׂH?sy,=y}?/%&˟|)AO\ޑ2; -n]c⓫E8{+:n%df?.Oi>}w.my ?S>"po] #2Oft@~?#s߲]>W昝Yӹ~-[w}~|q3}\7ɳU#P.[KC˿4ھDePeIo+)b// Jsֻ_+"m׺2xe]pO @Jj[x m|13kMJhow8bDεՙUsj?_asi-!k;X)zpdgzr|A(>?(6;[?N~N{(հ!y7naiv}pϽx { =*8n~RySO4\4,'W"#ƁULaw.sΏRM$/j;"uoeTvA^)_p?ζϭ}u=/v'39"8'gP9zS[qOL\]>F:*qЛȟG35pBﮇ~zdQeV:HAfEKN1:7r>]s\TZ; Uh/UD_u4 N|Qχx^q} 27ܘ>v>^m+)egy'_ӿ{ ?K''K'y_Zz;ogy9_渜q+苓zyc9:Kz:'9$] yqzWv >ӹ.O㧯g_h]zAXx)߃{fviwܿnP慕^=:xjGuqmI_s1}y. }>:m&-<َ>Lz<~^]Dװu.<dQO굓uktr2qG)96}`gE>jߩ_ ˽Oྎ%ΕNb|D\Qy0#;6iסU:0y3S󾜻IJ=;9>rUǎ8~= ?]  M“80FCTp7?6u9x_•Op"39@\3$>@'__&g4kfz 8u*7v^&|cҌFE$?1)b[{37n4\QaoWktҹ {{(;S5Cvc_xZtnٽ\͸28Lh=5&ZRp e3{_ӮQH_yݫiKVltUy*~Іfgȫ}T -yMLpC 2 ?k_]w&V+Gךt%"| !|_4.߷".yw7r'07{gw>F+L/h0}7ɘ0:t"z:ֺɰ׎‡E7:}AD? dJgB?+8AFudE>K>Ч}7YcȞd#n'+/2Gbh[G^DN! v6 tewwOA>^s/4J^ iXʫ㥮`fG^dW|~6y{>~Y[Nip1nyyEЇѓϪGK!W~-Cs_jmqDNsO ':(!~Cv7ѯ͈kJRҺ ;rnp[Y$)Uo yOϜg u|wCܜ;{K>m'>6p!mәvuyS (9x%G"JTCXܼY_Zl.]6 s $$= /]s#o wFqjjOEѐ 9"߳Dby(%7B>EvF(] \_#:Vu7~oyՉ&ѳя YгFk^NbMз!yXQȷy+ƣWeJ?dq FcFt Pn_{1ǢAYe_ ^ž7/ Ϛh*Z$xl9eoL_e3koO|lDWi^$60{ q?T0p1#IRge޶CUn;ʜ}t[kg`_8ᅳW7ˆ8]@߽U]2/ _O𖚆uJ8v|s˾&}X5pN2WbwȳJތZ&|Z+#+㈷!P9NQY!{r{^U6{#t G<ָjȽB)-qr`Z#^Gw:ߦ%y _\i,tomȽRowGI>jD|!u{zf?P?&)#Xp >?"8(O_pLx5P+>:P(is~=O$B CeB|K7|OT~f~_K_]Y\W"Hx?۹ŗ1sOurh8 ^:)3'_ypqԉw5Ϙx8)Qto?fo_(vKm"upZ?5Hz_gD܉(o#/cOeΏ9?u7si$aON*qq6җ^~_Ӈ׳ :t"?+9kMn݄;C}fȾ ͷh߀cfF/\.{!f.>ȿ6vUGnXra>gfJ]Z?Z?ﺝFe`~r{[7˞`Or?T礃e۽&g ({o8&b#Aop̏ }A{]caYՎb%d92&þHc,; QOp&v~31<nY_ ew_R#CwsIȯ%ʇd9+t5i49w'm9rlM@G]U>N|?R-ef(_VS nIC;@NU>"bNS焓muʌA!5ޫ2G_%ٻ섻 |y29h{mMDwʏhLx|^Ők9|9kb߄O׉pdɼڼ{N!?U}=8)o[kX&?q+sdOuA^·h?7/Qx#_6!N>3Y"W"!3 =^I葱}#zs2Q:<ιy/k=vo,-}f윣K+cj@0Mg\wHށd^r :W.r}}£rf|V},աOyWO46s| яd_i|n<˂5bU_=:~Kbǧw.wD^o?Hރ*wծ%uyM-7y\ҒtWa?u\uHyY;#|ůGkwt}]#&I+\8I8:oi?bK~_3ZCk-qzaWvuxUns8/< ;q'y &=?e0uXU&Ͷ~(c6.J~O]w^ԑFcW'Qm0brj;qE c{ِS>dPU.Cfȷi꜌eXR}zĻ6n_K^>'|vj77kf}<y^9KٟdtیkПvыbScK_\ 9ODiFQiX9`nMܗ#䔎}Vg kCݓ)|{=y|6o<[§w;18q>)uɻ +n}5`e5'4;uu9pmSy| 1bic}s lJr4p';=F~FfoN}a\8/twċ'Q__z:LuBI٧j_TmY9![YUaWݰ32lf[>u}NHwKcrw;6',u9k鵕˲9 (OZke77cRg8P^nQйsrꪂZwi |Ϋo$ߡ<嚿 .>wP"OxR ҃cʱd§{r/,qԉOad^p]WȡC^ c>_JOa;NZ&_irN]^R[IGDYx9c'8{ďe]z\7>||<_[Y&C<7Vnycach#C]U]ÄO[?IO߿7?,*_Qg{)1R@K=TlJ8]=u#//Ȩ /uF~jT,\4oUh=1&)_QO`1ւK:u0:,5Y\> >uLsM_bn{z-[yw+Θ+C6Q煶+w ['=x~{ȷ̣iڜ9 l|=^>kH}`/ W;_FHC\2~>*ƨeO Q;`f=4=5xGqYd6z9٥Ys;=? Oy-~MS^U?*-}WGXiRڏ==Dz>vV3"͸ɏ/;hL=ネK߻5zF)=e9 .oR'2wsS9 Kك숃}&񱑵)^Z"^.}o͌[R@ߛ9w7Z^AkQl־ t-V(0ȿa}ӒW@w$,u umf b[ο@R }cq*~\G^A'MIyZO~5[yj畏T o"<" Ge_1*(?&9vQ퐿)Yߐ'6`HSARL>M]Ƿ^k'>"^ pKN=yOgi~2u#Nk|MuDU-=}"uUNˏ?'\~_K97g{-on\%ujIߎ9zxHYM iьuo4{$|!J y3(< qi +3Eh(1uGn:݃Cǡo\S@\eKi#o{L/q@L+k9?]Z(^BeY/>)wvmל_V>LYnVx9+vC5=7몁ӛ~nEhrUY8k6DsLԓ! u.j}O{3 ?_a ?$/ /XP;/x3XOqo|  }q̻jf? ~g򡊽727i^j &!2b3y~Z|ۖ;y£m>z[⯜1oos~[kbG'.s%{ěIl3AK棓\ jߺ1&_HB[p_/^ 9o 3Z1uNykr:'7XOo:u5u^'{A+Ogl'¿k?>؊ArP Ѕyq_kܣsKt;^)߹#r!s:/}iNx>w^|εzE1'y=n /sqnf58a!A48=c,?^jBAͮoBuϲ$p ΁|&^3硫H˅#ktm!Dȸt>LjoW$*-[+BxR_좟Gto'tS3?OO{\G$~q¾94\G~(w5yg Ͻ2'xWa{Y`'|\}7 z덽y[/hM>%{@5G?TOGG yA9 z+^FCoN;rucݮ0=߯}GO 3& 7T>K>X|oʾH}R`]dI<+q}x+suǩjn`Kyʼ)ϱU݋|c:={v<=OY?;M#2?_CdOѤ<<7|NJ۴_[EƒGVUKX>^Y-s~B|W?A,> }z5{+r.%y;?yg0y`BN@O}|p nOBN+¾Ox4nT{ҏu]h& nٜŝڿaG }˚>%2_:wxQ;lJ~:"o(y.ͫk+\$!(}Oaq2aNs+ϩVsdc չx't{4dq䑽9w 7}_?F\4֢٭[Df>S^P{qyQ/K=^4/'/9If}S|8OvGbxcΫ ^ s݋-uIy`ܯտ^?V|+#xXqnr䋥ޯ 9Q(~HuB5?A\ v2Wj=&;#|ڧ(S||-|%u whS<_A.d?sӤyJx?L7i>Hq=8ve懖༯ 1=xKhynU.j,Wp`ߙ+GFF&F9 %"NAjqyn"7o@PgEy`>`'}]_c=} ?2@}2/QGc߫a/"p_m9[ge.3Ŀ_́?Z/,NJsY+S ]yxz"^>\;m<{u~Ee27*6ҟ%+7G>7WK+>7N`jK?7+:bS?;y(E`?76ז cEǓjss:߭-!ϓ簧m;iᝮsU4|RV{gS$KJu,%$'Y^8Lve_*![sP3i0"d91׬'ٳg^}]m3us,/|Q[@\_3 ~_~<и2?3}0$pW7`tUsuުu2O8~j>FD|q$N频_WֺPߞSUΞ]J]#^u}v~tҩ#KX'I0Ny+|~Aޕj}"1i{[I\R#Z;'.>L]C#[g8r/?p{& 5Dζ y_ndz ~=vRmfzȱwm^#/:nk\o_leE Wʑr)Gʑr)Gʑr)Gʑr)g4 .ƅv9!8ѽ:sF)6J o VYp"yވBA|*c'z\l6{w[p~]jăWN/8ڈ`_W שy=;v (& 5}uJy-^aO[c:EӖۨ+M}d}y'ق<|<}_+I(~< ZzDeWeg#۫;z^Xc}s)Ak=3c>s/sL䡜-gV{x~R8^qZW:N$dH~a@oR5CIf \տ hV[ÂxDp}^'vߛe_Ns7#t^"B_ב]Pi=ZPj~? }?QGwZC߳V2i/S9o䱿N~+׼#s3xSo%V ^Z uGrGw"Om4$'}7z6E}ȵc Z'3:=i \8W;a?C}{׽*x3oyA]Bݼ' y(8#'S533yڿL /lXs)s>sg^UQ_ي>awyKȿ_lXfIEUuL= >CηE ÎZ5n&Zԅce:R$۶k ߇^ 4>uNhC{zZΉ~u>_k$u:@V2~?>kS1NYvmܚE{~ . R Jܛ\0< ϕj2C4PWj,#Q'au~z~"# R&y5l}_2EW>i m>s[ 翃V{دQ×yv렻S=K6neJUY"ߢ={d)v:FPm@=A d \_b5^ _9/_-M߇/<>ţ?B8 }c'Eی( _@]ȥ:Τ͜𿻇 xƨ]oQd{}r'kvؖ|=2pԭHQskqOu/}cG,ՋŸۋθR ު0CRAO=PW{9se_C&ZDZSW4A/^?>[Ά}1 \eգ4Aﴰ .YƋ9F^v_MysO9?>lYM]1^r]ۼ>æKt zL9ߋ|,anjg٨!R~r8ݦp29ߌ?[.dݥȷ*︥[gETOUꝽp#t.}:vMjw_3(aip/N ~őEO/Wt6{v7z}V"qy^IL.E[. O2O^"˭vv1ڳ'3h#z(|Kx} a8FQp;؏93EXa3Eoz[=U$d*b,ߎdo㹦\No|wuUJ?se:~KJjG\qSvp}m#Oԃ9[ t7Y/(M9E6|h|zS_,˸Kt˷R =-[Oy?6[[ j5ca)vm=puC<)c37O|Dߺ8 jOxDӿo\xMvE ֞>{+ٗ׮ 1[*sOn[7;3+'d9݋W$<튽S6+}؃h{;nb8;U]s\_B/77-7?_܍_?& :Os&8%+c#hZk5) rva|-/o$~Gj#Wvc?,Hc{?~x-"5:|u=/>7ݞ\ו.7٬gUzy^dMyP Ws/[/G7Hk3Ӎ#uº\$@_"BzPxWOJ?ͺUي~>$?gȟH=g#%kd|.X :O5OqtC{oNFƔނ)Z/D\l},~3.2z 3=p+>Ct{7h[Q[y?D÷%P".y7b?-dȼl*ϴ?j>ɿ0 ,y;Bf[{h|hjWC.vQ,.^oөS%[yj׭τ=)"8eos9 cGfݱMejK<]p3O]B.9?@t _$/Yiyz+6"2,b}1Gq_8]' pvs]bj(8]dDc~eky^*INǧ|qкq$?Hx{d ^kyGxՇ"i=ڭv.pdԿ$^ɇ X/ɗ);G9UyM [EX1^({]v SEQ.uNDKJZpua8]w%+nJPX[bڥ6r^csN֛EAZCtOxXk~(don~,,~׵'t1)vWrɈdKHd=UBaEjP*{xٟh][/t.P|?~ۡWCsJiӏd/tɃxg$g5dT^iZ>OuW7?Z<@0/ܬ؛+s޹.~5^y >8nvO/yݲc"ymŗ aEj'O:HpEhVTU5iVk-rx>yT>kɞ}%r݌(=48386%joJ݈S#uS q&{z~bb'Ӽyr0o/$'둆 ZGށN<ƛU; 6sOuA]krÓf} *7-Tl~{SIw^W6+ _yxYZ353I'CoL}O=^FrXfoGe@Rg5>H8֝}:ПᡸUTYe>dyqyٮ̳|ҳ|!q-:aw]S1W6w=jܳ=Jdq w@Ssr1CKCOsh|V c'I3J9~bQF D/euJYb' /6z$'p=yr:}NwΒM8]j_a=%HO)r7wڊ:󁩰$:rRU_9EG>{>NReξI O\\'y*ǽu'靛~I^/O8 \ꚵ.{&{L,B96NV_(Ys7+r!++${2Nدгފ ?˨*Z"jlw` !h4'84Np =8 Cp^}UI]s2_fLO}ׯul};Z^bO edE |# Y{Ut{yIl؝:SXj~87 A)4g ?y%hF6϶}G}v+"{ůkIG6o S9AKپ?083mƆ#HXF'$nVa&I {+q;ʉ~<OWLuoV{RN-XK<+q0'|BNfnyZKa4n=|GsflGJJʏƛaKjN*qhin%uV,Ti\qNvWlُ08r#Bn>+H*B>v{af [OGoxf wUc/Xkgu3S)HqF\'LKFi7NÿOtC. n\XŁn&=sKV5÷j-1z7{o|#:q͛ow呃b ~=~/oԏOEoҼR䅑 goS ?>?zEn~~OCquW>;Ien&Q#Xv{K: ~DKL֗IS\Vg$[A5O9LO*9J>O,qI=~֜G:}Iݏ6\ʑ^]'b'l˽`nڭ{"zAq ]sH>*z=?λߗȺ>_}~C3z|>L/0ЀDO:VIc%YK%INs'= C)Yb4\=y?.o7-MZ8xT9Mj˳>L<O_6_p7T?LZ۝ F<|K<4"_T`HYϿODΥ o|:HLqۙs'[S̛XP9'gyz_v~i1dݱoQ|ӸzT\U7<[pk0xf7^1F?$<{F*\٭N#DŽ/֜o?пd?_E+f2Z(=#zK~Z$t~s83 ;睟a.±G[q!?RgcmYv)GğsLfgkZ;G=b"ߋ^Gu`Jׁ.z DjV0u1/?xH^Эa_idx߰kz]!v~.<{q SG&̺:ZGB{cF{`Z3G^pͣo2x\n5?k V좝/w3KNKq>z>2w~k{ͩӮ>3fܞ}fM]?X+!J!sFWHܞ~qF]cjUg~>D1U|wj[wW?~\$iUN;g~2j/'uNADwC_O㌾uOQ=kV ү{P~b-KX >p3=z2'`>Za|1;޻V?BcgFm>X;E>z`7{[_٫ m\ |kޏ~K{5'P![菫VP! vu?ߨ:@=;L}В'EUTqFFμ9t(ZjMZ;0*е)*qgC $n8~Rg?@*ÅbLFO}c|뭷Yt/,NwK%ȟumUţa w'^~ 5+v:1tc~4/s 3?^f+Քkjs qy4~qz||T<=/|(v譯`o݉~zSwG{+!%%vܦ굼3 *y2aV$h3}MaR8ёw '%ި}k?F4k?rN[Oxͯj^R-> =JƯo_z@21}pְe6FS~Y?⧆{K; 7#g!NjNEƕ=j܈n7~fg_:UMFSB}\gaQp G. ϟ׸7ēz&BW.~4w^9-8;=qwd 8oyxS{>?4qu5&we%usR?ؾTZ-#JY~_/;;|qS xO /R7Akmx]of7V<_2 g',ݓg^<.˼`h5Gf?羂BO􊅽TjN(?tVd獋k^/+GUg̼эsz"v;wiN@gF>Is~ɏ򀷓zu.@8E?V|u^y-_/_O|7*g$Oa\n<{-:V}F:=3 [$$Wg}B|ćr/oNyzX'՗#/t퇡#Z}c7TG7Ӿ:#+9HԞ7oyԫoN!~6DOy]R*aY2wS:+}l3Fb^x[i@gbO,w]thrf3C$.9'.?!y\MD}.dj;29{ cw?x.I%֩Kn^W}Bnl=D!9} }wBx2ie%o_9xe69;mW܁$ۅNCCpuwku {-YMϒ^^'tb|5vև*w=m˰KQ^m]]ȾxH0meӑыţwģsLEWt"W."rxO'cH}]hK#P_I⬊;Uҏg:(?6yyB/o0Ǵj1K><4}jwTzų}oGN(4/rʷE916/euqB|3>ћ zn;f~^O"ժ7\?Q|N?ϱBmvC/5Bmfۗ ?`p$n뾄tǞwd1_ktzF v?u%_ k'GέC]'d~*7͗iӗ֓=eb:b}zuaKȥoyfWET<oW\&ŕk}pg]ozxQռn0iq7.}Nw&-[4}T u~Gcwt<:ҽ!vJј݆qzCϋfߌܟ5'~^\H9ȳ3.NڸQx`f7EL5~ vM"u!cڷ@テ^#O`=88=A!>^XVkL U8Æ/H<L/sSȋ w!3[ORI|_?Y5Bm& wSCoyľ$k<ϹwՍ׃;u1:!w3x'|*&wX]w-vخ<ߵ-#"·+zmsuwy'4}*~(0:!/#EXE<<1CO.uF>K\2_n=jHk[|Ժ⥉Hxq]I7mwh~!3.HRU̍in^7a?\b4yCƩoWW K!r5iVoKx$q kSY*&|P!HA˹ 5L_mspCy|m໎r6N6o! vO kpCg_hViyܫ1˫W9O^շ'q{'`ҘFuamB_b?o9c#Jx%ٯCُkc#|+@x~Áe[[чحmO#r=~ci+Uz݄ޤ}hRW_ΈRJ#8&cXqxZ7;<"8sߗ5g%/α`)o}{HZ!GW%Nr{}vOg2zOC_8ё;'/- t>; |ѵW9F|o)wŻ5t4b O'OlOBw.|s;#$ܮ }љ7j2_!~yǺdu~niѷ`mqy{o[vKgj4owΗ~QqhZh_r}z+,rܑµ`cqp:?9+~ ſjÌ>!~$sЙ3+qS>$cϛܯۯ7Y;'e[a/(r~9gro:"7 TXaTx/mW;j-v 6w;s2ON@j|9}r-p5(!A3O]po?/ntK4jc<^?Ee#9kޗG^@/}S dD||wd-|{>땹vط<ׂNZ-/7ŕH\ry Uq"O@{u,d/Ď ?_Ke ~u/{ـ<'tc/^|INN|75ȸ9%zGy:}{7bw| N:-~ gƞ5u m }<̺Y:c׌uY8Zä!_bJf}?7m'p[rBN[{JpR.R?xs"9E ƫpn<*ZkV'hB#J!ձ% ʂuD~tLEW)Y/8Z;Ra Mξ~}>>.gâC*=-yvCwaܻܘ?\C*@?vP\6籧S'`r/!>U9)Yq>ͫ]Y_^ dUz}}>0꾇ޖ.8}z t n^\B<-גwS}|@~*UI~|{=B}vNH~ȷ3>n:JQ2%Cy-joo[׹ {M~ ~-ɇI>[k`;>DfJ?tjƾ^?dvЬw KۡSn8'kKۻ+ӫrEf'[| ^ǎ;]C߳>oηQ}u'cgIv_G<~@<50v{jsǸSf韛[bw!~'v]M<%IO8<o܀'OG鞼_EXpJW:^Gi܄kj?ys 祟ohQ]H7ݫN>s^nzRy1e'+=*?} ~pȣݩה:KLAJ#wO͞i=K|v7OhbOV_AH,8&o xthEF< ?NHnI}}>$>q|RzqeW!8 ~sy]p=Fr)qV`Cޠz/Xiym4|.yw+BҹηG~Ph K><^x&z)qWGx S!^SBk`wɡJ7_~vgKW!y6$1!!s ۰.5!Nhs{ybi_`lǾuI8[йKCcm'4=~i6[?no~K?GpJ:n^/ KGbIlxߚщpR/l vy|GײqEu/?zes: 5␏҆j7us>3sN{pjg=VO%W3Ā{_5ߣ?ϗ";8Q'VunõO_}6~ULraNs"\292Qu)ZEݨfVHLw}u-1tn%\K= Ӿ(?#F CB/2'ܘ>uV)C'3օ/2%˒<|k.x[[5@%ƈP9 ݏه}2Ay>=OÛd>;}*G%O>JɑXO;:ŸAd̩fA^@^ AJ897I o;'>ߞ8MÇ6;%."WϩK{rZiZ^~cG?>#8.Qk+?0Α~euSN 'i0ّTlΗuއq^.y;>od _0GpGg83#q_#v-5=q\e9zˁ~$nuj ͝o0?4fa_]`I ;?_-WscLu7gc+sQO_w?.C:K\oj.~}oH=bMψȿ : ع@yUg;ݹ#?O-7|1k'%#^[vT׎AXOܧHZ 2Xvx/Җ>ELe9_"A?/[IBW/ao ?C7/=sT_/i!zuKfx[AgB!K){}[+aku=4?~ .+70U 0x@2gLR:{CWp')έ_I s<[աϝ]Y ͩC7zܰd#r^j') ^@WA79sOW"Ag@G#e|k09 Cu,Mmƽ?0?&V'&eZৈ<ԩXyb7LJIVh@Qտ@wCu9́;:׏/{NdFѩs5y Ͼm潇7w-ۚC-D/[[n qtb@Ws O^ۂ~y!j]ꑿ˟ݧC^T'%tΧ9:/̾?vȓo{Rm!57S{/dXB+h?HoܤЇ)YN%/RMENɥ7ǻ}Od'F+\&Q&rɆ^폠I}Їߛ.`;.Yq 6X}3݆>s(3_=q?xU|\jد׺zf %+ {0 =0!Wz[7ΑHYt[^'mqWe<{9:'E;vaOu~CAOpct܁ů5%(K_PV ulK,<Т$zcI;y{ל> >\ Lj^R_S8c}NIO:*?`#OCp4y}oӓ:%nҧgY%G ϩ{^kAuV}&@ݷʷ{{qs6Dٺ ϥ5Q:S[|w3_}W_f?φҽJ9׾@GF~+~?;"i1\tr ]Mz^ko< y)x+Gп}XDOtB@X;}Qg잆m{Ck:sb_q{:A1<ԱX l1|LŦ]M|E^P~/6 {)U][5$p*~OU=-&7WpT)Ëyp:\-DpWΣgC ])Ry x5}.95_ݑ1Lc j50FNq&Z SQ;o2B;k'=6GH"Xdg,ﳅ@UN&)! ߢoJ,:WH9~&G%gI/7!/[%|R_GAݮsbt[~ķҷ''3V#N9!GBj#{aH\܈{ԏݑvG."?=&p o>/sv\o+-ַ,^u(M_ȁw/徟![yٗgطM}5l_/|8}qE;d]@GO=d^EeG|2_Ehݗ?Bqo[/͈qq7%/|oYoߟߏYudCΉ~7.V/ـ\k,޼0%[, 93r4Cg9c2j/ebpc/"'Ks{&žr1NAQ{uɛZ%}#Dκ ߃ot> K] |~$f7i88uң< \_l, {G{D?1kT X7Y37ZxԽKUхdΨ?Fvj$^W_MW,Z)㑹n"|TWu+;J|GOѡS(Y+ŸP:9n{3};h}0.fڊC_?}k,y)W؂?OŊQ} t}gzO~k^'sDS_HD~~ c:oNs_J~Z||q~F@$`dŜxNcHR7V>9֒Y=ڎ2}bIп)WM}z2@@'k<}W?no#c_Y C*CkGa՗ĕM^v!#Flyp9,{9zkf˝H?Ta;y<vNC]vs /*}:Hi']b?M~kfٵm}~ rAEԁG E_o+tcpKB/~|؏}zu,kJVjqߛs֙<59:_:7?Re>#xY'K0Zv /x2Jl'%x4Ϯy [\Kj]}I6,qk_>9٭CᛨQ֣t؛/m+q u%|kʡn<'? 3:YzKb+"(ϩxEsVz'FE]xX@#|7yy3nĎu?&.Q7~^~R+Km|}ěR$B|L`Lz⥇~bgR=`s6.]9jKO,']~&?y(g}?пVes'[zܠC+?EKpRL#gG$NzrFjjpqF<~@XK`?mlܠ8ܟwQSģ됳[5C/'[HVkR|K ?Sv]a:y.Z`uYK9?#&Y籧]9~eSráy9) Ǻ:{_ԈxTenmJ~ 2pzv%b/Ro{4vC~E^K=13F;8^/O'4-As(zIAR@/ɼoaS/^j"/ĦR'N;a?_؈~v7*9&N?a :w.T#폮x]-$u9[FUkr/ߚe.3 Hݙ wϙ Z**Rm2CGRf4ٌ=s+y&hLofs{yLrnXJa5w=YKG }JM7cBXM|<~ NH5YE54Cɿ:C"gJA~$իÑ7"ߏmNK6`?D6-r/sk'H=VpIZڛsNܸ\pCۖş8ЮEWVŁ^ޭ< ?B?W7g7j$ qaL+C'8mծYG/.>kW 눿r9^2!p%2 s\CPGtv'rmJ_qSƴy+#>{Mہ^Ž"w:xH} "WS s{uޑm0bWbGbEs2J-澵ʞXGWV Ϝp zVzjU}z:͓! ;R^Aᑫ $5 /To*c_&Wk뗰Ǟc2/ΚЧ#@炳k_9<~_ ɜSH-ZGr:||C}NlγX;BCGa߯v\&I6r?ݳ`/Ž+K>]Qr.[2EW^9zzqpF[‡ZJc.e/; 8EzMoY=:Y\UItU~;eL&~*ve:Q[9-8 )~ΫzIB|&/81t9ӧ}q}<D}튁aպ"w3u07ko &_ꗃ+.5ν)oLBBp/O87duUyA-^Ck=aE?1&~|v$qܦuss%?#sgYpڇĨý y>5)sx3^?+}sܟ+8.nmpт5/{Mρl?^9}~2 t~t5d;Y}Ek֪o?M^tg3w-/qJ"ۧЭ Sy29C CH=yn>#콕_IL'Hxz ig&#;NSz]*>ҝ{j6e1= }.&]3Ϥ&>5(/v㠻7oA~~ Z`+c2?'x`~*dK[Γ ~/@Nӱan~7BNoC?铟w 'O >{/$KcvqK@_K5nGX/r?I~8R`;27\*U\~z~+4}πA_-]׃{{,M/Ij"'Aj]Q-9?dž`Zy/LW W94).mQ= x@kRlV}xS ,뭯Q yNhKOi^pc>q&ߓ~.!أCw&bZ>S>e3 tϵGF[zrKgOY{D+ƨwwFYmuaUIAbna&=}%8\š ^ H^MTѲC5x7g.c˺`k}̥zdgYvTI/o~љ9r v"ڗw RsDž.$^ڹ=q}҇ Ğ`[d>;} f- )eg=/Y R\CHj7rAvuثw˪κC?ߴ; z8_UE{?G̼XT)y)T}uWBWRsb*k]El*K~ՋWU%f,vR/9 sqM,`?^y+Yat.n鱗; AiK+J#'5'㊛*N||ihRO_{]<8uj9E껴?VpA ~R:'qg zY٠k{}Aj\-sJ lR"g&v#ozFZ?UFo䪋C?⽭CǑg0#/}cs/Gae4n͍_ǶB =Ɵ>Q/WC~ڗR}~i/=H?@J\bEosJ\]vs39Y%sAC؊v ⟾m;c< _ۅ_$}maϸoϣìEQq ״<"ǾG>hKh67Wp*Pvȱ/Q|szN9sFԾ|duhr5E|T嗇^~y_s)(ySAs.'tƧ}~~Ǿ>X ,ȥ {w6?4弈}hZssOxum;P]kpgvO;ȍK 8=CuU')|(~*CUoٝc^؋Bӑ>|%~̍4Npgjp:kaR>=쿊 sj |z?}%5q_~U^9#?}=rH\>Uf_5` kwi|.MWO ?H־| `_cI_j]3ɛoJOv_U~9],u67~b!4~nСҺdt'HײdCJ^Zb\ūb7ܫLRC(i{z~u?ž7w޼~ү]įa}?kdn:9.q޲kv8w~'6KIp;~~R֜GSWQ">.8}}:Umo~6n2y xNɃkC9C[|d{߄f~ yC>fڪϯN؇"uKO -Gzuam+ޒ:B _KqtD'BnL^_q"2@vFH|Frel0oò9Ha3]}u=|]r6Tʽ߂ ^Yq`)V~NQk~~ y"W݈Dn.1!q!#~=?W?k,_ci= n>z`kɟvz/q:T80+y{!@cx!츲v̡ ULj7~qיy-ʏJ #BAT;Dog_!~SuOB_< tyQ12ƍt~ cpN@oO\WC{CԾ?+IHeiM:*p')gXv6}=?h]o" 9eo yOT{awrXtދƳּݫ+tk%>@WO sT VCQ5SE M|;Kl1iK#G%@~=|܇E'XA/ܢб0Vmz~ ϑlB<>B~^_̕{Afc 5 #j>?bOx󋿬TnJ¾*XuISW~Ĩ.ԁtʷ*4=:Yk$/m4?y<̻ WBә!̉؇Y_{Ά$<)v8ys<s"._ )Dv<ŷ5odC?&uس})<1='1KÌhoڕ:דʊWqq>hܕ j,ɧi\cW3F?X_&Cynk58`a/]aGR'j8'y'wiKӾ@rשׇ^/O?8&7CҼS7o'"q}?o"}쌆Oɷb%j?Kr&"w$Xnұ.'k%~q|֗i>NOuR;xTۃBw&_[#'[khŕ|>ۢe ~l*o".P- };BI~%+}jЏ>ؓvÇ;~ Ry'sx>N{G i|imɃ& }T+*jÿ}s/2пbb}"P%E$wp$S̘o .ɯ+кh\ߚs>C!'e ~;?{TsR~8žJ^ێG[k"#rs?2^TܤSռ5z[fңapl4ly?[9$|!]>s|7g_wԯj?>U/)^d6m&.q7nуyrñ^ރ~mxVy`)Th\G^:W~ޯS~.?.#xѺvw5{:w_, M'tޤOfb=?cUB4ko J~(7/oj~S*?]_ׇS)-w$nyDsl^EX~u6p(FǍoJ%̋K|̯ϓ|淵Ӵ>X+cw>w'yF?zQT=F펽<F=3=3q8/?JJ^}8D3(p;RY%ZH8kWu naުy'w[/ߎh"/Z?7)\x^$O*+vA$yg!fo Fn='3{`ZynC*HGRA xPcP?LWo~| }*q3Gy&7cOsȜXwJίuR=Hw~J|Kݷ/_Ooҿߔ>Cׄ+/'ܹ>4~܈<^}+@cԾV?_߁7s Sϙ$~8=K}')jG[U~T}7V^<h^ZC*NDxwg4R\t,vJA3}Lj)Ρp%pƅVϻE\t-úz.yR{G~m_aXp=ӹ憖зԽg.UMzAqԾA =w<[."qDnyuf~'_a/24X*wvnW\_v c]uxkm:KG/"Ͻus&M ^_F߭~~gW?ߛ#ۮ`?_qSkZv gy?33=D_ ?Tݳ.4jg4~>~A䴏.} A_*qέ_H)-]sYΝ6;#F1v&8%(Ixޜ[@ϿZޘ?^~%r2/Į$/teZCg+j{ā7cw93wo!吝U:ȿq_!ۯcx9W:#Ggo?sL&kON~U@ |%j#}Z"z~M1Os?P|r7ޤ_䫎'>E~%UvEO?znDC~zOj+}^z ܉{c;]!]ECA'M. M?V|CcFM*OU֊s7 y,}@hyCɿ\Dv W7x6/ }?V՜o>LFfA_ׯ UWmO/%(ҿv[j5)Ug y*3^|g|}dž,K/;zyϐ'9Xv7.:w=py|hU; ^C=o5J1;aj1xvw<1 ^ϚW'>m9&G9=YG8vD9P1olKzk ?}&?܀-+ U2IsZ;܂%-yki =9T_E4}ۋ?:c؎9 ΋\gw uZ A3@/ӧ'pLSwK ?|ߴ<T_ cМe:!Iiw#xi)ߡ7E{~)W[ xN5ϝ'Csx!wT pU}Y"c{Abx%[-.ȭowGk5uN=kfګ^s=? wE.gOIӋd=,} ΢΃p3vQ&zfFJ(pbr>0r郞ן9U>yn08{-~VR9fG?m6:"}p|?hMhzqng%N4dbS(oWm }\1uM6A?KMgO`bO߯e=Df4~7vODFNo~;rJB >unY s}]baY!v vg油?: ٧quۘ=37ʛXٗb(pw^q6xXl-M[.t?{/f!2׬ WIpse5"t?cJ!4({Nr渄H6#ghJMϞMIk[c1 Gjf&29lr9.MriH7^y=Ͽy<.g;D(#VW㺰Yďg͘!o'{'>ZH?o"nb'#*_$J|i}Ovl)1{o>OOr{i3zۃ7. sޚ4\_lyc/ d,GeϟU~r|bFnGCy]G*[we ϳ[ _ke /Wfo?3]}2~΅ړ[ ;YE49=H ES<`>}hHwa"b"as_Tfi p۝x._9~,A`~#dmoTxgh Nyn 2' ԠqZvFQ!}0Վsok,\ݘ6_^2ٸqvLp_SJ^s|S[c ~ӫ WG۴{rN#l̲p=Ҹ]sx:l^buPȺ6/z0 +s~R=DŎx2{(~GA~Z|TbO'3Ǿ}M[Gz9iU6?s,l]ߝ~"Of߫Wÿۉo\~ gݶ |t6x݁ynz$0w9bmuvѥKEIU`}G^'eGIj/ib=ĜWiڍ<ϪAB?~uHΉܬ9IڥӏpC#G|^{~8_Ҽ,-rηh!3G?ꏝ璧(YkI^XrM(.Xa9jn:ZLPo*Z"dN čqt?w/ݝC^Ows8s]4<*ŷ/aKn'ufǹ/\駩GH\bL4|݌"[ɿ>nݖ89Xt^L]5aʑ;p=4.Eʾw崉F#G)UQ皔*\iZلfZԈsb5?|(BoWOSO|.r;:俌زzvM9n|w&󵮜Ӛ?&y5;1uv^|pܳ(9'E kxܧr%1?؛稗#rnuȿ6w 9$'Rߐ<2 `AC(ΩsN'_>~h~9W?uwߪgz柸vCXp̱dk<7W|>?7j=?Rx>(瑉߱N^LQ./~Fʭ*U?3O4rkn~ʥ/4TB!`&TQZ>sLG#@~*?D9+*9p{̟ոdr )l-2 n&g ō4ovHOi䷴vgjK䅞1'w,g]]3s#y̭w- C/w+NRhm[]zՏy~bnq s)q[wi0MULF1sm+ǟza=}e>A98 rC:o4~76h~iY5,(>To!xqo=P{hz<=,/07t/z,Y7VƥſvAX~H]7t/aGfD!Uk> 7+_[d_Ex:̯ub`ďkJ۸m*?[`dPO&TwosS+)vF@ZuOIHZySv1=\J}ƑДz(60x|{kSy<^}2Lq_Cs}XH^FFJ ~^vvՏj=]675Yu{I~/Uo|䡽EbbαB]ؿ+%ƛ'x- ŋn6bo:΋]6&N.ElfiJqݴ|==Vqo7˫ǭOZVgl=*"O$~_c"=FVP#␛bynhܔ@w)u0}кw5^M޶{!u 4@ƽ$.z5'.5+2Xj$s {hϜ^Ǽ͹B0o*N$ NmAڗ6Ui?nyy?|UzL+;%.Tu'tZ%y)Uo9vH|sQpģkAo5뛆͛{ K zc;U{S*_מ;G~/\%u;wp\MϪ}x8̨~X~7GSQ[a݅o DynLE~;r|!AQ+_J}5e}'+8t^Ruת&? McQޱO|V~Yv <[ iwJ?KB.{xO/%=ʯW|ۧ*"O|;sW˿5UN\<ͫ/a7eUpact.,x5N=ugK=z?@S%ڏfA~;R5^ F"/<`у]Z-|=UZKoR?T^5yš:CJ=R>GHOu|_&}{gc.淍%a1xuJ9;<4qϐo>?`6z!i/9^|o'Fwsq;޹؃I3l#8j}8_HMf5 /Z' JkMN/1[@$[ꘚ0 (}N*/pH}2aj\3S>G R/,L/>u3\{c;_ =<oz÷M[_uopfnۃq-M={py~>(OFs _X.W樽\˹~'Q[,l駶-N/8f w?Q<y~ULq}yd[D}"WB/nM#S)KD>u}7v38yGyq[ ^ٌ@#9?] +9v ^aEgCpjz|=?YuA=؝.yzs C ɧ[j`W;xx ZW;#oh>3įR?sfҿΙyo(jߕqr7|Dvy?Y=?3>{̼C kEqyb~or9C<‡z 7S0mş'lYI|^՗ʸuVvJ;ȭS筲vJzr|s;:ȷ,WN|+NslFOҒq\5 oz4"ȹ7}~GL~]1>]Hp QyK_+jVgK̑Sz=k? 1p=:H~K;!P> KFBvS멒O>A{ iֻG"Y },~AE(Å ZO_!R5~+!v`vP D8d})_H Եn3*@͙ ~9|ntO](EcK.oF<ڳCɐܧ{~҂y[ioC۫5Oe]w>_Tޖn!R ׊?Ut2qԯzZW,vi2׎K»pU~o*|`sQ\R4O}͚o7?%hykZﳬv|MWGE+q\D_ƿʏ6ϑy ZQ'cwE~;>+9tzn p'S`wȝ;uf pʥ [.:߹B'{h-#!-HK ݍ4Ct t7CC HJ#0t}y=9ߵo^s&>΂>qׇH;ҧwu/>_!zZ^!諷{7Oïh:п6>:[`g a~y$CUݘ#~K}+C} p⏅Aok? U9qCvp~ştS1*{-6MVCr"R9yeIwca~Yp#,~ Ըm&7̰:șX'1P9;j> n[s /7takM}:6}ڷ':ܮ0쏕,߻S'h;_ OWn)/'Cwi/?ly5ES!U}b5Uq}bR#Qy=kkv\f&f?ߌ+CΏf^7'=qVЅy~xIR]wE/mM<@oœi^3qw>' {/xii_΢D%QiH k_qFnugмVG[a^T>Y!cvΦ5Nw|sdx/qH=&5rЗݧyz櫶As]Ϫt~}y׸X]J?Љ_Yv,н/,{I¶*!@BަCTܽ;sg~#U`3S "XoGݬn|o?gIC p8G4?iHpBZ]A ǽ [Q ״^ˉ`IP\܏ER{v^|uWWEe7-.wQ ;%eoCYt߼͑D^nNӅټt9/YvkS%~+6|rV˗)@O^>7/-%*{FtoP5ᗯYEg9qYw8]C/|_NVȃۥ/]V4eXie4cdޫY*q6pA'rN#%l,oyDŊpKMc~@#c77# / 7hheZ)KpSUl5"E_n+jci]Wq{v1ݥW͑Rdo3>]wUg#W @ޮ1f>??sࣤ%|" ~Q*7vw;zǫ0 {QR<,I]؝{|cgX&~EsIBٌ_>n宅ϷΚ: #56y|+vׄ^sҟBq-7˼ڌx<%HX,߂Rc7&"kIɃ䙴~Vn99D6}WL:?Hث8Rߩzz|do^4Y#ݐo4_ c7BeTrsiAӷ'?GaUViW5C'p3aC,f苄?}BR㬜 輙8ԍ^:|GWc]q20ia_m{7B/ q[ϲowF7)R[Z3Y~>L/y-E}o4ǟ v9ģ{OӅq}tn,aKnqsҙѥ'S ¡&: : ?@vOw%.L{m}ro\w!^- u1}67tOXZ_+;ևȕx=}$v7Z4ΞPa'OE{h_{h7J_Z#k g]~u16a}\GP*S;waaO' -tڨ1x7+t83womC?!:oy0wnVy' G-Xw =!hsO6Q8 oxkC' ")]j9#P//;ы ƮJp]Y|qGpyYjķ{7t ~~Xq5+u.)~+>>)?+ku{WI3M?*\si7oV)>ڵE|&z%+,-ˁ,4ꋜ]7?~54}.K19N߻i.e =6MuXS!u5/6/nOF-U4J&E.~_tۧ qU+ i=,3ڱ3UBtt(}#|']g\ )uo?~~Vy8y`x[Ai~} |Yl'<;~ vTГIHl/v/CC(U&4n_MVCb)ܾ;jbHcNvހOۻ2|רt7?ƯuW~n'O.e@qq^FN y4o*u݇=ƌ&O {lX|dQ Չu 7.ϱا#i4~c.3Ŗi| ޹2 9U% %}sհ冯v-~\=}:>-~<뱞1 ~EqZGxɬgK:{K )v>g=~+>Sd _`WI?=M6o̊|0cM;F"3vnb]Ml6k#"uI=n{^u{v/ON}N WS٫q{ow2Ag[w_C~uB}2zUI=A\ӊ<ߤKc~͝ķ' s0w-Z˺ &.eI?XidC^}s+uq)Yq7rGM4veb&~#\k? gZ_3*>MXFRO5=Xr[h={5yqUcghHUqVMB//:&RtXl|a%C䁝3$~e>-LYb;gJbDz߇f?ryAwPW/WHDXeqJaecf=W⏻?DߜͼjV׼r%_4*%<:=MZsmwϤğ9:P[WRHdӮ}lRdG<`}/D+|efE"Rwu[[$}v$m%'q ݊w/j,+ d G7-ǯV9 q"]J6>׌r~EU%:4qF/Ⱦ3ͮҰ>nÇqo]8:~V9}t8<+ z,U6%۬cK-k_R*N^R䖘?)5O~ex7&Gz-;q}鯪׾i[EЗ/5o.ZK>pAY-'~Qu+{<rb1kB1ނDYzn#'M!{&zH()8e`le'_b xk*Ǜ>8Zn%Tܚu>ՠ(?=QaF}zeNM)^==UbjbOޙg%~BX;ַ>[}y Ue3'<=*cZ NId,q'aHm?bWG^x|Yu ne֗+kђW퓹mybw7bIXK~PuqRٞ˟CO+`(Y_[rקĩgjb?~ln GïN[2=U5n.S/kgiqƼ{j~MFSl|^~+\m:qQ`z's*%u3bY/#.77rC/ghME;{CU&mF뻛YLra|K~-p~coO)_HTha#>rKLϳ 4tv>PKhaꃼ9:Lj1 u]CC :l:Yn*Ա9v4ljQ\l>$H>S98|Or\.!/Gc~4Cg'O:_kғUM׹:"?lm܎5|ʵlŏz(=j"AƝ΅.;E=9)w#vgn+mɭ=7mK_ HulV3/G:YZj|wZe׫{W_,-*5CǺ^) tzy(?w=ԡbDi_Gŭzgڑ`#mKq{ %>ƾΘ}5׼ɝ/;b'<o jC\R/z6uI<ץS/N#=~xSSˇ:{}!];sqoIV/7e'G!mЄeԉi͠ć4xFHɏ۽g@B׼C 6ןK}ۯGO#Fک腌W?w[R}A\ 9 C_"vk)X _lk WhR2֛k>ɿ [3)tz>ލq y_Ƕy=5+@8 7s#+-$C\i+Xf W2kݷܺ>,r@9?s'}}T.׆xwC'KaHϽ4G_ӕ}7k>_7v_(i/Qm}<שt.s>N_\8q#'g"3)}DJ(zV~Ť.̮CX?񳂼|C) ([B?'qI9|]_5rD>9y u W߫otd|pzoCSZA_1cG?E8''ﵟ<&Fꦭ z,sI{'yM{ևcZ:xR&3l!iOA]z0%ҟPHlћп(2'+c+?_5)-x\K uH0/@ c\1$q;ߞ0To!_r6tS:\Y$ؕbF"o _DQQ ymk&-HowR!/tSeri쥏ѧ^o "JURY5Ӝwvw{`na2|yj]8"OxkvXnQ N N"$` [}Hq|,5v}?I_cWNC촳> Wc}ePGFmmVzd=ƫ?H_n̈=,uO0fg_ihH*>yIʺ|\yux{.^bߤc ~iw{KӷN4;_;7}{{.q.Gkb(D\!k`J~3%4G7)6ژ:(*8Z]H' F>-o3RO`p7n4عGP>G޾>Z3z^+\kO+y5$ͷk2Ϧ|>Ё2_YiE=F797mK7ٴ>iXqG0é2BQ?faפ[~#yʒȷ5~m/ylq=a}kH~R?E="Z'aEěbONAQߎ!j O=D~씍Ae9)^>jx8?_$]<0ǟ ܪ=dQwA.l\8.~HjA"G~.A/^|;XKgg !: _hoFϯ̾k=s[A'fU~?[1}JeʇkeG9܃u '*ħ" 3qajp\ĥ=m8nW~Q ~xg=e(}W0E#GÚg(<3AƭQɭݗuʓD /yAsޏGB/t(-{$~&16ͤ_B~5ЏoQΑ%籒,V~d3>O~U÷g?{gܵf[%}5?PkI'C#~ZuOpig kz{ϒ`s,E)/&pOߦN4R~/@N{E#!'Ń7+)C}$Nju}p-OG˷j_q ,_V^ >^iͫWq'.BW˃_S?j}R>ꎝ!nO죷 Xi~yCxT8| ߨ 8U8P{g|H5z)R#iEdO+u3[tf}'`ݿ=R|`U#/}DTiȻ7 K89W}K]5-QQ{e" U[gd64!O}Ď$>a^ wa֭?V%oWF"?X*y֤#哜eܸfeȣEC+:Y?Lp:"} iuOPv?2t/ƢƋnz7Wudy-v4p2F/">'nt9#p=.?Vh/].0zcg0!s [g=gvѵQS99-#Ҟ@/N\]M3/p2O-Qؙ2GQT~>AQ7q_eާt~bGf_wA*]Fsޟ8 b[CK"Ɩ@ Ҵc.*qu2SԸ-ryo5?cKΌG7:9 :a8O{ ?|0NkZB :r?rAjډwK|RY-|uD~-5q v/p["w6 wyq}uX>?չ:7[akW]5ݒ3߄`5}?{ПD:Kg\jMo@35yEϺk7yn Om"Nu=7kC?YVqOW/7?,.r~7c%H ;78.V\=#UޟdN#޼2yyW0Lߗ%vIOɋ]aqOSnm_`\3>` 7wykfD_t={ z8ּͱx|!J&;ւːXW?%s?q:|F?w򭧼~E̠O3:cg ਭ e/ I[8ۯ_pM年Sp^1$hq%o_ЙAMqjk={]a$ƢnEwy2 }uz.kw0(y=vU kEܴ aVi><ٖi}<ӹ2oKݢ_nr:7EXɯŏ .Zx3bW]O]w\Gw@|';E82C;ѯڗ){&qWKg{K^G ;_rAoFxCᩐGľ8qH{YּƮѶ%׉\~ӶxZK9+T_ctNl>ɻpg+s$lϙ N\M^q;W2J7ԎeJZcR1%?T>^ѓ cWI]yy͕Wyo- >4,~3zyuς7wj5q 2/P߯s?//'ߦɿvGkZ})!Y\D_ם> [ n} %r4 3rd7\-~6- ?I|}}tnί9PɜPQCѹ:>9' ;[aoJϽO=Oe#Roz|`ķ͂~56_^or+b/OjCO ^Vs}24{F(#slToh}w2ħ2epgF+ # `/k?-Kb͗y*3͇:_JߙO q4uWm֞)Jݗ^y6SnȀBUzK&y+ONZR'C?x^+ԃolů16̶5oj^(]L:ǂoh6+B!~5^Њuы-KU{Yl.~GS٧ܭFKUI?({Li8OC23G_tNd9?u0"_"svΗSy.u]Vэͭ}:9Ab?yzXvok!eanɎ'8+{]7п{QjO^!CN\#iCU!XB3ǁ_SM/ЋmjQdy\EY ݹ})>Y\%}^M'F̃yy;<ԯ؉zAk{2E͠~("f$ LBOTAvc,]~9>xvQQr2S͐p&{_dΣ1!~ĊįdNw86pjI ymKɪ#*:9~IϡέN?Щ%w5"a]MN 68z4VL눵P6x.%u=KVRӍ;5})oF(O{ckJL~/<:7JoL,}goD(Xw~׹ƈ:'7^䮇ޕ&p楅~Sj%Է;[>ր* swkБЃW: ;E@Qq4Wq[N~Q2wȵzzLW#ߛ#KC Jg=j]|~͊_ej}|S$H~/ttY, eJ%Jp닟7.eeZr[먴s^3|9:BuKY/܎Hݙu;+|* dG u|^4?x~ҍ%o_K&OUrq8*~[IxB"wW܃/Y}g +ީ?S&.{cL].m ^`7ډ!?H<%sAAu@bKD+O-;A|2 槷YJ$WXfߠx:/)2McjbN1.u[ĵUO X;B p>"N"-;45YZǧ/>aTMpG M~?4QYoT|>/v8_)S.,s?sRI7 {:9zqlm["o+/O+!WB/ZOǥ_ y7':7K=./pi' zHwA ⯂_Dw2'۟udv kc}5/V>ۧtu^ߞ9,rs7-Yk'*{[d'b*3~<32+e_߰~\H;yK9Iv")e֓{[cj֖EKtcϾ3 t%rIAҧ>l! Zwy8tL'o_н׹vl'PnGs8ȈyoPP',}d}Ϭĕ"2#ץy5a>.͎Z4-"l[泚r>`?7kVUٕe3?]x=sp;xM1tv~z\r֫KKe8]sBIΒ֣fη,~'tތΟ2إ2K˸oMAoӟA{1BJr_BwŒ:PN5_&ӫtMce~G)wGqci5t>.vO5⼂ 3 LףּϷE.MHI6e#׈j/-OK)'qI7ү+4>>vKqOn(c^+61fC %gsM]x S2GO󮬗]e4|~hI9 whrD7{~/bOP|{8nGQrsϰ)O8C __2Wz_np2wVڛ\~nQe@4 ӎyk&>d)%PuKF wYriU{IؘE!OA=/_yqEv>ةX-YKUo?nOЩ[ͩ1!,PW6\a%_ sl{Jg0x~KHO7X"-_ΨliM< $O C_ pL]O_NJ]Un~RЏɍ&;6gF_(uE?}5y6vF> \qŭeoZ7 rJpvH`]%>m~羝}: ;p.J 'Κ~nw\ ]h[@bG=O2X箒uĮu>e?=G2?lA-k9?a}5zDi}} Zde(/PСE^3#/2BӖx6&\Jn̥~ҷV[I'z]Gj)7j_ kb0ȗ3Jo7?ҭBZz,;qjB{ 'IU'Ok>:XRhGO%2[e]k][y>.:O#I#7Z.s9rQT 쭏J\99_>%s#SziG]א"_1^ҏ֏o^ ֫>=b?1:<:ZV!-w2?u?4~]=Rj.|LAZ/bqY'8°K$ޤ=5.q~?UGjRqڿMu{U9Zp{qaɗi<O7{SK5#|q̗#cBWW&vdԃQo>^*֗asnĞ};|(}M\$vF>̀$zz<~ϝgF}nGvmЏg>QǍ1ezT0wm5_uVUWqu[bgOdfz"^ >mFiXIJYѾ}cxiQOcttCl׹1/zc29>@.l-z r}F{C^bGI{639RqikW)keok8(QosIlߞⵍ$~|~p&Ǐ8XJzj^BSݑ9 8Wdлg_gx̙~܈ ȑ'YOܒСɟ7-?S2Ph 0t qwI'&s<*GY$vƺ+#Nu&I y'_t>⽂*~YZϩ9Sc\0/͗#~C_Jສu~~{3ר4t&Vz%ðvuJ7ESKO,ۊUZnTyaڏ+Hy2*"*rCՋe[%iGy@^)E(B^_v3W1.}Kleo?ҏֿ?oy'kNߚKPHyAtsYOU/n?4݉߬uvOTyi5y:H󣚧pu?#8_8]"/#zA_(1Ue)vرfΣ5;:?ݮЊ $ꇿ+uĩg:JwNQDNQ݋F>:UlvoD|$ȁא7 Y燆~y<68s|ly*~DC4vc&%8Rj? /&vC7?aa&~̧8NO2,zy(kML_@S \&u0zX|'^bWꮔ.9Yu~G5?hy&%HNIygݼӑkHjwn 5_N`^R?="|s~Y_r+FumT; aK '^ֵyIc` I͗hּ;?vrTϾufG=9ͣz["wt+$#qKG|Sp/㢷I+0 4 U;7*k['b҇^%F?H=O<@ w%yȀy~ӹ+Z/1ҿi)qO]7bk;x&W}B-o?շ'bw P ~y`%q'z7)6~\w}^\Wg(Gj<\yvT׺0Ia n/ȓiT:,sk#8ghRg+Aj #ώJoXg5^{D =H=W?vS߉_~<*QU|%飩[3dY}+}!X:_d~{)rB)h_]A^DℚTI([(`һJqdZ喝~qF8Q|Ή^{৏߅vL29=튼Q#J皧TAoi ]ĎӹA]]@yYSC:\_沽_{(EH{:* .)_y&b_/3r`Y_TOΉ*_B=:΂bƺMrsf;t捼=F[G%Jy8v|/8A7[\'v'fwGCq: y,r[M\13.'*/~\79&zA'}ʚݝ_=7ܒx׻qϫOE*aW^*OH8t.$k˺\-O5H5h|ٌ{O$xO:չ0}5O(yqAi"AMP<^wѼSw qS?ǜC7b~<Ԯ>aFI+簜sG~ҵ4}iz˴[Ar[UqD4NT_?SyuFeC_[n|ą5_&y o_nNz |Th 6SOsAߒQ|X:G{chs^Gď׼5EGwRjЏGb*OJpao ׸U|n _Ȅȧu. fHN]q㢗uZϤY?DH\U' s|4.sTiR&wbKѧCG?Y:q|[[ ;4G_պld~H8&Baߟ}iAӇq^==׭!zIxFP|_"$Ѕm;OO= -JOǗǥ@O]^/k\Gg CEyÐ8K ᴎR3si'qk_ioUSRu\w\y)vGu`ڱg??伵cI1]{!ͅtr^K~'5/{>Q#|~7#{G.Q;7ZZ9<w|1^q8wUk?szG :7_=[OߛBV{ n z|rfkÆ~<[8Ge+7~i\Xk ~LeY_"t!w/tG_+o@_$oHw?yIc8?,:c.+MaT>Sx }MK}C=@$_?mM_w`2D ƚ=u8||RdΔ{LƯ*Dkulήm&|0~vW=GzuB41gZ)3Wt_y_TO޲莺y۵rO[Cmp+y7x~{>sˎo/=|W'󗱯c_>6/G1'џӨbkpG958/ SIgDAows_[|?$tؙ扷!wZd#twh%F=C_e\E־^0foդ^mXw钀XFQ or?S ;Y஍oBS<X?E<DG N9'8[N,;WDsJ/NoKl7N߫tN({N~}U;ɓ/_s\{<Ů Ϸ*zy,x-P!b~~ 㳩o5Uߥ>wOb_}{=vfŧI^VhiC_37%Xos8͊P(~jO^V栘rӗ%oWƷG ')??8$7/?79<'?7F{~k{K,?#nުri+ܪAbd~xUhmV+}:ꂳv76io#ڞO1Y +}/Pmi]zfbs8Z߹!6_eφ=ւ:`J5~7>  z8wKDmw7JW[#7}C_%%|g.gٷ=C_݁}=u (_`OE攼=`^c_UrE4|sSbNKNH%b/~\vkg~ }wwkQ共zKT'4Ǩ~h-ZO^ ۹9ߕ3} +Qc̸|a^VI98p~FwgR>\u Zje?Ϛпh}4/7h1s4hN:zӗ.$]чӞٍu;?T+ o ~:*u͌~`q'WU;kijUSvNm9v{}_gr9o' sޙeB/?|\9K\?Ul*[eߗyICX5iѯ}XYy~"oyy7jѬ|er]o;/B~H"c{fjs]~EZq\PŏV.Fo4ޑrKwwq\쒖HR Grl6T_gϵ}g}* }d[alxɐݙ? }U͊NCk%|'_7lpc'?b|F@"q7_S4P|1)uXU8W?vzz E☡!%P=uk4q_R_-.qQk}<7V_f$>yͪO eۜ+ML9ѳo~wO3M%~:1Besch*5nw*4“$*'}09d^eNư ;_ޕIoHXm:3ʾg:[95z̉CP9a Y6,uQl_kno;'?9^i{~T~vȋއsTlQMwcǮ~l&rd1CY'k#<乽̣WQP3ߣo%~{Sr?nl-H;8_a[MDM2BcR],[ɒ -e˚ɐ%I,]["*d~s3i<>|gys^sܹ{1ð[Ӌs=Oy|GLGJ* ;B?ߕQ'p?o~n Mž=m=}4?y}"uKO5?r(N_/夏?MPzy&,_z>;kj_5a^hwg?xg BH;k"E&BS3bI^]9gbp=hLo!̗p֠3/yؚu&HL/:ݔIl/60Kؾ,} I~cހ{U?Czɒc?dQf9Qy=Q縪Ărq8mʠ/o+ʺv9?ѷYyY2qQإbΥffyQ.'xP-_dKn?] 'uZ\/ymiqeJryA2#k^LVKg;%b=دU뤏}Ϝ)ovcsݧvG~F%ֺn(T(柆wRJ'D&7y;꽇~xym\GGiܠl5j:;J__ƃoz|īn5B>J?Ys?`ӱ.OS X_iOtp0ZXΥUӋwֺzN93ޠ[ }"mWqQ:6W/q}K͂Wޏx1_ݿ9w~S>^*G oCu'L̒'|ݞ~ԅ _<oe<255yyfoe#ފԗr /&^˛%rlbޯ5?i\q$!?G k5^ ) CE 8_Bk\Rpr\~P_M ^$vEDn<6V;Y^ O1<ļ:S$iQ~a^~y4r5¼߀KLae+E<M}üMkz08udǖw:|H=,x*v%4/ɍH>0 |L^6Jz".e[sI׉_8lҺ |a `?W>_D.ŁkYE޹c/,7v4QϹifICe79~Rv @>Asw%@?؅׺uO?ɛ Ÿ:;aN|"/3ʑj>|vO2| ZSQO:saz_]Cp?o vJ7kV1uYo;j;~M+=>t:WNg}Z?PHZ/VAh=Gq=>*nqzYsJ Ku O[㣠ye[ 6.<g|q?k<'(=yix>RK<44?]Z|)C(_F5l8L/Gp{~o`av&_d~u\pt@Gd5ܱ{3M3^oG>9Nj<!?\?-^xwVd{"ăNʺi^?+_~>{LC;׌κ?O`]_12DcYO&X" u }#Hh_9n<[enA?~<*X[_;qߒ ;G<Ԍ< }Gj/=Oذr]{J1o|ΛֿA+z=5uV3O f_״pGu\ _>G)?qԀˬgֹI:%/o])$l~xʼrޣ|y')Bql،HIۗf~Gܸ[c~[]~c=vC%awkm2}q#^Ts7>2[Vxu;q>3ǽy=0usOY:nzi7G營.i_*|yS<.<{Hp䁬Z𺟘JewRI݋a=w5xHB^)OE WעyzWer7V^<8KGb(P>JqJ._4쳘\e"G̕}~> :L' oIb4O?5Kľq;Gv3Sq *FVE?FbFKCy:1coS{w1Gۥre#)?Z_+g^Iz:y2Gd zy97'#'ONNŽ(j˽ȷꦑ 1rX__F+bg5#3-<|fj~lb rG}; ziV%o&3u.qDcz)?/g/^܁< ^hP"Ǘ~/Z7[ +|S!i^%|XȻ§gE/ɳFq݊kQxwD/0Ouh_j&6`KuɿKJQhD ʫ ?)p׊T`nD'L,~Dy+KL nFGL ȏ{p NT@hܭmjy 80LsN^#yQWCyAen3n hJm_?s`|Ѽ}_V~a1h??2Gdq䑝3 \ʡsxj>zxl]xZmDpw%'Rw.s+}V?ڿu Q?uC?%;]h;|Eόrt2 *2~[2o܌#|=y3鋠z|r3 ~Aŋy:_̻CǀzS\(gVC8pɿmU}Y1`현r3ĹW/H|̧^G^2ڷvW=GʾOSB樨=]AR #g;ï#_ezZȱȽNm^iy^s|+3 [fQ 9.y,珽;'$>ʿЕw?*>JTZo+,WoJxJ<ȿ3/)sì|.k"orW5^Q~J|>(YH_ ?EDUۨYa;mgW?;y99S{"E rZC MƁ39ʈ;#cFλq1ƳĆ+G֖$[Bj֖4<}j?"U<6_wn:5}Ы5rJ5۳%<>u63 Iz +u(QqJ 48).Bp˼8~ vSZ_OF1򰃧*z'uGƻZZYu?YEB= F)x"ߝуJ!Wx7OӮ#/9wC/i-q 9~LKndArN "롡l,[{ds _ip_-rqV8VgG}T7eF͗z#zoܓ Nt8:XCe]8>4^b f w1J>W(9cjQ W1}!ͽwNGo%_ >pcоUQNd%>A qu:ZO<הJ^ Z{y:~[ICMYWiI]rrg++[矐[3`/+*S9 4Ĺ כ)Ӄ72K4AbMOT.ly7Y,I\bl"~5,;؟[i_~ܵa/|v@fRViVQ[~?F۽aI>mPc뵿>z/Vgߚ8'd#E*3twq}_z:Kע2-~ iDsNG7󲋽> R$֭O+[$ xcS MV cߘ_"YgTc~Xyu_^g3[7$=+GB8׋#Uee̗U[FNJqWӂ+~Ǯg֮=Ojl~hF|+}׾D?F&An_s^{_x~N(ߨӄo'ƌ.؟cWEï~`ٶ'sxM{; a];KK%)o<-[Xwܧ)s1/}):%[^7xϵNأlQe7{Sxƾܿ]y|ysq|ľON?7 m oϽ:L(N~c,.H.s}ҧk]̇| 9~ ͟З>8\yGMvݛ~>!Nm>ʫj/ȻR |֥$~[>O~|uMɟm QcGcZCBc?n`"~wR+"{!;˱syɿiTȊ?uÙm X=ؘY˷~F6Z8^ ?5]!;wgm ~ u̒aߝt_FǬ9}ܶb+8qB)z57h~HV۟jz s1#/ޕ$skgX Dsr2D$nMmߑ^(ύeK o?CN Oϕ)W~`=&p^oGϹS 7>G~G>/v0ps%ϻC5sRfpC[cDZ>Cb&Eů v@^D~#>:q0ȩ{s16" X[40=vY'97#r^? ׷~_ Qjaq~UAL?]>\.A;&*}}kW&.-e}8?;׬JKSpJ2%6O ;!)F/]g/ɗ?꼷(?7d+sloSk;z23FgF+۲88kߢPWz#ϥ0FE}ݏ|X=f_=8v1BK{#g1BV0t\|?x Mp;DNeï}{.´&xP=& xb5!U\77k8ruԫUe#?Cd+i* o^9;]mW+Q?.z \"Ꞟ|i>]J=|淔)$ssMSmߑP=gB#YkK(W~PM=[R\7^%SY~Oɜa{EBsğο/ѓQɷ_Hu<=cz"xx[&.=N|kzn%8;}'A>I] %kpD(9{o~^G{)`|bq}9c3Ưԃvױ+9WMm綄ˇY&OA긓w7k7NL}HոЍ YJhEpn؅6~$"Q7^T|Q`".X>W-o:G'\yDǠa{? o ~"FrN%SmJ7+SɃi7/Q-iq FOE+z_̻#x.`e׾̚^}:s};CS5}Ӕ9s>Vq*iw;}sR:SG;$\^x1܏#k9TO|{m\)O=Cx[߂8%-v|.{sߌw3~~#HTݪ'&'lNwW4Zy?"Z~wL2ȗ>`̣%}v\d^q8wo[M LIy xd 짱\o>"uݯԵܤO/Ƃ_n9hp?!7yl\ I5r`i )yā"վľ_ \~-uvчD];ۗg3בֿ1j^R1Zqoޔkz~-vOvG1c\ ,rvw:Fm6 R96 <*EY%F߰+]1:b]?Cr3xD{b^qq+xjD_ϐF5Ǣgч:7 \/O˓qh=̃<|~q/+ ^1I[g&vFѧoh\>O)9l#{BXչ9SWDw c#?9<qOZyܹ5/T<xHޮ?Wl^4EZK_zE7LzP__<~ra3 _.lKޢܯSgY e>sKVE>zgx9GlBbǮ)3^*7S=s8x^ AԵC)G7bԺQ3x5sEʾMՖKyߓ'61ۉA9n;p7Ʈ Aߣ!~=|繙gsrǮ}oKXﭩ]Yʫcu3=3#/5o[eZҗjǕ\YX—/slfًWCwHh:BC9bB^R?"_(vqSѯi~ߘT<Ǽnھ:FpʓZV+o7ȥ>M NքK֯x1;=]h/a+ߩ=v1qiE^j^H)287}#NM39C0;.sN`+oq%yZ~=%/M]AѬ_/So9Q>u6ڿܟ j.H>(?[`KfL;W_^~߲=ɯ*d^y85>Q' #ӾR%~K F3TE_@pKRB|ؗ<&YGT>f*p׈%Ok ~8?H1*&>>$OmZ_N ܷ>r33j|,O2/>h?Q(>!- -B&Vܗl[.w,z6$y21sꄠo3sX)?M_ۜ@nDC{ uo?lU T:nB+o'5oC\$s8w͏uVpΨ_PqZ]is3l{~Z#i-r%sK{P_iFr=K_{C?0gɻhysHG?8%jN\ɻaWVO0  g]z8oΣ`[(|1 ŝp^y;oW_N?A۬|G!\WY/,JG\bhI戬s{QZnދzh|'-vE/|Z_Tǹ[z<֐g(2w/j9܅8~_k>f"q9N&ܙ n="9kاWK}*؅s4.zݜ< fվ%ob *<qt18q捠?oV:G|]};xT$W( 'y-䫬Y7#?{ܮc4es?A ?,~GkPg<2r<|M3oB-;CrzFM|eQJcDeޮ?K(XI\;2:q)tD~y|Oa잂/ۅMݺK^MjU?#,ΝGJ=ٷ9hӍ|ˣ>2|[k b bݣ-u<sQ|`NP8G?~ȗ+% rvF\د63M}9E?S߉^LF]`o?pƦaD?{\Q ~ߥ_.n,ןub$W'Zu๶<]7덶#{ܟ?Ȟo杤xL͆ߚt}WYn㧒12b+Bh}!ǒ/#寇}~F߃uvPtz{l8sO=vu4w=ԿSKr8uw8؟aB\mgqe._h</.&00ĵ[WqRNu#lД\Pʚ3WЯ}Sn7X kٟ~o+~挭yK ZqҮM5|hķ }5wr;|WW-3 2~B5l-DYP\7o;'<;sVʟ=)*}oZVto\\Ywr*/zCx{\pwDOB 'ͣΪ~|>Dݗ{=7K`WG¿W_^]KߴM9W"Nȼ9 Mߒ7>@-<8"G[corFp#iZ3/k3{ >V9s@6/lYy<.x{`w2WLu%V}ȗZY3O".灗#{wF#I܈># xyUK @]X,\I)_ 4zOm'u Wwӵ!)}ֻ䟤]mQ:6&9Fb <,wqoydbe꨺+.7EnA[bK$|_7y=w.~\׵S|z^C.^ę2n~0<~;(A^%~"m&xaxJn;@%Eg.< 73~[pe57Wjp=쭂>f+sv +x]> aոTy~88 xz~~DZƦs?iɋ_9Ń[-#SA[IkLL1?yk/XKavDQ)^s~xؑ3 g%ꄟzX,p[^X| τ|~g#K~}:"8mr=8^Sy);BgFK~(<^G}ZNEDYq*bW;p8cMS+8}I' YѳZ=ѿq8ַ_ _Y@0g|}aV_VuߋcT; eQ)z{o^GA ,jN1 ovWVpU/|Q  : GF95v.;Gxlϐ݅{5S{bǘԝd)ћͶo;pn=GZ3O|۝~iNkI`[tkOe*eĭ׾ ϵWud]J.z3R\Cr$NZ1#{cܺ%F|Ov8l>}O%O׉2FǙsƟ,¿྾'wMze/a ]; x_VBhp˨CF6(_ނm$@MҷlNmbqSx{v<=ATۢчVQ2{YɣIYG"9%/My8z~͈鼗;H|YF`GG~:yWp}ɡwӋ}[Dn..d}ߴ>eN͏I\|{r v`a/?qV;ILgPdRQ_c{H*p٫byVu?rщ'~E)~\+߻it[mv -~ zyZS1kbX#}S"_g.Q n~|sl^D)6s,Y:Qg˦ǩL~#FΧmԵf~=nvD^r7嫒z";RϝQgCw1q  spo 4* ~2%kCݧ|rmVR'z*헑a> O ȍ =(h+lCWdApO\_x2ҽL#FqF͓Ȟ.kwV>ߜ1/ʹcWtoCFz޳>ݵm9c7ۧ'Sr:'-fWwdxVj8M59I)1b}x[rGYV^HoL}< g|ʟ}w-x\r}*uy|0NkbvUBju|Qzflȇ < u~gyߏ?bi.;ŜZߩOBoV<~Wѩ+Ρi߭Ĥ2*TFЀW< r]ye5dzLyptUh\<.^،7Kg,ݻ&|쿾lH?gGk  Bg݈rrV\K=Y\C_F?U4Nn✌)%үb  N?ՙ \] '9Sxs~Z@d@[M/Nh$?6'8EB@O#Q^91֏x]\~q>UǷ^4=CdQ><wR23xr!Sn؄G!kv:M:Г$ΚD@ؽyɧ삳2>'W~\9Ɓq/</e&`fg(NMd?έڿˬv\>:8y9nžٳsf7y(iކGT;%"/4"/??=qwf8HEEo$G_<rGA7'ibw7'Ⱥ/xY2'x:/84kw81EUÂJ=tZ!HK/NĽbS+vөJruۂ?WG >>?7:@mOr^}G;< ./v?P<ksy'f7$?Yc y L<4F9D_ƒ%T9g&FD 2qQ,uѣ/[?2_8A _Lt;∋{  s>T =GJsWA\.rh_@~ &^<:孇G_#kf|rFvMytߜi~@J?"$aOJnn?iJ>ʎ8WyÉ.> !{ٮ<=:3CKT9J!K_6>>|#y~VxB=9>'FإwOt3{xÄ86&Άa[-8#[R& LKۼ.婻.I *{ϟ-&qgaR:oGH$s—n;@J{+pkGF]|f!%|ML<&|"uQo`>3yN¥?P* v2s/Ƥo qF? NǮj^=F?o}~߃9oGNn&8I¡:Nu[> \JJ5| \%r/}Nؿo3 x}8#=۵J?o\qm\u9XÂL]Ζ>'hv֪DO淽it_kL?4n'<$)8E>N?} ^v kb%_Zs_~'#<8QJspUzC"1!. ې<˳Qr;>ȿ1%us+sq'YQzCoH9%:U<]sE?ѧVߧq'qCۀϓ{Q$yI"Q[kjmb1}q{ ꧲8W7/~BQ9烳SQ8{\z ;nbWNɞʼȭ>>kg!K:ڦW'`O~Oa|'xB5zmwe6~+b zݨ87uV庴|qݶvCe~]FZ/O ,"Gbo fk>">}vkl;@yk=KtlGhVΓR/+Ǥpud#W5B;'9sykz)H\{.-׉re߷'RU Tb;^F91bEd/;xّ#_=ݛ8YBFA_:,OE>9 >( NoJ-#"}R5d<" nmsm9? ^|sg?q^|n{J߶~xp?"eOwvN<]8v#̕kI;w´iч]^1^K8ȿe$07u ҅Gkl3&Nxy ΡK}NyOϊO%cѿգސ/^%~&<ɴ ɼ- sdÞ.z}Ic\B-Wc$+},^̣e<ͫoI>mc,v8I$M~i;{țV v]^zg^8R8[Vg<wd#n;wWCꜷSϱ{z X'z|v*s}Wo7 +-$l'?ѣގx>)+Dƥ/akpu5/{jwk/)&i5CRp}vJӮ=X2%;owz'CޅJs4yP܀kN=JFΡ%I<ʗe y nC0?x7gH$}F> 8Hc?BF˿Ͼ`I\Rc?5uŹhI"8W/R'>kK_r&|Sz3~Bx =]Vt/]Mx2;Y2s2ϯ:ܓ%_@`om,5wUM\?})>}^%vk9O}E_ޜZ\G|f<3O [K/%Iۋq yTpܰzo9y=-I?] yuڈ  s:9 ?dի omg?\+1';6${8ƹɼ*ऎ8ޥnٗ$NNHxUȏv_L-yd'5%wGqC#?o"?m%^lgEdϧ{ll.%)Kus } .Xhf|OyQO(G1;av>JpUT+;27z]B7y(,w92]RZޜxyY% 5WA f\d)T0] ?(, {uad64~tiۨ]'|߱-ݣSy]Vn,䅎 ޶'GBX^i{jQUn\ o??4n Ϲ*8O-^-CRU/Ǥy@v䙮E{qsA:gD'Qkge.Gywl#'(oOPoRjz'ڟW 8?z3r|2y. ['y[NϔBoBT~wg+;'BX1=p+E`U ە؏MГ`~i쨋$&viǩ+CKEaj4>IJyf"._.՞0}i:\&eƞ:yg5.;)KEL~Պa蟺#%u:7Fiо|}9{W>刍 \0M9 {~WP9mx/ۨDb2y"U#*͊U$:`$ryd%{v<9ȿ52^#  /RU!<90"gC%sZ$}Ӿ9kW>#.jXv p^ky;+)ʉ^&r~b]NN%a\ =F>nr,bqO"$f~= ơs/E[T|9v>}\$y C# ˏ<җo^r+~'nV>Ym* 8~XcQ&r3{iF/^VR-ȿ{4"vhr $F⬓vx.NafS]dFlE%8$DȻp;T>N2:ݐc麛>jb\ QS|-|!  B;r<3;vck߿=IKʹ/Kߒo#r%ꂰ%RN+o}KWŊ,ob '7]OwVE9ɳ)?j $/J< >됽TNM<ϻ9@?d[WbCn%z Vn /: ;*q+V>kZ,>ДT7 WS~0sj몏Fȳ)R>Z}"N*>yG/̟ož;!ɾw#{?l]8ؓ=M/6=~;Qc;!{*g*_Sf'')&qǒzKwy>g[rONt@`l9`jm qٰbou/[96WK=~G˩o-.A}Dݮ)JA/qF7Y oFr?eΣ|ʚ ,wn8wy96+Hk)O== ͻh|y- յz$sʛ _oH>_ypJwȽ5+cko͛\7!ZQu8Ҍo7-P0~#[şh}Lyf }4>pR_6ܗI󆄕OgfTzqD0.;׽H悵o)|[ֽjU|Cx2 [uZc罈󔺅8ϭd;~W(r331gj<gߕw';XoU<({^ߠWZП%7ʵ ξ~[Ӛ_\69m#?wni?NH˺y ѾbKꏴ_NKrF'99nox/ ۰-߷?t4#߅m4/ho9 |>o"3"ǒ0ZWqֽ̨J5sƮtp}9"'+k` N<23MOT?yPJ Y<#9?=>~CR7}}i'?k:Q[#ryxH`8ߗgךQ,|R 6nHǎ#ГL5W~p?'8_*O/j~Mp|[[ݞ|m(3>ycB>/A{i/Yuo!׼Mx+?^-k?>\'>'?j?U/_r?+iߚtJM^։<iQ~ UÉ])PR#x.^wOQY{]F3yo?T^'T/ +/>LS8w7:c>4,@6K+6@G䑼ۯvXׄkK+K=<櫃{6,:~ȅ8ykߘ4cKZsʗaNK\?*/r<׭m 7^'+*-nKp9! zOEY^bv?M3ߺ1|1C݃R #_r$:vO禴x ׮7i=+}ލh D.< }vjE+n}%evH}9x=hΦo_FN_ySFOQď(O>)f=VIX;'9+yģVJG~7XNxʋ-y dN̙(w{o$nn&DdOana'8ٮ0 Ӣi; wGog>ҿ+؟+}CZ=Io˼_ϗu#r=;!^4t*p?%*8Cysߐt!TSʃ vVՔi^erKAwZX y}R;}[A>/Oډr}3ܺ |oqCj^$j9p@͇*>} .Y!|>#yl ^N\A/>~+}rgmRIg2Mh[fl^?$>JNJ(?;<xRqI\w[^IzY rtNgu;d%[Kx Bү#r,>cYJ 'R!{5:~0_)}?u & >6kԹdֽ_C~9sr_$z^|Kyd[#~GIU\|ZG>PyN͚Wk}\=`W.)u3p9#[g)ߒYG{5^?H]l~FFt-~E=tϊg {Z~Z^cFdD%~C1~o}ҟqQ%Z\Zq?[eh?m05/qѨqij:ď³H |]OkϜԅ:=\ۓP<ׯJU6wrW_ZѾm݋fOۀo8D]V)ܧBu7Z/ĮtD]iտ~c> n >_ W]<0Jd/ryI>]m!Z} ǂoApgzV?_dOVyyq[ҷ{k<oC<_3~q?l]9+$nTۯ;{M30gu_s#~^ڋvzJiyo~uwS׌#a?Oҧ5s-O:@S~̧|VTZ9@k^Y2IR8,I bRG˿Kݸ~eR;ݑ$2Ї9:8ZvG\[׌Ӹ>Y_pU^`UOzӲOԎÿ4ԫt n3j|I }MF /k3;2WƧi%a+>ޘc}Kw`~|Uzg1{H-Ц\9*3?a<(#}|?SM#J)ݙk~\eO·*=dByayR3 u;v6|oa~)/CRG-/Qgn^%~zu28u-k؏~nǙ <џT:>F9v4unWG+^ml({Ufüݺߌ=Ǿ^woǽG.N~rbVH9:ꍺ X|>9*_C8ϗH6~' b5̇Ie9Z2^9e[^iCxjܢ:RekTp–C<#;Ož ~q3'~}b\l}}0QsӷP~]ֆ_*Ch8m?;6;vMy;OΝñ16z ƣߑӏԞ973'G^5V_7U0s,}vQҿ%v̎0R ڟ2G~x"̐7=kȼ#;V xZUfyղ;a 0!9 sJ }v1'j#ow7`{1"zw}ͷu[o͋ӑeUу|oyKD?~.;lUʵ4\=7Nt:+?H;5d.@R<6OV30/pmd;9]bT['"o ?8[+>u7 +[2;=v7rkr';sƔnaȺAuMsko݊]Ƽ ϳ :܈w~qӘ=%]~8s*> /o2?Okׂ A{{<"VUxb+ ~ƫ31˻!w$}]*~c[Umb;.?s~bizV"^>į{ӧ8/G4_Q]8W-D2>"Ո̽FӄGUÞGe!aZENn7oDʋ(){R9HĸR7r2q |{F7)w6nO)>x2|#hnӘ/Z/UosnVYίOT%/4[Vzb^!׺ӋK/} C13xٙ kyQ-b#Lb[27ZoNOiW7go\+k¯3p #%+ط1B~?1G`ߑ?5p;|+#v o3u(3#fSmCx9GTQ'xen\k<>?ݝ瘓9wg8i#`'eK~0|ve毝N( ƙ ;WVx^ ]<&,yoj>%[u7A;~&Rx˺S3_i?PvK}?wq㥼<g<] KSݢphy%?C5z JLB_yΞ;@ׯk]2$t6^xv+#FəF&FYZusMqfB4`zF\VUsf-bNJ>{ ၚ*~1{uW}دn*/Qg2rl(s#`oC%\79Vy?^^9QPu r5k/kD;:Ϟ٢QmC=9{"xՈ9G؏¾gwiU9 eWj0<+iFg.RX@R|?4=sN~;g8Zaq| U&n'}s?0H0)\($xʌރ| s&;L=r1!_"?nf.7f$ߝiPЎg~!sJ~,'p-YxnMv}vr)>2l\de߆v]˜搿3F84G4>`gwnb9cb/n>.7j > evaHZꔟGI Aq<=ϣ>|r^K} \>$?ڳ <,da$KswvF_៾?_ik|/׸XYġΎ\=%_a>m+OH}+:=ͷs, 'րab'a%o8YyNN Kלّ#+%>\p6xn=z_{ޚ kqg$5o#\/+8ȅ֠5Q,\O7;L[?ח- +T >2:|bN^i?ǒonPMq=$h>,trFBν Ek$&mDxb2Ւ:# jK~NZl櫘-|k*"7u)Wq_?k:І,C;S`1 C+dMdOX<_-zKf^3/xF3s÷qCX>onokO|F>}Oi`<{oo =;h]0NP~~yy07Oe^ǵ y?xCܟr8~/JO |^"n`if=٧` }-n 짞$$/]R9PN8bͳÐ/"? kDηsF%FnLXH|J~.81xDON!?P<_w=!O53* ~=f܂u#e3r˞熕o/+wg.מz3.5v~5|Kn/5S6u0gH~ο]n{Td̰;%>jȾ+!ַ>\-Rhy${cl{Uh(Ep)xW99ԗ4|'B8L>^u1 pmL煕+SsA UJ|>3oa#pSc;p{_;&bo% {_Gp5ULxe8\_}%^sԗ]M慕[㺷f, +_NDxF9Yh~Ok#33ޤ_& G3 ծXlRIN5ޤȜ9 u8e1S5Q;bи[:Ƚq/P={'/GA= |pz̼:⌠~}C5=FGhg`?^n%=Ug~@Z_®hh8 ];V}he\roV$?!g{}z摬g}Z/оY+~+Qe;!}%Q-{]v]u/8Xʞ 3o1n{oTc? ì\8nϡ{+ſ9Kԩ/ϛ:xdqi}oE "Mvz=m_;79ElkB[}煗"I3r!x67 uO+~|O`=r}S<ǯO6Ub藞{mā̑;_>wQ吓Nؕ|W(ӏ~%v۱SZB>}1~9kj|R];'l\e܏G֔SKy#{fƾ.A>m;z7Hm{qxSuf^ _n= C j*M/I's6g ~|D"95hLI>LxՏ!sxCG/V\=~oأ;z[Уk{{~'{ȣc7Mms<4ˣmj?sޭ,sዉֹ=~Wonr;P8Myyr=7++/)Mz~t_KJ괍Ņ(y@GV泘_:  /+W{[qRwy^u_ѿnT':(SqZVC)wFȼ8ď׼]aetq}kݞmpkr{o L9-skԎ88r)^Sqv_׫\^j5{/1ʣ,ov.3ti|iG}JU=9md苜_;)q>gw 9~!vGβ蝅~}o}佛{̪ 9]njX ~jՐ,J\EpDW}tZZcF6MNE_-u|3)A} _4<e r~PRWgJuGx9𮝔y|uoK}Oi<=u+zKy*aήx4_n:aq݂e{9yt2]nc*Py'SqO9ȣSj][ǐbzQ~Ѐ=MVihn'={])~i [Fދ~4bQ5*+ӬS܌_"x`3ȥVqHs`:}"r*sO=E_d(M{?NsjXįďu~N'o&w_sxQ#*T{HqJ ȗɹ_BRv]h$r^`rE5I>FU*.8oGVӁ{vN]ztgO=ѽy -}o?K5oUpwV wcLy uG8 |]'9ӗ@ώz .+3]?^֠GMhǢZGnџ=gwSwvI}[O>NN޵ʺl0Lyĥ[֕x1[S|K]e9bǞ #d|U׻еZoǥVxP|i,z@떇1G\hϛH'cCew#8 "Z"p:qkB]3A=FׁK "W{޻;SbE3v<BOa,Ï_j᯦_<_}v@֣3P*3#%f腥x3c//?g]ѽ|=4)KHgBgJB}į(s#nDDz_ì~n |~z%pϟjH]tz_ŷy=\-~uV}MFq~HpZhk~w|Vwo7-.[ x!tiGn]M>>_jGş?3r?w_D _77G_ |~8|%RUGVD렇^'w09zjVaS8ZR1.zCZĨ=ӣ4~ؤOġvn }k qkinM.-vQEz`LDc͗̓:W{z̮ٔ9I-%[_ýxQֈf ť<עW?;ٲP?{#y;20"1Vv: *,ymG3WjpҝG6TnDocwc5ϲ~O%c#l|&r;~Q/Ў|5ONЇK}g?0w }zb{e]f">/Ǚ .m>Fk%8\)_h\9 דi9kaYk1 ^#.'{V5uu6g?e?gMy L~"ǁ~Y }>UyK^W[hS y'(ӹ|w;Jx~$vjlG,\4믓xEW [#X}{s5".Tkiߞ=d>] k[ɇM[_G8_9ȡc镛DO|ީy\?e{E;792V;sB+ի8>CBV㘿z,mяW?|]|~3(}蹁%_~Ov>wp<qҜEru.:_g0{~ȱž^@Ϲ~-7<;po}vQ|?{.+=S|[m*qW~}y)sxX;gm5-*|p[O}4-0ni~M:by߻B͒#%|Y[Y*NX*O|И'U7/KM̈́wU4#A9oY!n!:9~hkșWQ$î&"Ҭ 3$~V: {+)7> `xŤ.|"QeάهRѽ(>=yrٰOycu5iu n k7vOW&(}g?F\.{}DT0Ma?uΆ}1wYiϛpff;Op(LP\#X'_5 :q4׭Z;*|?~$s<;l>4ȇz8}-/qu-A9ϢS+/d/qO~.ワ.{s̭$ J?&~/REoDZI?ˢu}ڗ N7~>?t5vƪM{>K$az]e7]۟yw;/SbGE}oIȻ,v! ~|v x]wj{mJz7A/ek4:ѡ{4O}J\g@2?p;C%m_Ey/z?j 2ET=GR4n'w#Idw6|nwjJM&N,Ua?ؓ S+TxeMG.?8= &} ̫W?#:gV|Vt'z?P{=MZegz (|ߢ/]t`9t$IO}>>vz;>u^v[\E{]} v[u!.~\-Javyt!;]BfsJK=O;c>_s?j?Po &6q=|NB/(/WߐJ i6@YZ?H|m>YIvv%ih]]?3"~ິ/}yߟ~v;|5x[vؽ RyOlO#.ρdu uucCK;3<ɻ?RԄNo0&%_vwߐS W'_Q4qDT{λ,q͊H^\~xtft+kwK3y^3ճã/f]D&!8ٖ^j>CZi{tn}{}ȑKѣ7KcK3/@IO].uGT}]ޜ/_ Tܠ{}N6 SJVWgP Wٷ9f y}Q>pzZ Kawun5Vq_9_} 9$vZa?JN|'>k_E&_y:v}}97=&?4}ᣩ˱kou?޳wq9c$,~Ӱ-X91inl]>ߟjG|S )nZ(gv$ k¿ 'zxד~We3H_x~a7)pп>5ǯ/ztXuqn5`? K- q╱of@nIS]*te%SbAQ'`W({zyr23m.ssؿc/h~ع=!vGh\}߽W';+81G5O\} Ò?N^#oaK{߭J=S=vI+?_i``B2Sp7/"&>]-}ە5|}~u1fo3[^GqJfov̄.{ww_:N||7BGp=ҷ@GF籓_4i(_N.|+~?o<Ŀ8I~&TCmOn#yt>|S/vSQQ(> LU=s2_-oG?(Yԇi;{:gEpYݣEgwx`{xWxy^|\{([pcb>yrTf7r #ӽC}F~ ZN|7;v6+koL>>/,ّS乥maO# S|5*?|0? 9K1T<Э? k;Hr+P<12PX{a/i:uA{W+ȷ^=7ƪ^{~?1|K9*`.|^{jR&m>>a/AJ^^p=z~7@"0{ LFMtz/ӏ/R9~qvʕS*~+I|:T,$="S7y!}yӼW80%g&*E&#A'[ H-&vyK}>Xg[$XWKWkZ߂/~Z5߫u=˶Т!}ѴBߵSy?ze}^msEEz%v{9r |Pt&WbُЧNR?fOxfmqp28:Eo\0vvR7ѱϤ@7I/MS ~_ -},T:oM^N4f*IBo9SUlϬ(v[՜!kV9ۗ|7y_a<{}s&)=q7;vK@_K'xa`Q  %̥y׻f|L(.xfOxfϮtnSo磥*/%L9w:S+0vѰ1~[~9:y}p]Fg7*W$i%XI2qzW KP5l"Ez$_*H]E8 [G<ᮡZ{ <8yw'7&鉓Jt?W? w WSG 矊Gƕ 促 o#1|Z$2|Puxwr??_}6kyӦ_ &g$Әgy:ga-ydc>K80_''վޚi1_wߊ? w]؏ۿGofF; }_43b%c'UM`.P_ '{|} ǹň$DߨATz7]^~FivIPpavvTώ)O$8{/{Is G\.@\2'qaڹ{@MG.8Ջ~ىϦnsU-Oxso胁BГto"O9WJ퇣8`RBIf jK}Gz7`N~Lpp/CCpQsϤ>I~hOW ?OO*O]y?}pD]Kǵ[K%{cpKt{Kpe@OF` [CIvH'wkr!ž}ڄ},03l+-kHq[z'?݃7?W$OV.8ߕؓ3ww#cn^xYr_q9_7nu>0{fsIO? *EiOsGp??xU3<柴>Pgw|?͒~6̻оo)vԆu_cW._>NeJgbƍ y?77>bb=G\=nyZ}1ǵv%~kBc?bOUENIΧ FO:^g'~~y3cϨGq]Bﰛ{6n^8 xu>qx.\A^*_9^ăփhnL!#7ܩ3{OYs֯ה<}_BvicR?"UYxߴ{B$xg!G$?v>dcZڗL GAx.<]v|s}R}iӑfh<ػeA?=#8]T(N6Lp_Z/H]#;Ftu17ځhGw@S]/bMU2[K "e> [n{ yQ`(rkM*%mϼV3~dKY[NoOoă|rK2W|ǐFEK&/tO#_JbXӹ!!(ȾDɀIkB_P}9a>Bk8`G]RW=8 3/bϽ'!k\C}I:o:q)wo5b#'em4'}&?tO絿dn6G?o<6>@n,ƺUp:` i (|ଢ}9+O>#|$ \Op7|˪奏l ?t}zbf}]/%|m3rs:H!Oȁ'G|3gya'=Y|\]h܍.Nz9$a{ɩ%/1#G|9EӬ_~WD|"^iؗ]ק`?ԟ<Wy?OJ|'=ơ&xfo3, ]K}ⰍwV8p־Dl9q?QwzD` qiml1/Bl,~n>ڟЍE%jp^gyo:_c$nE<;U=;+gQϙfr(CFH[Wb|I6&_yaSaW\:ovEC\<SGpEڟ%"%\?!sh1#*@[n>7 ^ߐ/W9RokonkSC}+N<>aDSI/Ռ{Vi[?u7 {lB~'BG@#X#{ U%;uAł ::<zy䞧ws3,bb;FACets#vodnyLpc/O=ȝ-u">:!ϑflػ}n56/?3xa|FXuK,CԱ̊:%)뻺50>xu> t#5f_/gNMIK>9{ N:/;Jqąw_6\Y+n eNXjE¯a]y% ގ8[&FF?Oz߯yIi pw28R5cA?ujƾLS awJp]?/{ {"`pUJUY]+q%&I^DyqEĹ<85zd;Wv +x"Ƀiiol ?JWJ?bŻ#s5{:8?$q?TuC;jo~y}Y/V25iklKW- }mt^,9qqOr3zes}~<}c?OrwAFY߆c.ߝY^>#IT#{&xߊ>7{wgf,9 rƈ?{I}+X.t.k_<|M>zw;#-st.y??85k&ӱ8v_W%@EKސw[h*~Qչ:Oӟ)>m~6?¿5#so?[E6"$8:^u>~:VmgdX 2SЏ7qqs7N-Z uW.n(k9"K,亣o)If#{dYg/oo@礄?6]'oErB_!OsfG$hV>CjM3>Z~9|*=Y&$?Aa XEmCݛ!\Σ_c"wNBJJ:f/M@WYsϯ}A4?s߹Y 8 ybfݑugɜL!}PU7g,G N*fAuO˂+A/u5uirB}U E^&Ck=ҁO9.ƞ:U8usZC7@CJ (!ֽ\Kp5:<>,)ƭ!|U;-yz<ϋ7a>"_EzpɻWy1zews/'iuyi_.$V}J]J֍ ^ZZk[a |o v}ܯcкeGzp~-r;Z z{gFޑ> H_}I磸}u s}G>(:~-0$u@:G^9O?ϸ9< ;8~_;`t!fܢ'33Di_$'yrsvp&F(ߞ}W#C'i~x2~>}.@_J@on /O68OsŮɜǐ ly/K"vSڪӈ؁'ZqpoNCR*z@2-$.C@Q̈**D'?Vj\!٢}pNd_=3п~qsN*fw(s]WH:%G2W.ݔ6eGQcתƐW7ʁ+$$ucWw_xHp]wcO\ߚCiAK-]u|!*rYkݧuw-{EcimO}vɓ2/F\M1KuCOzK]稾G^Cc׏&#K=w v`4Kyl؂Ty{; gG,?! K-s)x7׺Mi\0׾*?Ƌrg,u~̳TܷXЩ̙?M_ m׹{VǞ|+CT8&b >9,)S{6M}S|흌7W9cW; ]|'7uQƍe 7IyYY|'䞣OeGi5%ŊJ?aH)'RG"s5p9{ñ5'~gҗ~f!# vr+{b$."3&xy.8KOK 74 7 8Vp?Ro4ۆ͑}F=)]i^U:%cP_~K kte}?]nz`c$yr Wfb ~C9̳C̃M܉(9aw孎!uUa~6yVgF\T[=Љ|ScLgh?MTҲ 8D'[N_wYf`Y]}Ju.S vTm& Lp:m7}ry8r'7ͼ ~-Rjee&Ȝ$ŭoD.olt ;DWgç `.> sGt &v`w(~NN˴un>s/$f\~o:}3oo;~vO~yc2Tk[ޢF6ֵ: 3|0|aj>Ý=;"O$F$&rhu?O7JxGV5v2zH><熾!G.3^J>a]mYd 9\7[kpŏ$uswa:gJ?-kgk??ǿJ(qq'MhV|JzXk^oZ'R9IuK ]H.k{e+r_osk}?qvYBoᏤ=ƏxX]گ3ʏĎ.;@!r6~U:鑛c_t@l%_opy,}gࢷn!8<ᥑ-Qb qw >v9;8H[>Кg _7~BO@f>{㧮 X/gP/&,8*szI`1d^&Mwz_h&з'# KFQxj]>x>?jQ/ &W߯FEo͓\%k?.6>oEw \:*—Sf_ O ?k_y!{m\F\Q?pGcT ?ƈH[sLEz\My:soxz_L>L@.p +N 䏍S8[[ӫw'ϲ @ۉ28v:%̎{q<ɷa/QʂG_y:*?^0ҿb}ŎtsT혇u|<}s𻚚ȣ"_c‰ϯxfW歬I]K@6/˶`g^P#%*"/띧I #漏?XCGޚw>Lbģ?~Dy;v|y$Ow,k0A4`m܏畹]k)W ?:vA,oD/<<,z`N:{ sV %.X|RTJw 4]=?!vΉ~8~M=;yyҟyHݚ4ȗ~g_O$Ɨ+Rϯ-,!ygi9.ϟzZǏz0xJoiKc]ku Sȳu[mD%h5SuS>Yci=3/|r׾/dYڗ;{EUfSn0/-;p3-N0,$Γ}̽gMN0J~Tif|t^ul<*%:bWK^AL\:-VEQM9%"HtLкvYMnޒNO[nы;roOQ_u^h[}J֜oO=3$Mxg v=,<kny=rx;(Njq_V9f_W='u K;WwIx>1c$_}Vw7*E{!g@=Y3+dTHLs?z>Hg~H*Mi_hekb;?OVU dk*s^4f5/ҹe"Uʔ-Z{IRzeoF&yF:ov{uE?${BPȩ/?*n[~5^%AȜ9I75:3 &վNT~W\;rbOk\M _:ڱOjGI-}< o_iC;=W/ ͐cK[V|Q2͘]pa]dsǭ|&>ab/XYSÇsq9N ~=8Mv[y2GŌ`d?>-:K_d=GJgzx<6#_-SGf>܅OeN'/8 5⤻+:OLY:߷y2"CwӉ~{Xei GųO6!g ş~Fȃ,/N,K~pgXOVBx])q8杴NV_nqcbċA|p͸?xo1yn71X}HIA <;-@"oM3Yf]@^]F Y'2/ʎ^}qjz)k` ( =~K^?-pN9vȷi,}]o狿8^ >1#z \VY2Oʮr v~~d?:yFF7'Yzt_/KGN՞} TD/}1Ey.d2BZ7压} .^oǯg%{dwS$)&?'D~gqJ0 }I,P<5%uڟ߾^0r%$|t*0]}?(NZ4;C+߾څOQ̬΁1l ۧпcB*'zFW $߮ ͧn$aU^;=ILf )!Vӹu),#UfpɗO9i̐O y=x?|}qY^#r9NM|AiXz(/G@='G.|ِ9NƬw}N-f2F}(&?8'<=s#Tyvljωy^NowO>>,Zci߂7oJحŎs"~8+N=[bm?&zfOQ{v봿q|  \8Өi˛ 7ХE7͕+E{Rhmy)x$ۛw" Xp(`ci?Y{տr d]#HB%s}sOjU0uw܁ؿRѱkQ{]c~^wҺ©qxy-y!t-gӖ`U"KY5?<2\;C$k ֹOT?b˜K?ӧSQ 3*qWu3gb;u8s϶դyZԮ ]h\^;'n ;[vХQ,5cLP?N$/vIZ _w~(]hp)zjܗM ]G Hh r>=ho {W*>[l,}N>-E_ƄZoC*"TWBinZǶ\ A?k/ <}K<ү#z#i"9iĞ74D~i|,)3_fwc7~5 ?OEpdoe>ӬJx^|։I^:q:ߤ~?w&S ~(:@9v$+Nîu3X' v>@ L/sqC`Tɀq9_~ .L/sw Owߋ/ O?&܅^$O\ķ9ł2v>5-q-wU|Uaa¯#~aF5rҷ|Sy-%8?/h#X}c5ػ7GI~U+l+km sf𜅢@WW<ӟi3DjHOL_ZnZA2o+uj8-rWNz;5Ҳo@K}C * 9MtWxވ~yrjk݉OB*>jknZೌ591ȉQ;bKŸǼxKܫgΏ ioeh&ٌ>ٖ ?T=i/~mJG]d^p# p߂_xc_8e7|*gR\e֝xu>)q_ ŅkV睪|~|HԯhӒk 7f#so5#ӱf2 K]A5&q,TʭXO_\9vO}P'6V^gZퟱ_= Gp/ 9dpG\~\Az|^q0 ,}Bp`-1PܝW=oЏGFWQHI扭 ޻}7l~4I::H/Oו!17ǂZd]/iG.Cג׸χG^R_V ԝדkG?[/g߬k7=x?qF/öG^v&Ȍ3z1K۰7*$'~ V;Z>wTݡI ](FY{nyܝ;ݡ!R@V>7ǩl^ {U\ZyvwuS2>$Nz l.HsUި?gq@z_⩚/LT;W>[!s;$ OA).Yp̽?j_''c_ev8(EQY& +V\G 5`<7⹤nT Dr8W_Y>x瘧z!r?8F >08(R_O~q{^St5qJ:Wtm@d|J]QKrWf)j|IRW2?5jx;K/EBo:]cSiBfܡM*rޞRW6Wn+=[τOmG4_R\ !v~Mպ+(Q鳭yŁ_n4--QȇH=-SQ )uX; NB"u$2zWT֭P 1#&@c@ cw=Ю#F,/^Uj@a69l;ԡmA>0#ꐑUQ6NxLYK:|׀+ر!;m&vŊt]V侑yWxSG(~kȾEaKN: s Uڢ<(ͩͩp 4e4EX3T}@"Ac3&1ժ/~6lvs0`>!# Rd:0#ė9d s @E@1x`EM>DU Pd@* 7E]j$c vR 5>xΚiS\o_l-:3FPyҡ ^ ljc)) IQR69 @u@~X% :S't|Á Gu<@)@#o"R syo~ǃvGWByg;߯w:o\WX\Tkcn΁()J K<iYT~5%˕@&'O`{*o> ~@HJ "Rs(@$)})"zQVp>K\֢x$ΉmFlTrU̜`Ԯ}jTo ^ԣ?Yj/( ~G)x"'P̛zܳC0&Ry<-;KA^&f_}}ɷa5` p)=Аzŀ5 j\C<^v䑃c+Du@Q{ (׀MjMZMz&y0V*]Y.` H?H@@ǡ蟣&g#]'CG\0w~UH||و R݂ PU2/߁Q^ӿI{?hM (̛3WJ $pOӄ! "I BAid@S3ɥI ܂pN T/$ % }ntXOp: 2_@+@/<+ 7Ԑ3,T ZI[)P h  e`M^. ,ɀMV4Qy5y_/7Y[_ETf6U"r%\/YE'Tn==rܑʭ&?Ѓt*2S}r}C[DN&GЈ7ܵ2/Or>0 }@ka*ۺ(DyI[AVGy\n Û&7]VE}C}wDn pȳ 7OWU}ۚ 7} >l=(o0g5rodipy-0.13.0/dipy/data/files/test_ui_button_panel.pkl000066400000000000000000000003731317371701200224630ustar00rootroot00000000000000}q(UMiddleButtonReleaseEventqKURightButtonReleaseEventqKU KeyPressEventqKULeftButtonPressEventqK UMouseMoveEventqKU CharEventqKULeftButtonReleaseEventqK URightButtonPressEventqKUKeyReleaseEventq KUMiddleButtonPressEventq Ku.dipy-0.13.0/dipy/data/files/test_ui_disk_slider_2d.log.gz000066400000000000000000000050541317371701200232650ustar00rootroot00000000000000eXtest_ui_disk_slider_2d.logn% DR л H{qH܀3?'9jv.u%D!o??Ç_.wGzӧ?_~eyiُ.yYVQ+ z{'|jd6 Aa^TzQ3 u^^Lԋ g^x7'Qݜu40)~jx@ɴRVJ r+Ʒ?^>(01`E@vpt>nJic@ 00A wᶢ?zӀ70q{d۲b$S ?5C'8QZ;N@'|mB0#@|, DH (w P1uѐǧn~‘xw* ݧ]E@Gt؆>ʼn| / rb l.ЩB 8 n1(1y BA?0N !8,$+KjrƼN6c׍h6nwmT=|@l "@/&Aㄇ |"4-4b1P-/ }i8ֺLP|5S :0A9nmNuD@9=khF}`46di#( 5z'Aoj,'$PW 3 g7Y5ʁ`Y)K_Pi)@ɝ ص"÷[UNI6&'|# 7ڕ]] G ЯE[i8B bl3;EwH~ŞLI u9*|NWpW~L DD kݡ>Tpi= 7Eõu, #~Bp|E܅k`fĦ V/ D@`s&Bg5|]tAR"6lz(MMHz0!zZlWN~b^ 7JX0# ;P9H1Ur˃0A {%=Jdh@o`TA![-?V5㳱AOaZ*r[Zx,o&\ s&GNj5+)΢Ƅz#qh=C fJ.qΝއ[jRE n4[O qTMy2a%zZA5qPI!5!wp%odҮlakF#'Aś`P_F)N՛P!"sL- X,Qu90@O(9P4/ࡦs(NB=0JY@4P3} G7YgddsEuz, 8PI5?t\kDDI(7]A P@K# Y`)rM= 40V,$ z]P"嵀ڋ#tRMҀKf nB󵀖 @@@=L ..z^{@{ٸ_7,`3 4Y )LHD5`} g ,Mns$ @ @MJ3 4Яh7`z%݀ X܀ʷOD%o*Yhx?1,KtّPÀ bdǁA**+A(2M8~HsȞ(K=mxI]_%o_~㇟?lR: U<P ?9#9A? @w``J@qCԄ@"NB5O';5i,&:DnKӡ!yfC[ΟZ]OڷD@ CX% 7+a~3~+xq0|W2xw( Y;O?M?,m2g2MyNc= Wy@Af(Ʒ}d// -/ٛ61Ф}Zu`UZ :Ec2=ic07 ^z%o 4 B1M`wr: tڷ@93^?'x<`A=ޒQ`+&ƪz U5&6lǷ8SAA@w &$A@#+@#k dP$P. *L_bS*@@n@}{1r W'JoK`LZ4>``l2H~ 3t`;?5%(r`e//gDIGb1I`X{(Tm8A"S̓\M&M3 @AAA `o<\v{Jyit;CFARA/*hCFҰ8H$]u *2IH< 65_=( \Nr=i_̿xw}<7*ۯxxݮ7_[A]ȏgP;q=)67x. F~ oNb"Ŧ:z(EIgh옃Z&s4ix J19L4zAW r_Dj>T4~ U!\KD5%K2U9(:X]Q%5T 4vf0T7r(4eûHADօYt[QW TZW@ 5$L S]( <hW,q1J{ÙO:`Z-/WkjyV^Y-WW˫kZy}VX-o7W˛k@AAkC@*@ o-X #K{[\m d*H:H4 FAAAVA2h20!TPlOA2(4]XdT_K `3$l"!3Mg1*Ƌ3O, t_m[=NKKbiTZY,,VKϬakVQAZMde7TM X&,`7h}wH,BP{QAhZ AqM:_gwQyYճҞ&Ar9d%y8V@xXbq#X}`ӣ>m> U;b-3KA+FI8 VEa ,C{^ǔ@S9@I8HԿ@p .ùFD>Lj]gH.T(OtO9m)ʠiﭫ`R I`SoUPUPӞ&|DP_`r৉qX~p?4jJ`<s/U@Av WH_tJuD*+'J?;+Mˠ K { 5CXME $8 ƔAA t`{{D4Mt@h*Et&^$VIR t2A&pA@%Ewi[M14%_ x s.ś;3LJN-_"gxܟw^M1 ]%r d$ F} q;Y3X6^dy>tEy(,wxZ q0H(G.EpyШAbWěs=b]sߣDm臂D8u֓*-lb7'4vB%b)cOL_eh2DLC䅜;yFI{ Jȣ_s1%h.TӓEA!J;(  *hS@̪Bl / LAsHA~ f<^_0ul%>\iΒt_jx> 2Y.f$3.A}TW1h$06馀]YmOOEug_jSDC/T YyVn# VO `@78Wh Ps9 H9Mم6'/ǨznWh}%nqǴ>M+V+PPt zYĠd z9(Um9"w_)[Ċp 瑡L' R4́72aA ` R%:B$N2Zy&cQ`^qRwqZUlvr,o{b+ TD=V(̂g|P`{hW@\j \@XG@dm*`f> k]ɻ.u 6\^xꦀb"1*TP}z|Kg&+1QP&Pf 5<+oBA|\k$Sxx :)\A~C+vEq XAo,H7-ze%F,"F-aS[\$?GP;)nA%px/1U ͚8~;Q = N1 } } O$v6Gx^FH WsQM.[-W/{eǓ=2~'wsWіhRl,^36`HFYY|_>JVŀ$<ڇ b }IA52e3X ,-J 11ZQ@*_h?l}bT`KO$\2 Ges=|*^N3dipy-0.13.0/dipy/data/files/test_ui_file_select_menu_2d.pkl000066400000000000000000000003731317371701200236600ustar00rootroot00000000000000}q(UMiddleButtonReleaseEventqKURightButtonReleaseEventqKU KeyPressEventqKULeftButtonPressEventqK"UMouseMoveEventqKU CharEventqKULeftButtonReleaseEventqK"URightButtonPressEventqKUKeyReleaseEventq KUMiddleButtonPressEventq Ku.dipy-0.13.0/dipy/data/files/test_ui_line_slider_2d.log.gz000066400000000000000000000032011317371701200232520ustar00rootroot00000000000000$9Xtest_ui_line_slider_2d.logώ7;O1R^Uvk$n E;)iwS6N.ASv߿| /^9^p|{s^P>OudvZP T6A0Aktd`yd 5J Bj %jM@i%ڢu?4PVMbZ`D1bF` JᠪG DMzE@X}Q=غ!QEYT֬-QE*&ѣQN;03쨱֝Q[#zHaLJ0Ps[o*QaݛUu{ĝqA9$Bwžo|܎. )gsĶДz$x?4 , zHIM# fhII@^OQng''OS+$,X;8I0ӠAMMy(.].q*# zxX JS:zxXh,3 F444hא2^)IC#OvπuƳZȌbX_ΙCC`fK6 4Ȁ Ή@Tuj@ 154cZg≸LxOΏ[{"kG~z"aŭ$-o qMFi/_773K#=_gO=_{&ѿ$Ѿ19Sǣ|?Lģ xOgX<xЙ5-;O>~dzUdipy-0.13.0/dipy/data/files/test_ui_line_slider_2d.pkl000066400000000000000000000003741317371701200226500ustar00rootroot00000000000000}q(UMiddleButtonReleaseEventqKURightButtonReleaseEventqKU KeyPressEventqKULeftButtonPressEventqKUMouseMoveEventqM U CharEventqKULeftButtonReleaseEventqKURightButtonPressEventqKUKeyReleaseEventq KUMiddleButtonPressEventq Ku.dipy-0.13.0/dipy/data/files/test_ui_textbox.log.gz000066400000000000000000000017461317371701200221050ustar00rootroot00000000000000Ytest_ui_textbox.logn1y8#y_ N$ AtHXfY"TIbKo˞~AnCܬ,mܒˤ.VMw tb* \Ф 9˂G`" "^z*I2 |Y`r`> e rF#BB+ eP -sgcny3G\ \aΝEIu w2G/_rCY<G<#s7'gMC~]m_ݺlDe[l79j+qn`7 q>M9Sn0Up*OCZaT I]xS#NBล6~CxկKFAGno#źOAuR|~OmV\m Q`cGFkF{ m_z;a{Q@Ă%O8C(o|a*Wtmt1zO Fn&̉n]Mx>iNZ k  yNNA{՛Rkk}/v<mL}xAM*fK,Wn[/KH7[=&r*ȍ QɍƬRyC~ U8ljT6k>QH>6*gFeN5jEA?O~ՋH.dipy-0.13.0/dipy/data/files/test_ui_textbox.pkl000066400000000000000000000003731317371701200214660ustar00rootroot00000000000000}q(UMiddleButtonReleaseEventqKURightButtonReleaseEventqKU KeyPressEventqK>ULeftButtonPressEventqKUMouseMoveEventqKU CharEventqK=ULeftButtonReleaseEventqKURightButtonPressEventqKUKeyReleaseEventq K=UMiddleButtonPressEventq Ku.dipy-0.13.0/dipy/data/files/tracks300.trk000066400000000000000000005317301317371701200177670ustar00rootroot00000000000000TRACK,OBBمB]uBBBYBCKB(TBEBi=B礉BuKB#[BBvBB}BBòBBBB@BBBBB>$BBԻB\ڰBBKBȰBiBqĕB%аBL:BCBBBjԘBSBbBtBB&B)B?B B:ɝB1BTBuBMBDBBBBTǢBwB`BkBbZB9BB ?BBvB&B5BBB>BkBBp>B̫BB;BG+B]B:BtB߯B[B࠯BB†BXBBr BBBBBbB1B5_B>6B*BB_HBBB YB]B, BiBrB B}~B6BKBB0gBGB;BԶBjB̰B\B랶BBOBȡB=BIBBXBnB*BvBv?BQoBBB1BJBB׵BOBSBwBBBBkBkB6BB,B)>BٮBBBhB୾BtB+B-EBjB?BBݱBBȒBSBB}WBŰB?BRBB5jBVB5NBTIB9BrB0\BBfB0BB ˴BB݋B7BɰBKB?BڰBrsB,B*BB2B`NBgrBO,B 4B&B!B|BBBذB BBlHBBG(BBBi@BBU>BcBN|BB BBJEBa̲BL?BBBۘB%B.EB3B@BխBnBF̄B B B|BDBB B,BlBIBeމB4`BW$B푋B,BBBBB'B&BB`BBSCB+BHBKB=BհBbBBBQBkB}HBBdBִBܮBpB7B]BUBƚBBB[BC BRBBBkaBXBخB&BeBŮBBBBBY~BvBmBB{BsBFBǩB BiBBBBB~B(BB3BDB(BBIBmB mB?B)$BTB%BDBl{BBdBBXŰBBlBsB BBABIB-BVBBBBB'BaBBp4BִBկBBBBVBBAB"BB7BsBf BͰB6BNݳB"BA"BBBB@(B$BaBԆB̷B+ BB¶B BRBBB ɊB}8BEBPXBBBsB;&BBBгBH@B{4BBBϒBABjB)YB~.B+BߕB(:BBqB XBAKBBzBsB\ĚBZBdBvBcBM>B(B`BBkӟB$BBwBܲBMB BlBy BɤBGBtBoBBBBBI5BkqBv B#BŪB&BB%BEBB ZBZB_BïB(ZBB0BHBB4B7=BN=BB݂B1BǴBBBZB8BBBB0BB[B VB}bB@BSwBBBxBpBByB>B3BvB:BB'BB'8BoBbB^BB BBBBBBmkBBիBBB8B|BfBB`{B B2B#BpBBPBqPBR BBsB +BkUB sB BB1fBߥBRB_B:Bl^BpNB9BsB46BݩBByBBBB%BIB`KB!B2BABBBiBBحBBBBXB9qBݭBB[BkέB!BBݷBJBBBBBB,3BBZBBpGBBBBB͚BųB]BwBܳB>oB)BBIJB۟BBeBBbBBB]B)BeBeBBƷByB}BBBBBɺBuBB[B'BۻB{B@~BL8B{BXJB!BքB;B)BB8BafBƍB &BB鑳BgB,'BBѮBBΙBB7BݤBsBTBB?B~BƮBiB2BcBgBBBۭBB)B}BHBXBeBHB. BBRB7B.BRxByBBB`BBwGB/BBB BtBB^B{BBN݉BWBBmBBOBBB BBB Bt/B VByBXϑBB BfBSBB%B0BfBvB< B:BB*BBBGBBsIBq\BBtB8BBqBjB*B8?BxB BVBB{BHBgXB B^B_(BLȠBZBB2B׫BBxB;)B&BtBoBօB`B1BRBGB|BBE"B BrBвB2B6BGBBBB_oB$B_BBB@nBV~BkXBEB%BB%/BB}mB8BuB_B0\B(~BpBsBBBKBBBBBVBB+B|]BBB)B;BB~B-°BSB$B`BBlPBʰBRBBbBӽBOݪBBIB BEBy8B5'BBaB7B"B|B0B7BBBpQBąBްBjBB#B̈́BrB~SB`BB$BB0BOTBͱB~B8BBBֳBI)B!BBkBeqB:BkBhB سBB_B5BtBuBgBBBUBcB=BB BBwBzBRB/BBpBѱBӶBB[BzBBܰBLB B7`B,B"B ܯB+B|BIBAB{TBBBXDB0BpսB=B7B53BJ;BqҭB B>0BBB-B0BHB5BsUBBCB)B B ^BB)BnuB|ƬBB"B#sB^`B㧮B6B\B٭B@B/`BBBBXBmBPBWBBQBBQBBVBB|BBӨBjB3BBBB@BݛBN$BrdBVBBkB;BB,B+B8B B팢BיBhOB0BsBB|åB&MB/B?B4BBB0BB BEBBXBoBģBBBcB۹BBB.ϮB掰BjB֯B~BQBbѰBKB0BBܰB[QBkBB>B# B2B{BNBeBYtBB.BkBTBMBDBݎB}BfBHB\BBB5BBBRSBYBdBBBSBZB5BB4B7tBʤBpBBIBzrB>BBB$BɓB,B/BBxiBJBB(B}KB8 BBcBqB_B\BB BbB>BBQIBBsB HBFxB׽BMBZB|:BQB[RBnBNBEBBVB.B/hBdBBBuB BBBBfBůB:BϭBBױBB̍BXέBBBBBl|BB~BBABVMB6gB.eB/BBtB׬Ba.BrBȬBIB3 BkπB_BQMB5BBB;ăB걳BrB[sBۘBaB%BtrB BÈBLDBeBBBBkBՋBBSBnBB;BBñB/BBB[Be0BfBBœB'IBB^UBGBomBcזBxjB_HBNBB BЙB BBiB ȱBBBBH BaĞBYBB,tBkBBB>BY&BB(BLBYBVBHBmBBB)QBշB7BZBɰBfBcBBB?B$BBoBBR8B>BB$BoB B͆BƥB:BVB B VB͖B\B pBFBBdBhBBBBBξBB2]BB'gBSBBB/BNUBcBNBjBVB;BBBO}BMBBDBŷBbBBO/BpBBAB*-B@PB:BB BεB,B!BBBGBABBXƱBBBBBڸB9BhŰBUϹBBBBBߺBвB{B B&BZEBsBʕB B־BfBB5B՗BdB B>BŮB3BBYBRBSBeBBէBm;BBﶰBB$B,ůBBpBۮBڍBdBBgBB*"BbMBOZBWBBpBBBGBߪB/+B˶BBB!4BB BBBB۞B3BBBB\EBگBBBBBkWBB|BeBBHBBB&BM]BB|BBBrBBBBBͯBBlBBBlҨBLɯB B'BگBB ZBgByBmB B"BRoBB\ByBAB%nBvB7BB hB3QB(mBVBfBBBdvB/Bl'B_BB[BKBB#BABj,B(BڰB^{BRBBLB`B^BBQB7BepB'BtBdBY߲B܃B0BひB13BiBgB϶BB&BE_BoBoBB[5BͼBK|B*B֬BBBޜBɯB B6BBlB{BE3BB`B%B?BFHB49BB}BҬBByB٬BBUB߬BB 5B|GBeBPB4pBIBiBwBEBtB6kB@BBXBBBHBDBB-BÐBBUBϭBpͭBBSBu׭B2B#BB,0B 1BB}B9,B BTB BWB>BƳBB8B0dBosBזB`B@^B\BSBSBHBSBVBBeB_BXBNB(kB^8BxB(BmBBBBBҮBBBB,mByBEBBK2B5BBеBJyB$B1$B B#BxdBMhBKBԂBBPB?uB3cB̭B&Bu/BBԇBBdBmBBLBBCB&B"B0۳BABVSBCRB$pBnBVɲBBBBoBB(BOBBB MBBCBVB-BB;BB.gB2BOBCȚBy BB/B]BѿBϬB.BtB,BmBBm>BBۜB#EBئBEBKB,BբB RBBMBDRB8&BBzNBp#BBWBB kBdBBB|rBB&ҪB}BWBBSBgBBBBdحBBBĮBBBB ®BlBLBϮB0wBFB]ӮB BmBuͮB{4BB4ήB%BTBXٮBBBB4BBBB,BB'B"BBB-BBB݅BѰBBB_BsBB*ܰBBB&B BOB|,BĿBBBk8BBƯBB(B'BBBBtB,!B2B jBB#կBEBqBBBBVBբBBBCBϳBmKB}BBEBLB\VB.BBMBBmB=BBSBgBxJBB"BPB3B/B⎳B1կB7uBRyBB>Bz'B/BvkBֆB ^BpB9mB7]BBB BBhBB3SB`:B'B5BӎBɮBeBY]BwB6BӑBZBa&B>B-B^"BB]B[BtB߮BB?B B{BBqBBB 7BB0CBbBBB@B\B\BҮBNBhBxBg'BBWaBBIABlBB9ƥBvBLB{4BzBBSBK{BBB{B^BKB {B,BeB{BcBBB%aBB~BB+BDŽBB3B䏮BRB,BBB:B៮B [BڳBBBsBΉB>!BBDvB}BpB2cBB8ߵB\^B,B=BkB~BቶB9xBB1BBBNٶBgB?dBB B{BB=BBBBNB(B?BFBqBM\B.B B0ݮBPBBSB'BBB"BrBܰB9BXBvBB܆BFBB,BBBsB;BB{BBBԅBBGYBBJB[B䚏B[BpB"BܭBeB B}BBBBBゕBlB2B7BuB B|BEŭBJBBڭBJBwB~BIBLBBPB/BҭBB LBNBIBBOmBB.BaBtBQ_B}BR$BԦB]BB.BBjBzB­BBêBխBBBBBWBB[~BBBDBįB)BBBBTBaBBr=BvByBBճB̭BEBBBJBBE]B B8B7'BhB8 B0BB4wBMڬBBP׶B4۬BlB B]۬BKB6WBOެBBBnBSBBBBmƷB(߹BcB}BB9BJBjBBݽBBB&B3ɹBBBBCSBpDBڸB`B&чBwB"B9>BB>B곊BXBB7ByBBōB!BBcB7BBBQBNBƚBB2BB+˳B@BLjBԳBeB-BnBuB,ޘB B\BB!BB";BeBBzBTB0nB+BBB+1BRBBgBB6TBB"BBBBByB@BB\BBqBҐB BB^߲BɎB`BvBnBBRBF-BBBBHB6#BBB{BoEBBlBdBz_BsuBJBb۟BϪB)BdB,BjB)BYBBOB BUnB̠BqB JBB̫B%B %BBrB(eBBBB'BzB9B0BBBIBRBoBmBb,B lB܈BBd,BхB{BدB_B {BoB1BHBBKB GBYBYӫBBOBB}BBYB\B)BoBB ,BXeB B/B BPBB!BBeBB:B BDzBȩBcBBNBBIB¿B 0B>BBB.BWBk/B4BB\B;BuBcBB֤B'BnzBBкBhB9ڢBBcBU¡BPBJBB!BrB/jBB,HB4BqôBBBzB BB(BQB5BB BژBꑰBB>BBB"BۍB>B;ŔBoBvBK`BBM7BB"BӫBSBNЪBdB^BǩBB<WB+&BBRBBvBBBBѲBIB\BަBMB nBB'BB BBBҘBBB+BʲBBw-B&BeBD՝BmBB8wB|DBp:BfBBB뿢B,űB=BiB+B"BBrB7KBBmBuMB[BB 5B0B’B'BlBBBܬBBB0*BB;BfBͦBBBB\BBBBBByhBqBPʱBBBBD0BaBBwBB?BBB|gBHGBBVBB"B_BB64B3B2B^,B4BBBBBBӳB)BB4BB_BBBXBm'B-KB B{BBʴBFBBBB2B9B0÷B$B+ųBأB1ǾB>BB2BqBB0ݞBhBABBYBq]B$+BMABlB诣B BB*BLBBBBBB BB+BB3NBAB)BvBTBVB2B5B1]BF{BꗭBqBBwwB B}BM8BBB"BұBgbB͚B}BsB&B{BBwB>BhB,BD?BBPBvBfBBȲBZBpB-BPBB BBBeQBB lBcBBBNBւBfBBfOBKBjB"BuB BB.B]BWBi{B\BBzEB'BˠB>BȎB1BOBmBZѴBvBBzBAB0B[(B/BWB۳B9BBIB\Bu}B铳B-BB癳B'BBhBB=BBFB6B˳Bn-BBgBTB;QB:BBBcBBB tBB_BKBNBBPBB-OBqBBB|BvBBT}BFB%B#~B7B0BpBiB>ñB5BCBC{BSB B ҜB4BPB?BJBBɟBrBz B]Bu̫BL^BߢBBnBIBB\BvB6B>BBBSBOB7+BrBBShBMBBoBBBȫBB ,BWҬBSBB'ǭB٬BMBB2BBvB] BBB0BBBcҰBBBAcBBtBBBBoBB1BBBBZBRBBBBymB(BBDB^uB;BBBKBBMBBKBrBlBB BB#BB,BpBcBhBzƳBBBl+BBଵB젴BxBVVBC"BB` BBEB[ȴBABBᇴB-B=-B%BȷBĿBBBhBfB勹B`BB߁B=Bs;BBlBױB׼BKB%B 6BIBBcBdBB%BBB8BϵBB#BjB B;B.B*BBHWBXB8BoB0BB+BXϲBnaB&B- BBܯBUWB0B="BBBB;BδBSBBB :BYBBZByBoBBBB&BQBwݯB@B"֐B2BBTBOɮBrBgГBVήB_B>BB:2BbBN BBzOBHB%LBB=BBIB hB:BB^BBwBbB$BB}BQmBXB_CBfBYBDBB*BZBBqB mBBBէB yBB+,B BTBhBeB=B/ԫBoBB/BBlB=aB$BVBߎBBB奰BʍBB`B*B;BBFBBB BGBdOBBڱBBnBBkBOPBNlBB4BB:UB&BBwBR/BppB;BEBξB:BQB BTB_[BUBZB=oBBdBB^BgBýB:BRBڮBBBJB'BȶB-dBrBBsB3BqB]}BBPB`B~B.BŅBJB9BHyBBmBt(B1ޫBPBՊB BBBBB9B,B:BvB$BgBBqBBHB«BiB۔BBCBlQBYBmB×B.BKBy8BsܫBBšBBmBgB$&B?BBsKB^BzB)rBHBTBBhB9ŢB>BgB!BBRBoB*B $BLB$2BDB]B5BBqBCB;;Bo BB@BB߲BtJB;BJB7VB}B/B`BONBgB1gBgB(BcBy4BeӮBTB~BiBHBBB%DBXBnB?BBѰB,BdBB8BJB BĬBHBBeB:BtȰBGB4VBnBBxBaB B-BBJATBBu^B!3BGBǂBJ-BMBB+BBBBoOBf!B6BfB$B B`GBvBaB߲B$BB.B 0BBB-B%BeMB BF+B| BBBްBOBaKB9ӰB.BB"ܰBSB CBB۩BїB0B5BmB1BBBLBBœB*9BBvBYBB#B԰BDIBwˡB⦰BB mByB'`BBOBBpB1B*BBBBC~BZBB۪B0BBl)BJBLBeB^B\BBdBB B_B[BBZiBdMBQB\BB^xBBBBBBjBENBBrlBB?B޳BB5B1XB|BBBB+BӣB΀BZBdB~B*B+BᨼBzhBX B?KBeݰB\BBBBzBڸB>BmBϷBhBռBݶBKBCBpBNB/BJBUBB]BOBwB BW9BBB6B:B"BBBwBmBB,BԮBzBE>BҮBKB]TBͮBXBiBBwBK~B(B)-BvBB~BsB7BD탦BonBBBBiBB!.BXBWBBS݉B§BuBaB&B&BB}BBaBèBBۏBBB/RB@ BTBwՒBBBvdBBUBB(&BVB!BPB/BQBCéBBB֜B9BUBBBFB̪BBoBcةBQBؖB\ͨBEBtWBdBȡBMBBWwBיּBB/|zBtB6B)}BPBBYB،BrB>B@BTBjBBDBBBBRBUBBBB1BtBBmB TBBi BˋBBqBIBbeBBbڎB3B"BzBxB2BTBߓB^eB4BMB"=B BBBVB BBa;BBBbBޱBB7BcB&BFBmƱB4B{BMBpBB-RB=BDBCB\BBBzB*BBB B陰BvBB]BBBBABVBBxBQBBBJBɰBBaBʰBS;Bk:B2°BtBcLB˰B6yB':BBBB?By]B-BfBBv.B/B"BⲴBKB|B5$BfBB`|B]BA!B'B駱BnBҵBٱB,B ӵB.B BʵBEeB]B|BضBܱBBB; BLBBnBwBxBB BlB8B@BPBBBIB#BIBMGBXB%B1B?B;bBBڼBڱBƸB|BUBĹB3BKаBB B]^BEBB!BưB BB~"B(-B$BBFGBS:BB`B?BtBAvB,5BBÀBBA?BBBĠBbBǯBBNBBoBGBBBBSBP?BȬB/ BתBޫB?׮BSB BBB*BunBEB@B&WBABB5PBB.B(B(BBB-6BeBBBQBE7BB?:BBG'BݮB:!BBZyBGBB3BCBB$BBЩBBBnBzB=B~BBYB,B~B'kBB8B䈮BebBLB]B,?B B.BB{BkB2hB_IBhB=~BBڭBriBB BB2BBVBBB]BBBB^B'BBBn1BRBBA7BBRNB>B^B&BBB BBAB Bx˰B1FBqBRбBqLB BDzBLBrB B >BB5aBBjB>BBBeB2حB%BڵB滭BY~BEB찭BBBBi#BUBĭBpB6BͭBB;.B8ܭB=BEBBSBVB BBRBBBC5BwB:B)BBىB>¶B$ YBbB$zBXPBKBY}BtBB>@BB3BkցBBBlBNBF\BQBܸBBBTBgBsBsB BLBжBxBqBBBMBBjBrB&BB{B줳BBB!OB ;B[BBBB²BI=BfBƝB2BBӠB¤B9BxBbIB:Bz޲BܢB,BBaBϖB?BBJBBLB%B'BPBB4BsQB`BײBdBBBa BBsnB^B`B.}BXB^BvBBuBgBBB\]BBpBgBBܱBYrBoB> B5BaBBfB. BOBBD>BBLvBB(BxתB{BҮB#BB/Bv_B2B˛BB%%BBcBB@7B׫BB{BB&B徨B]BזBB/BRBzB9B B'B?CBGBVBT[B)BBꀬB-BǮBB BGBB{B0BbBB̰B;BKBY`BBBBBBBǪB1qBLBũB&zBQBBCBBAB3BBYBǃBcBBGwBNBB*B`BBڈBB{BuBکBBB{B,BVȍB(BBQ{BB5Be+BB$BΒBJBeB)qBuŨBBJBBa(BBƨB5BC=B_BBB=BهB)B4BBםB BBB8BQB0BlBBB1BB1BbBBBjBBB&B3B"BX0BBSB5BjBtBJ=BBBDBBBCB BBI*B-BP?BBBB/BlB@RBsYBB/BlBBW{B۱BBIB=ͱB3BtaBrBsBB BBďBB"B/BBB5ʐBeBRB[BNBmBݓBJBBiPB\BB)ɖBBBKBBIBݙB ðBVB|BrװB B'B9ðB+BڞBBBBEmB-B1BHB9 BɣBp$BEtBaB:BBBB\BQBBܯBBﵩBEBCB BPBB PB3$BYB6~B52Bi BB3BBdB;B}OBBrSBBcBwBNVBGBBXBeB B$B)xBBB)BfŰBiB^WBٰB*B}B ByBlشB`,BBBilBB3BBhBB8B^BRBM]BB$B²BtB{(Bw0BBkʳBBBfӰBB B{B)B묝B{BlB]B BCkBr B B޷BߖBްBUBBðB BcBrBI)BBB6BjBBBBpBBުBȩBm}BB»Bm0BB$BBS=BBBwBQ=B~BvB@_BBEBB)BB;BB "BBrB\ZBܗBaBB?B BBB˱BBBBBEBOBBB*B'FBޑB B^B3BB+NBrϖBްBBQMBõB'BߤBBU!Bo BаBBHBаB6RB7BBB؟BJ BBBFBB4BBBNBYB5B9BͰBu-BBŰBB.BN B^ҰBB9OBBBBrB4B`BqB?B#BBp,BBOܮB]BB BB[BBB-BB}BM2B;B!ڱB\B BB4BBBm,B BPB{B5BBJB/1BBfBB+IByBҲBBB}B0B?BD(B涴BBBGBBTBBtBJBBaBްBFBǃBVdB!BBқBmBB5BѫBHB鷞B)BBKB-B B`ߡB[BSBgBBjB֤BB_B.B8BHBq}BSŬB1B+̨BݬBBBBPB\B"(BBBBBYBB>YB4BήBDwBBTȯBLB8\B!BeɭBBrBBHB(BڭBOBB~BB{BXBX`BBnB(BwBYBBn`B^BgB8BiqB¸BBBB&BBPB',BZBB0B^BBBbB;BմB%ʬB{BBoB/BԃBEFBB́B?B?BP}B7BsB-B7JBXBچBBBB tBmBBAB0B=B\BBXpBӌBbB,BTJBB0BBHLBB`/B|BBƯBBBB!>B-BB וBB(BBKͨB~>B4BBcBךByB8B^B̩BBBL BB/kBnXBDIBBTBBB8ժBHBB|BjBMBdBBBCBqlB?ЧB紫B/B{BByB BƫByBdBBLBKBBhB^BBB%BϱB.BFB2B3՟B6BBҴBB-BsB|˲BB=BB BBϼB7ܪByjBBBBB1jBNBȭB#B=B#BͩBԓB BjBilBt?BB BB;BnBҩBB7BLB̦B B GBbBBZBBʭBEcBLBBVB*BBg$B2B>?B4BBǛBfRB-B2mB1[BKBB_B7 BΠBaBtBhBeBBBiBcBhB$iBBfԦB=cBB5B iBBB1|B޶BѪB鎮BsBB6BBkBBBB®BBBdBfǮB>B¯BݮBBBTBB٫B BB4 BBBjB`(BtBB BB,BBBiܞB8BB;BnܯB zB;3BB B<أBB]^B:gBBBݦB}BB0GBjvBBHBBSyBBBLB4BïB1BwTB>˯BɪB/^B/үBHGB[BBdBgLBB]BbB$B>BѱB5B5BfBABBBJBBVBTB6DBXBnB͕B6BBBF/BfͰB3B4EB+B_BDB1[BBM-BB?+BBs BzBB xBB9HB,BBLB;BB `BBݎBB&BB&Bp]BBtqBBɮBBBBWBȴBBBBRBkBBBDB:ϴBįBBB.BiBXBmҬBdBBBB B!BBXB NBB Bo~BB{BBBnBfqBB8"BbBBՎBnBB{B.BB +BB:pBUBBBBTBBBBr̭B+}BBB@B4BBXBÚBO-BB/YBt4B>BB-B|BƴB$BcB cBBIgBMBB8BBBBBB B2BBBިB&BB,5B?BBnBhByBBBDBӭB퍮BBۮBꞮBB(үB|B7 B5BmBBxBBIBJBBTBwB0BBBB4 BVB7BqcBBBB'VBBhBpBt=BWBUBuBBgʴBŹBBB~BFB䕴BA0BɜB7GB,B,B?B B9B1B^BhBXBGBB?BLQB BBB.:B BBB'ƮB,]BV!BtBSHBBL BBBWBֈB&B7BҰB{ުBňB߰BVuBUBCɰBeBBB"BfBiBy˱BȯB$BUBzЯB'BвB֯BDBCBޯB2B#BBA+BZB"BzBt&BPBLB@BBBABhٰBQjB\*B%BBSBBBBBB=B$ABn5BAwB.4BfBPzB@ BBq[}BPBB[/BV۹BTBӪBʹBK6BBBBܟBꂹBȎB8B4BBӇB6BB>WBpBVBڊBB'eBRBf׵B/BAԍBt,BBcB&B= B BBB5BB*BmB;CBaBUBE˕B fBnBRBr\BABBxlB.BvB憳BLBr9BB BBqBBߕBȳBMBDBճBSBiB³BPJBB+wBZBTBh/BBsBBIB~B$B6BxЪBE1BBBd:B4'BӋBƭBUBBaBB!aB'B{ BҀBnB,BB⋇BBwLB%B{޶BcB0B0B|BABrBBҍB4ܴBqBcB8BBBBBeB4BBc(BBzB0BhDzB6BBB;+BBDBKȢB㽱BBoB#Bu{BBSXBBB;B]%B94B3BW(BBu.B +BBAB6EBlBBXBڮBfBMPB5BBBJ]BBBzeB$BFB!SB?BbBBXB(BȶB&B:BB|EBBSBiBBBiBM!BBB rBB2BB׶B9BcBBBVBu@BBBzB)B%BɉBBWgBBuB:TBK5BB.BXB<BrWB:BB6BrB8BѯBBCBB5BBB BiABDBfBB9دBBšBBˁB2HB1vBB8B)BBYBBoBGBB,B'ДB1B4 B9BBBpBBB BBBBG1B8WB*B\=BBMԝB/BBBB|B4BxBB_ѢBB,BRVBίBFBǥBBOBv/B$B/HB BԯBp'B}ѩBBBBB(B^@BBoBMBB&BQB*B BWB:LB~PBSBlBUB&BB>BȱBfB؟BQBƣBCBDzBBQB$*B ȰBVBsBBBŤB"BRABMBcB B羳B BBB9 B:BpB5nBB0 B޲B/B²B#N}B0 B&{BLB]Bn}B+B]BXYBB:OBBT~BaB⪃ByBBYBĊB 0BwB~ȤBBB=8Ba]Bv!BBB씋BLB^BBcئBBrBcBHBMB[BSBQB'BB{ђBABЂB_BNBmBBBdBNB᫗BQBSB^B6B{BBuBaBB]&BB5BEBcB$؟BuBeB_BɩBBB+!B4B2 BuBBDBfBPBtBYƪBBiBMުBB-ʶBZBGBBo BBBǸBQBRBۥBB,BBȌBBBxBB@B6SBp#B|BʐB\_B BAB߭BЖBB٭B\B&BBkBBB"B#By'B[BB??BtSBRB[cBR*B̜B#pBBNIB9BBB&ѭBvBYBBB4BBnBB/B3B_BB2BBϭBBB7BB~YBBB^B BBB9B3cBE1BB-BeBBBBBFB씱BBx-BBBBWBaBGB^BݭBB%BBBZ}BHiBtBB8BBdBB+B(̶B)B~B!BBiBU[BB BqBNBeBB!BBպB(B7B(ҷB BB BP]BBeBzB^\BB;BBZB2BBB BD@B~B\OBCzBVB8BB BB]/BBvBajBB״ByBdBBOBwBܔBBBB{B=B$BBBЙBωBBp~BPB9B"BBB!ĞBϦBBBeBwlBKB B+Bw^B8B]۲BfPBTPBB8;BB YBբB]B1MBB8B\B>BSB^BCB¬BUBBu@BKB$Br¯BTBA'BZ+B#\BBlBJfBNB~B(dB B5BUcB-B2B'Bb-B악BӶBB6BhBRBB?BZBBiB#cB2BfBvBjB eBxBBBM3B&Bf_BUbBBk B BiBQBBAB†BBBkBxBBIqBt+B`B;BXBҲBKBh BhڲB B蹢BDzB |B2mBBBBDB5&BB%B^BSCB}(B_YBӠB/BYBpB<(BP|BzB^BBB?BBrB8BʺBBL'BuBB7BSBNܳB)TB/|B섴BqBCBpBB'B=B^BJBByBIBBX>BABB3ɱBk8B qBߌBBɁBBBKB#BNB4B4B qBBBBPBHݲBBȉBBYBYRB\6BBB;B1BB%jBB$B9BFBBBBCBBBrϔBBhBGOB B BBȗB=BBvDB_BBؚBvB"B?}BpB9aBL,BbhBJBޟBBCBuBR9BB]Br9BxBTBB)BJBB]BB NBBOBڴB&BFBBBZBVB HB B_قBNBBB1#B4B-3BߵBBׇBkBBTOB>B,{BG׊BB]BzBB B"B2BADBďBwBB;^BPLBBBBKcBBByB:BײBB{ėB]BB4B(BENBCBBBQB}BxBB*BBBPBB DBBBBvBBBl6BWBSBBJB^BܲB;BaBB#BBBjBuB 8BBFBMBBpBJBBB|3BB_BBBTB0B%BZB$5BBB9[BBwB.BSBB"ǵB$B~B;BB BB BBB(B徃B@vBBoB/BwBB*B?'BBBB^B9BV9B߽B=B4BTB@BB`BBJuB#BB$+B%HB3BBBeՓB(B BxB^VBpB8B.B|B)B*2BBO#BKB|BSB/vBlB?BhB,BBB9B1B0Bb BBB/BnBUBy-BDBB}0BBͲBѸB\BBBBeBBMmB8B~BBBB{BsTB"Bc.B7BiB4BGB42ByBLB?bB€BfBB3B/BBUBlBBe.BB/BBB BBHB1B_B.IJB BՊBABIBuYBBBB>LBrBB BBFB4ذB9BBBAB}B>B3BBxkB#RB*B|B{BwBxBBBoBSBB ZBTBumB+B0BBBB)B遵BBB莳BB#BBB̖BQBҬBBB`ĬB^BBB:BBRBmZB3B%BBJdBBrYBwBBB۫BBMB#B3BBcBB$%BIBB BmBeB'ҿBCB?FB}BpEBBA?B BdBBB>ωB B,BaBBBBB*BBWBB:B8BBۑB5вBmByBTBBB@B)B.B|BtlB&BBDzB{ B溙BBnBYbBBBBBBƞBBOBqBzBsBB:B,BںBBXBNBαB BKǦBQ±BB%B+BrձBB~zBBBB| BBeB)BB"BBn6B ABQBBIB %BpBJ=B::BNBBWBoB2رB|BBzB-B9BB8òB B=BCBsBmBB:-BBQB|BBNBBB4wBB; BbB9BBIBѺBQBEBzBBTBBBvBBVBBBMB`ӪB^BBBLB(BBDB,BBxB8B BBBBBBXBduBbBB^"BB%BRBB㞫B;BcXBs,B;BBBBBB6B7B.ԬBB աB=B1ABZBf BPBŤBBBB\B'B?/B8nB>7BBB9NBRB$ BkBBHBSBBhB$BZ2BoBfB@BdB>BUBHB$׭BfBBDBHB̰BB*BtdBѭBB~BBnBCkBퟭBGBDBBg#BMBiB_xB BBBsBBBqBB^BBBmB&BsB]BȳBl!BoPBBŬBBqB[BqB2BɫBtBҲBBqB\B%SB B߱BBB{bB$b"B ?BQT{BoB.B}BRBB~BMBWB%BjJBB"҃BGBKBB9^BGB5*B%B'BVBSBB:BxBGBB.B B"BBaBCB GBBcBBOBqaBBBBBѸBtB0B43BBIBmBCŗBjBbfBwBBBBBHeBJB4 BBJB6BVBBSBB:B B;BVBBSBB BXBBv5BGB5BhbB$BqB|BB#B7BzBBlB_BzB̢BBtBƪB1B)BBЪBB1BBB5ƮB'VB7BdޅB5BFB""BqBZBZBdB:BÉBTMBYDBABB hBHҌBBBiBBB>B]BBB BTBtB &BTB8Be@BTBOBhBqgB'Br~BB(ӭB2BѿBٮBBVBίB׺BBB9Bi\B#rBZB4BCB B'BBBcBB]BBBwB6BBBBJBBOB労B>B)BmBwBxBMB乯BBŭBB|BB/BdpB0B(BBրBdBG B&;BBWBjЃBذBWB瀅B-BuB2BUBB͈B BBPBɳB ?BBbBB#BBB$B֕ByB-B(MBzBDBBBݓBB!0B/yBfBBxBSBjB{B/B\B BCNB4B]B^BsB92BnB B؞B`BtIBBgBNB8BB ?BYBBvBBd@BB>BBB&¨BBBG#B~3BuBkBRB4BBnBvBByzBǟBaBpBnB7B^B9GBnرB[BB~ѲB0kBmUBhBDBBBB4BBBB&-BBB-BPBA:BB0B ;Bm ByǫBBRBϫBB^BjBmB4B!BB`ÈBB BqB .BRBB9/BB ͍Bh0BB7zB%:B#aBBIBBBGTBB-BWBBfBbWBmBBbBBUeBuBlBBB/BBBCB B,BvBeB4BZB%8BSʬBBܡBϬB6B֏BmBXBOBBBBBk/BBBHBB`:Bj[BB+BVjBB۫BBB!B攭B<\B[BB!BBBBpBŭBxBBeB BB BBiBRBBkBlBhB֩BRXBBw!BFByBBABrB BHBMB6BQBBBXBHBB$BNBBܮBgGB?nBB4BBBB|8BB6 BBq'BQ BBB$ BBBhB6BB B̬BBBB*.B9B'B3BB%BB6fBNBSKBB֪BBcBwDB@*B{)B!BBBBBEB>FB BˮBzB @BEBBBXBͥBnBB BcB/B$BB}yBTBdB¹|BŹBBBBBBĹB+Bd!B!ZBHBbBIBB_UBzqB1BއBPB"GBBIBzJB9B㓊BB:BB{7BB"BBbBmBꆗBB/B BQǭBXBPB:B$BtOB BB&BDBBBhBLB8&BeBB納BKBZAB֦B8BLB?B&.BCPB'B"6BBBSBbZBB;BXwBB`BBQBoBLB"BZnBVBB]BCB9B63BwٯBB*B3BBԄBBsBB)BBTvBgB-'B:BB-{Bv9B6BBB7tB/fBB^B1BiBÞBBֹBꓴB4B B:oB܅B`dB,BBB˳BhSB(BmB"9BB؀B$B BFBﷵBB BsBgBTBdBַB!>B-BBBBBdBBޡB|ۋBZB>7BoB BYBBBBĶBfBIB+VBBB{BڲBBBBTB6BBUIBQŘBŲB&B:BBB䧛BPBBMB6BcBAB8BBxZB4B*BzB6B^BuBZBmyBfBrB{^BBjB%B]B[B>BB[BdB~BSLBBBx"B{B*BBB(BBB:BBBYBB.CBBBB{hBБB B˗BϥBSB'B¯BB9BBB',BB1BҝB BBB BݼB/B8BBiB-B=Bs>BBVBBүBaBmBӯBIBKfBگB\BlBK߯BcBڐB`BLBNB BuԯBlBjBBBgBBB,BیBBB8BQB9BEBʰB}'B؊BBFvBeB7BBBBBBaBpBVaB.BxBXpBzB}BiBBhBs:BBVBxBrڴB BBBB@BaB|BԉBBBsBmB%BB7DBOBoBղBBTLBoBB‘B0B`BBOB"B'BBB]B䅖B|ܱB-BBʱBBBnȱBjB BܱBpBkB BΫBNӝB7BBJB-BB2B$CBBrB:BBVB5BǶBBDZB7B0BBZdBBBaB|BBOB.ЫBDZBNKB+.B*ӱBGBBܱB|9BBرBB#%BӱBjBFBBٱBZB0BZB+B߳BB )BY_B%B?B޴BeSBEB_B憲BHBٵB6BuB*B[B?BWBBiBB+kBֳBC1BSB-(BJBB³BBvBBBBoBBٿBγBB$oBBBZB{BdBkBF-B @BBB$BB=BB'BB܈BcBBBZBMB[BnBRoB`BF}BtB7*BB,BB|BBB BݱB#B﫛BBoBXB{۱BpB! BB.BBBB^B=QB>BgB-!BBl}BBBBoBܘB5OBM BXB՜B(4BSB֪BdPB/B#BTBBB`\BSyB%2BdBB)BKzBB0 BBvBҰB BBF}ḆBBV BBBBsBBBBB BIBrB$ͲBB`ByqB B^BBpLBBBxBuBB*B,B BHBuBmBQBΡBEB寣B]BBjBB4/BCB$BlBMBɳBhBB;òBtBB;ɱBɭB;BBdDB ښBB>BCsBLBB B%BU@BIJB|$BsB_BBB'BBԯBBz#B#B$B"B㐇B:߶B`BΈBIB:BK8B!BBB.sBBEBBBYߎBZBG/BxBBlB{B-BB BxBtB$QBVB11BؖBWBBtdBsBBBQBBᡛB6B&BTBBBJB佲BiB#BoDzBB|hBBUrBBăBuB˥B=Ba-B tBBsBBuBlwB`B BpBּBBB2/BBFB%B8 BiBP$BBBxB*BBB(B)BB:ABÞBH^B/]BBϴBxBMBC$BrBB3BܲB2BByBfB#xBKWBBF:BkB3LB=B̳BBw?BRBBB?BBBb^BBFBnBBBܾBB{B&XB B,BBBBQB3BIB1nBBےBSBlBsB1IB2Bo B?BBĊBEBBB]ZBBQqBoBDB BBݶBeBwBBZB_BB BAB BBF"BCB'BB9iBBcB{B BBeoBdBLB0CBàBBBܪB%B/BBQ)BZlBBHB 8BN.BtB.BCjBBBBxȱBXBpB/B:B'BBy)BˣBBBBMBlBB=B얼BCBk)BBBBέB9BB&B#BBBBhٴBPB=BڳB2BHBBIBzB{B&zB9rBB0BB B«B):BFBiBB B*B:BžBB[BBB-BUBݩBwBBBB9B4B+BɢBhIB;B=B@WB6B BRB:!BשB2B ҤBôBݚB^B'B' B BfBjBfsBBuB;B\BoBޫBLBBiBnjB׀B>FBBBoBBKqB$BBǧB媬BrBBìB=B`BBBBB{VB4B!BBBK9BB)B ]BnB)B BnB0BBBݱBޭBBB٭BSBBB9BBB BBuB%dBhBy]BWB̴Bl[BBK#BnBC`BkBBBQBBpBBuB_DB⠵B|BBBjEBBYB2^B B=B֠BKFBB^{B BRBUBȋBBGUBBl{B BNBBXB9BdBvB6B+ЎB`ȰB=BYWB-BMB BڽB%B+iBB2dBߔBB@BVB̯B\BBlۯB^eBЉBBBW%B9BBBBCB0HBίBBܟBGB@4BXpB6B"BB;BBBzBB=BBRBBB9BBBBkBBBëByB BB,B>B]TBBڮBZB~BٮB'B台B9BCBBBL0BtBB)B^WBnB 6BqBBBB?BB:BBXBBFBB'BB BCBB[BնBH)B&BB9BBBKFB@BBtcBB}B(B}BBB-BdB BlB+BWTBB1EBB7BݵBG\BAB BGBABnB-BB8BNBBcBBB*BKB3ByɱBlB㍿BbBf٦BtBB6BBwjBF&BiYB"ٯB8BB;BB|ܬBWBpٓBBIuB0sBB`BB/BwDBۏBBBBBwBBuBB-]B BȀBB<B0\BGBBaB!ҬBBոBtBBjB%BBB>BBΊB7BkB䁌B $BB5B&BBhBJ6BBBEBLBBHBNBuB&VBfB5ÕBpBWBL.B5BnB~B BB>B˭BpB'қBڭBBMqBBsB$B=B#BԠBBwBBsBPBzB=BBɅB_BBBɦBB|B-%BDBi}B|BxBdBʪB6Bo8BBPB@B$B^BB,B2mB-Bu#BBNB BGBG:BհB®BGBaBǮB! BB/B6jB B5BBBB| BxBñBuBֳBȮBB,!BNBqBLQB$B"dBdeB6OBTB`BB'B BB0kJByBBB4jBBB)5BBCBcBdB B!BeÊBBH!BBGB{BGBBBBBȏBz`B1Bg]BTɑBB[9B@BBBWBBjBSB%B3BBz9BBuBOB B}˚BSBBQB]DB4BBB;B㧟BKӭB8BEBϭBYBآB<̭BBkBݭBBBB4BPBBBBLBB'BBB[BtBϲBBBBsB&BfNBB+BMB6Bn7BܪBdABFBEBBöB,RBۊB2ʶBqBMB"B_B8%BЄB}BvB7B%oBfrB︓BBRBBBpBNB[ϮBB9B?B@B"B3*B|JB颚BDBBpB{B BBxB BLBl8BBtBBZaBBGB8ǫBB钞BIBGVB"BB{BrBIBB9BqBBtB鐬B5 B_ B詬BnBYB¬BB;B]جBBwBB٫B?BIBB}B8BMB]B OBB ˮBlBvBϯBB9B BBBIBB0BBBB͕BײBO֭BzBOB൭BkMBABܔBB.BczB/BoBBBB_BQBBٞB8Bk8BBB&gBBB֊B%BMBBWBB/oBBBlBB'hBBBVBBGBBBX:B*B.B^QBkyBͭB[BіBEBEB'@BBBÙB+=BZBRUBORBBBp>BBBBBBCB B B_BBBB.By BByUBe%B`BFBNJBBгBJ~B.BBkBMB"BBƜBųB%bBBƛBBB(B}BaDBʤBϪBsBM.BB7B㮡BQBBBBBmB«BkBBpBPB6BBmB]'B BNB6BBxB41BRBqDB B3BsBB CBPBԬB{ABBB0B_@BTBBװBBWB%BʬB׫BuBUB«BBBSBOBIfBCB~B(B~BBӱB*B-FBBBɪBZBB`_BdBOBuB[BBtBB0BBD,BaOBNBBrBBw!BB٦BBB B}EB`Bh3BGBPBOBeB]2BaB6hBFB$ʧBvBlB B;B BڸBB3BBbBB5BB6B gBzBQB0BBZBWBEUBYDBmBtgBBBB˂BBZBM)BQxB6BЗB@BBbBWB+BԚBBB%BoBBB=BB7B BsB#6BҪBDBհBuB[TBfB耪BsBBwoBBxBeB pBG޲B=6B\B#BBBB,yBkBB`BBݲB_~B6+BYBBBuxB,'BχBBBBxBB%B B;BBһBƳB BmBFBwBBYUBBBBhvBzBB!BζBB(B0BOxB]ӱBBhB̛BkoB喒B4yBB-BfBBYĕB^B oBFBjpBjUByBgBY*B3BdBHBBxѱB(BUBBB BB>BFBBBFB}JBQB%BBYBABB|BbXBBBBBBBσB2BRBjB/lB߳BQB}BбB 6BnB-۱BB.IBұBqB}BñBёB"B4ȱB#BqB}۱BәBAABBB޳B9BhBnB*GB8B(BqB 'BgBBzBB߼BxBT۵BBBBt6BgBbԵBdBBJBHBBBBB?XBBB/"BBQ޳BBjtB BB ̊B2B7B*3B*BXBtB*BBGBfBEBvڐBNBBaB\~B BjBk{ByB\{BqBBBLlBSBcBomBBٶB^B-1BʮBRBBBRBBIBTBBBLBB,B;B]B9B*BӰB0'B0BF#CBQlB(BBܩBTB;BB^߃B=BBB BBd@BIBBBBBքBg`BwB%B9B#JBÍB6BxBVBABaB-BBqBBB@BB0Bh5BBVBB*B B}B8B BB B*BBvB1NBuBBtiBB\B {B-BBzB:BWBvBB7]B^ZB?B B0B~_BDBBB0B{BBBB5B(B(BBMBBaBݛB BYB^ݮBB%BBBB3BB}Bk4B&B8B!BY=BKkBEB WBxBCtB$BQBBȠBmEBeƴB$BmB|BB8BP&BlBޜBB8B5B#B BԽBBBrBBBBBnB߹Bd*BB5BdBhBBB~yBF/BjB}BABݰB`BrBHBB_BкB(BB;BulBݴBPȮB<4BrBBkΞBŲBzBvnBB@BB0 BBұBKBrBRB'lB\BBtBgB ˖BEUBѱBBB*B;9B_BBB B&Bu&B!=BBBWBB+B,BB)BXܷBBӄB:pBBnB߶BЖBBMBBxBBжB7B2,B BBdB3[Bg#BXBiBƏBBBiB#B>B BiBBPB#BBKBB߸BBB|B@fBB6BFBBQB/B5BYBϕBBu`BBB\BZB B^GBBB B٫B8BȹBJBBNUBBl'B:BۮBBBBڏBBABB!BRB6BRB]BB~B nBB&/BB(ZBBBBB5B=*BzmBBŞBB{|B(BKBKBB(B BTBB BB@BeBBӰB=BNBeBHBpB&BBsBMBYBlBBBPB'B6B߫BXBiBB ۯBOBDzBʮBiBұBnBBްBCB|%BBiBBRBqBGB:"Bz&BnBPBέBBBGB/͋B٩BƂBBsBuBѬBlB(B0B)vB5هB.BgBfBv_BB:BbB B_B?B4BB BB^KBͬBzRB}B9BDBB}ݱBiJB BBB,9B2BU BBaB~eB%%BBBpBBB2BVڲBSBڝBB]BB`BByBЮBDGBobBIgBBByBBBBo]B(BnBU'BYB%B'B: BXBTPBBBB?jBBBBBB͍B,B BBs,BtB3BB+5B\ޒBFB=B3uBBBBkBtBdBU_B\YB*ؘBB1BxQBEB BܛBȫBgB/{BӫBB9&BBBؠB BBBzBSBBBzBܓBiB BRBBVBqBOBɬB\VB-[BBn$BBSBB:BBuB4BLB= BBBB_{B6BBmHB\?BхBrB2BRB7BBTB3BB>BڽBB@B=Bx٬BpBѰB1B BB;۬BBANB'ȬBzaB^BGBfB+VBC[BdB>BzBUBBBBYB?B' BB=ƪBnB{QB'BBLBlBoBjB5BBB@ȧBBLjBݦBPBgկB9BoB(B#B9ηBu|BBBBBjYB]B^B:B7BJdB,BƬBB.BBB9BBBNBVBŭB[BnB'B B懮BmBB"BzBBbB/̮BuNBBBB罪BB~B}ʩB63B/#BĨBBB^ҏB鱧B[B\B BX{BkBKBB0ϸBbBEB)BB#XBRBvB푈BB6BB&B8BBBBRBFB{B0эBBUB[Bs&BBBhٯBBUdBӯBΈB֓BB[BSBBBB1BpBB0/B+Bk.B]B[BЛBtWBBBB sB0B̯B~'B֠B}BBVxBVB0BeBjBBCwBWBy*BBGBR4BqIB8BSBdDBa"BBLB BNB`BBBhtB؇BeB1}BJ,BBLB"BBBYBBBB`BQBѯB-DB9B4BGBBBqBBBB^BuBB(B״BBmB%B2BUBWB4ZBBnmBBBsBܰBBBeB(BϕB`ABtBKBBʱBJJBB{/BB*BBٞBBKB3sB2BNmByB2BB&B%BBKBBBϨB4`B2BDZBCaBX B;BFB!~BlgBBKBBlBuBQBB9BBBԐB흯BXB pB2BϿB BB$BBBBږBBRBZBBDBBWBRBB 7BBBבBIB'BBBKB6BBBB:BNBòBgB5BKܲB[BBmӲBBoCBBBBB4eBBB B.B+FBƁB1ޭBBmBByB\BLBjB͆BBB~BBB/BȉBiBB5NB3BHB"9BBEB8TB2BRB BvBBBѐBcB2BBFBˮBSBUBBwB%BP!BsBB{CBBB^B©B.B\B,BaB BOB9BЗBhBBBIBRBJ3B1nB=ײBfBB5ZB;B?BݱB؈BWBz[BBBɰBB:ϷB 3BgBBB_B"EB|-B1nBBQ®B$B;BfB:B= BuB\[B]BB.BᘰBYBɟBȯBBBBBj{BBB&BȖBάBsBxBBB$WB쫪BuB.B˩BBJBB:BDBWBAB*BB B۳B BBQʳBBeBLBB,B BwBBOHBB BXB B9BjBjBY6B BB̏B$ʱBtBdBBBBsBBߎBGaBeBBsB6BƕB B B@BDZB3BBBzBlGBB\BBԱBgIBBBB*PBvB7|BEBFBBcBB B"BBBkB'B$*BDרBB- B!1B B`BvB/BB멬BABZBϭB=B4BBFBB B^BB7B}BwBޱBBBBBB5BϱBOPBBBݦBB> BGB UB7BEBuByBwB{BӴB&B2BHaB!BBBBBB>BYIB@`B6BαB $B "BLBmBB԰BB]BYBtB!BRԯB B6BYBqB`BBEѾBVBBs0BBB΋BBBBBBHBBQBƭB BwBBBBvBBBB5BDOBTB#YBgB[BBBLBBBBB=B|8BlBژBB/B:B2 BFB LBIDZB\BB뀲BBBB@BBBB<:BYB&BۆBqBcB_B8[BBಉBBB.BxB:MBBBBMBñB+BBQBۭBBXBJBB8B BB5BBf BKBByBGpBaCB5BBB՚BƩBK BāB$BB)5B{BB5B/JBBBJBWQB 8BBlBNۤB>BFBoBmBuBBB.BUBB{B?BB]BgB2B2BQ6B%ӰBBeBgְB@BBΰBZB2BְB BhBB2vBD`BJ BRBB#BNBB=BB0BWB BBpB^BB#B|Bl1BB6B{LBsBJBNB88BlBDBwBB.B۲BCBBe>ByBHBLB~BVB$B-iBBBB䭳B!+B7BZB@ӵBBBB.EBg\BVXBٽB@ѱB&BqBIB BJB&İB(BcٹB4BFBBïBB;B8BB¶BvBsBTѵBzB B2شBWxBA@BEBmcB+BBBgBaB(^BBB BߥBƫB°BB@BPBB5BBBpBBVĮBB}BԭBڒBBJBِBFB~BaBiۓBBkBywB BPB,BBp9BvBBB&BdžB6rB5ՍB]B*ܪBF+BFB/BcBBBBlBNB ۴B]BCBnBBBBB{B{BBIBtB7SBKBرBBEBBBBKBʟBs7BB5BЖBzűBB=yBB$B)BBBۛBuBB0BzϱB`B7B֎BBڠB3NBۖB}BBq>B%BBB ȥBB*BWBBNB:ϨBBTB4BBGB+BB2BJݬB1BB;!BݼBBcTBBBxB|B>BBðBBsBذBBGB3B/BGBVγB BQB>[B$BSrB޴BABBWQB^B"BByBrBBBBB!ȱB6 B0 BBP\BBMBB|BB5BϵBB ZBǐB `BٺBS8B$ѳB#BڴBHBBȃB*ǴB;Bn3B_B^BųBUB:BiB^dBGABBBL1B-sBB-?BBNKBBBBBBCBSBjaBBLBܫBBxmBA2BqBBKuB6oBBpKB*BIB 2BpB?B ݲBBGB9B4BBUByBgfBCBrBBBBg3B|BdBBBBOBtB61BGBBMBP}BB5BBOB uBEB@B/OB)BJB|3B.BhgBDBMBDBBBBHPB~BBKBWB&YB(BykB-BCB|BBBBqB .BBpBVBBmBĵB۰BcBBBJBB>B BBBpPBڵBֱB*BͱB<)BBMmB؇Bh\BBBBB8bB?#BQBճBBBeB4BnB?BBoزB|εB"B?B$BBBBcB#6BtBBoBeBBB޺B^ݷBྯBIBBBB BñB.B$BȯBٛB7B{֯BM BJBկBxqBUB1ǯBB}NBB,BMPBBB\B~BBkBBXlB|B;BFBiBCBBʰB7#BnB1̰BBaBbǰBBIBmѰBBBBBCbBBBB BwBB8BB=BPB 3BBfBdžBB爱BaBw8B輱B$BPBBtBQBeGBBCBʛB{BtB/BoBBPBRBHB߽B*BhB{7BBYBBBiɲBGBy]BBBB +BBPB_BRBkܾB7BBhBBBB5B޹BฺB5BB‰B B)BPtB.B~BEnB LB۾BkBj,Bh6BndBBB_BBkB'fBqBRYBqBBBBpB2BqBiBUB37BjBFB]KB1/B.ŮBQBdB.BOBB%BUB(B]B7jBBgB1BB^dBABi|BABBB}BFưB8B.BsݰBƏB(XBKBBꢵBB/B2еB47B|B9BlBBBBBֵBBmBѼBwQBBBB B3BBzBӴBBBBq#BkBYҾB猲BBgBBwBxBrB(\BBB`B?oBIB[BUB BKBZBBnBvrBVBB BȯBOBBhׯB-BҵBدBU'BòBʯBBbñBPBBBBHBuȯBjBBծBiBBB*mBBrB\B>BGB>1BWB BBBB"B?XYBXB$B BBʓB>Bw*BnB BrBBW.BBqB-OBUBBdBB.UBٰBHBBްBxBbBBB8BBqBjBBBBްBB.BBgBYBŕB7BBB9B BeB:"B9ЪBðBB" BݰBB-BB*[BR=BBCBMBgBB,ZB'BjGBPBDBBB\Bq.BBdtB5B[?BBBB΢B9B Bc˱B B"BBB3BFB'B1BByBBBBFBoNB+BBBBRBd7BBBBYB(B;EB$B{fBB9BBBB8B\BLBBBBfڼBdB=BL~BBB>B-aBZBMBaŮBYBx BJWB*BjBBB.BBWlBBBmBjB˭B0BNBBByB+B3 B_/BPaBvBEBC,B BUTBWBLBisBBBSBƚBEB8ϬBBBBFB&@B4aBūBRBģB/9BBBBBC,BBU-B @Bi.yB BB*O|BBBgiBBVB@BlBi B}ɂBJ BBLB /BBRB#BdBBBBhBBBB. BlB BGB}:BɝBB0B%B*BBzB!۱BYB{^BBBBBjBgBBB2BߕBABXBƯBB!BͱBcBvΚBBBvBX۱BB3BBRBͯB}oBjsB!ZB0BaBoBoBYBBJuBWB5BB&YB۟B%B^B:B݅BmBBBBBnLB*BBBBKBBBZ5BUBDBȫBgBB2BsB BrBBNB>BBǭBBoBeBfBB'BlBBܬBèB@mB BBB`BTvB BB2BB2BʠBfBBQBlBBٝB6BX[B[B'Ba0BBBBCBrBeBՊB̵B:B[BBBB|B3`BI`BBBBB‘BBxSBoGBaBB*B=B`B B[B?B+BB}BFFBB(BhBuB$ۚBnBBB[BAB=B.BDBBIBoBĊBB1B4B^B_BdۤB)B4BWlBrB BJܧBBB2B4B4BBB^֑BUB@BYtBnB)B"BB B ֖BjBBB!BCBB-BB*-B*dB B9BB7BܱB͍BנB1xB3B:~BAByB#BB:BBB hB6BBIqBBBbBBBWEBR=BdBL!BƀB+BBhB[,BBB+-BQBB"B:B;B&BB4B8B\PBBpQBBBlB) B2BBBB`BBC2BϱBI5BҏBsB>BY˵BvBBB{FB BmBˇBepB޵BҲBBi˵BA#BBBBNoB.nBBSBBhB!6B?BuBқBtBqB@B'.BBwB̳BҶBBMBĤBCB BfzBBGBBB\UB,2BBIdB!ɯB(`BxB5tBK BVBsBsǰBB BrB|BB^BOB BβBBͲBBmB*?BkBͣBBBЄBFBIBBB#ͱBpB$B&vBBB`@B^~BYB BBB߰BBBðBlB BðB=CBCBܰB4BBBBTBo#BPB-BABүB՝BEBeBOB-B3B2BBABBuBB;B5BvB.BMBXBjBQvBBG-BXmBBJBhBBBKfBSBZEBiBBB{tBnB0BEvB#BӰB{BBرBwBSB_BٞBB|BNBs/BBT˰BfBBB(BBBJBB'BBB5BUBs+BQB>BOBuBBFXBBBSBB)BFB`:B{B"+BʍBeBSBB/B-B{WBÒBlABȳBBB?B[B B˴BB BsBQBB^5BBBBBukBB$BBɸBκBy[BչB4B԰BwByB}B~BqB,`BBBlBkBɵBBeBWBձBSYBBB°BHB.BǰB;BCBBŰB[BPBBB^BB{{BnBsBBw}BBIB鉭B]\BfBBBEBBB>BXbB`B0eB)hBBBB;B(BCBBoϫBBBBeBQB BsBBB>B뭋B #BB?[BW(BBB 1B?B宐B>BBFBLBBɓBQBvB 4B]OB6BBWBBABhBDB(BzBBYBBkBBB$NB4B[BBǟB^B"tBgB̬B|B{BЬBHB-¤BB\B7BBBdBkBB"BJB{9BB$ByBwĬBBoB;۬BƻBBBhBBu,BB*B^BgBűB9B! BBB vBo)BBMBBFzB0BcB2VBފBB3BBeBc"B9BLBb&BwB뜵B4B4B_ܵBAB'BXBJBrB B9BABB B BjBoìB^BdɵB+FBKB٬BNtBB;BB{AB!BB.BB٬B.BGBB}>B2B&BTB-B+BG^B^BBEBB(B GBxŏBGB gBvB5kBB#B^PB6BIBGBB^B=\BBLB=BjBBBFB BBȭB BlڛB>٭BEB[BB7B 4BBBߠBA BDBtuByBnBOBBzBZBBwB|BBsB$B"B0fBYoBA3B:BBBBBʫBGB@B/ʬBNB'BB_BBBuB=BBBҳBSBB}BDB߰BBp~B lB+BBBsB6By^B싮BBòB]BNBBBM0BYOBnخB-}BhBBPB]nB"BsUBxڕBYB%BUBǫB9#BзBB)B8 BWDB|B=B`Bm:B9>B{B^B5BBOBsBMBB^?B߬BNBoBOB]B'B>9B@TB|BQB ?BϧBXBP BB_BeBGB\dBSBBfmBVBJ B}B7{BfBBkBѭBBu~BBBwB&NBТBYBSBsB,~B^BMBBRJBEB^B5B4>B3ĕB.UBOBB zB6BBeB6B.BBBBB+RB)BBBBѮBS-BEmB&}BBEBYBBԸBRBBCBHSBBTBGSBB.BLRBBcoBkOBB ΪBXBB B1jBB_B }BbBB"BBoB\B3BBBLB0BЮBBoBBDB"B, BBgBS BBe6BB~aBBfBjBS B B BgBFB;]B,B0.B~B ӵBPBCBBZBEBBW֯B%B&ֵB2BB3BMQBABPRBqBBBF,/BbjBmBѳBXBƁB0BBvLBABBB'(BBBBBNBγBBBƌBB%KB*B BBBNBjBU;BBB}DZB$ BUB/[BiBLBIBB=BDB|B~BBDBBB&BvB')B>BBKRBBVBqBcBӝB~B<B]B"BBBϳBBBGBܷB2BBѵBμBe4B]B2uBºBfBE0BqHB?BqB@BHB BҰBlB#B<ٰB]1BLBBByBB\&BBe/B%BóB5BbBB;B BBN>BB#B*:BFWBB2BBg)BxB1B9BnB9BBBfBBBdB%"Be^BBB.BBCBdBBKB>BbBEBlBBHB.BBaBYBBBBvB|DB-B3GB%BBTBB {BPBBB糬BBlBwЬB@B BB:BuB\BJsB[jB BB B=B:+BBSB':BBbB2B%SB6nB"BB B BFBQB|B$BLB|B6BQBB1B淭BB!BЭB4BuB{BB˯BBS!B-zBBBaBۭBBBeŭBGBBXB>BrBBMBBgɭBKBDBUԭBBqBܭBBzBҭB /BsBB}BWB^SBB,BB*BRBBB̻B BB_BeBnB?BNBBapBܩB&B5B8]B@BuBBтB՜BBIBO?BBBŸBkB BWBWBBdۢB٫BBGBBBBBBgBv8BB&B[BKlBhBYvB-B(BBB7B0BIpBBBB~B>ƬBBuPBجB{BBЬB*eBBtBB=SB5B2BBaB~BqBGBbB=BCBPBncBi=BBBB2BܲBB@BjͲBDB+BB1BBBӪBmDB4RBmBBB9BBpϱBBbBt~BBBRBCBSBBB߽B7BBwB9BԥBB BBչBzyB BBխB3BSBNABޡB$!B ĬBBBOSBBԳBB;BoBUΫBBfBDBB0B}B%BBm0BBB,BWB ­B/B9BWѬBB^BfB5׫BBBBqBBmeBZ~B|بBF'BYBBPB%IB)BB(qmBB0BiBBBEB>BYB/BXB>B5BB(BҥBWGBB\B(BCBѲB\BB'BBB:BjoBVFBuB)B`B,B#BVBwBB BIJBBuBxBZBT BB{BB˨B6B:BBcB8BCJB~vB0B{B|Bu}BiBiB̧B)ҩB-6BNB3BBBB>B%BB9'BB?BB B>B},BBHB#B{B.BB1BBpBRBשBBBoBZhB23BuBBBB'B9BbB"zBJ~BBBߒBTB (BKBQBB3zB(B4B+QB7BYBpBGuzB ByBgBZ BāB&BSBJB/&BBB=BB*BBRBKBgB B͉BBBgBBTB.B/WBBBBBZBbBBCBBZBB/B B#B/B4BHBRBBOBϱBؗB˙BBB]*B:BB關BïB-cBkBȱB)BؒB.ޱBVbBlBBBGBBB@BzBB4mB59BwBԩBmBCaBB%BiBB|BBBB=B\B'BBr)B1B~B\B.VB"B[BnB8Bf7BqBֱBBMB}B3BBB8BQBT8BB#/Bs[B ZBCByBBشBdBB]BB,HBB-BvBBA{BBBԴBdгB@BEBK5BBB}oBBB#Bs5BOeB%߳BɵBBB`Bj7BrB B;Bj$BϷB6BBIB˾Bt:ByBGbBDZBgBB ^BuB BBYBՐBĒBZBBXBTB2B_HBBBQB=2B!BRBDBŴB6?B BLسB BxB.BBBKBؽB:BeBZB8B*BBB'B۴BjBEB\BBwBABOBԬBGUB.BkBB0ܮB@;BXBNBBkB>B7BBSkBWB5ʈByBBwBTBB(BB5Bs֍BBBlB?BB9B(B8TBߒB%BG=BtPB4BfHBB=QBmIBlB~B7BrBBBB˯BpBBݯBB*BޯBWKB/۞B¯B/B2BGB|B,3B͐B`BY£B6}B`9BC9BiB{EBeBaB}FBKBmB[5B\XBBBBBPBB(ΫBBؒBTBƯBY5BBϯBBBB7jBZBBB9ȰB7!BhB |B3BBB AB,B>BCKBB5BWBBcBiqB-BBDB4}BٳBkΰBBBBB2B\BnB0ԳBABBBBO$BDB>BbB(BABBѝB}BMB-^BB_BBB;B6BvBƱB"#B B BzB BIwBkBB^B(BhBFPBBBQ'B΃B֝BrBBBB B:RB BgDBBMBOB7BȱBBg̐BbBUTB"lBBBBBSB…BbBQwBB]B3BBBCB"B8B;BƛBZBBrB"^BB B4BBOˠBBQBFvB@B^-B%BӗBBťBvBVBUB[B.BϨBLBK:B8BFDB9B™B/B;BB4 B'B_MB.BB BBԤBЙB),BCByB =BB"BgSBYBk^B/jBBB|B(B6BeBB BBBnBBB8ijB濵BO׾B#BBpBBbYB2 BVB=BCBmBFBxBB#BaBBBlBiBfBdBzBBB7B[BB̰BB BBKB%3B|BB;MBB*By_BBB]rBBcBՆBBmuBB,B5B~BBIFBTB߰BB@BBABGlBBJB]BBBݠBMPB߳BBBݳBBB핳BB9?BNBdBB BWBɖB˲BB1BwBmBӍBbBB)uBBBQBB'BbBB@fB:TBqBABBB0BBRBhBMBqBBM~BtBBBB=B`B6B-QBJBBޏB&BkðBq|BabBXBB/BByZBB{B/Bf BbBGB~wBcBBөB~BkBkBB2B#KBBBbBBBjBðB=4BX|BB9BBBfBgB B2BB6B;B$BwKBBE"B^BBIB'|B>BBHBBfBBBkBB)B-BZBzBBϳBײBB8B @Bx8B`FBiBМBB*BB'BެBMhBBB>BkB B)BWBpBBBB[BhBWrB)ByBBBiB GB0BBיBDBmBB”BhBB^BhlBBXB%oBwBBefBcBB5oB-ABXBB!BB囱BB~jBBBBdӯBĬB@BaB^BBB(`B5B=BTB7BhtB?B&B BlUBBBBXB]BBXBfCB\B)ZBМBByaBsB*dBSwBDBBڝBݓBKڴBԯBBBB2BB7]BЃBѴBpByB/BB3Bb;B:IB{BEڳBxBXB`|B,YB`BmށBBG=BSBBBq.BJBB߆BDB=>BBe]BBBB>BOEB)BB]SB~Bn[BjBBŧBB B7ݲBŰBYoBB BBB[BBtIJB BoB"~B%QgBBnBBBLB&BGB#B|GBrgB(BܼBBJBUKB BlBٍBqBVBmBCBB BBa:BﮒBQgBBKB 2B[BRەBCB[BdB*BhBB_JBehBBgBB0BBqBBBB=BՏBBHBB_uBB{BeBoBw5BVSB)ZBABBBBBwrBB@B̪BBB.BqB\ BTBWB(BBB2B{BB BἱB)BdBXֲB8B^TBXʳBB[BjB4BBִB.TBoB;B=BVBBB-BI*BBBBKڊB>BcSB'BBˁBBB[BTװB6BBB,BBMBYB_B=BMBkBͰBKBņBrBB,-B$BR8B׎BVBBX~B>B|&BBB\BᥓB;BBBBB]B<BpB_B-EB^B%xBfBhB@ B~BrBBąBKB_BeBBB?BGBNBZBBVB_BBڤB,BBIB&ׯBBaBܯBBBBUBNBBB{B+,BXBUBt;BBB=BWB쨮BMB8IBB lBBBɍBTeBbvBVBB&BmB4BBŰBB!BӰBDBB&B49B{ϳBBhBuBcIBB!B(B&BBֱBxBBd,BBѳB⍲Be+BB B Bo(BmBwB˲BSBXBoBfBBM B B9DBBBz;BCBOyBUBXBCByBoB#BB|QBdBkBB9BMBkBBEBBBQBtBzXB`BsBBgBsB'BtBnBBcBbB BB`BmB̰BfB'BBPBycBB(BBB B^MB?BhBBeBH BDBB=BzBcʪBB,BBQBBBB8B"BM֫B BbB)ЃBPB3BӄBTBZ BbօBeBByBmB B_B5B2eBv!B B\B*BBYyBRB$B1B8B%BB膎BsBBV$BB/BɑB1B{B(mB=ݳB#BB“BEBBjB?B$BhBBﯙBwB"wBPB⌳BBDBB3BBʮBrBeBóB6BBaɳBBȣBΝBs ByBzLBZB%BB uBBuBB]%B/B}mBwB5BBB+BB[~BBJBxB$BB{B-)BsB`BBa_B_B&BP7B: B+\B.+ByBؑBi-BmBeǮBFBBBmkBόBBTB Bh3BB*BBxBؑB?B~BEB~B"B!6BkBդBIJB2ĭB B BBݱBiBEBXBښB9BKBdBTPBABBJ[B8BBFVBFB]BxJBKB B,GBdBBFBBqB2CBB}BKAB BڨBJBvB1BdBYBzBHB$BլBEBJBŭBޫB.xB ɮBB BB>ٮB{BBB9B\BB/|BBaBBBBe6BBeBUBlBBBٳBFBk=B.B=?BBjB3mB;B`BtBo+BtB~Bn{B؃Bk(BBfUBljB&BBJɶBnBBFB ]BB~B^B]BoBBcBBBBαBW1BBIBB-BB!B@BHB>B4BwBBB繰BjBFBưBnBB_ݰB+BۗBBYlBNEBBuBqBBU_B樞BB3 BcMB\BB%B3B$BTBBgBBzBXB猦BBBBׯB"B[B߯BًBBB{BBBTBFB;BBqpBBBBBWpBBB BB3BBhfBQB"BCBiB5fBB{BB{5BIB"BBByBNB&BB`BѰB'B֒BBhBB+B0B`BpBBBB6YB3BB:BQBijB BBtѲBmBB@BBABUB6B4B5B#BMrB״B(B2BԏB:BNBPBLBcBaBB7Bf B!BB BEYBABcBB?BB5BDABAZBBWBTB]BoBndBB}BlBRBƈBhBBBUBBԑB?BI|BgBj+BB1BBSBB;BNB#ȭB*B.BܬBI,BBBBB@B.BYBB BBBO:BBB|BEB0nBBBdBiBB(B;>B4BɶBHBBB[BVMBB@BB~2BߵBBBfBHBBB BBBsBWB3B>BBTBAB BٛBBnBALBx]BB8IBBBABB>%B.BjBEB\ BBEgBBܬBzB]B!BB4B]BBB߯BB7BqBܮBBlsBBBTBBsBB-BBDBخB*B3wB֮BBd߳BBBC1B^B%BlB3BRsB‹BvnB BڐBɪBBE}BBbB[IB0nBgBxB舵BCB8BDRB!BۉBBBBTB:BZB,SBBӍBB0BYB1BzBݐBƯB=BZBȯB%B˓B2үBB~OB}ܯBBCBBbB7B BB$B-B3BB!BeBsgBկB`PBB9zBBBHBBwXB]2BByB!BBpNBrBBgBBBBzBBiBB BWB$BB@@BJBB|BBuBB B1BBB_BbB#BgBסB3BȵBB BٵBBZBٵBAlB1BõBOBB葵BBWB DB-DBBߴBNBHB~B|B zB!B)DBfBBOB{B7BTBZ B|փB5RB BB3\BB8kBIBVBmBBUBYBޅBntBrBB:B遶B~BJRBBBչB[B,BBĀBJBHB0BⲵB@BBCBB?.BuܵB-ZBL܆BB$kBBgBB.BBxBMBŴBBXBnB3BBB+BBsBRPBABZʲBB/͓BZB?BeB9BvlBB&B B힘BB!BBԱBBYxBӱBTBPBBB[BBmB[BlBBnB2B7wB\EBB#0BBױBeBLB=BBB?BB2uBBBWȪBȱBBBBeBJBBeB[BB(BʳBB\BͰBjBFyBUͱBBBBJBlBQB4=BgB?BtgB9B xBЎBBBBB#B۲B3BCBHB9BEBVcBB2BB#&BB:ůBBBBBBKB BBBXkB2cBBcbBPB@BLBBBB6B"BBU±BB B8B4BBSBB]dB_'BcBBBSB/BaBzBBWBB\BްBBEŬBqŰBB0BBBBB˞B ðB俰B_HBѱB(԰BB%DzBBMjB>BBPB5VBB8B3ִB 1BBDBNMBB|BrBKB1BxBTB+BBBBB۱ByDBBBdBB@BB0BZB,BBB}BBuBBwBUB=-BsB,ijBǐBB8BPBB㳴BhZBgBK:BBgBBߵBx>BBBBӿBhB[}BzBgzBYBB^B BBfBmBTFBxB2BzBmNBBټBCMBNBLB!jB+BͿB}BұBRBBBB,B#IBUBeB vBBsB BJB7ɲBBBiBBKB B BBBLB!BBB3ɴB=By@BvBЦB4BXwBBfBxBB>ByBB܂BErB?,Bu"B-BB5BҿBBB]OBB B@BCBB@BmBB,NBB&BBj-B)BBB'B'bBBHABy7B7BHtB'DBBڏBڤB`BkB31BZBzBkޯB:BBׯB-#B BWBBxBBBB,BܬBwBNB)HBz BnBBiBquB"BWBVBTBDB9(BB BrBBVB֯BBoBB 'BaBeB9BHϨBQBeBB=6B B @BǕBBs"B߬B娯BBBsB4B%BiB>B/B#BB2B ϯBiBBTBBӲBBKBrrBB!BBo$B9 BzB2BdBBDB/BCBaZB BBvBWBBܜBRB+BNְBB⿵BB7DBBkBBEBBBDB~BMBByBB)B*BBQ'B-4BBmтB_eBxBjBBpBf1BʭBkBBOB?fBÒBBtBRFB˭BԌB"BBdBB'BvVB@BBB]ȑB頭BB3BB8BB"ʭBh BBB*B(pBBBB#B4BBDBB7BGByBγB1BtB gB<B4!BB BBxBzBB0BB5BܕB5BBBB~BOBh$B-BBFBBBfBPBBQyBB$BBTBr$BB/BBBµB4B.ٮB;B~ðBNByB BBbBBB BwBSBBrB BMBU-BBBB$BBJB7Bs7B'B pBzgBuBB8B B)BBB.zBx!B7gB/DB9BBJBגBSB@^B"BBѝBB{BE`BEʫBBBxBB{B1+BhB BNB]BABhBնBJBBhBCاBBCkBKBDB B-BBB-B˼BRBBӬBBBBhBB BBwB B`B)qByB`BB'B5B9BҬBBeGBBfBSͱBBDWB=EB䲬BB/B}BBrBbBIBcBYBBBB/B˲B籫BAB4B?PB4B`BYBBBlBaBBc̩BۿBKBB dBҰBHBBwTBoBkB̯B͈BEB'0BBB|B7BqŷB̭BֱBЈB.BB[BiB]Bl@B8B}B/B BB()BBXB-BMBBLABBђBOBB7(B8aBBkBtBBPBB!BZB%B;BBFB<B9 BBHB$ݐBxB BB)DBOѬB&B_BZBkvBBɫB-BFB7BBNBi͂Bp,B^BzB@BRB<-B_B`B߇BHBΆBҏBqBB@B⢬BBLBiBGBBB5XBOBB,BB괬BB]BBBBBYB!BլBBBB_B B B6BBBB[HB@BBB`B*B襟B pB4RB!PB*B?BYBƏBRBPBǝB sBB#B3mB`B&B`B^B2B5MBl B_حB-BQBBBBBBaB BLBKB;6B[B֑BUBiBEnByBBO0B.BNB}ӱBBBVBuBO B вBdBeBXFB!WB!BB]BBBvBUeBWBBB킴BBsBByB.MBKBBB,aB#BB͕BABBBLkBtByB BJBB1BeBYwBsخB!BBB4BʞBۮB1B|BҮBBBxϮBBBɮB>BcB;BB~B B}B=ܧBlɮBMB.'B׮BȒB4OB-߮B6BVBBB{RBBiBIBHBB 4B#BwBB/B\BBLBVBCXBbBB{BkBNBkBnBsB ܱBxBaB?BBqBB˨BnBԲBدB{BBB? BB)"BÂB-BL BB:BB@BBBnBB>BYB}8BֶB(BLjB^BWBU!B7B1B`BBB᪌BB\kB B`BBkB BBB[BMcBqĒBcXB3B_B&BIBBBnPB7tB#BWB(BABBB^BB@GBnzB+BB&BB"BiBȳB^B[BBB0gBsBv¤BuB[BoBBBN BұB!BBWٱBhB1ݪBhݱBB@BKرB BᰭBѱB#Bp"BձB'BB۱BBıBB BٲBB>BcʳBBSBPiB!BBִBABTZB=BaoB.B BdBB,BXBtB]B-ZMB2BLB$tB']B0?BB*BBB<$BѡBK׬B0BSBBKBLBB_B]B B,[BkBBTZBB#ByB(ѐBDBB5~BtSBHBrBLB BfB]B&)B(Bz+BC%Bf?Bi̒B"BB#aBBBBrBBUBTtBA+BABBHBP5BLKB9qBPBB0BQB:BBWBFBŪBNBkB⬰B^B7DBzBKB=BvBvBBMB-BBA3BBeB&BB B20BrB.?BKBBBAjBB}BwBlrBٮB|wBBSByBϾBBXBdLBԱBpBB3B:ưB1B9BܰBBȳBiBBDBBXJB٫BBڜB>BTBB[wBBvB_ BՐB BGBNB/B9BkBS*BpB]BwĻBBϸBuBPBܹBABBBk,BMB |B,1BB|B?BB1NBIBlB*BHBB_BNBpB[wB-VBdծBBZBBCBgBcB1BUyBBBBU~B(BBvBmBPB iBEmBӫBPBBBBnBeB@B40BxB%BRMB?&BB1B&BBqB*BfBOڦBZ.B>Bt;B/0BB˜Ba/BQBB}:BcB6FBLBBdB^B\BBjBQ BïB)|BȯBʰBPBTFB B9BBBWBq=B'DBNBuBճB B8BPTBSBXBôB>ByB(B[BBBBSBõBBB'BY(BBB]B;BBBBBBBwBCB6BshBWBBBǮB/BBB;BZBBi4BB:BBDBXBCtBB"kBóBBkBGBEB`B BPB[BFbBpBOBBB>BBgSB.BBJBn%BB B.BBjB;BDBBCBnBB$GBBᕭBNBBB:`BB:vByB'B?BBX˥B-BB.3BB*B BBBBO)BBABDB~BmBVB,BB^BBBeBgBB{BBaBۤBoB'BįBBбBsB LByBBB6BVB BUB8B|dBBBEBITB$B B흴BIB[BK˴BBdBB0BBB BKBʹBYBBoBBB"QBBW[BB;~BB’B!7BsBqB]ЩBBٝB:B BPBBJ[B͠BتBBdIB)"BB£B^B B *BB BgBD˫BBrBBgB$BqBPB$hB8BThBB _B?BByBC B'BBB3BìB^Bz2BBAB-B?(B:oBOBMB8BsBO?BBBBBLB B?߬BB)BjBWBkBBBӴBjxBBO5BǁB[BBB$BɵBxB2BcBzSBBB!B BBvB꤫B:B,B|;BwCBIBc۪BBPĵBEBMBe?BpBBBBXBۉBޑB_\Bv9BTB)BޡB߲BB%BlBB8ǏBBVBScBBRB^BƱBB\BBBBBbBBӱBBQBB'B&BB B۶BB_BfBBvBM BqBBB:BBVBB;B,B߰B{BqyBʰBېBBǰBBgGBK۰BtB BBPBcثB B'BB{BBHB1BByBqBoBBBBB B|BB9BB4BTBtwBϳBfqBB;]BB9Bs۴BBґBoAB˱BBنB&B'1BTB B~BQB]BBBB]BBBqB B PB BAPB^B*BEB/BBBBBTBV3B(ZB B ˵BqBEB/BQB7BMBBzBdBqBh!BB#!BXBBUϺB8*BBĘBBYB5B*gB?B匷BKNB-BTBWBBuB8pBB޴ByBBBbrB'B BB^B\BB1DBBBgB#B4*B>ݯBB4BתBBrCBiB)RB+XBB0BzBSŮBZ,BBpxB7qB-Bx!BBB᪞B3B>YB3;BBBġB&ѩB2BAB!BB鯤BTBcBYBBB[BB*BuB|BBB&BS BTBB,BBH$BIBBBBsBvBAB%B~B&BO֫B%B8 BB{BEװBy"B+B戱B`B%BB`̫B,OBڕBQBB B4BBYBBsBBתBBskBԪB(BٴBSBzBU%BYBB/KB~5BBY_B ȩBvBfB=JBB]BШB2BCB+-BBB5BB4B8BXBσB5B&BŀB-=B,Bz4B[[BwaB BBdBB۫Bo[BAB*BIBAB8B3BUBBQ?BZBBBBBR5B@BBB1BDBBBB!BB BWBcBB,BBBBBB;B| BBcBwsB#jףBa\B<~BCBMBBnѣB,gBYBsڣBBBvݣB BBBB MBvBIBlʈBGBB4BBkBqBBLBBBfZBY|BB9BYBBBB%BMButB+B6ԒB]BBwBMBB ]BB fBBBB֞BVB-BgABBGB1ʳBB#Bx7BFjBBB+BPKB BDBBr~BBBcBCBZBXB -B@ABKBBFB2BUBcBůBxBBvBBBBYBiBtB|BʲBB,BѱBwBoBΰBׯBBQկB4BXBB¯B;BB֯Bi.B/BYүBؓB B$BB B䘯BBBBBoBoBTBBB)BBveBA\B;BeBsB#VBB=BnnBJBBBiB2͖BuB.B-BuB,B쥙BBnB4B*BBӜB*B=:BZB'Bh7B4BsB@BޡBB4BoBBVB BpڮBSxB,^BYͮB6~B\çBǮB|B#BҮBmBxB8BFBrBB4B/ܬBBBpBG!B@BޮB2BBQɯBQB)UBӞB[nBB(TBEB.BB܈BBhBݍBBQB珯BAB_QBޟB BBeBBBB5BB^"BB#BfBBBB 'BtBEBBȧBq`BBGBG=BN|BՀBBBCBBrB܃BB`B㌅BBB@BBBkB"B BBV:BoB"HBhB*BBBBBBB_YBBBBLPB#~BBBBS>BٵBjBsBxBwB1BժBBJBBQMB.B)BBѝB'BRB?BCBB2BozB(BŢBBGBF6BBAFB BB0BߦBA@BSB'BWBjB+`BXgBB;}BNlB7B}BvB BpBیBXBSB禬B1B%BBSBBoBBBbB0B5@BEuBBqڰBXB B iBKBZBBMBB*UBwBeBB٘BjBBjBBOBN*BByBܴB~BBTBB+/Bd#BRB֍BIBtBvB&.B\B&BƲB.QBwBBBB-|B xBB bBBc8BEBBxB>BBB}EBB%BtPBA/BEBdBBBHB@qB5B!BB+@BsBY6BtǣBűBhhBDBDBB BB!BB괱BYBJTB4˱BB{BzϱBHBzBBDB^̬BBGABǭBbBBkB"B_BB'=BBgBaBbPBBBBVBcB~ BBrIJBxZBU=BBBc[B6BBk\BBNB!2BH0B0BBaBB~BѓBOB BDBaB˅BcZBѐB{Bh%BJ.B/BBB!BܖBcB.BY(BB΍BӼBDBueBaB֮BVB B)EBtBuBB!BұBmBB/αB0B{>BB`BTBBB5B9BfBϛBKBoBoxB7BB+BkB"B6ؠBޱB!B]{B髱BBBByBqBB8EB/*Br&B%BABΕBs%B;BBfABB:BiBBapB~BݭB뚭B-|B ^B걮BBB˯BBBBڰBBNBɱBбBBBB BBEBBtiBp B6LBB~вBBYMBuBB׿BBBhBBKYBjB{BIB]ByBZBbB,BgB>BӹB<ڽB1BAwB:B0B^BNB93BVBB9B:BaB7@BFBBzABB<BsKB~BcBEZB=3BBgBέBgEB~BcBBBCB31BϭB B8BBB<$B_rB\B~B9ܫBZBXB?=BKB-(B|BBԳBɑB0BPBBފBRBBBzBڹB8ŅBNBBvBBxB#BB#BˊB_B8BtB Bv5BS BB.B@ʏBZBBiB B1BTBޱB`BaB8ͱBLB$.BựBB˗BBBHZBZB7fBBɚBBhB#BB ABhB*BB8BXBB])BB1TB_B%BNB${B+B/BBBA BB%BBBXBBBBBB{B "B'MB,B<1BBp@BVBB|B'BŨB{BBWB9˰BSֱBBBBEB @BBǤBB-BBBPB|IBOBWB BkBIJBBkBBJ:BTB?nBB"B@׳B9B=ײB3MBSBB*,BB3BBBBB `BG BBBBBUBdBB/BBB܌BYB_JB^B4ӊBVBB~kB B=B B BOB B BB'BбBGB$BBB&XBGBxBJB1B>BB#BrBuԮB}1B@B BHB hBcB`BBB|BtB#˲BRBBpBͲBCBB#BBKBBBwB9VBw6BBB^BqB6BڞBlBB?ByܛBB#DBR5B*BfBBE{BB6B:B>LBȡByBtBMBqBBYB:BŌB BTBpBzSBqBQBB֔B,BXݩB BBxBx̬BB9'B<۬BpGB"'B=BxBBBT_BB3BBB6BEB9VBnBBBDB BmBSʬBTmBHBܨBkB{kB"B%B\۲BBzB;BxBZB tBKBeB BvHBdBwBBB^BxB`Bv7BBB= BBFBBBBBBDXBS5BdB;BB"BB~oB.wByBBˆBbB~B(B5BZBζBB`B;FB@+BB܎BϯB^BmB9B&B!BkBCB?gBBBДBBJBHRBBRB BgӯB[BXBBjBBBBfÜBSBBMuB%BB4'B}BBҡB_BaBwB@BB" BA.BB|BB B%B BBLBBB BBB BBBKBBإBoBy!BNB~B)BBB\4BBxBzLB BNBjB}BBBBɗBBCBUBB9BKBBB-BOBHJB:PBwBBBPկBB+BB4BʵB:5BlB̵BBTB)B|ȰB(BJBBB?BF]BiB۴BB6>BzB,BBBBӦBYBӴBIBB2BBxBʳBBEB+B̮BmʔB]BlBwB-UB +Bz&By`B~BՙB_BvBzBgyBNB BTBBĞBB"B_BԲBBBBBBBBmBH[BBBBBByzBݱB]-BVBBSB+7BBB BB!BABB;)B5gBBBH BBBsBKBBBBrB BҳBV,BvB^BHBBZδBpB.B{EB3B BĵBDBB{9B (BBBkYB&BB.BBB BȼBBB;[BթB_BnBOjB:B|B!BBBbB鿌BFB.BpBj BB$BBaBfБBGPB!BKsBZBdBvBǧBسBBB+Bf`BŨB,BBs BBlBa$BRBBP;BBBBGB"eB;B}BB0BBB\B0B7BBbTBѭB{BhMBVBɩB(BD%BͩBBZBBTB BNBS[BվBB5mBnoBIBBB,B4B'B2BBS,B*EB=3B}BgBEBDϕB_BeXB BűB?BBY֮BBBBBȥBJBYBtB«BpB)BRެBrBBBBsBBBB?BðB!BBB2,BٯBBBGB BBDzB9BvIB/B2BB*~BWBsBBjB:B˳BʱBBϳBBBB7nB1BBѲB1B-EBy@BwBB 8BxqB"BBBZBhB DBSB͸BYB͇B3BtB*HB_BGBBB BB"B B(BEB'B.!B|B BYBBtBBԔB BaBIB&BW+BBBŕB$زBMBFWBnزBBZBBBlBB-+BZB/BSBB-@BTB vBSB7B(BtmB6B٢B4aBBoBOB2B/;BVԲBBWB`B[BbBIJBBBRԲBBpBҲBzBjBbɲB\eB"B6DzBEBB̲BBB"QB^B6BWhB>+BʕBB]DBBBFBsBB&BGB BBwuBTBJB B"BBbȞBBeB){BXBnBB2B7BKBBSBBB+[BZB'BtXBBBxDBP4BgB BccB&B+BuB!B{BdB%BxBޞB.BƮBBb@B=B}BQYBB^IBqB*BBIBB週BJB[BB쐯BEBVtBBBֲB_BB%BcӯB>B<_BB@B|BBHBvB B笋BfBDB9XBժBD&B.Be˪BBBãBRB5B{BB]BbPBB%(Bo3BB%B,BڏB,B/&BFBɸB%BBUB 1B1ABB3B;B垞BKB4BNBzB=BB BwByB(BBB^BBBBQGB BB7FB #B1B0B5GBBcBdBP-BPįBbBUBqB%>BBBBiB6B{ܫBB#BNB0BҧB}BB!BGB6BBB6BͳB MB)B]޳BABBֳBB.BpijB-BB椳BĩBB.{B"]B2=BbHBݨBB[Bf8BNBQBzB`B&BثBR8BBǥB ۻB*BH֤BBqBڣBHB B٢BB:B~ӡB~BB B셵BB~B\B[BP'B3RBtrB˜B~ZBƮBlBcsBF6B B B8BB+BBTB:B'LBB~BBBڔBVB̰BBWJBB!'BgB5B1BBiBBBHBBBBB|lBqBYBB*B ߦBVBJBKoB=BI BBB7BeeBKBB2BoTBB]بBs!B:BΨB^B$BwԨBWBCBBBBBKeBBLBBEBoBBuBBBݎBBnBBBBPB[[BrB:̣BᵪB[RBBBtBg5B BBQJB"B|B:_B&B0BM\B$,BBVBP8BFBGBDB{B.BBBZB B-BB~ݭBB/WBUB3ϪB״BB ̪BB\'BêBBB'BBjBfBByBɪB`BE͈BzBBsB*$BBJBHLBB΍B0^B"B3BJB>BBBJ&BMBBBaB0 B4BLΪBBB⾪BBBϪB~B)BB B4FB0B:B BlB6BBB-B@BBǚBJ٠BCBBqB!B)BLBnaBp+B@BٗBBB9BtBקBƬBB|BͬBBeBuBӬBB BBެBLB$BB!BaBbBإBBBS BxB5BHB3B?BBbدB߬BIdB oB6ҬBB.BϬBc B$mBǬB;sBB)BBB~B B{ԱBm5B_BOBHիByBB[lBB2BB|BְBBBjBIB\BRB1DB޾BRBNB2lB@B,BB={B ܦBBڭB<N5B$BBY#B BBBB3CB B[BBBwBB ˱BjB4BwB2B͟B3BԆBWoBBBBBWذBB\BܱBB=BBL B沧BEB*B=(BxB.BBrBj,B:BlB"BEBjBB'BqBBTBuBBܰBY}BK"BMB銰BB4ͲB,B*BόBٵB B"B@˰BBBB㱽B۶B풰B,BaBǹBB2BBbBiZBBBp}BmB( BB BmwBB BBBf B6QBЯBBBBB+BBBBB]BBBsB2$ٷBBJB^BMBʟBOBBI>B%BujBچBBxBUBBvBݰBm BbBBB%B ]B{BBǤBܰB_BBB#BBByrBBBBBڮBB&"BBBxBB BBABeBBB6ūBB߳B BmB|B?BŮB.BXB ήBBbBۮBqB`BBBEBKBvBBBgBţBt!B@B-BBBABBqBB#BJBqB>2BDBtBlCBBxBYB7BBkBBZBBB~B> B&BBJBgzBBBBo^B˰B/BB,BBHBMB\QB_B9BB{BB^B BBaVBFBRB0BB B(BB{BMBIGB{BBuB8BBB4EB,B0KBB>BoB_BHBBBIBBBBBvB5BVB$B B/wB@B(BcBtLB BhB/B۩BtBBBf{BޒB BBAB BrB BRBװBBBBmmBuBBB0B[8BGBBiLBB4B\BCB{B'zBHB%ϲBڣBBBܱBBd B&B87BB~BlBEвBBB5BwSB=MB`/B^B!BՔB kBBPFB~B|B+BB B BBBBޯBLBBBHBVBBBqBB~B9B0߯B(B?BүBB5B쾯B-BB߬B98BBB/BjBBB6BƯBBCB˯BMBBrدB)BȬBBB(B|B BBBƌBWB~1BBB PB`BB3bB%BBiBB;BuB-pBBB[B?BBȴB~BpBBhB BaBOBBB-BYQBBBԎBnPBwBYBBH*B!ؑBǯBB OB4ͯBBa̔BѯBB_BBBB)B~BoBB%B2BByBݜB߯B^}BBrB>B/B4BBpҡBBi7B/fB BPBؤBB6YB?BB_BBB fBG BٮBjBnBԮBCfB̫BۮBCBTBuBAB>B{B~BRBBXBXB BBTB_"B|B3BP=BBB8UBZBB`BwBBp`BBІBkeBanBBqBBMB;B*B˗BBbBŵBBHBڵBBBB/BMBյB'xBB/BBB%jBBQBFBUBBBBpBEB;B BBЬBB℔BBBB~dBOB%B[ZBB71BlUBpBޚB{]B:BBbBNB;BBBB_BpB{B,]BЪBB$_BZ3BBSeBʘBB]BKBBu0BvBBNBB3B0B2BBȵBBV7BB#BOB5BHB2ByBBBݸB BTBKB~BIȰBSBBPB9BW0BIBƒBIBoB%ABoBBUBBLBYB#+B3B'[BɘB`BBhBBxBB>B BBBBBqTB"ѲBBBƋBAjBBWJB;B-BBB٥BȱBmB BۦBRBBBBiBաB(BɫB由B6B'ȭBBBҭB BuBحBFBn̫BBpBB5BABLBaBiBvB BXBaB5B'BB.UB\BkBYBzB֮BfBB8B:BB4BB^BB%xBBBkFBB2BNB#BLB BBBFBBBBʵBbfBBBlB B:{BpBBBB6SBBB[BMBEBB:B`BBpBMB}B3BBS"BóBB.әBճBpB;BMʳBB5B嬳BVBB|BBB+;BB.B BmBأB?BB|BWrB[BBaiB\BeBBBB°BB0BB*ɲB BDB%ղBBƛBqȲBBBBBBB>BeBMnB"B^DBB $BCBEBFBdEBBiBB0BׄBBSB&BbBDBwȮBH"BʜBBBRBB/5BBZBBB^,BBLB&BBUݤB0B;ByRB6B+@BB:B[;B{B=Bu3BoBBTBBjűB=gBBR/B|B@B~B㞱BB#ḆBUBܲB B.B߲B]B[BB-BBbB(B/YB'/BғBU@BBMBLBB–Br_B͡BFLBuB-BBWBgBƉBBB.B|BkBlٞB3B.BJB,BB)BB8xBˣB֯BBZdBk¯BGBB?B 8BZBB*EBĩB,BKB *BxB+EBBBlB#!BʭBiBBBoB|BBwBBVBBnBرB-B /BBBBkBʯB B-B#گB1jBBBBZBBB-pBBtBʵBwBB)B00BAB:BpJB_B=IBDuB B'MBBBCBhBnNB]%BTKBBBBPBBxB]Bi.BVBÿBδBDzB$BoqB'B9B4BBdB~ BiչBB!BBB6B7$BWBt"BĵBnB!BִB|BhB2cB7oBh2B׳BdhBϑB\2B~nBEbBByBqB^KBB\B84B,B 5BB?B.BvB.BN2Bq{BBYB(jBHB3BBfBBBBaB֢B5BBBBDBSjBBBGB%BB=B&'BXBKBBk4B\BBYBaBu\BYB'B2BB⽱B(BBBB2 B/B BΩB/BDB#B B)fBOBMB[BBBۤB4BJ2BӲB_BTBBB|YB_B$BwDBB{BB-B~B_BZڭBٛB]BBB BKGBÕBBXBBqB]1BWB`#BB7BЍB_ȭBXB>pBBBIBûBBuBBBؓB٭BB^3BB\BABBBp5B3BU)BřBSBxB2OBYnBBٜBcBB}BH:BΗB>9B$B(BޡBm"B|BnB/$BkBB$BB0DB$BzBdB`)BBwB>BpBHBcBDBBD}BB BBBBBbBB6BXBfBB{خBPBaBB^B/B/BBBޱB)BBlB&BB@BB"BZBVBBBʳB%B B )B?B\B=nB4iBBBBBBB-JBB[&BBBfBBB5ABVBB0B"BvBIČBBT BֽB۴B.Bt׎B#-ByBWBͳBBB}Bk%B˞B)5BxB :B6Bs=BM͖BB+B7ZB BBBB-B܂B6BdBW2ByHBjBBXBOBwBusB BHBnvBB}BBByB BB>BWB%DzBRBB1ƲBNVBKBڲBHBaBݲBhBBԲBBB˲B1B$0B@βBB\BaӲBBWBz)B BܒBȮB0Bz9B]B:BB9B:@BB/BUBUiBB-kB֙BֲBBcB7B\kBJBIBBB̲B]BZBBDBBHBB/BBxBvCB1B 6BYBBBczBB+BdBBB>B.BtBRBUB:B BBBpBBBBܕB?JBBzBBBDBpBijBıB)B[BܱB0B-ʴB`BB ?B*BHBB?]BiBG B2BhHBBgB[BB0iBBITB^BZîBB7ɉBBtBzBdBB5(BO2BBkՎBBBpBBJBB B@BF\B@4BPBB YBGB-BN}BBBJB!B%BBmBMB BB@B BBǝBBRBjBŮB6eBB~BV&BE¢BfB B@\BdBB`ڥB dBCB@BcBBRB`BIB=B]BB`BfBBBvBiwB6BpB2BB搮BBBBV}BB#BBeBۮBB(ҲB}BBztBBP[BoBBB|B_BBB2BgB LB B]B BsBe B۵BBVBpB?BҢBByB{BBįBCBB?B BKB8ByB"^B0ݲB,BBdBB}B]uB'BɇBIJBLBO B~B^BhLBBBLBcB}nB4BBBB1BA_B=B B&BخBeB$BBîBBB.BUBvBBy5BBBBNB9BBךB9TB-TBRB.B(BDBm̮BBBHB%B JBwBLBBuB&B `BsB"BƥBrqB`B)BonBBBlkBͿBFBfBBGBpBB1BBsBjͭB^BB.B4BBBBBByB>BEîB BBaB4BBLBB`BeBYBBB_BwjBBVB$شB$BeB:BU BBBPB4B˵Bp.BTBBSBơBBBBnBگBCBBB4B\BBQBBOBBIB#B.$B/BҁBBBsB^$BfhBm$BN;B gB׆B_ByBBBoBd;B\BBJB跬BBBBBPBB,BBB B]BɬBF BJBŬByBrЕBYB*B-BBBmBݬB"BBYBGBoB/BB3;B&BXB0ܞBUDBB}BnBBB Bz~BBKˣBBK+BRB~B6B廦BB 0BBtB $BfpBQBBBڭBB?BBǼBBsB?BBBBBB0BсBBB.|BcBBB\zBBNCBxBBZB}B.rBٮBBBBpB2BOBBOB`BBBmB@B_BB8BaBiɱBB [B?BBJB_B8+B'*B2BBBBABXB|BEB7BxB8B۴BrB!B܅BBBJBĴB B'B#{B駾BB{HBXBB]Bw"BBFұBCB[BKB,BZB7ϰByBKBBBZBVvBgZB{BXBӿBB~B LB̴BϰBBGBT߰Bn7Bm BBB#BBhB1BeٰB|BABzӰBBSBӰBTB_cBB=BqBrBB=uB BbBbB B?B-B7+B0άBf_BBB4BೄBBm.BeB.MB3BBwB1@BZȉBVwB>]B5{BTBB+B0BB̎B*BBbB1B*qB5B0BbBnKB;BgBͨBQBSBBjBBBBBT-BrBgBBBB^LBʭBBНBͭBsBEjB`BqBEBB)BɢBeB BvhBBJBBuBQBIB­B]IBBέB5;BBԭB.BRBBBlBB=BܭB BB) B BcBd'BEB B41B84BNB[(BUB{(BBrBBB'BrBqQBB*B*B۷B!BeBBI8B8BJBˬBBdBBBaGBBYBrCB#BaB.LB/BB{TBuLBI/B]B_QBɗBeBHBgBGiB$7BBuB!BB}BBMBgBBBQBϬBʏB%B|B>BHBBBڥB2B93B[TBBBuB蔛B&ABzBBB9DBxB(BBB3uBB&BB?B= BBs\Bo|BB'YBץB=FB]CB%BaB&BoByBB*BΈBBŪBʍBJKBūB昬B6B BBkBB[ʬBB2jBԬB=dBm+BʬBBٯBhB=B zBBBCBdzBZB1BmB{dB-BMqBBՂBbBi B}B>BXB̲B7BiB B B'B@BTBTBhBBɯB)BBBݱB B=wBBhBBsBIBxBEBB BBB BB*BXBB>BBEBNBַBTB LBWBpB)BpBցBpB5SB5BBO;B˫BWB/BͫBB0B B陛B=B-RB5BGBxB̘BUB公B^`BWfBUBBuBx}B匔BBgB]#BxBJBmɑB B,B؍BbB BtB(BB|`BxBBABԤBwB-NBBBwBD^BBBUBRB!BiNBYBBOGBsB'BOB#hBB[BPB­B%BɎBBZB{fBB#B&B"ʭBB>_B*ڭBMBBBB7BBTBBB BMBE.BBB6ĘBӰBBm>B栰BBB*BBeBBeB`BǰBB8BBBBBBVB(BٰBBB`BBBBBGBcBFBBRSBBdBBBGB=BİB[BBZBܵBݭBBBBd=BQBB4BrbBBBB4BCBؘB]B1B,BBeBEɱBh BBBSB_-B-BVXB?BBװBB:SBBDBB BB BBBnBfBʌBBpBѓBsBNͳB BAnBBͮBBLDBB B;UBR(B{WBNB!QBB7,B=qpBBuBB;QB3vBԿB2BKxBY$BBzBpBAB|BDnBBBB,BXBBBBn BABBjeBoBXBHBBBsB\B{BCB*5BwVBӾB2BNsB}B1BBBZډB0BBkNJBȴBnBܜB7dB*BBBB.BOBFBB:BJBsB˯BYBiBfBBwB0BOBQB>BQBEBYB7WB BPBEB.xBBBBӯB.BˇBB*@BZ%BBBПBүBtB%BBmB0BBoB>ΤBIpBBMB7[B=,B-BIBZ7B#BmB4BTBBBB7%BgBs>BΓBVBmBBBB&BBBoB B0BbGB_BqBBBBPB>OBzB(B\B+BRBBB}BBBBBf}B]BB6˧BǬBWBBVBB]BBvBB.B$)B马BBHBBg3B[ZBBRBBXtBioBZ]B9BuBBBbB/BHvBQFBBB#BB}B+BMBBBKBaB BB^B BJBYBUBuB3 B5BJB>BRBp0BBYBB8ѳB/B BBB3B5{B+B_B2BeB# B?˲BөB^BQB Bn%BԱBj0BBWB>BbqBϰB0]BBB2*BBj BP‚B LBB:^B@B] B5B^B=BWBBcBBxByB:B/B&B7BLBBvBB4BݥBAB`B?B&BBB ÒB2ʭBw;BbBq٭BD2BɕBBRPBBQB2JBvB+4BBBVBzB倛B9tBwBBhBBB |B-jBQBIBYWBBu/B;BBX,Bv=B5Bz.BJBB 0B KBB@0BFB_B1Bu?BBBB(B B<[BBZFBpBкB#mB(BUgBNBBBځBBښBpBӮBBM>BBŒBKB BsB{Bl BLBBBBcB=B B`ʴB B]QBa$BB6BAdB 7BB[BcBFlBͮBBhBmcBvBzBBBB:*BÙBB1BvBBBH)BB8BמBbBqB&|B6&B/"Bs$BbBcḄBBYBiBgBBQBeCBB*B)8B5BBEB B1BBYBBBmnBB߭B}B߹B$BY{BsB[BNpB=BZyBPsB(BoBBOB4BBڷB1гBBBT_BBX~B0BBB_B,B12BTBcMBBBvB(Bc BBB(B{BnBB?B BBqBB(BaBOnBBdBB/BB5BߴBSlBB$ByB%B\B?B|BBmB BeBJBB8B$'B(JBBBBB BBBTBL{BRBBqBU,BBB.BBB?RB*Bh޵BxBB B:B:B1BoB{B"UBB9B.nBȱB*UBzBٱBQBfBsB6BaBB$B>B{BUB6B+`~BxBB4jBB-BqB7B;BjBkBB@PBؽBBBBbB6eBsBB3BYBtBBkB_yBBBB!BBBB&B؍Bo B^Bi(B>BBʓBBBsB9BmBBB_B BBB)BBrB痛BBB}BMBBSBB!}BɟBj BUB6rB,,BB$B6BBפBjŲBBTBjB`Ba"BLvBBBBSnBBcBWlBZJB,B1BB㜲BB ;BڡBB-BiBBBB>B.B+"BB0}B+PBˑB;1B¶BmB`B|BB[B BjB¢B׶B@BSB⏶BB$B8>B[B'B>BtB)BBB:ҌB6B#BzBδBB!B:EB6BUBgBBMBB-=BBBۙBBfBB!BfBBB[BBBűB-BvBB|BBıBmBTBYֱBgB7BBTBBvձBBтB#BBB?zBB}BgBe)B(B}BB1BBBfB|B9BvBԺBDBBB"BBLBBB9B#B 3BFZB1B B4}BBߴBIBBb9B²BVBBBBPB#BBJϳBlBEBBBBwB06B@BǀB^B!B6PBmBBBq=BBQBYB6BNB,oB"BBeBYB>B~-B)B'5BүBBލBܡBLIBBBB2B&B*NB=ʒBB!BGB.BN BȩBBBBDB4BB BBBD+Bc5BNBBjBBBաBB%B HBBR4BBZBvB=pBB=BܦBBaBs@B(BBɠB}BBOB}BvBUBsBMBLBBJBpBnBBYկBȧBbBܰB渮BBձB,̮BXBoBsݮB;BaB>B%XBBB9BxBٮBBvB=֮BGiBSBܮBBBBLBBB[BBBͧB7B\:BB'CBe{BDB/8BBB B;BBµB3)B>FBYB.BBBBBqBԫB BBB|B}BBBeBBBAB;BIBƉB,ZB $BPwBhB6Bz*BRrBzABގB|BpB2BBB8BwBjBϓB^ByMB^CBXBEBBoB%BnBB BBB|BԜBBBvBBBdBBB̡B6B]BPlBzNBHB_B`B9B._BEiB̟B{BuBaBN BB,uBIVBBNBBʭBL BB٭B#BƭB[BULBcB2BBtBU&B\BcwBBG5BñB/BBCBpB0BGBVBbMBD6BBBB BBBv BHB3B:BYBSBTBBUBu_B[,BBBPBzBcB! Bo}B'BBlBB2BBBB@B1xB[֪B~BB'BcBrB?rBAB BUBBZBޫBJBBBBBS%BBc8B\IBJBzBTpBkBBWB7BBBB BϬBB` BB$BB2B7B|ƱB8]BlBtxBTB4~B= B+B9BQ~BBG7BB̬BB>[BxBBôBBBB4%BȠBJB-BBB5BNB2BݵBB~BB_GBBIBB#B޵BoBzBеB"BBKB#[B(B9~BVB&BָBkBQB_ByBB=BBFBYBBBLB.!BBLjB@BUBN2B0BBjBBBBN>B͡BEyBBBJۏBBuB=BQBB nBN1B|tBAIB_pBBBŪBͪB]B*B:BBBB B@\B5Br]B1BeB8BBBB{BuӫBF BMBBBBBNB BέBg^BBZBBBIB%BBBEBJBvBOBB(BXqBe?B_BnBBӴBYBBi(B,B=BYBBBqBxBGBV|BBeGBuBJǣB1B$BxBuB fBGB|BقB7BRB{Be,B@$B-BLBUB ݇BeBgBrBB]BB;NBB.BԱB\B;7BlvBBˏB;BuBaB%B BB$BBӊBJBAvBBB&QBBf*B%7BB;SBBbBlBBBr}BwBƝBiBzB>zBCBBu%BdBBŢBBLdBRBͰBBʥBB$B5B0BB͐BsBFBBذBhB1B)B*0B`BBB{B BBDBDB.>BKB@BB¯BbB`wBB6|BBaIBB[PB B`B&B=YBűBBBBQBB BsBBYBBB B6B5ABBBBwٳBJXBBBHųB#QB#GB?BB B;ŴBGBcBBRBBSdBBBIB$BuB?BYBB"B2BBB$B!#B##BB׺BBSBvBBs&BgBH{BxBB-BվBBF BN0B'~BEBΉBwB̭BBg}B֗BPB҈BgBǽBBbBB*BB B[BƯBBRBBj}BxuB!B1SB:BCBcEBwByB)BrB۫BwBZB,BЪBB|B'B(BD۩BjB#ByByBʹBzBo}BB[BGBB<$BXBDB.B*FBBzOB BB:BSpBBljBBBLBHBB?BѐBn,BB@BH8B\LBCBUBBBBbBB,B6B%BёB߳ByBtBBB B MB,BaB.BHB+B7BBBLB}xBYB0gBbB ByBBBB:tBoBfBu;B{B̡B%BңBsB/.BB^!BNBM/BB~BfŨBBшB-BByBFBB&BBBBfBBIB+BhBvBB#+BlB\B BGB B+˨B)lB6BBBBϠB BBBCB(>BޓBB͉B*.B&XB$^B㐪B;&BBBTBB~BBeB9`BB[BBYBIB*BB3FBB iB̖BBB/BiPB?By)BqBNB#BzByԩBa BgvB4BCBB ΩBIBqƟB̩B8~BvBB,B6BW9BBpBBBȥBuB3B^B;B4BFBBTB`Bd BB`BBDBUBO*BB:BCB`B$BFBB٭B-BBB BBWVBIBopBBBWBB2ڰBQBB3BB,$B%.BB[BB4B,B9BBypB`B/BBB]BkBhBB2BcB5BB5԰BB2B8B9mBBIB=B>B쓰B8BڷBBhB;B ˰B-B˘BvBaBMiB+ BqBBS BBBBPBtBBl4BB\BBâB_B-BiWBB BDBBBTBlB7BB B`BBӴBǚBBB1ݴB5߱BڒB״B+BB´BB:BBWBBKByNBBBBE^BPBN8BB9B@B+B^Bc^BBhBB4BBնBýBXBۜB0WBҰB}BvB>ByB¹BלBסBDBgBgBBgBxVBB嚮BWB BABB,BB7~BBsB3BBZBpOBBLGB$BӰB7BI)B6B BBBB0BBBmxB.BjحB5BKBϭBLcBqBBBB{B(B`1BO1B2=BBBxBBB&}BBB;0B:BNB~ BTB8B1B)BB~BP Bt(BBiB8BbB3B'BBBBӮBBBBBJB< B)BsBB"BB/BB=BPBcbBƛB*xB7JB5BZBBxBB B{:B[jB>BġB#(BSBrBBB"B!B>UBB:BEKBBMBs-BIB`BB0BtB5BBBB'BBBfBBDvBdBnB*BeBBB$BؒBu_BBnBBnBBvBrBhBB(BgBLB$B:BJB#BBdBVBI cBC>BӀB} BmB,,BbB?BB|hBwlB[BIBaBxBBB컈BϲBB\BzBHBPBsBsBVBBBs?BaBB̐B1B+Bd_BBxBBBsPBBvBB\BBBBI BBB B B%|B3B5BB.FBۄB-ƞBXB܊ByBiBܐB,BlB3;BYأBABBkBPBnBBBBXBBBB BB5B,BB4BBBIBVB?BBZbBNOBJB,~BlBTBBBB B몱BeBIBRƱBBBtBG0BlBHBB̳BBB BMB%B(BBLuB*BvײBtBmB+*BeBBBlsB}óBBUBntBpB7B#BBBnBBIBB"B sB[BsҶBFBBKBzBX|BRBBBh$BܚBBBFB B)BBBZBBBVBqB軮BB.BCB`BhڵBBBDӴBKVBBֳB;BvB۲BpޭBBBBSGBBkB7B BBB(BʬBBRBNBB˅BxBBɬB&*BB&BƖBjBbBbBBܪB,B4BHB`B,?BKFB/zBB5(B̳|B)ζBB B"B-B"BYMB$B.B"`B˱B`BBBByįB BBB 9BB1ձBVBLjBB*zBB&BBKBBBBسBBBB!,B0;BVBixB/BCB3*B BSRBBBBܴByB.BBB\BBBiCḆB8AB$B<"B-BBB B,ٖB)BŁB/zBz?BBBVUB BBoB'>BDBUKBBԞBB:B\BjB$BjBG)BB̡B"BVBAB1BϋBæBFBЌB$BCPBB~BOB}B ܪBQB\oB2BXNBpQBc|B8B$B߷B&ByBܯBk&BbBBp7BSBBбBNB@B뮲BlB"B҃BIBFBBBeBfBJ޴BB_BXBwB1BϵBZBBCB껯BmBKBԯBUBdBIBWB4(B B;ZB>B`BRBK;Bm4BB4BskB?BY)B˷B"B# BBB&ζBAB@BtBJBܝB[BBBBUB^aBOB JB BCB.B,B)B]B9BDBB5[BYB7BBBAB~BB?BBZȬBBB>BBOBB(BxBNB\BnձBhB1BCBBҞB>BƕBpBBnBYBI1BIB-BdB'B BdB2B_bBaBBǴB_B#BBB.BQB`B7BƜB(B&BB"BSBB6BX BB:B>B38NBBcB0@BBBm/B?\B|B%BqBOB>=BBBTBӯB-B萱BBѭB,BF8BABFBBђB3NBفBPPB^WBnByBNVBW.B1FB]BBB4pBcB>,BJBMBfwBBB-mBnBU;B-BrB.BB9eB{ B;BB̶B2BB pB/ӱBsBW BN3BDBBܣBn8BMBLBBB B'BBwB*BbB6BHB| B\_BB?B'BBlB۳B BB%B1BBD)Bn6BhBײB-BBBqBϺB+B BIBYBѦBXܓBԱBJ4BwBñBB BȔBB܋B[BBlB["BB8B8BݜB B BYeB (Be7BBʟBJB B6{BCYBBu*BFB9BäBB}kBBBlBBCBBLB\BB}BlB^BLB|B&0BaBs¬B/B!BŭBGBbBB}kB"BFBB BBBGBBαBsBBBB BB"jB&nB$BBBYTB5 B̳BBXBгBܲBB³B1BBߝBB$YB_B)B+B BB;BB BB~BB4B@B ,BB^LB#~BYBszB;BoB}BeBB2B.BjzB֥BwBo\BnB3B|(BBO\BKBw6B4BBBBBBKB7B~BcBBB?BpB#BOBBA)BMBEBZϐBB0BkBBBoBO;BBvBBjLBBԳBzB1B߳B7pB+BFBBݛB#B3BB?BB.BLBYB۠BiBBvB{BZBB]BGB˥BQBUBtBY+B"BB2BB[BRUBB5B|BkfBBKBrBYB,B6BٯBBcBNB:BqB9BBCBȎBmsB BkBBBvB"B B:BSBȉB๡B|BB?BBBB౬BBBŬBB5MBެB;BBBGsB_کBB,3BB(BuB B4BtBBKBBBlB7B îBJ~B!B䀯B{BqhB%B:hBBѷBLBI/BeABX*BBñB'B(B9BBBBB!ByBBBB2BZB,BBˬB/{BIBxBB߲BB(BQBŴBބBfB9JB]BBzB(UBBBB98BTB[`BBꆨBB|B"IBtB&BדBBBjB1mB BnB0BBBB}BeBB%BBB;BNB9ųB~6BSBBB#Bo_BCBՒBBBB|B`BVBBBpB1BÆBBBuBìBB%B٬BBR؋BܬBPB싍BfBBY?B$BBBhBC$BBBmBMBB`BsB;B_B&B B=B ĮB]BǛBBnlB+XB9BB@BڮB+BBB_BRB~B˗BBjuBB}BjqB/BBmBIBGBhB$BBkB2BBBFBMBݙB(BBfB>BBBBBYʮBЂBĩB&BBB-BBRB2BRBbBd=BYB2B~9BBBB9B BaB?BcBZƴBAKBB}BaaBqBOB/BRBnBBtBxBHBBiBKBDBBB2BB{BѰB>B㙴B8,BUB7BB{BٳB%BjBBBB BՕBBVB~+B4&BBxBPBABB~BbB`eBB5 BBBBpBBJBBeWBڳB?BddBPB BfBLqBЏBjBBB.wBB YBNBqBBB1B׵BбBB=gBбB BBıBB̡BBB|B5yBBB%BA:BB ɦB B"BQB~B5B޾BB2,B BN B#BsBTBB˭BB7BBBBkSB|BBtB!Bb(B:xBY1BB5LBFBBB^BvBQuB@|BWBHBxB26BwB DZBBB`BB6B3 B2BedB.BBouB`BBwlBnBB(^B8BoBHBAB3B!B9BBBWߵBBBBBB.BBMBB3BBBDBg1B.B BſB5BBiBBøBgBBBBBBgBBB@BYWBHPB">B8BBhVB#CB=BBoBB5B B;BBiBQBBYձB0BEBBBcB"B BDsBAB}BB}gB3B~BB$EB,jBxBFuBBm2|B~BlB;mB{BB JBVBًByBelBZBȉBtBLB+B;BBGBB9hB4BшBRdB(|B۶BfBB BUB<ٌBj BuBBWB;BBLBƿB#BBBɒBEBxBgBBzBBBݲBLBB:B>lBgBBBB`BeB$VBx4ByBBDBPiBBKWBCBmBjB"BB_TB*B!ӤBB6BBgIJBVB!BqBBLBB.BBʲBB"UB@ŲB4 B֭BB>lBcBrBB~B4BiBEB77CBBpBB BƜBBIB0BB"B籟BOBBBBBABb¢B@BQB+BB>B|BB<B¦B#BGBmBCBBCB%XBmBdB0`B-BAjBkBBm\BeBLB;BBB BBBBFƮBBBpBՁBBBm\BgBB7BB4B%B;DBDB'BB"B0B0BWfBB;BsBAB1BUB:BB'*BxӪB!BB=BdBHBB'BBRBdBpBBBB]BpBRjB3BPBBȯBBzBlMBrB BrBEBPźBaBBB3nBB=B)ϬB3BqBLB-BXB1٫BJBBcB&jB'BBBZsBpB1ěBfmBgܪBgBuiBŪBIB^BB׵BfB)GBZMB ~B>BB˞BBzyB@ūB9BB9BB'BJABPBLB@B*EB֍B%BWB'$BBBMBbBB)BB[B'BB ОB*rB/=BZB\BBBBBmB{B1B+ؤB4B?BN*BNBLBsBkB'B"B BxBdBBEB_1B̬BBBNBZ߬BgB@SBBt2B!CBrBB3B:BMB B]BmBMΰBdB3BtBLBQBB(BBo|B2B_SBjB ڬB BlBˬBB׳BM֬Bi^BK8B٬BB|wBҬBBnBBhEBB~BCBB&B%B%lBBʫBt?B^KBnB:B"BWBBBkBcBBBBBB{BBBBBBHúB#B5 BB_ÀB BBVABBtBBBLBꒅBGB)RBZFBݫB[}BB_BB3B%BgBUB8B BBEBnBwB FBtBmB<1BfBB3BBB)۫BdBw;BޠBBB'BqB~BqȫBKB3BoBB$$B{ BxBĝBWB\BoBS9B̷B!BfBp9BB7BtBnCBB@BBϬBwBBBaBpTBBxFBBBBӪB B{BB_$Bl_BB%2BBAحBILB{B+BdBBBlB&lB7B_BB?ҰBJB7BbB,B:BBBBeBB6PB7ԲBB8BY1B BqBJgB0 B>BytBBB8mBߧBIBkUBbNB1By-BB{BBˎB?BBBJBhiBqB{ĿBBBOBBpBBBBBB!B[1BB0{SBCkBDB :BzB]wBBBBB^B BB/ B\PBBkBB?sBȸBbPBuBx.B6ߐBBBYaBBBϓBI~BhBZB~BBBGBBƘBBE BrfBBBJBBȬBBӲBlB+YBӥB B&BlBBpB%B,BvBɱBZB BXBm٩BvBeݰBBrB_BՍBB$BrB8BoB6BBB4BBBa6BB`uBBBBpNBmɲBABnBqBBBBܼBTBBҗBB˲BLB뱟BBBVBOBݣBB BeEBԣBձB"B3BBB+BBBBBܱBBSFBBTmBӈBB|MBϬBB B B BB-B3BBOBBV?B_BBJBPIBBEBB6BBB`BB2BBByBӾB BBBֲBBjGB B9lBWBQBBIB}B;Bf3B2BBAB#B$BBBBXBjBBB`BwBcB'ʲBnBiBBt BB B(sB^BYB;B{BSʯBIBjB6BBB6߮B+BUBYBB?œBB|BBBBBؖB1BBzBQB5BBzBeBBnBB|BaBwuBsB,BB6BBs0BVաBBNBuB BiSBc{BB[gBBBvBΦB4BJB"BFBMBB2B]°BB?B(BBBBAB{bBB}B}B^BBABBsBBAqB`WBBB]>BVBYB,6BgB^BDBBBfWB4KB/BbBĖBEBnBBLBBw-BVBBszBqTB_ܮBVB5BBBBKZ!B!ABBRB1B[BvBUBʂBt BB0jBBBBǵBḂBِB`Bb{B KBF.B&BBMB캌B͜BiBbB3BBF BBB契BBB-4B$VB#B”BdBBaBB$ BqBαBB̍BNBBBXMBFBTB!FB|BѝBG[B BmB4pBXBBJBk>B̢BuB7BbBPBBlB)BBZBBrBèB%BoBB^ B9BBa]B B"BBfB1BB BB}ḂBI;BDB!yBj^BBc#B}BB0όB6Bd:BzBUBlBBʴBBBMBBB齫B%|BHlB/BclB\ؕBBs2B\BiЫBmBpBsBTMBkB]B^?B9BBBBR BBhB;BB2BMVBBBآBlBeBgB{~BBHܥBhB~Bt9B|BoB؏B袭BAZB"BB!7B#BڭBBcKBBbBXBB9BRB BB4BOBtBBYBBXB BGԱB1BBBBB$BBB;BBBdBtBtB BtpBQ|BBpyBM.B B`BBV B}BO_BB/B`BGBðB@BUB=װBŝB ӴBEBB>NB BXBɵB(BB1BHB3B8BeBRBﭶBfBBBqBB>B]ޱB9B B6'BBBwBBtBϲB!8B1B4BMBӵBB BuB%BaBJBBBhĴB.B@BZLBBdпBBӰB&pBwBBBsBoBBB@gBgB߂BB@9BBB1B{BUB HBmBJӾBzBBQBBABcBBiBKBBBB8B@B~;BZBᲱBB rBʱBpBBdBBB B?BǮBB2B BBčBB;tBdKBBnB-B)BiBBgBƆB[QBcB_B\B"mBwBBt5BB\bBBauBðBBDBm%BQB(B[BbB BhB=BiBBB?0BABBBBBB#ۮBcBjsBzBB BCBBB@BATB'BdLBBB*B'B:lBGBBBzBB0BrBtBX=BwuBBB]tBBBrBqBpB)pBxBϪBZmB7B'B\uBݭBkB_BsBB\Bb'BBB'BB!BuiBZBB8BB)ɮBjBB[BjҮBBBЮB0BwBŮBBLBSBBsYB;B9BBˮB&BBٮBBY3BB&%BMB BqBo^BFBBS\B#B.BF;B$¯BeBoBTBEBiB'/B@B=BBABMB7hB=BߕBqBBPBKB(BBBBRBBBB,BVB=BŝBOβB˪B\BBG,BFBAB­BpBBu\B:BıB!BPBBgBᆧBlBB B eBB/oB!dBBBЫBTB[B@BABVBB3B"BBJ?B BIJBNBB ]BaBw2B,QByBįBBBBBBZjB BƱBHBd{B~B!BB B~BdBNBaBöBqB%BݳBiB1BBѱBB4B.VBg+Bf_B@wBB B_B2BrB BBnBެBBBRB_BъB B+BcBBOB5BBWxBBBBճB᤮BNBBBΜBɥBӪBŢBbBQBBGB7BݮBەB6]BBBgBPB3BUB,BBB6(B쵯B=B2״B,3BBn؀Bw³BB0ۃBB^BBBJ2B:BdzBBÈBBB,BVBB˳Bb۲BnBHBhBfBގBjBBtzBxBaByBbBBB䋱BB:BB\kBBVB:5B@BұBB7ЙBBVBrBBB BSBBҞB/B:BnBKBթBG8BLPBBB$BhB˚B߱BB;BֻB7BBBS;B"ḆB{!BnB%߱BeBŬBBL BaBBBrBB BBBB߱BRB,BֲBBâBBnBLBBQ8BfBBWfB{B*BgB_)B|BDzBBBWBMBPB\B2B˛BB(B&BקBjLBBB5BDB%BB7lB?BeZBBhEBBhB^&BBB,Bt˶B~#B͠B$BBB}~BZBKBdBFBB]B}CBB YB1B>BQBBFޘB(ZBBjvBqB pB BBḄBBBQ?BCBBB^`B,BBB4NB)B BsBB`BFBOBGBzB B\BbB#BBNKBB̪B+B;XB'BBRBwBBMBZBjBZBRB[!BbBUBrBKxB BdBOB|BBـBB\YB BB#B#(B BBjBuB*B?BaBBBB6BBoBύB׍BBqBx5BBB6BBB״BBBNDBHBB#̕BGBBB3[BBSBMB>ѳB?B8B@B-BZBܳB: B BB*BBBcRB[Bz>BLBiBVBBBBP,B=BB`ABwBSuBoVBBwBFBIoB$B|!BWBʲBBB߲BBGBٲBB&BBpBBTBw@BgϱBBkB԰BѲB0=B:eBBB}B1BB>BOjB tBABBBoB"B^ByBKB@BB@BpB\B6cBq'BBfB'#BBĨBBPeBmBkBB BB|Br^BGhB.B,BWBTߎBZkBmBwBB*B2BzB B̓BBBP^B(BBmBCBBaBרBBBBB?DBaBlBƜBaBjBDhB>Bh#BBh?BByȡBaqBIB UB{B]BBBaB]BBMBkBJBBBcBBBmBwBƪBwvBBB_BUB BRBBiBBBm-BעBBB[hBsfB PBABTBnBBB7B/BBB+B'BWB?B|BnэBBQBdBBB;BBfBBBBBBBBRvBBT8B=BBZB[B83BBbBZBBBrOBB5ƝBBBkrBB BBBh/BâBoBB gBDB5B;B"B:oBBO B BBBB>BPZB%BBBB(rBBj BAB?BBB`BBBsB/B)IBwB)B Bc]BABMBBN[BnBeBoBBrDBcB[wB/ĴBBNB4B{B&BȏBðB/wBҵB߰BBLBB BB ;BB_BBBBBϱB?BѵBY!BeZB BF}BBt9B BBڴBTB~BH|BkƳB/BB~OBVB壳BBZ߿BBhBtB5uBvB; BBIB B/[B0BSBBz@BBS0B B BXүBLBBB_B4BB ԾBSBQүB'CBiBlBBj~B@BBB߯B|B&BEʯB BBB@B3BɓBBBBBBB2BFìB BhB̫B{B>GB˪BCqBBeBRBBvB\ȯBBCB B3BL+B 5B7B/B DBB7BIBYB1BMB B9BF%B*B`BïBgBBN-BPBJBkBHVBV}BދBhB՞BB2B"BwīB*BܙBêBHB!IBBz7B*B B?BڜB쟦BBBBzBҜBBGB-BMB .BLBB%B7B{BʩBB3BBvBhB=BěBB9ةBW BBBeB[BRByBB҂BBBiBBngBBKVBB̀B BB[BCBd(BxBBİBnBBT.BBBJB0B/Be6BB+BۜBBkB׎BB[B=BnBBӡBWBBNBϏBBsȤBjƩBF3B4B|B+BċBB7BѾB)BB ̩B.B:BƪBTByBeBBbB:[B-QBp+B*J˵BB;B"BBRB'ABB=BBmBBVBBBţBB;BRBB֊B6BB|B뒴BB=$B B-BBBlKB:PB$BBΒBqB&BcBBVB B۲BqBBBBM@BpoB{B BEBBBCB3BBUBB!BrB{B}BdBB0BBjlBBsB$B(B*B@B 'BkBBaBBlBIBBBIB?BBqB\B%jBԭBbBGBBQBqBJBJCB_BdBGBlVB;VB]\BBB{B;BBBB7SBVٲBBشBB_Bl>B+B:BqB_BB;BEBVNBzBLBBw-BǂBIB횂BBCB1BB BBӑB,B#BpbBB0B%BKBŠB_ϲB7B`B[BDB?BgB}B)BBB{"BuB BݶBUBOB.MBBB|BޕBABUB\B2^BBΘB䅱B}B MB^BBBBQBgBgBB=BBvTB7BYBlBB0Br/BBBYBAB0B#nBB>BgB%_BMB-aBB%B@BB%BB~1Bz7BBJBB_B2BABĪBkBǟBABB,BөBABNBBBkB4yBpBHAB7BSB׳BBBv)BX2BxBX(BBFBBWBBB,BBȽBBB|B/]BxB BBBB5BB5CBVB@BB.BܑBԱBǻB vBbB^BB BBBɽBBpBBBHBfB]$B@8BBuBB B4B]BBRBFBBBBBΘBBNB Bo4B4BRzBæBBBt;B5BB5BBBMBB`lBY3B;B:=BkBGBB8BABBBBB[BB+QBBݱBDlB,oB}BBB*BꤱBvEBBñBB?BBB3BBLB5B(BYB B dB6B~BB9BBBBHB|MBNBOBBv=BzBI$BDBp%BBBAֳB>,BRlBBGB%ǛBC/B BqfBB?BsBBеBB{BaBAB:B3.BqBRBTBB^ұB(BB=ƱB7BBOױB"BLܩB B!BB|BBB`BBWBBBLB+B`BBAB*BBhCBIB׭B BBB|BIBBBDaBB°BzBMBBB0B1B"BpBڲḆBBqfB Br,BҳBvB$~B|B +B BFB?fB BQB᫲BlBZJBBB$3BQBBHBBqBzB=+BBnBVBa:B#B;BBKB+͵B! BBrBBEB,B{B ѱB.B•BPBB^%BװB:BƻBe^BB!BگB,BYBMBBHBrBcBDB®BB&BB*BB7BBvB7B\BB malloc(BS * BS * BS * sizeof(double)) cache = malloc(BS * BS * BS * sizeof(double)) sigma_block = malloc(BS * BS * BS * sizeof(double)) # (i, j, k) coordinates are the center of the static patch # copy block in cache copy_block_3d(cache, BS, BS, BS, arr, i - B, j - B, k - B) copy_block_3d(sigma_block, BS, BS, BS, sigma, i - B, j - B, k - B) # calculate weights between the central patch and the moving patch in block # (m, n, o) coordinates are the center of the moving patch # (a, b, c) run inside both patches for m in range(P, BS - P): for n in range(P, BS - P): for o in range(P, BS - P): summ = 0 sigm = 0 # calculate square distance for a in range(-P, P + 1): for b in range(-P, P + 1): for c in range(-P, P + 1): # this line takes most of the time! mem access d = cache[(B + a) * BS * BS + (B + b) * BS + (B + c)] - \ cache[(m + a) * BS * BS + (n + b) * BS + (o + c)] summ += d * d sigm += sigma_block[(m + a) * BS * BS + (n + b) * BS + (o + c)] denom = sqrt(2) * (sigm / patch_vol_size)**2 w = exp(-(summ / patch_vol_size) / denom) sumw += w W[cnt] = w cnt += 1 cnt = 0 sum_out = 0 # calculate normalized weights and sums of the weights with the positions # of the patches for m in range(P, BS - P): for n in range(P, BS - P): for o in range(P, BS - P): if sumw > 0: w = W[cnt] / sumw else: w = 0 x = cache[m * BS * BS + n * BS + o] sum_out += w * x * x cnt += 1 free(W) free(cache) free(sigma_block) return sum_out def add_padding_reflection(double[:, :, ::1] arr, padding): cdef: double[:, :, ::1] final cnp.npy_intp i, j, k cnp.npy_intp B = padding cnp.npy_intp[::1] indices_i = correspond_indices(arr.shape[0], padding) cnp.npy_intp[::1] indices_j = correspond_indices(arr.shape[1], padding) cnp.npy_intp[::1] indices_k = correspond_indices(arr.shape[2], padding) final = np.zeros( np.array( (arr.shape[0], arr.shape[1], arr.shape[2])) + 2 * padding) for i in range(final.shape[0]): for j in range(final.shape[1]): for k in range(final.shape[2]): final[i, j, k] = arr[indices_i[i], indices_j[j], indices_k[k]] return final def correspond_indices(dim_size, padding): return np.ascontiguousarray(np.hstack((np.arange(1, padding + 1)[::-1], np.arange(dim_size), np.arange(dim_size - padding - 1, dim_size - 1)[::-1])), dtype=np.intp) def remove_padding(arr, padding): shape = arr.shape return arr[padding:shape[0] - padding, padding:shape[1] - padding, padding:shape[2] - padding] @cython.wraparound(False) @cython.boundscheck(False) cdef cnp.npy_intp copy_block_3d(double * dest, cnp.npy_intp I, cnp.npy_intp J, cnp.npy_intp K, double[:, :, ::1] source, cnp.npy_intp min_i, cnp.npy_intp min_j, cnp.npy_intp min_k) nogil: cdef cnp.npy_intp i, j for i in range(I): for j in range(J): memcpy(&dest[i * J * K + j * K], &source[i + min_i, j + min_j, min_k], K * sizeof(double)) return 1 dipy-0.13.0/dipy/denoise/enhancement_kernel.pyx000066400000000000000000000340101317371701200215220ustar00rootroot00000000000000import numpy as np cimport numpy as cnp cimport cython import os.path from dipy.data import get_sphere from dipy.core.sphere import disperse_charges, Sphere, HemiSphere from tempfile import gettempdir from libc.math cimport sqrt, exp, fabs, cos, sin, tan, acos, atan2 from math import ceil cdef class EnhancementKernel: cdef double D33 cdef double D44 cdef double t cdef int kernelsize cdef double kernelmax cdef double [:, :] orientations_list cdef double [:, :, :, :, :] lookuptable cdef object sphere def __init__(self, D33, D44, t, force_recompute=False, orientations=None, verbose=True): """ Compute a look-up table for the contextual enhancement kernel Parameters ---------- D33 : float Spatial diffusion D44 : float Angular diffusion t : float Diffusion time force_recompute : boolean Always compute the look-up table even if it is available in cache. Default is False. orientations : integer or Sphere object Specify the number of orientations to be used with electrostatic repulsion, or provide a Sphere object. The default sphere is 'repulsion100'. verbose : boolean Enable verbose mode. References ---------- [Meesters2016_ISMRM] S. Meesters, G. Sanguinetti, E. Garyfallidis, J. Portegies, R. Duits. (2016) Fast implementations of contextual PDE’s for HARDI data processing in DIPY. ISMRM 2016 conference. [DuitsAndFranken_IJCV] R. Duits and E. Franken (2011) Left-invariant diffusions on the space of positions and orientations and their application to crossing-preserving smoothing of HARDI images. International Journal of Computer Vision, 92:231-264. [Portegies2015] J. Portegies, G. Sanguinetti, S. Meesters, and R. Duits. (2015) New Approximation of a Scale Space Kernel on SE(3) and Applications in Neuroimaging. Fifth International Conference on Scale Space and Variational Methods in Computer Vision [Portegies2015b] J. Portegies, R. Fick, G. Sanguinetti, S. Meesters, G. Girard, and R. Duits. (2015) Improving Fiber Alignment in HARDI by Combining Contextual PDE flow with Constrained Spherical Deconvolution. PLoS One. """ # save parameters as class members self.D33 = D33 self.D44 = D44 self.t = t # define a sphere if isinstance(orientations, Sphere): # use the sphere defined by the user sphere = orientations elif isinstance(orientations, (int, long, float)): # electrostatic repulsion based on number of orientations n_pts = int(orientations) if n_pts == 0: sphere = None else: theta = np.pi * np.random.rand(n_pts) phi = 2 * np.pi * np.random.rand(n_pts) hsph_initial = HemiSphere(theta=theta, phi=phi) sphere, potential = disperse_charges(hsph_initial, 5000) else: # use default sphere = get_sphere('repulsion100') if sphere is not None: self.orientations_list = sphere.vertices self.sphere = sphere else: self.orientations_list = np.zeros((0,0)) self.sphere = None # file location of the lut table for saving/loading kernellutpath = os.path.join(gettempdir(), "kernel_d33@%4.2f_d44@%4.2f_t@%4.2f_numverts%d.npy" \ % (D33, D44, t, len(self.orientations_list))) # if LUT exists, load if not force_recompute and os.path.isfile(kernellutpath): if verbose: print "The kernel already exists. Loading from " + kernellutpath self.lookuptable = np.load(kernellutpath) # else, create else: if verbose: print "The kernel doesn't exist yet. Computing..." self.create_lookup_table(verbose) if self.sphere is not None: np.save(kernellutpath, self.lookuptable) def get_lookup_table(self): """ Return the computed look-up table. """ return self.lookuptable def get_orientations(self): """ Return the orientations. """ return self.orientations_list def get_sphere(self): """ Get the sphere corresponding with the orientations """ return self.sphere def evaluate_kernel(self, x, y, r, v): """ Evaluate the kernel at position x relative to position y, with orientation r relative to orientation v. Parameters ---------- x : 1D ndarray Position x y : 1D ndarray Position y r : 1D ndarray Orientation r v : 1D ndarray Orientation v Returns ------- kernel_value : double """ return self.k2(x, y, r, v) @cython.wraparound(False) @cython.boundscheck(False) @cython.nonecheck(False) @cython.cdivision(True) cdef void create_lookup_table(self, verbose=True): """ Compute the look-up table based on the parameters set during class initialization Parameters ---------- verbose : boolean Enable verbose mode. """ self.estimate_kernel_size(verbose) cdef: double [:, :] orientations = np.copy(self.orientations_list) cnp.npy_intp OR1 = orientations.shape[0] cnp.npy_intp OR2 = orientations.shape[0] cnp.npy_intp N = self.kernelsize cnp.npy_intp hn = (N-1)/2 cnp.npy_intp angv, angr, xp, yp, zp double [:] x double [:] y cdef double [:, :, :, :, :] lookuptablelocal double kmax = self.kernelmax double l1norm double kernelval lookuptablelocal = np.zeros((OR1, OR2, N, N, N)) x = np.zeros(3) y = np.zeros(3) # constant at (0,0,0) with nogil: for angv in range(OR1): for angr in range(OR2): for xp in range(-hn, hn + 1): for yp in range(-hn, hn + 1): for zp in range(-hn, hn + 1): x[0] = xp x[1] = yp x[2] = zp lookuptablelocal[angv, angr, xp + hn, yp + hn, zp + hn] = self.k2(x, y, orientations[angr,:], orientations[angv,:]) # save to class member self.lookuptable = lookuptablelocal @cython.wraparound(False) @cython.boundscheck(False) @cython.nonecheck(False) @cython.cdivision(True) cdef void estimate_kernel_size(self, verbose=True): """ Estimates the dimensions the kernel should have based on the kernel parameters. Parameters ---------- verbose : boolean Enable verbose mode. """ cdef: double [:] x double [:] y double [:] r double [:] v double i x = np.array([0., 0., 0.]) y = np.array([0., 0., 0.]) r = np.array([0., 0., 1.]) v = np.array([0., 0., 1.]) # evaluate at origin self.kernelmax = self.k2(x, y, r, v); with nogil: # determine a good kernel size i = 0.0 while True: i += 0.1 x[2] = i kval = self.k2(x, y, r, v) / self.kernelmax if(kval < 0.1): break; N = ceil(i) * 2 if N % 2 == 0: N -= 1 if verbose: print("Dimensions of kernel: %dx%dx%d" % (N, N, N)) self.kernelsize = N @cython.wraparound(False) @cython.boundscheck(False) @cython.nonecheck(False) cdef double k2(self, double [:] x, double [:] y, double [:] r, double [:] v) nogil: """ Evaluate the kernel at position x relative to position y, with orientation r relative to orientation v. Parameters ---------- x : 1D ndarray Position x y : 1D ndarray Position y r : 1D ndarray Orientation r v : 1D ndarray Orientation v Returns ------- kernel_value : double """ cdef: double [:] a double [:,:] transm double [:] arg1 double [:] arg2p double [:] arg2 double [:] c double kernelval with gil: a = np.subtract(x, y) transm = np.transpose(R(euler_angles(v))) arg1 = np.dot(transm, a) arg2p = np.dot(transm, r) arg2 = euler_angles(arg2p) c = self.coordinate_map(arg1[0], arg1[1], arg1[2], arg2[0], arg2[1]) kernelval = self.kernel(c) return kernelval @cython.wraparound(False) @cython.boundscheck(False) @cython.nonecheck(False) @cython.cdivision(True) cdef double [:] coordinate_map(self, double x, double y, double z, double beta, double gamma) nogil: """ Compute a coordinate map for the kernel Parameters ---------- x : double X position y : double Y position z : double Z position beta : double First Euler angle gamma : double Second Euler angle Returns ------- c : 1D ndarray array of coordinates for kernel """ cdef: double [:] c double q double cg double cotq2 with gil: c = np.zeros(6) if beta == 0: c[0] = x c[1] = y c[2] = z c[3] = c[4] = c[5] = 0 else: q = fabs(beta) cg = cos(gamma) sg = sin(gamma) cotq2 = 1.0 / tan(q/2) c[0] = -0.5*z*beta*cg + \ x*(1 - (beta*beta*cg*cg * (1 - 0.5*q*cotq2)) / (q*q)) - \ (y*beta*beta*cg*sg * (1 - 0.5*q*cotq2)) / (q*q) c[1] = -0.5*z*beta*sg - \ (x*beta*beta*cg*sg * (1 - 0.5*q*cotq2)) / (q*q) + \ y * (1 - (beta*beta*sg*sg * (1 - 0.5*q*cotq2)) / (q*q)) c[2] = 0.5*x*beta*cg + 0.5*y*beta*sg + \ z * (1 + ((1 - 0.5*q*cotq2) * (-beta*beta*cg*cg - \ beta*beta*sg*sg)) / (q*q)) c[3] = beta * (-sg) c[4] = beta * cg c[5] = 0 return c @cython.wraparound(False) @cython.boundscheck(False) @cython.nonecheck(False) @cython.cdivision(True) cdef double kernel(self, double [:] c) nogil: """ Internal function, evaluates the kernel based on the coordinate map. Parameters ---------- c : 1D ndarray array of coordinates for kernel Returns ------- kernel_value : double """ cdef double output = 1 / (8*sqrt(2)) output *= sqrt(PI)*self.t*sqrt(self.t*self.D33)*sqrt(self.D33*self.D44) output *= 1 / (16*PI*PI*self.D33*self.D33*self.D44*self.D44*self.t*self.t*self.t*self.t) output *= exp(-sqrt((c[0]*c[0] + c[1]*c[1]) / (self.D33*self.D44) + \ (c[2]*c[2] / self.D33 + (c[3]*c[3]+c[4]*c[4]) / self.D44) * \ (c[2]*c[2] / self.D33 + (c[3]*c[3]+c[4]*c[4]) / self.D44) + \ c[5]*c[5]/self.D44) / (4*self.t)); return output cdef double PI = 3.1415926535897932 @cython.wraparound(False) @cython.boundscheck(False) cdef double [:] euler_angles(double [:] inp) nogil: """ Compute the Euler angles for a given input vector Parameters ---------- inp : 1D ndarray Input vector Returns ------- euler_angles : 1D ndarray """ cdef: double x double y double z double [:] output x = inp[0] y = inp[1] z = inp[2] with gil: output = np.zeros(3) # handle the case (0,0,1) if x*x < 10e-6 and y*y < 10e-6 and (z-1) * (z-1) < 10e-6: output[0] = 0 output[1] = 0 # handle the case (0,0,-1) elif x*x < 10e-6 and y*y < 10e-6 and (z+1) * (z+1) < 10e-6: output[0] = PI output[1] = 0 # all other cases else: output[0] = acos(z) output[1] = atan2(y, x) return output @cython.wraparound(False) @cython.boundscheck(False) cdef double [:,:] R(double [:] inp) nogil: """ Compute the Rotation matrix for a given input vector Parameters ---------- inp : 1D ndarray Input vector Returns ------- rotation_matrix : 2D ndarray """ cdef: double beta double gamma double [:] output double cb double sb double cg double sg beta = inp[0] gamma = inp[1] with gil: output = np.zeros(9) cb = cos(beta) sb = sin(beta) cg = cos(gamma) sg = sin(gamma) output[0] = cb * cg output[1] = -sg output[2] = cg * sb output[3] = cb * sg output[4] = cg output[5] = sb * sg output[6] = -sb output[7] = 0 output[8] = cb with gil: return np.reshape(output, (3,3)) dipy-0.13.0/dipy/denoise/localpca.py000066400000000000000000000144601317371701200172720ustar00rootroot00000000000000import numpy as np try: from scipy.linalg.lapack import dgesvd as svd svd_args = [1, 0] # If you have an older version of scipy, we fall back # on the standard scipy SVD API: except ImportError: from scipy.linalg import svd svd_args = [False] from scipy.linalg import eigh def localpca(arr, sigma, mask=None, pca_method='eig', patch_radius=2, tau_factor=2.3, out_dtype=None): r"""Local PCA-based denoising of diffusion datasets. Parameters ---------- arr : 4D array Array of data to be denoised. The dimensions are (X, Y, Z, N), where N are the diffusion gradient directions. mask : 3D boolean array A mask with voxels that are true inside the brain and false outside of it. The function denoises within the true part and returns zeros outside of those voxels. sigma : float or 3D array Standard deviation of the noise estimated from the data. pca_method : 'eig' or 'svd' Use either eigenvalue decomposition (eig) or singular value decomposition (svd) for principal component analysis. The default method is 'eig' which is faster. However, occasionally 'svd' might be more accurate. patch_radius : int, optional The radius of the local patch to be taken around each voxel (in voxels). Default: 2 (denoise in blocks of 5x5x5 voxels). tau_factor : float, optional Thresholding of PCA eigenvalues is done by nulling out eigenvalues that are smaller than: .. math :: \tau = (\tau_{factor} \sigma)^2 Default: 2.3, based on the results described in [Manjon13]_. out_dtype : str or dtype, optional The dtype for the output array. Default: output has the same dtype as the input. Returns ------- denoised_arr : 4D array This is the denoised array of the same size as that of the input data, clipped to non-negative values References ---------- .. [Manjon13] Manjon JV, Coupe P, Concha L, Buades A, Collins DL (2013) Diffusion Weighted Image Denoising Using Overcomplete Local PCA. PLoS ONE 8(9): e73021. https://doi.org/10.1371/journal.pone.0073021 """ if mask is None: # If mask is not specified, use the whole volume mask = np.ones_like(arr, dtype=bool)[..., 0] if out_dtype is None: out_dtype = arr.dtype # We retain float64 precision, iff the input is in this precision: if arr.dtype == np.float64: calc_dtype = np.float64 # Otherwise, we'll calculate things in float32 (saving memory) else: calc_dtype = np.float32 if not arr.ndim == 4: raise ValueError("PCA denoising can only be performed on 4D arrays.", arr.shape) if pca_method.lower() == 'svd': is_svd = True elif pca_method.lower() == 'eig': is_svd = False else: raise ValueError("pca_method should be either 'eig' or 'svd'") patch_size = 2 * patch_radius + 1 if patch_size ** 3 < arr.shape[-1]: e_s = "You asked for PCA denoising with a " e_s += "patch_radius of {0} ".format(patch_radius) e_s += "for data with {0} directions. ".format(arr.shape[-1]) e_s += "This would result in an ill-conditioned PCA matrix. " e_s += "Please increase the patch_radius." raise ValueError(e_s) if isinstance(sigma, np.ndarray): if not sigma.shape == arr.shape[:-1]: e_s = "You provided a sigma array with a shape" e_s += "{0} for data with".format(sigma.shape) e_s += "shape {0}. Please provide a sigma array".format(arr.shape) e_s += " that matches the spatial dimensions of the data." raise ValueError(e_s) tau = np.median(np.ones(arr.shape[:-1]) * ((tau_factor * sigma) ** 2)) theta = np.zeros(arr.shape, dtype=calc_dtype) thetax = np.zeros(arr.shape, dtype=calc_dtype) # loop around and find the 3D patch for each direction at each pixel for k in range(patch_radius, arr.shape[2] - patch_radius): for j in range(patch_radius, arr.shape[1] - patch_radius): for i in range(patch_radius, arr.shape[0] - patch_radius): # Shorthand for indexing variables: if not mask[i, j, k]: continue ix1 = i - patch_radius ix2 = i + patch_radius + 1 jx1 = j - patch_radius jx2 = j + patch_radius + 1 kx1 = k - patch_radius kx2 = k + patch_radius + 1 X = arr[ix1:ix2, jx1:jx2, kx1:kx2].reshape( patch_size ** 3, arr.shape[-1]) # compute the mean and normalize M = np.mean(X, axis=0) # Upcast the dtype for precision in the SVD X = X - M if is_svd: # PCA using an SVD U, S, Vt = svd(X, *svd_args)[:3] # Items in S are the eigenvalues, but in ascending order # We invert the order (=> descending), square and normalize # \lambda_i = s_i^2 / n d = S[::-1] ** 2 / X.shape[0] # Rows of Vt are eigenvectors, but also in ascending # eigenvalue order: W = Vt[::-1].T else: # PCA using an Eigenvalue decomposition C = np.transpose(X).dot(X) C = C / X.shape[0] [d, W] = eigh(C, turbo=True) # Threshold by tau: W[:, d < tau] = 0 # This is equations 1 and 2 in Manjon 2013: Xest = X.dot(W).dot(W.T) + M Xest = Xest.reshape(patch_size, patch_size, patch_size, arr.shape[-1]) # This is equation 3 in Manjon 2013: this_theta = 1.0 / (1.0 + np.sum(d > 0)) theta[ix1:ix2, jx1:jx2, kx1:kx2] += this_theta thetax[ix1:ix2, jx1:jx2, kx1:kx2] += Xest * this_theta denoised_arr = thetax / theta denoised_arr.clip(min=0, out=denoised_arr) denoised_arr[~mask] = 0 return denoised_arr.astype(out_dtype) dipy-0.13.0/dipy/denoise/nlmeans.py000066400000000000000000000056141317371701200171520ustar00rootroot00000000000000from __future__ import division, print_function import numpy as np from dipy.denoise.denspeed import nlmeans_3d # from warnings import warn # import warnings # warnings.simplefilter('always', DeprecationWarning) # warn(DeprecationWarning("Module 'dipy.denoise.nlmeans' is deprecated," # " use module 'dipy.denoise.non_local_means' instead")) def nlmeans(arr, sigma, mask=None, patch_radius=1, block_radius=5, rician=True, num_threads=None): r""" Non-local means for denoising 3D and 4D images Parameters ---------- arr : 3D or 4D ndarray The array to be denoised mask : 3D ndarray sigma : float or 3D array standard deviation of the noise estimated from the data patch_radius : int patch size is ``2 x patch_radius + 1``. Default is 1. block_radius : int block size is ``2 x block_radius + 1``. Default is 5. rician : boolean If True the noise is estimated as Rician, otherwise Gaussian noise is assumed. num_threads : int Number of threads. If None (default) then all available threads will be used (all CPU cores). Returns ------- denoised_arr : ndarray the denoised ``arr`` which has the same shape as ``arr``. References ---------- .. [Descoteaux08] Descoteaux, Maxim and Wiest-Daessle`, Nicolas and Prima, Sylvain and Barillot, Christian and Deriche, Rachid Impact of Rician Adapted Non-Local Means Filtering on HARDI, MICCAI 2008 """ # warn(DeprecationWarning("function 'dipy.denoise.nlmeans'" # " is deprecated, use module " # "'dipy.denoise.non_local_means'" # " instead")) if arr.ndim == 3: sigma = np.ones(arr.shape, dtype=np.float64) * sigma return nlmeans_3d(arr, mask, sigma, patch_radius, block_radius, rician, num_threads).astype(arr.dtype) elif arr.ndim == 4: denoised_arr = np.zeros_like(arr) if isinstance(sigma, np.ndarray) and sigma.ndim == 3: sigma = (np.ones(arr.shape, dtype=np.float64) * sigma[..., np.newaxis]) else: sigma = np.ones(arr.shape, dtype=np.float64) * sigma for i in range(arr.shape[-1]): denoised_arr[..., i] = nlmeans_3d(arr[..., i], mask, sigma[..., i], patch_radius, block_radius, rician, num_threads).astype(arr.dtype) return denoised_arr else: raise ValueError("Only 3D or 4D array are supported!", arr.shape) dipy-0.13.0/dipy/denoise/nlmeans_block.pyx000066400000000000000000000421631317371701200205140ustar00rootroot00000000000000cimport cython from cython.view cimport array as cvarray from libc.math cimport sqrt, exp import numpy as np __all__ = ['firdn', 'upfir', 'nlmeans_block'] cdef inline int _int_max(int a, int b): return a if a >= b else b cdef inline int _int_min(int a, int b): return a if a <= b else b def _firdn_vector(double[:] f, double[:] h, double[:] out): cdef int n = len(f) cdef int klen = len(h) cdef int outLen = (n + klen) // 2 cdef double ss cdef int i, k, limInf, limSup, x = 0, ox = 0, ks = 0 for i in range(outLen): ss = 0 limInf = _int_max(0, x - klen + 1) limSup = 1 + _int_min(n - 1, x) ks = limInf for k in range(limInf, limSup): ss += f[ks] * h[x - k] ks += 1 out[ox] = ss x += 2 ox += 1 def _upfir_vector(double[:] f, double[:] h, double[:] out): cdef int n = f.shape[0] cdef int klen = h.shape[0] cdef int outLen = 2 * n + klen - 2 cdef int x, limInf, limSup, k, ks cdef double ss for x in range(outLen): limInf = _int_max(0, x - klen + 1) if(limInf % 2 == 1): limInf += 1 limSup = _int_min(2 * (n - 1), x) if(limSup % 2 == 1): limSup -= 1 ss = 0 k = limInf ks = limInf // 2 while(k <= limSup): ss += f[ks] * h[x - k] k += 2 ks += 1 out[x] = ss def _firdn_matrix(double[:, :] F, double[:] h, double[:, :] out): cdef int n = F.shape[0] cdef int m = F.shape[1] cdef int j for j in range(m): _firdn_vector(F[:, j], h, out[:, j]) def _upfir_matrix(double[:, :] F, double[:] h, double[:, :] out): cdef int n = F.shape[0] cdef int m = F.shape[1] for j in range(m): _upfir_vector(F[:, j], h, out[:, j]) @cython.boundscheck(False) @cython.wraparound(False) @cython.cdivision(True) cdef void _average_block(double[:, :, :] ima, int x, int y, int z, double[:, :, :] average, double weight) nogil: """ Computes the weighted average of the patches in a blockwise manner Parameters ---------- ima : 3D array of doubles input image x : integer x coordinate of the center voxel y : integer y coordinate of the center voxel z : integer z coordinate of the center voxel average : 3D array of doubles the image where averages are stored weight : double weight for the weighted averaging """ cdef int a, b, c, x_pos, y_pos, z_pos cdef int is_outside cdef int count = 0 cdef int neighborhoodsize = average.shape[0] // 2 for a in range(average.shape[0]): for b in range(average.shape[1]): for c in range(average.shape[2]): x_pos = x + a - neighborhoodsize y_pos = y + b - neighborhoodsize z_pos = z + c - neighborhoodsize is_outside = 0 if ((x_pos < 0) or (x_pos >= ima.shape[1])): is_outside = 1 if ((y_pos < 0) or (y_pos >= ima.shape[0])): is_outside = 1 if ((z_pos < 0) or (z_pos >= ima.shape[2])): is_outside = 1 if (is_outside == 1): average[a, b, c] += weight * (ima[y, x, z]**2) else: average[a, b, c] += weight * (ima[y_pos, x_pos, z_pos]**2) @cython.boundscheck(False) @cython.wraparound(False) @cython.cdivision(True) cdef void _value_block(double[:, :, :] estimate, double[:, :, :] Label, int x, int y, int z, double[:, :, :] average, double global_sum, double hh, int rician_int) nogil: """ Computes the final estimate of the denoised image Parameters ---------- estimate : 3D array of doubles The denoised estimate array Label : 3D array of doubles The label map for block wise weighted averaging x : integer x coordinate of the center voxel y : integer y coordinate of the center voxel z : integer z coordinate of the center voxel average : 3D array of doubles weighted average image global_sum : double total weight sum hh : double weight parameter rician_int : integer 0 or 1 as per the boolean value """ cdef int is_outside, a, b, c, x_pos, y_pos, z_pos, count = 0 cdef double value = 0.0 cdef double denoised_value = 0.0 cdef double label = 0.0 cdef int neighborhoodsize = average.shape[0] // 2 for a in range(average.shape[0]): for b in range(average.shape[1]): for c in range(average.shape[2]): is_outside = 0 x_pos = x + a - neighborhoodsize y_pos = y + b - neighborhoodsize z_pos = z + c - neighborhoodsize if ((x_pos < 0) or (x_pos >= estimate.shape[1])): is_outside = 1 if ((y_pos < 0) or (y_pos >= estimate.shape[0])): is_outside = 1 if ((z_pos < 0) or (z_pos >= estimate.shape[2])): is_outside = 1 if (is_outside == 0): value = estimate[y_pos, x_pos, z_pos] if (rician_int): denoised_value = (average[a, b, c] / global_sum) - hh else: denoised_value = (average[a, b, c] / global_sum) if (denoised_value > 0): denoised_value = sqrt(denoised_value) else: denoised_value = 0.0 value += denoised_value label = Label[y_pos, x_pos, z_pos] estimate[y_pos, x_pos, z_pos] = value Label[y_pos, x_pos, z_pos] = label + 1 @cython.boundscheck(False) @cython.wraparound(False) @cython.cdivision(True) cdef double _distance(double[:, :, :] image, int x, int y, int z, int nx, int ny, int nz, int block_radius) nogil: """ Computes the distance between two square subpatches of image located at p and q, respectively. If the centered squares lie beyond the boundaries of image, they are mirrored. Parameters ---------- image : 3D array of doubles the image whose voxels are taken x : integer x coordinate of first patch's center y : integer y coordinate of first patch's center z : integer z coordinate of first patch's center nx : integer nx coordinate of second patch's center ny : integer ny coordinate of second patch's center nz : integer nz coordinate of second patch's center block_radius : integer block radius for which the distince is computed for """ cdef double acu, distancetotal cdef int i, j, k, ni1, nj1, ni2, nj2, nk1, nk2 cdef int sx = image.shape[1], sy = image.shape[0], sz = image.shape[2] acu = 0 distancetotal = 0 for i in range(-block_radius, block_radius + 1): for j in range(-block_radius, block_radius + 1): for k in range(-block_radius, block_radius + 1): ni1 = x + i nj1 = y + j nk1 = z + k ni2 = nx + i nj2 = ny + j nk2 = nz + k if(ni1 < 0): ni1 = -ni1 if(nj1 < 0): nj1 = -nj1 if(ni2 < 0): ni2 = -ni2 if(nj2 < 0): nj2 = -nj2 if(nk1 < 0): nk1 = -nk1 if(nk2 < 0): nk2 = -nk2 if(ni1 >= sx): ni1 = 2 * sx - ni1 - 1 if(nj1 >= sy): nj1 = 2 * sy - nj1 - 1 if(nk1 >= sz): nk1 = 2 * sz - nk1 - 1 if(ni2 >= sx): ni2 = 2 * sx - ni2 - 1 if(nj2 >= sy): nj2 = 2 * sy - nj2 - 1 if(nk2 >= sz): nk2 = 2 * sz - nk2 - 1 distancetotal += (image[nj1, ni1, nk1] - image[nj2, ni2, nk2])**2 acu = acu + 1 return distancetotal / acu @cython.boundscheck(False) @cython.wraparound(False) @cython.cdivision(True) cdef double _local_mean(double[:, :, :]ima, int x, int y, int z) nogil: """ local mean of a 3x3x3 patch centered at x,y,z """ cdef int dims0 = ima.shape[0] cdef int dims1 = ima.shape[1] cdef int dims2 = ima.shape[2] cdef double ss = 0 cdef int px, py, pz, dx, dy, dz, nx, ny, nz for px in range(x - 1, x + 2): for py in range(y - 1, y + 2): for pz in range(z - 1, z + 2): px = (-px if px < 0 else (2 * dims0 - px - 1 if px >= dims0 else px)) py = (-py if py < 0 else (2 * dims1 - py - 1 if py >= dims1 else py)) pz = (-pz if pz < 0 else (2 * dims2 - pz - 1 if pz >= dims2 else pz)) ss += ima[px, py, pz] return ss / 27.0 @cython.boundscheck(False) @cython.wraparound(False) @cython.cdivision(True) cdef double _local_variance(double[:, :, :] ima, double mean, int x, int y, int z) nogil: """ local variance of a 3x3x3 patch centered at x,y,z """ dims0 = ima.shape[0] dims1 = ima.shape[1] dims2 = ima.shape[2] cdef int cnt = 0 cdef double ss = 0 cdef int dx, dy, dz, nx, ny, nz for px in range(x - 1, x + 2): for py in range(y - 1, y + 2): for pz in range(z - 1, z + 2): if ((px >= 0 and py >= 0 and pz > 0) and (px < dims0 and py < dims1 and pz < dims2)): ss += (ima[px, py, pz] - mean) * (ima[px, py, pz] - mean) cnt += 1 return ss / (cnt - 1) cpdef firdn(double[:, :] image, double[:] h): """ Applies the filter given by the convolution kernel 'h' columnwise to 'image', then subsamples by 2. This is a special case of the matlab's 'upfirdn' function, ported to python. Returns the filtered image. Parameters ---------- image: 2D array of doubles the input image to be filtered h: double array the convolution kernel """ nrows = image.shape[0] ncols = image.shape[1] ll = h.shape[0] cdef double[:, :] filtered = np.zeros(shape=((nrows + ll) // 2, ncols)) _firdn_matrix(image, h, filtered) return filtered cpdef upfir(double[:, :] image, double[:] h): """ Upsamples the columns of the input image by 2, then applies the convolution kernel 'h' (again, columnwise). This is a special case of the matlab's 'upfirdn' function, ported to python. Returns the filtered image. Parameters ---------- image: 2D array of doubles the input image to be filtered h: double array the convolution kernel """ nrows = image.shape[0] ncols = image.shape[1] ll = h.shape[0] cdef double[:, :] filtered = np.zeros(shape=(2 * nrows + ll - 2, ncols)) _upfir_matrix(image, h, filtered) return filtered @cython.boundscheck(False) @cython.wraparound(False) @cython.cdivision(True) def nlmeans_block(double[:, :, :]image, double[:, :, :] mask, int patch_radius, int block_radius, double h, int rician): """Non-Local Means Denoising Using Blockwise Averaging Parameters ---------- image : 3D array of doubles the input image, corrupted with rician noise mask : 3D array of doubles the input mask patch_radius : int similar patches in the non-local means are searched for locally, inside a cube of side 2*v+1 centered at each voxel of interest. block_radius : int the size of the block to be used (2*f+1)x(2*f+1)x(2*f+1) in the blockwise non-local means implementation (the Coupe's proposal). h : double the estimated amount of rician noise in the input image: in P. Coupe et al. the rician noise was simulated as sqrt((f+x)^2 + (y)^2) where f is the pixel value and x and y are independent realizations of a random variable with Normal distribution, with mean=0 and standard deviation=h rician : boolean If True the noise is estimated as Rician, otherwise Gaussian noise is assumed. Returns ------- fima: 3D double array the denoised output which has the same shape as input image. References ---------- [1] P. Coupe, P. Yger, S. Prima, P. Hellier, C. Kervrann, C. Barillot, "An Optimized Blockwise Non Local Means Denoising Filter for 3D Magnetic Resonance Images" IEEE Transactions on Medical Imaging, 27(4):425-441, 2008 [2] Pierrick Coupe, Jose Manjon, Montserrat Robles, Louis Collins. "Multiresolution Non-Local Means Filter for 3D MR Image Denoising" IET Image Processing, Institution of Engineering and Technology, 2011 """ cdef int[:] dims = cvarray((3,), itemsize=sizeof(int), format="i") dims[0] = image.shape[0] dims[1] = image.shape[1] dims[2] = image.shape[2] cdef double hh = 2 * h * h cdef int Ndims = (2 * block_radius + 1)**3 cdef int nvox = dims[0] * dims[1] * dims[2] cdef double[:, :, :] average = np.zeros((2 * block_radius + 1, 2 * block_radius + 1, 2 * block_radius + 1), dtype=np.float64) cdef double[:, :, :] fima = np.zeros_like(image) cdef double[:, :, :] means = np.zeros_like(image) cdef double[:, :, :] variances = np.zeros_like(image) cdef double[:, :, :] Estimate = np.zeros_like(image) cdef double[:, :, :] Label = np.zeros_like(image) cdef int i, j, k, ni, nj, nk cdef double t1, t2 cdef double epsilon = 0.00001 cdef double mu1 = 0.95 cdef double var1 = 0.5 + 1e-7 cdef double d cdef double totalWeight, wmax, w with nogil: for k in range(dims[2]): for i in range(dims[1]): for j in range(dims[0]): means[j, i, k] = _local_mean(image, j, i, k) variances[j, i, k] = _local_variance( image, means[j, i, k], j, i, k) for k in range(0, dims[2], 2): for i in range(0, dims[1], 2): for j in range(0, dims[0], 2): with gil: average[...] = 0 totalWeight = 0 if (means[j, i, k] <= epsilon) or ( variances[j, i, k] <= epsilon): wmax = 1.0 _average_block(image, i, j, k, average, wmax) totalWeight += wmax _value_block(Estimate, Label, i, j, k, average, totalWeight, hh, rician) else: wmax = 0 for nk in range(k - patch_radius, k + patch_radius + 1): for ni in range(i - patch_radius, i + patch_radius + 1): for nj in range(j - patch_radius, j + patch_radius + 1): if((ni == i)and(nj == j)and(nk == k)): continue if ((ni < 0) or (nj < 0) or (nk < 0) or ( nj >= dims[0]) or (ni >= dims[1]) or (nk >= dims[2])): continue if ((means[nj, ni, nk] <= epsilon) or ( variances[nj, ni, nk] <= epsilon)): continue t1 = (means[j, i, k]) / (means[nj, ni, nk]) t2 = (variances[j, i, k]) / \ (variances[nj, ni, nk]) if ((t1 > mu1) and (t1 < (1 / mu1)) and (t2 > var1) and (t2 < (1 / var1))): d = _distance( image, i, j, k, ni, nj, nk, block_radius) w = exp(-d / (h * h)) if(w > wmax): wmax = w _average_block( image, ni, nj, nk, average, w) totalWeight += w if(totalWeight != 0.0): _value_block(Estimate, Label, i, j, k, average, totalWeight, hh, rician) for k in range(0, dims[2]): for i in range(0, dims[1]): for j in range(0, dims[0]): if mask[j, i, k] == 0: fima[j, i, k] = 0 else: if(Label[j, i, k] == 0.0): fima[j, i, k] = image[j, i, k] else: fima[j, i, k] = Estimate[j, i, k] / Label[j, i, k] return fima dipy-0.13.0/dipy/denoise/noise_estimate.py000066400000000000000000000300631317371701200205210ustar00rootroot00000000000000from __future__ import division, print_function import numpy as np from scipy.special import gammainccinv from scipy.ndimage.filters import convolve def _inv_nchi_cdf(N, K, alpha): """Inverse CDF for the noncentral chi distribution See [1]_ p.3 section 2.3""" return gammainccinv(N * K, 1 - alpha) / K # List of optimal quantile for PIESNO. # Get optimal quantile for N if available, else use the median. opt_quantile = {1: 0.79681213002002, 2: 0.7306303027491917, 4: 0.6721952960782169, 8: 0.6254030432343569, 16: 0.5900487123737876, 32: 0.5641772300866416, 64: 0.5455611840489607, 128: 0.5322811923303339} def piesno(data, N, alpha=0.01, l=100, itermax=100, eps=1e-5, return_mask=False): """ Probabilistic Identification and Estimation of Noise (PIESNO). Parameters ----------- data : ndarray The magnitude signals to analyse. The last dimension must contain the same realisation of the volume, such as dMRI or fMRI data. N : int The number of phase array coils of the MRI scanner. If your scanner does a SENSE reconstruction, ALWAYS use N=1, as the noise profile is always Rician. If your scanner does a GRAPPA reconstruction, set N as the number of phase array coils. alpha : float Probabilistic estimation threshold for the gamma function. l : int number of initial estimates for sigma to try. itermax : int Maximum number of iterations to execute if convergence is not reached. eps : float Tolerance for the convergence criterion. Convergence is reached if two subsequent estimates are smaller than eps. return_mask : bool If True, return a mask identyfing all the pure noise voxel that were found. Returns -------- sigma : float The estimated standard deviation of the gaussian noise. mask : ndarray (optional) A boolean mask indicating the voxels identified as pure noise. Note ------ This function assumes two things : 1. The data has a noisy, non-masked background and 2. The data is a repetition of the same measurements along the last axis, i.e. dMRI or fMRI data, not structural data like T1/T2. This function processes the data slice by slice, as originally designed in the paper. Use it to get a slice by slice estimation of the noise, as in spinal cord imaging for example. References ------------ .. [1] Koay CG, Ozarslan E and Pierpaoli C. "Probabilistic Identification and Estimation of Noise (PIESNO): A self-consistent approach and its applications in MRI." Journal of Magnetic Resonance 2009; 199: 94-103. .. [2] Koay CG, Ozarslan E and Basser PJ. "A signal transformational framework for breaking the noise floor and its applications in MRI." Journal of Magnetic Resonance 2009; 197: 108-119. """ # This method works on a 2D array with repetitions as the third dimension, # so process the dataset slice by slice. if data.ndim < 3: e_s = "This function only works on datasets of at least 3 dimensions." raise ValueError(e_s) if N in opt_quantile: q = opt_quantile[N] else: q = 0.5 # Initial estimation of sigma initial_estimation = (np.percentile(data, q * 100) / np.sqrt(2 * _inv_nchi_cdf(N, 1, q))) if data.ndim == 4: sigma = np.zeros(data.shape[-2], dtype=np.float32) mask_noise = np.zeros(data.shape[:-1], dtype=np.bool) for idx in range(data.shape[-2]): sigma[idx], mask_noise[..., idx] = _piesno_3D(data[..., idx, :], N, alpha=alpha, l=l, itermax=itermax, eps=eps, return_mask=True, initial_estimation=initial_estimation) else: sigma, mask_noise = _piesno_3D(data, N, alpha=alpha, l=l, itermax=itermax, eps=eps, return_mask=True, initial_estimation=initial_estimation) if return_mask: return sigma, mask_noise return sigma def _piesno_3D(data, N, alpha=0.01, l=100, itermax=100, eps=1e-5, return_mask=False, initial_estimation=None): """ Probabilistic Identification and Estimation of Noise (PIESNO). This is the slice by slice version for working on a 4D array. Parameters ----------- data : ndarray The magnitude signals to analyse. The last dimension must contain the same realisation of the volume, such as dMRI or fMRI data. N : int The number of phase array coils of the MRI scanner. alpha : float (optional) Probabilistic estimation threshold for the gamma function. Default: 0.01. l : int (optional) number of initial estimates for sigma to try. Default: 100. itermax : int (optional) Maximum number of iterations to execute if convergence is not reached. Default: 100. eps : float (optional) Tolerance for the convergence criterion. Convergence is reached if two subsequent estimates are smaller than eps. Default: 1e-5. return_mask : bool (optional) If True, return a mask identyfing all the pure noise voxel that were found. Default: False. initial_estimation : float (optional) Upper bound for the initial estimation of sigma. default : None, which computes the optimal quantile for N. Returns -------- sigma : float The estimated standard deviation of the gaussian noise. mask : ndarray A boolean mask indicating the voxels identified as pure noise. Notes ------ This function assumes two things : 1. The data has a noisy, non-masked background and 2. The data is a repetition of the same measurements along the last axis, i.e. dMRI or fMRI data, not structural data like T1/T2. References ------------ .. [1] Koay CG, Ozarslan E and Pierpaoli C. "Probabilistic Identification and Estimation of Noise (PIESNO): A self-consistent approach and its applications in MRI." Journal of Magnetic Resonance 2009; 199: 94-103. .. [2] Koay CG, Ozarslan E and Basser PJ. "A signal transformational framework for breaking the noise floor and its applications in MRI." Journal of Magnetic Resonance 2009; 197: 108-119. """ if np.all(data == 0): if return_mask: return 0, np.zeros(data.shape[:-1], dtype=np.bool) return 0 if N in opt_quantile: q = opt_quantile[N] else: q = 0.5 denom = np.sqrt(2 * _inv_nchi_cdf(N, 1, q)) if initial_estimation is None: m = np.percentile(data, q * 100) / denom else: m = initial_estimation phi = np.arange(1, l + 1) * m / l K = data.shape[-1] sum_m2 = np.sum(data**2, axis=-1, dtype=np.float32) sigma_prev = 0 sigma = m prev_idx = 0 mask = np.zeros(data.shape[:-1], dtype=np.bool) lambda_minus = _inv_nchi_cdf(N, K, alpha/2) lambda_plus = _inv_nchi_cdf(N, K, 1 - alpha/2) for sigma_init in phi: s = sum_m2 / (2 * K * sigma_init**2) found_idx = np.sum(np.logical_and(lambda_minus <= s, s <= lambda_plus), dtype=np.int16) if found_idx > prev_idx: sigma = sigma_init prev_idx = found_idx for n in range(itermax): if np.abs(sigma - sigma_prev) < eps: break s = sum_m2 / (2 * K * sigma**2) mask[...] = np.logical_and(lambda_minus <= s, s <= lambda_plus) omega = data[mask, :] # If no point meets the criterion, exit if omega.size == 0: break sigma_prev = sigma # Numpy percentile must range in 0 to 100, hence q*100 sigma = np.percentile(omega, q * 100) / denom if return_mask: return sigma, mask return sigma def estimate_sigma(arr, disable_background_masking=False, N=0): """Standard deviation estimation from local patches Parameters ---------- arr : 3D or 4D ndarray The array to be estimated disable_background_masking : bool, default False If True, uses all voxels for the estimation, otherwise, only non-zeros voxels are used. Useful if the background is masked by the scanner. N : int, default 0 Number of coils of the receiver array. Use N = 1 in case of a SENSE reconstruction (Philips scanners) or the number of coils for a GRAPPA reconstruction (Siemens and GE). Use 0 to disable the correction factor, as for example if the noise is Gaussian distributed. See [1] for more information. Returns ------- sigma : ndarray standard deviation of the noise, one estimation per volume. Note ------- This function is the same as manually taking the standard deviation of the background and gives one value for the whole 3D array. It also includes the coil-dependent correction factor of Koay 2006 (see [1]_, equation 18) with theta = 0. Since this function was introduced in [2]_ for T1 imaging, it is expected to perform ok on diffusion MRI data, but might oversmooth some regions and leave others un-denoised for spatially varying noise profiles. Consider using :func:`piesno` to estimate sigma instead if visual inaccuracies are apparent in the denoised result. Reference ------- .. [1] Koay, C. G., & Basser, P. J. (2006). Analytically exact correction scheme for signal extraction from noisy magnitude MR signals. Journal of Magnetic Resonance), 179(2), 317-22. .. [2] Coupe, P., Yger, P., Prima, S., Hellier, P., Kervrann, C., Barillot, C., 2008. An optimized blockwise nonlocal means denoising filter for 3-D magnetic resonance images, IEEE Trans. Med. Imaging 27, 425-41. """ k = np.zeros((3, 3, 3), dtype=np.int8) k[0, 1, 1] = 1 k[2, 1, 1] = 1 k[1, 0, 1] = 1 k[1, 2, 1] = 1 k[1, 1, 0] = 1 k[1, 1, 2] = 1 # Precomputed factor from Koay 2006, this corrects the bias of magnitude # image correction_factor = {0: 1, # No correction 1: 0.42920367320510366, 4: 0.4834941393603609, 6: 0.4891759468548269, 8: 0.49195420135894175, 12: 0.4946862482541263, 16: 0.4960339908122364, 20: 0.4968365823718557, 24: 0.49736907650825657, 32: 0.49803177052530145, 64: 0.49901964176235936} if N in correction_factor: factor = correction_factor[N] else: raise ValueError("N = {0} is not supported! Please choose amongst \ {1}".format(N, sorted(list(correction_factor.keys())))) if arr.ndim == 3: sigma = np.zeros(1, dtype=np.float32) arr = arr[..., None] elif arr.ndim == 4: sigma = np.zeros(arr.shape[-1], dtype=np.float32) else: raise ValueError("Array shape is not supported!", arr.shape) if disable_background_masking: mask = arr[..., 0].astype(np.bool) else: mask = np.ones_like(arr[..., 0], dtype=np.bool) conv_out = np.zeros(arr[..., 0].shape, dtype=np.float64) for i in range(sigma.size): convolve(arr[..., i], k, output=conv_out) mean_block = np.sqrt(6/7) * (arr[..., i] - 1/6 * conv_out) sigma[i] = np.sqrt(np.mean(mean_block[mask]**2) / factor) return sigma dipy-0.13.0/dipy/denoise/non_local_means.py000066400000000000000000000050141317371701200206360ustar00rootroot00000000000000from __future__ import division, print_function import numpy as np from dipy.denoise.nlmeans_block import nlmeans_block def non_local_means(arr, sigma, mask=None, patch_radius=1, block_radius=5, rician=True): r""" Non-local means for denoising 3D and 4D images, using blockwise averaging approach Parameters ---------- arr : 3D or 4D ndarray The array to be denoised mask : 3D ndarray sigma : float standard deviation of the noise estimated from the data patch_radius : int patch size is ``2 x patch_radius + 1``. Default is 1. block_radius : int block size is ``2 x block_radius + 1``. Default is 5. rician : boolean If True the noise is estimated as Rician, otherwise Gaussian noise is assumed. Returns ------- denoised_arr : ndarray the denoised ``arr`` which has the same shape as ``arr``. References ---------- .. [Coupe08] P. Coupe, P. Yger, S. Prima, P. Hellier, C. Kervrann, C. Barillot, An Optimized Blockwise Non Local Means Denoising Filter for 3D Magnetic Resonance Images, IEEE Transactions on Medical Imaging, 27(4):425-441, 2008 .. [Coupe11] Pierrick Coupe, Jose Manjon, Montserrat Robles, Louis Collins. Adaptive Multiresolution Non-Local Means Filter for 3D MR Image Denoising IET Image Processing, Institution of Engineering and Technology, 2011 """ if not np.isscalar(sigma) and not sigma.shape == (1, ): raise ValueError("Sigma input needs to be of type float", sigma) if mask is None and arr.ndim > 2: mask = np.ones((arr.shape[0], arr.shape[1], arr.shape[2]), dtype='f8') else: mask = np.ascontiguousarray(mask, dtype='f8') if mask.ndim != 3: raise ValueError('mask needs to be a 3D ndarray', mask.shape) if arr.ndim == 3: return np.array(nlmeans_block( np.double(arr), mask, patch_radius, block_radius, sigma, np.int(rician))).astype(arr.dtype) elif arr.ndim == 4: denoised_arr = np.zeros_like(arr) for i in range(arr.shape[-1]): denoised_arr[..., i] = np.array(nlmeans_block(np.double( arr[..., i]), mask, patch_radius, block_radius, sigma, np.int(rician))).astype(arr.dtype) return denoised_arr else: raise ValueError("Only 3D or 4D array are supported!", arr.shape) dipy-0.13.0/dipy/denoise/pca_noise_estimate.pyx000066400000000000000000000122371317371701200215370ustar00rootroot00000000000000""" ================================ PCA Based Local Noise Estimation ================================ """ import numpy as np import nibabel as nib import scipy.special as sps from scipy import ndimage cimport cython cimport numpy as cnp # Try to get the SVD through direct API to lapack: try: from scipy.linalg.lapack import sgesvd as svd svd_args = [1, 0] # If you have an older version of scipy, we fall back # on the standard scipy SVD API: except ImportError: from scipy.linalg import svd svd_args = [False] @cython.boundscheck(False) @cython.wraparound(False) @cython.cdivision(True) def pca_noise_estimate(data, gtab, patch_radius=1, correct_bias=True, smooth=2): """ PCA based local noise estimation. Parameters ---------- data: 4D array the input dMRI data. gtab: gradient table object gradient information for the data gives us the bvals and bvecs of diffusion data, which is needed here to select between the noise estimation methods. patch_radius : int The radius of the local patch to be taken around each voxel (in voxels). Default: 1 (estimate noise in blocks of 3x3x3 voxels). correct_bias : bool Whether to correct for bias due to Rician noise. This is an implementation of equation 8 in [1]_. smooth : int Radius of a Gaussian smoothing filter to apply to the noise estimate before returning. Default: 2. Returns ------- sigma_corr: 3D array The local noise standard deviation estimate. References ---------- .. [1] Manjon JV, Coupe P, Concha L, Buades A, Collins DL "Diffusion Weighted Image Denoising Using Overcomplete Local PCA". PLoS ONE 8(9): e73021. doi:10.1371/journal.pone.0073021. """ # first identify the number of the b0 images K = gtab.b0s_mask[gtab.b0s_mask].size if(K > 1): # If multiple b0 values then use MUBE noise estimate data0 = data[..., gtab.b0s_mask] sibe = False else: # if only one b0 value then SIBE noise estimate data0 = data[..., ~gtab.b0s_mask] sibe = True data0 = data0.astype(np.float64) cdef: cnp.npy_intp n0 = data0.shape[0] cnp.npy_intp n1 = data0.shape[1] cnp.npy_intp n2 = data0.shape[2] cnp.npy_intp n3 = data0.shape[3] cnp.npy_intp nsamples = n0 * n1 * n2 cnp.npy_intp i, j, k, i0, j0, k0, l0 cnp.npy_intp pr = patch_radius cnp.npy_intp patch_size = 2 * pr + 1 double norm = patch_size ** 3 double sum_reg, temp1 double[:, :, :] I = np.zeros((n0, n1, n2)) X = data0.reshape(nsamples, n3) # Demean: M = np.mean(X, axis=0) X = X - M U, S, Vt = svd(X, *svd_args)[:3] # Rows of Vt are the eigenvectors, in ascending eigenvalue order: W = Vt.T # Project into the data space V = X.dot(W) # Grab the column corresponding to the smallest eigen-vector/-value: I = V[:, -1].reshape(n0, n1, n2) del V, W, X, U, S, Vt cdef: double[:, :, :] count = np.zeros((n0, n1, n2)) double[:, :, :] mean = np.zeros((n0, n1, n2)) double[:, :, :] sigma_sq = np.zeros((n0, n1, n2)) double[:, :, :, :] data0temp = data0 with nogil: for i in range(pr, n0 - pr): for j in range(pr, n1 - pr): for k in range(pr, n2 - pr): sum_reg = 0 temp1 = 0 for i0 in range(-pr, pr + 1): for j0 in range(-pr, pr + 1): for k0 in range(-pr, pr + 1): sum_reg += I[i + i0, j + j0, k + k0] / norm for l0 in range(n3): temp1 += (data0temp[i + i0, j+ j0, k + k0, l0]) / (norm * n3) for i0 in range(-pr, pr + 1): for j0 in range(-pr, pr + 1): for k0 in range(-pr, pr + 1): sigma_sq[i + i0, j +j0, k + k0] += ( I[i + i0, j + j0, k + k0] - sum_reg) ** 2 mean[i + i0, j + j0, k + k0] += temp1 count[i + i0, j +j0, k + k0] += 1 sigma_sq = np.divide(sigma_sq, count) # find the SNR and make the correction for bias due to Rician noise: if correct_bias: mean = np.divide(mean, count) snr = np.divide(mean, np.sqrt(sigma_sq)) snr_sq = (snr ** 2) # xi is practically equal to 1 above 37.4, and we overflow, raising # warnings and creating ot-a-numbers. # Instead, we will replace these values with 1 below with np.errstate(over='ignore', invalid='ignore'): xi = (2 + snr_sq - (np.pi / 8) * np.exp(-snr_sq / 2) * ((2 + snr_sq) * sps.iv(0, (snr_sq) / 4) + (snr_sq) * sps.iv(1, (snr_sq) / 4)) ** 2).astype(float) xi[snr > 37.4] = 1 sigma_corr = sigma_sq / xi sigma_corr[np.isnan(sigma_corr)] = 0 else: sigma_corr = sigma_sq if smooth is not None: sigma_corr = ndimage.gaussian_filter(sigma_corr, smooth) return np.sqrt(sigma_corr) dipy-0.13.0/dipy/denoise/shift_twist_convolution.pyx000066400000000000000000000202051317371701200227040ustar00rootroot00000000000000import numpy as np cimport numpy as cnp cimport cython cimport safe_openmp as openmp from safe_openmp cimport have_openmp from cython.parallel import parallel, prange, threadid from libc.stdlib cimport malloc, free from dipy.denoise.enhancement_kernel import EnhancementKernel from dipy.data import get_sphere from dipy.reconst.shm import sh_to_sf, sf_to_sh def convolve(odfs_sh, kernel, sh_order, test_mode=False, num_threads=None, normalize=True): """ Perform the shift-twist convolution with the ODF data and the lookup-table of the kernel. Parameters ---------- odfs : array of double The ODF data in spherical harmonics format kernel : array of double The 5D lookup table sh_order : integer Maximal spherical harmonics order test_mode : boolean Reduced convolution in one direction only for testing num_threads : int Number of threads. If None (default) then all available threads will be used. normalize : boolean Apply max-normalization to the output such that its value range matches the input ODF data. Returns ------- output : array of double The ODF data after convolution enhancement in spherical harmonics format References ---------- [Meesters2016_ISMRM] S. Meesters, G. Sanguinetti, E. Garyfallidis, J. Portegies, R. Duits. (2016) Fast implementations of contextual PDE’s for HARDI data processing in DIPY. ISMRM 2016 conference. [DuitsAndFranken_IJCV] R. Duits and E. Franken (2011) Left-invariant diffusions on the space of positions and orientations and their application to crossing-preserving smoothing of HARDI images. International Journal of Computer Vision, 92:231-264. [Portegies2015] J. Portegies, G. Sanguinetti, S. Meesters, and R. Duits. (2015) New Approximation of a Scale Space Kernel on SE(3) and Applications in Neuroimaging. Fifth International Conference on Scale Space and Variational Methods in Computer Vision [Portegies2015b] J. Portegies, R. Fick, G. Sanguinetti, S. Meesters, G.Girard, and R. Duits. (2015) Improving Fiber Alignment in HARDI by Combining Contextual PDE flow with Constrained Spherical Deconvolution. PLoS One. """ # convert the ODFs from SH basis to DSF sphere = kernel.get_sphere() odfs_dsf = sh_to_sf(odfs_sh, sphere, sh_order=sh_order, basis_type=None) # perform the convolution output = perform_convolution(odfs_dsf, kernel.get_lookup_table(), test_mode, num_threads) # normalize the output if normalize: output = np.multiply(output, np.amax(odfs_dsf)/np.amax(output)) # convert back to SH output_sh = sf_to_sh(output, sphere, sh_order=sh_order) return output_sh def convolve_sf(odfs_sf, kernel, test_mode=False, num_threads=None, normalize=True): """ Perform the shift-twist convolution with the ODF data and the lookup-table of the kernel. Parameters ---------- odfs : array of double The ODF data sampled on a sphere kernel : array of double The 5D lookup table test_mode : boolean Reduced convolution in one direction only for testing num_threads : int Number of threads. If None (default) then all available threads will be used. normalize : boolean Apply max-normalization to the output such that its value range matches the input ODF data. Returns ------- output : array of double The ODF data after convolution enhancement, sampled on a sphere """ # perform the convolution output = perform_convolution(odfs_sf, kernel.get_lookup_table(), test_mode, num_threads) # normalize the output if normalize: output = np.multiply(output, np.amax(odfs_sf)/np.amax(output)) return output @cython.wraparound(False) @cython.boundscheck(False) @cython.nonecheck(False) @cython.cdivision(True) cdef double [:, :, :, ::1] perform_convolution (double [:, :, :, ::1] odfs, double [:, :, :, :, ::1] lut, cnp.npy_intp test_mode, num_threads=None): """ Perform the shift-twist convolution with the ODF data and the lookup-table of the kernel. Parameters ---------- odfs : array of double The ODF data sampled on a sphere lut : array of double The 5D lookup table test_mode : boolean Reduced convolution in one direction only for testing num_threads : int Number of threads. If None (default) then all available threads will be used. Returns ------- output : array of double The ODF data after convolution enhancement """ cdef: double [:, :, :, ::1] output = np.array(odfs, copy=True) cnp.npy_intp OR1 = lut.shape[0] cnp.npy_intp OR2 = lut.shape[1] cnp.npy_intp N = lut.shape[2] cnp.npy_intp hn = (N - 1) / 2 double [:, :, :, :] totalval double [:, :, :, :] voxcount cnp.npy_intp nx = odfs.shape[0] cnp.npy_intp ny = odfs.shape[1] cnp.npy_intp nz = odfs.shape[2] cnp.npy_intp threads_to_use = -1 cnp.npy_intp all_cores = openmp.omp_get_num_procs() cnp.npy_intp corient, orient, cx, cy, cz, x, y, z cnp.npy_intp expectedvox cnp.npy_intp edgeNormalization = True if num_threads is not None: threads_to_use = num_threads else: threads_to_use = all_cores if have_openmp: openmp.omp_set_dynamic(0) openmp.omp_set_num_threads(threads_to_use) if test_mode: edgeNormalization = False OR2 = 1 # expected number of voxels in kernel totalval = np.zeros((OR1, nx, ny, nz)) voxcount = np.zeros((OR1, nx, ny, nz)) expectedvox = nx * ny * nz with nogil: # loop over ODFs cx,cy,cz,corient --> y and v for corient in prange(OR1, schedule='guided'): for cx in range(nx): for cy in range(ny): for cz in range(nz): # loop over kernel x,y,z,orient --> x and r for x in range(int_max(cx - hn, 0), int_min(cx + hn + 1, ny - 1)): for y in range(int_max(cy - hn, 0), int_min(cy + hn + 1, ny - 1)): for z in range(int_max(cz - hn, 0), int_min(cz + hn + 1, nz - 1)): voxcount[corient, cx, cy, cz] += 1.0 for orient in range(0, OR2): totalval[corient, cx, cy, cz] += \ odfs[x, y, z, orient] * \ lut[corient, orient, x - (cx - hn), y - (cy - hn), z - (cz - hn)] if edgeNormalization: output[cx, cy, cz, corient] = \ totalval[corient, cx, cy, cz] * expectedvox/voxcount[corient, cx, cy, cz] else: output[cx, cy, cz, corient] = \ totalval[corient, cx, cy, cz] # Reset number of OpenMP cores to default if have_openmp and num_threads is not None: openmp.omp_set_num_threads(all_cores) return output cdef inline cnp.npy_intp int_max(cnp.npy_intp a, cnp.npy_intp b) nogil: return a if a >= b else b cdef inline cnp.npy_intp int_min(cnp.npy_intp a, cnp.npy_intp b) nogil: return a if a <= b else b dipy-0.13.0/dipy/denoise/tests/000077500000000000000000000000001317371701200162775ustar00rootroot00000000000000dipy-0.13.0/dipy/denoise/tests/__init__.py000066400000000000000000000000001317371701200203760ustar00rootroot00000000000000dipy-0.13.0/dipy/denoise/tests/test_ascm.py000066400000000000000000000104331317371701200206340ustar00rootroot00000000000000import numpy as np import dipy.data as dpd import nibabel as nib from numpy.testing import (run_module_suite, assert_, assert_equal, assert_array_almost_equal) from dipy.denoise.non_local_means import non_local_means from dipy.denoise.noise_estimate import estimate_sigma from dipy.data import fetch_stanford_t1, read_stanford_t1 from dipy.denoise.adaptive_soft_matching import adaptive_soft_matching def test_ascm_static(): S0 = 100 * np.ones((20, 20, 20), dtype='f8') S0n1 = non_local_means(S0, sigma=0, rician=False, patch_radius=1, block_radius=1) S0n2 = non_local_means(S0, sigma=0, rician=False, patch_radius=2, block_radius=1) S0n = adaptive_soft_matching(S0, S0n1, S0n2, 0) assert_array_almost_equal(S0, S0n) def test_ascm_random_noise(): S0 = 100 + 2 * np.random.standard_normal((22, 23, 30)) S0n1 = non_local_means(S0, sigma=1, rician=False, patch_radius=1, block_radius=1) S0n2 = non_local_means(S0, sigma=1, rician=False, patch_radius=2, block_radius=1) S0n = adaptive_soft_matching(S0, S0n1, S0n2, 1) print(S0.mean(), S0.min(), S0.max()) print(S0n.mean(), S0n.min(), S0n.max()) assert_(S0n.min() > S0.min()) assert_(S0n.max() < S0.max()) assert_equal(np.round(S0n.mean()), 100) def test_ascm_rmse_with_nlmeans(): # checks the smoothness S0 = np.ones((30, 30, 30)) * 100 S0[10:20, 10:20, 10:20] = 50 S0[20:30, 20:30, 20:30] = 0 S0_noise = S0 + 20 * np.random.standard_normal((30, 30, 30)) print("Original RMSE", np.sum(np.abs(S0 - S0_noise)) / np.sum(S0)) S0n1 = non_local_means( S0_noise, sigma=400, rician=False, patch_radius=1, block_radius=1) print("Smaller patch RMSE", np.sum(np.abs(S0 - S0n1)) / np.sum(S0)) S0n2 = non_local_means( S0_noise, sigma=400, rician=False, patch_radius=2, block_radius=2) print("Larger patch RMSE", np.sum(np.abs(S0 - S0n2)) / np.sum(S0)) S0n = adaptive_soft_matching(S0, S0n1, S0n2, 400) print("ASCM RMSE", np.sum(np.abs(S0 - S0n)) / np.sum(S0)) assert_(np.sum(np.abs(S0 - S0n)) / np.sum(S0) < np.sum(np.abs(S0 - S0n1)) / np.sum(S0)) assert_(np.sum(np.abs(S0 - S0n)) / np.sum(S0) < np.sum(np.abs(S0 - S0_noise)) / np.sum(S0)) assert_(90 < np.mean(S0n) < 110) def test_sharpness(): # check the edge-preserving nature S0 = np.ones((30, 30, 30)) * 100 S0[10:20, 10:20, 10:20] = 50 S0[20:30, 20:30, 20:30] = 0 S0_noise = S0 + 20 * np.random.standard_normal((30, 30, 30)) S0n1 = non_local_means( S0_noise, sigma=400, rician=False, patch_radius=1, block_radius=1) edg1 = np.abs(np.mean(S0n1[8, 10:20, 10:20] - S0n1[12, 10:20, 10:20]) - 50) print("Edge gradient smaller patch", edg1) S0n2 = non_local_means( S0_noise, sigma=400, rician=False, patch_radius=2, block_radius=2) edg2 = np.abs(np.mean(S0n2[8, 10:20, 10:20] - S0n2[12, 10:20, 10:20]) - 50) print("Edge gradient larger patch", edg2) S0n = adaptive_soft_matching(S0, S0n1, S0n2, 400) edg = np.abs(np.mean(S0n[8, 10:20, 10:20] - S0n[12, 10:20, 10:20]) - 50) print("Edge gradient ASCM", edg) assert_(edg2 > edg1) assert_(edg2 > edg) assert_(np.abs(edg1 - edg) < 1.5) def test_ascm_accuracy(): test_ascm_data_ref = nib.load(dpd.get_data("ascm_test")).get_data() test_data = nib.load(dpd.get_data("aniso_vox")).get_data() # the test data was constructed in this manner mask = test_data > 50 sigma = estimate_sigma(test_data, N=4) den_small = non_local_means( test_data, sigma=sigma, mask=mask, patch_radius=1, block_radius=1, rician=True) den_large = non_local_means( test_data, sigma=sigma, mask=mask, patch_radius=2, block_radius=1, rician=True) S0n = np.array(adaptive_soft_matching(test_data, den_small, den_large, sigma[0])) assert_array_almost_equal(S0n, test_ascm_data_ref) if __name__ == '__main__': run_module_suite() dipy-0.13.0/dipy/denoise/tests/test_denoise.py000066400000000000000000000010261317371701200213350ustar00rootroot00000000000000import numpy as np import numpy.testing as npt from dipy.denoise.noise_estimate import estimate_sigma from dipy.denoise.nlmeans import nlmeans import dipy.data as dpd import nibabel as nib def test_denoise(): """ """ fdata, fbval, fbvec = dpd.get_data() # Test on 4D image: data = nib.load(fdata).get_data() sigma1 = estimate_sigma(data) denoised = nlmeans(data, sigma=sigma1) # Test on 3D image: data = data[..., 0] sigma2 = estimate_sigma(data) denoised = nlmeans(data, sigma=sigma2) dipy-0.13.0/dipy/denoise/tests/test_kernel.py000066400000000000000000000120071317371701200211700ustar00rootroot00000000000000from dipy.denoise.enhancement_kernel import EnhancementKernel from dipy.denoise.shift_twist_convolution import convolve, convolve_sf from dipy.reconst.shm import sh_to_sf, sf_to_sh from dipy.core.sphere import Sphere from dipy.data import get_sphere import numpy as np import numpy.testing as npt def test_enhancement_kernel(): """ Test if the kernel values are correct by comparison against the values originally calculated by implementation in Mathematica, and at the same time checks the symmetry of the kernel.""" D33 = 1.0 D44 = 0.04 t = 1 k = EnhancementKernel(D33, D44, t, orientations=0, force_recompute=True) y = np.array([0., 0., 0.]) v = np.array([0., 0., 1.]) orientationlist=[[0., 0., 1.], [-0.0527864, 0.688191, 0.723607], \ [-0.67082, -0.16246, 0.723607], [-0.0527864, -0.688191, 0.723607], \ [0.638197, -0.262866, 0.723607], [0.831052, 0.238856, 0.502295], \ [0.262866, -0.809017, -0.525731], [0.812731, 0.295242, -0.502295], \ [-0.029644, 0.864188, -0.502295], [-0.831052, 0.238856, -0.502295], \ [-0.638197, -0.262866, -0.723607], [-0.436009, 0.864188, -0.251148], \ [-0.687157, -0.681718, 0.251148], [0.67082, -0.688191, 0.276393], \ [0.67082, 0.688191, 0.276393], [0.947214, 0.16246, -0.276393], \ [-0.861803, -0.425325, -0.276393]] positionlist= [[-0.108096, 0.0412229, 0.339119], [0.220647, -0.422053, 0.427524], \ [-0.337432, -0.0644619, -0.340777], [0.172579, -0.217602, -0.292446], \ [-0.271575, -0.125249, -0.350906], [-0.483807, 0.326651, 0.191993], \ [-0.480936, -0.0718426, 0.33202], [0.497193, -0.00585659, -0.251344], \ [0.237737, 0.013634, -0.471988], [0.367569, -0.163581, 0.0723955], \ [0.47859, -0.143252, 0.318579], [-0.21474, -0.264929, -0.46786], \ [-0.0684234, 0.0342464, 0.0942475], [0.344272, 0.423119, -0.303866], \ [0.0430714, 0.216233, -0.308475], [0.386085, 0.127333, 0.0503609], \ [0.334723, 0.071415, 0.403906]] kernelvalues = [0.10701063104295713, 0.0030052117308328923, 0.003125410084676201, \ 0.0031765819772012613, 0.003127254657020615, 0.0001295130396491743, \ 6.882352014430076e-14, 1.3821277371353332e-13, 1.3951939946082493e-13, \ 1.381612071786285e-13, 5.0861109163441125e-17, 1.0722120295517027e-10, \ 2.425145934791457e-6, 3.557919265806602e-6, 3.6669510385105265e-6, \ 5.97473789679846e-11, 6.155412262223178e-11] for p in range(len(orientationlist)): r = np.array(orientationlist[p]) x = np.array(positionlist[p]) npt.assert_almost_equal(k.evaluate_kernel(x, y, r, v), kernelvalues[p]) def test_spike(): """ Test if a convolution with a delta spike is equal to the kernel saved in the lookup table.""" # create kernel D33 = 1.0 D44 = 0.04 t = 1 num_orientations = 5 k = EnhancementKernel(D33, D44, t, orientations=num_orientations, force_recompute=True) # create a delta spike numorientations = k.get_orientations().shape[0] spike = np.zeros((7, 7, 7, numorientations), dtype=np.float64) spike[3, 3, 3, 0] = 1 # convolve kernel with delta spike csd_enh = convolve_sf(spike, k, test_mode=True, normalize=False) # check if kernel matches with the convolved delta spike totalsum = 0.0 for i in range(0, numorientations): totalsum += np.sum(np.array(k.get_lookup_table())[i, 0, :, :, :] - \ np.array(csd_enh)[:, :, :, i]) npt.assert_equal(totalsum, 0.0) def test_normalization(): """ Test the normalization routine applied after a convolution""" # create kernel D33 = 1.0 D44 = 0.04 t = 1 num_orientations = 5 k = EnhancementKernel(D33, D44, t, orientations=num_orientations, force_recompute=True) # create a constant dataset numorientations = k.get_orientations().shape[0] spike = np.ones((7, 7, 7, numorientations), dtype=np.float64) # convert dataset to SH spike_sh = sf_to_sh(spike, k.get_sphere(), sh_order=8) # convolve kernel with delta spike and apply normalization csd_enh = convolve(spike_sh, k, sh_order=8, test_mode=True, normalize=True) # convert dataset to DSF csd_enh_dsf = sh_to_sf(csd_enh, k.get_sphere(), sh_order=8, basis_type=None) # test if the normalization is performed correctly npt.assert_almost_equal(np.amax(csd_enh_dsf), np.amax(spike)) def test_kernel_input(): """ Test the kernel for inputs of type Sphere, type int and for input None""" sph = Sphere(1, 0, 0) D33 = 1.0 D44 = 0.04 t = 1 k = EnhancementKernel(D33, D44, t, orientations=sph, force_recompute=True) npt.assert_equal(k.get_lookup_table().shape, (1, 1, 7, 7, 7)) num_orientations = 2 k = EnhancementKernel(D33, D44, t, orientations=num_orientations, force_recompute=True) npt.assert_equal(k.get_lookup_table().shape, (2, 2, 7, 7, 7)) k = EnhancementKernel(D33, D44, t, orientations=0, force_recompute=True) npt.assert_equal(k.get_lookup_table().shape, (0, 0, 7, 7, 7)) if __name__ == '__main__': npt.run_module_suite() dipy-0.13.0/dipy/denoise/tests/test_lpca.py000066400000000000000000000227421317371701200206360ustar00rootroot00000000000000import numpy as np import scipy as sp import scipy.special as sps from numpy.testing import (run_module_suite, assert_, assert_equal, assert_raises, assert_array_almost_equal) from dipy.denoise.localpca import localpca from dipy.sims.voxel import multi_tensor from dipy.core.gradients import gradient_table, generate_bvecs from dipy.core.sphere import disperse_charges, HemiSphere from dipy.sims.voxel import multi_tensor def rfiw_phantom(gtab, snr=None): """rectangle fiber immersed in water""" # define voxel index slice_ind = np.zeros((10, 10, 8)) slice_ind[4:7, 4:7, :] = 1 slice_ind[4:7, 7, :] = 2 slice_ind[7, 7, :] = 3 slice_ind[7, 4:7, :] = 4 slice_ind[7, 3, :] = 5 slice_ind[4:7, 3, :] = 6 slice_ind[3, 3, :] = 7 slice_ind[3, 4:7, :] = 8 slice_ind[3, 7, :] = 9 # Define tisse diffusion parameters # Restricted diffusion ADr = 0.99e-3 RDr = 0.0 # Hindered diffusion ADh = 2.26e-3 RDh = 0.87 # S0 value for tissue S1 = 50 # Fraction between Restricted and Hindered diffusion fia = 0.51 # Define water diffusion Dwater = 3e-3 S2 = 100 # S0 value for water # Define tissue volume fraction for each voxel type (in index order) f = np.array([0., 1., 0.6, 0.18, 0.30, 0.15, 0.50, 0.35, 0.70, 0.42]) # Define S0 for each voxel (in index order) S0 = S1 * f + S2 * (1 - f) # multi tensor simulations assume that each water pull as constant S0 # since I am assuming that tissue and water voxels have different S0, # tissue volume fractions have to be adjusted to the measured f values when # constant S0 are assumed constant. Doing this correction, simulations will # be analogous to simulates that S0 are different for each media. (For more # datails on this contact the phantom designer) f1 = f * S1 / S0 mevals = np.array([[ADr, RDr, RDr], [ADh, RDh, RDh], [Dwater, Dwater, Dwater]]) angles = [(0, 0, 1), (0, 0, 1), (0, 0, 1)] DWI = np.zeros(slice_ind.shape + (gtab.bvals.size, )) for i in range(10): fractions = [f1[i] * fia * 100, f1[i] * (1 - fia) * 100, (1 - f1[i]) * 100] sig, direction = multi_tensor(gtab, mevals, S0=S0[i], angles=angles, fractions=fractions, snr=None) DWI[slice_ind == i, :] = sig if snr is None: return DWI else: sigma = S2 * 1.0 / snr n1 = np.random.normal(0, sigma, size=DWI.shape) n2 = np.random.normal(0, sigma, size=DWI.shape) return [np.sqrt((DWI / np.sqrt(2) + n1)**2 + (DWI / np.sqrt(2) + n2)**2), sigma] def gen_gtab(): # generate a gradient table for phantom data directions8 = generate_bvecs(8) directions30 = generate_bvecs(30) directions60 = generate_bvecs(60) # Create full dataset parameters # (6 b-values = 0, 8 directions for b-value 300, 30 directions for b-value # 1000 and 60 directions for b-value 2000) bvals = np.hstack((np.zeros(6), 300 * np.ones(8), 1000 * np.ones(30), 2000 * np.ones(60))) bvecs = np.vstack((np.zeros((6, 3)), directions8, directions30, directions60)) gtab = gradient_table(bvals, bvecs) return gtab def test_lpca_static(): S0 = 100 * np.ones((20, 20, 20, 20), dtype='f8') S0ns = localpca(S0, sigma=np.ones((20, 20, 20), dtype=np.float64)) assert_array_almost_equal(S0, S0ns) def test_lpca_random_noise(): S0 = 100 + 2 * np.random.standard_normal((22, 23, 30, 20)) S0ns = localpca(S0, sigma=np.std(S0)) assert_(S0ns.min() > S0.min()) assert_(S0ns.max() < S0.max()) assert_equal(np.round(S0ns.mean()), 100) def test_lpca_boundary_behaviour(): # check is first slice is getting denoised or not ? S0 = 100 * np.ones((20, 20, 20, 20), dtype='f8') S0[:, :, 0, :] = S0[:, :, 0, :] + 2 * \ np.random.standard_normal((20, 20, 20)) S0_first = S0[:, :, 0, :] S0ns = localpca(S0, sigma=np.std(S0)) S0ns_first = S0ns[:, :, 0, :] rmses = np.sum(np.abs(S0ns_first - S0_first)) / \ (100.0 * 20.0 * 20.0 * 20.0) # shows that S0n_first is not very close to S0_first assert_(rmses > 0.0001) assert_equal(np.round(S0ns_first.mean()), 100) # Use a volume of sigma, instead of a scalar: sigma_vol = np.ones(S0.shape[:-1]) * np.std(S0) S0ns = localpca(S0, sigma=sigma_vol) rmses = np.sum(np.abs(S0ns_first - S0_first)) / \ (100.0 * 20.0 * 20.0 * 20.0) # shows that S0n_first is not very close to S0_first assert_(rmses > 0.0001) assert_equal(np.round(S0ns_first.mean()), 100) def test_lpca_rmse(): S0_w_noise = 100 + 2 * np.random.standard_normal((22, 23, 30, 20)) rmse_w_noise = np.sqrt(np.mean((S0_w_noise - 100) ** 2)) S0_denoised = localpca(S0_w_noise, sigma=np.std(S0_w_noise)) rmse_denoised = np.sqrt(np.mean((S0_denoised - 100) ** 2)) # Denoising should always improve the RMSE: assert_(rmse_denoised < rmse_w_noise) def test_lpca_sharpness(): S0 = np.ones((30, 30, 30, 20), dtype=np.float64) * 100 S0[10:20, 10:20, 10:20, :] = 50 S0[20:30, 20:30, 20:30, :] = 0 S0 = S0 + 20 * np.random.standard_normal((30, 30, 30, 20)) S0ns = localpca(S0, sigma=20.0) # check the edge gradient edgs = np.abs(np.mean(S0ns[8, 10:20, 10:20] - S0ns[12, 10:20, 10:20]) - 50) assert_(edgs < 2) def test_lpca_dtype(): # If out_dtype is not specified, we retain the original precision: S0 = 200 * np.ones((20, 20, 20, 3), dtype=np.float64) S0ns = localpca(S0, sigma=1) assert_equal(S0.dtype, S0ns.dtype) S0 = 200 * np.ones((20, 20, 20, 20), dtype=np.uint16) S0ns = localpca(S0, sigma=np.ones((20, 20, 20))) assert_equal(S0.dtype, S0ns.dtype) # If we set out_dtype, we get what we asked for: S0 = 200 * np.ones((20, 20, 20, 20), dtype=np.uint16) S0ns = localpca(S0, sigma=np.ones((20, 20, 20)), out_dtype=np.float32) assert_equal(np.float32, S0ns.dtype) # If we set a few entries to zero, this induces negative entries in the # Resulting denoised array: S0[5:8, 5:8, 5:8] = 0 # But if we should always get all non-negative results: S0ns = localpca(S0, sigma=np.ones((20, 20, 20)), out_dtype=np.uint16) assert_(np.all(S0ns >= 0)) # And no wrap-around to crazy high values: assert_(np.all(S0ns <= 200)) def test_lpca_wrong(): S0 = np.ones((20, 20)) assert_raises(ValueError, localpca, S0, sigma=1) def test_phantom(): gtab = gen_gtab() DWI_clean = rfiw_phantom(gtab, snr=None) DWI, sigma = rfiw_phantom(gtab, snr=30) # To test without rician correction temp = (DWI_clean / sigma)**2 DWI_clean_wrc = (sigma * np.sqrt(np.pi / 2) * np.exp(-0.5 * temp) * ((1 + 0.5 * temp) * sps.iv(0, 0.25 * temp) + 0.5 * temp * sps.iv(1, 0.25 * temp))**2) DWI_den = localpca(DWI, sigma, patch_radius=3) rmse_den = np.sum(np.abs(DWI_clean - DWI_den)) / np.sum(np.abs(DWI_clean)) rmse_noisy = np.sum(np.abs(DWI_clean - DWI)) / np.sum(np.abs(DWI_clean)) rmse_den_wrc = np.sum(np.abs(DWI_clean_wrc - DWI_den) ) / np.sum(np.abs(DWI_clean_wrc)) rmse_noisy_wrc = np.sum(np.abs(DWI_clean_wrc - DWI)) / \ np.sum(np.abs(DWI_clean_wrc)) assert_(np.max(DWI_clean) / sigma < np.max(DWI_den) / sigma) assert_(np.max(DWI_den) / sigma < np.max(DWI) / sigma) assert_(rmse_den < rmse_noisy) assert_(rmse_den_wrc < rmse_noisy_wrc) # Check if the results of different PCA methods (eig, svd) are similar DWI_den_svd = localpca(DWI, sigma, pca_method='svd', patch_radius=3) assert_array_almost_equal(DWI_den, DWI_den_svd) assert_raises(ValueError, localpca, DWI, sigma, pca_method='empty') # Try this with a sigma volume, instead of a scalar sigma_vol = sigma * np.ones(DWI.shape[:-1]) mask = np.zeros_like(DWI, dtype=bool)[..., 0] mask[2:-2, 2:-2, 2:-2] = True DWI_den = localpca(DWI, sigma_vol, mask, patch_radius=3) DWI_clean_masked = DWI_clean.copy() DWI_clean_masked[~mask] = 0 DWI_masked = DWI.copy() DWI_masked[~mask] = 0 rmse_den = np.sum(np.abs(DWI_clean_masked - DWI_den)) / np.sum(np.abs( DWI_clean_masked)) rmse_noisy = np.sum(np.abs(DWI_clean_masked - DWI_masked)) / np.sum(np.abs( DWI_clean_masked)) DWI_clean_wrc_masked = DWI_clean_wrc.copy() DWI_clean_wrc_masked[~mask] = 0 rmse_den_wrc = np.sum(np.abs(DWI_clean_wrc_masked - DWI_den) ) / np.sum(np.abs(DWI_clean_wrc_masked)) rmse_noisy_wrc = np.sum(np.abs(DWI_clean_wrc_masked - DWI_masked)) / \ np.sum(np.abs(DWI_clean_wrc_masked)) assert_(np.max(DWI_clean) / sigma < np.max(DWI_den) / sigma) assert_(np.max(DWI_den) / sigma < np.max(DWI) / sigma) assert_(rmse_den < rmse_noisy) assert_(rmse_den_wrc < rmse_noisy_wrc) def test_lpca_ill_conditioned(): gtab = gen_gtab() DWI, sigma = rfiw_phantom(gtab, snr=30) assert_raises(ValueError, localpca, DWI, sigma, patch_radius=1) def test_lpca_sigma_wrong_shape(): gtab = gen_gtab() DWI, sigma = rfiw_phantom(gtab, snr=30) # If sigma is 3D but shape is not like DWI.shape[:-1], an error is raised: sigma = np.zeros((DWI.shape[0], DWI.shape[1] + 1, DWI.shape[2])) assert_raises(ValueError, localpca, DWI, sigma) if __name__ == '__main__': run_module_suite() dipy-0.13.0/dipy/denoise/tests/test_nlmeans.py000066400000000000000000000072671317371701200213610ustar00rootroot00000000000000import numpy as np from numpy.testing import (run_module_suite, assert_, assert_equal, assert_array_almost_equal, assert_raises) from dipy.denoise.nlmeans import nlmeans from dipy.denoise.denspeed import (add_padding_reflection, remove_padding) from dipy.utils.omp import cpu_count, have_openmp from time import time def test_nlmeans_padding(): S0 = 100 + 2 * np.random.standard_normal((50, 50, 50)) S0 = S0.astype('f8') S0n = add_padding_reflection(S0, 5) S0n2 = remove_padding(S0n, 5) assert_equal(S0.shape, S0n2.shape) def test_nlmeans_static(): S0 = 100 * np.ones((20, 20, 20), dtype='f8') S0n = nlmeans(S0, sigma=np.ones((20, 20, 20)), rician=False) assert_array_almost_equal(S0, S0n) def test_nlmeans_wrong(): S0 = np.ones((2, 2, 2, 2, 2)) assert_raises(ValueError, nlmeans, S0, 1.0) def test_nlmeans_random_noise(): S0 = 100 + 2 * np.random.standard_normal((22, 23, 30)) S0n = nlmeans(S0, sigma=np.ones((22, 23, 30)) * np.std(S0), rician=False) print(S0.mean(), S0.min(), S0.max()) print(S0n.mean(), S0n.min(), S0n.max()) assert_(S0n.min() > S0.min()) assert_(S0n.max() < S0.max()) assert_equal(np.round(S0n.mean()), 100) def test_nlmeans_boundary(): # nlmeans preserves boundaries S0 = 100 + np.zeros((20, 20, 20)) noise = 2 * np.random.standard_normal((20, 20, 20)) S0 += noise S0[:10, :10, :10] = 300 + noise[:10, :10, :10] S0n = nlmeans(S0, sigma=np.ones((20, 20, 20)) * np.std(noise), rician=False) print(S0[9, 9, 9]) print(S0[10, 10, 10]) assert_(S0[9, 9, 9] > 290) assert_(S0[10, 10, 10] < 110) def test_nlmeans_4D_and_mask(): S0 = 200 * np.ones((20, 20, 20, 3), dtype='f8') mask = np.zeros((20, 20, 20)) mask[10, 10, 10] = 1 S0n = nlmeans(S0, sigma=1, mask=mask, rician=True) assert_equal(S0.shape, S0n.shape) assert_equal(np.round(S0n[10, 10, 10]), 200) assert_equal(S0n[8, 8, 8], 0) def test_nlmeans_dtype(): S0 = 200 * np.ones((20, 20, 20, 3), dtype='f4') mask = np.zeros((20, 20, 20)) mask[10:14, 10:14, 10:14] = 1 S0n = nlmeans(S0, sigma=1, mask=mask, rician=True) assert_equal(S0.dtype, S0n.dtype) S0 = 200 * np.ones((20, 20, 20), dtype=np.uint16) mask = np.zeros((20, 20, 20)) mask[10:14, 10:14, 10:14] = 1 S0n = nlmeans(S0, sigma=np.ones((20, 20, 20)), mask=mask, rician=True) assert_equal(S0.dtype, S0n.dtype) @np.testing.dec.skipif(not have_openmp, 'OpenMP does not appear to be available') def test_nlmeans_4d_3dsigma_and_threads(): # Input is 4D data and 3D sigma data = np.ones((50, 50, 50, 5)) sigma = np.ones(data.shape[:3]) mask = np.zeros(data.shape[:3]) # mask[25-10:25+10] = 1 mask[:] = 1 print('cpu count %d' % (cpu_count(),)) print('1') t = time() new_data = nlmeans(data, sigma, mask, num_threads=1) duration_1core = time() - t print(duration_1core) print('All') t = time() new_data2 = nlmeans(data, sigma, mask, num_threads=None) duration_all_core = time() - t print(duration_all_core) print('2') t = time() new_data3 = nlmeans(data, sigma, mask, num_threads=2) duration_2core = time() - t print(duration_2core) assert_array_almost_equal(new_data, new_data2) assert_array_almost_equal(new_data2, new_data3) if cpu_count() > 2: assert_equal(duration_all_core < duration_2core, True) assert_equal(duration_2core < duration_1core, True) if cpu_count() == 2: assert_equal(duration_2core < duration_1core, True) if __name__ == '__main__': run_module_suite() dipy-0.13.0/dipy/denoise/tests/test_noise_estimate.py000066400000000000000000000157601317371701200227310ustar00rootroot00000000000000from __future__ import division, print_function import numpy as np import nibabel as nib from numpy.testing import (assert_almost_equal, assert_equal, assert_, assert_array_almost_equal) from dipy.denoise.noise_estimate import _inv_nchi_cdf, piesno, estimate_sigma from dipy.denoise.noise_estimate import _piesno_3D from dipy.denoise.pca_noise_estimate import pca_noise_estimate import dipy.data as dpd import dipy.core.gradients as dpg import dipy.sims.voxel as vox def test_inv_nchi(): # See page 5 of the reference paper for tested values # Values taken from hispeed.MedianPIESNO.lambdaPlus # and hispeed.MedianPIESNO.lambdaMinus N = 8 K = 20 alpha = 0.01 lambdaMinus = _inv_nchi_cdf(N, K, alpha/2) lambdaPlus = _inv_nchi_cdf(N, K, 1 - alpha/2) assert_almost_equal(lambdaMinus, 6.464855180579397) assert_almost_equal(lambdaPlus, 9.722849086419043) def test_piesno(): # Values taken from hispeed.OptimalPIESNO with the test data # in the package computed in matlab test_piesno_data = nib.load(dpd.get_data("test_piesno")).get_data() sigma = piesno(test_piesno_data, N=8, alpha=0.01, l=1, eps=1e-10, return_mask=False) assert_almost_equal(sigma, 0.010749458025559) noise1 = (np.random.randn(100, 100, 100) * 50) + 10 noise2 = (np.random.randn(100, 100, 100) * 50) + 10 rician_noise = np.sqrt(noise1**2 + noise2**2) sigma, mask = piesno(rician_noise, N=1, alpha=0.01, l=1, eps=1e-10, return_mask=True) # less than 3% of error? assert_(np.abs(sigma - 50) / sigma < 0.03) # Test using the median as the initial estimation initial_estimation = (np.median(sigma) / np.sqrt(2 * _inv_nchi_cdf(1, 1, 0.5))) sigma, mask = _piesno_3D(rician_noise, N=1, alpha=0.01, l=1, eps=1e-10, return_mask=True, initial_estimation=initial_estimation) assert_(np.abs(sigma - 50) / sigma < 0.03) sigma = _piesno_3D(rician_noise, N=1, alpha=0.01, l=1, eps=1e-10, return_mask=False, initial_estimation=initial_estimation) assert_(np.abs(sigma - 50) / sigma < 0.03) sigma = _piesno_3D(np.zeros_like(rician_noise), N=1, alpha=0.01, l=1, eps=1e-10, return_mask=False, initial_estimation=initial_estimation) assert_(np.all(sigma == 0)) sigma, mask = _piesno_3D(np.zeros_like(rician_noise), N=1, alpha=0.01, l=1, eps=1e-10, return_mask=True, initial_estimation=initial_estimation) assert_(np.all(sigma == 0)) assert_(np.all(mask == 0)) # Check if no noise points found in array it exits sigma = _piesno_3D(1000*np.ones_like(rician_noise), N=1, alpha=0.01, l=1, eps=1e-10, return_mask=False, initial_estimation=10) assert_(np.all(sigma == 10)) def test_estimate_sigma(): sigma = estimate_sigma(np.ones((7, 7, 7)), disable_background_masking=True) assert_equal(sigma, 0.) sigma = estimate_sigma(np.ones((7, 7, 7, 3)), disable_background_masking=True) assert_equal(sigma, np.array([0., 0., 0.])) sigma = estimate_sigma(5 * np.ones((7, 7, 7)), disable_background_masking=False) assert_equal(sigma, 0.) sigma = estimate_sigma(5 * np.ones((7, 7, 7, 3)), disable_background_masking=False) assert_equal(sigma, np.array([0., 0., 0.])) arr = np.zeros((3, 3, 3)) arr[0, 0, 0] = 1 sigma = estimate_sigma(arr, disable_background_masking=False, N=1) assert_array_almost_equal(sigma, (0.10286889997472792 / np.sqrt(0.42920367320510366))) arr = np.zeros((3, 3, 3, 3)) arr[0, 0, 0] = 1 sigma = estimate_sigma(arr, disable_background_masking=False, N=1) assert_array_almost_equal(sigma, np.array([0.10286889997472792 / np.sqrt(0.42920367320510366), 0.10286889997472792 / np.sqrt(0.42920367320510366), 0.10286889997472792 / np.sqrt(0.42920367320510366)])) arr = np.zeros((3, 3, 3)) arr[0, 0, 0] = 1 sigma = estimate_sigma(arr, disable_background_masking=True, N=4) assert_array_almost_equal(sigma, 0.46291005 / np.sqrt(0.4834941393603609)) arr = np.zeros((3, 3, 3)) arr[0, 0, 0] = 1 sigma = estimate_sigma(arr, disable_background_masking=True, N=0) assert_array_almost_equal(sigma, 0.46291005 / np.sqrt(1)) arr = np.zeros((3, 3, 3, 3)) arr[0, 0, 0] = 1 sigma = estimate_sigma(arr, disable_background_masking=True, N=12) assert_array_almost_equal(sigma, np.array([0.46291005 / np.sqrt(0.4946862482541263), 0.46291005 / np.sqrt(0.4946862482541263), 0.46291005 / np.sqrt(0.4946862482541263)])) def test_pca_noise_estimate(): np.random.seed(1984) # MUBE: bvals1 = np.concatenate([np.zeros(17), np.ones(3) * 1000]) bvecs1 = np.concatenate([np.zeros((17, 3)), np.eye(3)]) gtab1 = dpg.gradient_table(bvals1, bvecs1) # SIBE: bvals2 = np.concatenate([np.zeros(1), np.ones(3) * 1000]) bvecs2 = np.concatenate([np.zeros((1, 3)), np.eye(3)]) gtab2 = dpg.gradient_table(bvals2, bvecs2) for patch_radius in [1, 2]: for gtab in [gtab1, gtab2]: for dtype in [np.int16, np.float64]: signal = np.ones((20, 20, 20, gtab.bvals.shape[0])) for correct_bias in [True, False]: if not correct_bias: # High signal for no bias correction signal = signal * 100 sigma = 1 noise1 = np.random.normal(0, sigma, size=signal.shape) noise2 = np.random.normal(0, sigma, size=signal.shape) # Rician noise: data = np.sqrt((signal + noise1) ** 2 + noise2 ** 2) sigma_est = pca_noise_estimate(data.astype(dtype), gtab, correct_bias=correct_bias, patch_radius=patch_radius) assert_array_almost_equal(np.mean(sigma_est), sigma, decimal=1) sigma = 1 noise1 = np.random.normal(0, sigma, size=signal.shape) noise2 = np.random.normal(0, sigma, size=signal.shape) signal = np.ones((20, 20, 20, gtab.bvals.shape[0])) assert_(np.mean(pca_noise_estimate(data, gtab, correct_bias=True)) > np.mean(pca_noise_estimate(data, gtab, correct_bias=False))) dipy-0.13.0/dipy/denoise/tests/test_non_local_means.py000066400000000000000000000055261317371701200230470ustar00rootroot00000000000000import numpy as np from numpy.testing import (run_module_suite, assert_, assert_equal, assert_array_almost_equal, assert_raises) from dipy.denoise.non_local_means import non_local_means def test_nlmeans_static(): S0 = 100 * np.ones((20, 20, 20), dtype='f8') S0nb = non_local_means(S0, sigma=1.0, rician=False) assert_array_almost_equal(S0, S0nb) def test_nlmeans_random_noise(): S0 = 100 + 2 * np.random.standard_normal((22, 23, 30)) masker = np.zeros(S0.shape[:3]).astype(bool) masker[8:15, 8:15, 8:15] = 1 for mask in [None, masker]: S0nb = non_local_means(S0, sigma=np.std(S0), rician=False, mask=mask) assert_(S0nb[mask].min() > S0[mask].min()) assert_(S0nb[mask].max() < S0[mask].max()) assert_equal(np.round(S0nb[mask].mean()), 100) S0nb = non_local_means(S0, sigma=np.std(S0), rician=False, mask=mask) assert_(S0nb[mask].min() > S0[mask].min()) assert_(S0nb[mask].max() < S0[mask].max()) assert_equal(np.round(S0nb[mask].mean()), 100) def test_scalar_sigma(): S0 = 100 + np.zeros((20, 20, 20)) noise = 2 * np.random.standard_normal((20, 20, 20)) S0 += noise S0[:10, :10, :10] = 300 + noise[:10, :10, :10] S0n = assert_raises( ValueError, non_local_means, S0, sigma=noise, rician=False) def test_nlmeans_boundary(): # nlmeans preserves boundaries S0 = 100 + np.zeros((20, 20, 20)) noise = 2 * np.random.standard_normal((20, 20, 20)) S0 += noise S0[:10, :10, :10] = 300 + noise[:10, :10, :10] S0n = non_local_means(S0, sigma=np.std(noise), rician=False) assert_(S0[9, 9, 9] > 290) assert_(S0[10, 10, 10] < 110) def test_nlmeans_wrong(): S0 = 100 + np.zeros((10, 10, 10, 10, 10)) assert_raises(ValueError, non_local_means, S0, 1.0) S0 = 100 + np.zeros((20, 20, 20)) mask = np.ones((10, 10)) assert_raises(ValueError, non_local_means, S0, 1.0, mask) def test_nlmeans_4D_and_mask(): S0 = 200 * np.ones((20, 20, 20, 3), dtype='f8') mask = np.zeros((20, 20, 20)) mask[10, 10, 10] = 1 S0n = non_local_means(S0, sigma=1, mask=mask, rician=True) assert_equal(S0.shape, S0n.shape) assert_equal(np.round(S0n[10, 10, 10]), 200) assert_equal(S0n[8, 8, 8], 0) def test_nlmeans_dtype(): S0 = 200 * np.ones((20, 20, 20, 3), dtype='f4') mask = np.zeros((20, 20, 20)) mask[10:14, 10:14, 10:14] = 1 S0n = non_local_means(S0, sigma=1, mask=mask, rician=True) assert_equal(S0.dtype, S0n.dtype) S0 = 200 * np.ones((20, 20, 20), dtype=np.uint16) mask = np.zeros((20, 20, 20)) mask[10:14, 10:14, 10:14] = 1 S0n = non_local_means(S0, sigma=1, mask=mask, rician=True) assert_equal(S0.dtype, S0n.dtype) if __name__ == '__main__': run_module_suite() dipy-0.13.0/dipy/direction/000077500000000000000000000000001317371701200154675ustar00rootroot00000000000000dipy-0.13.0/dipy/direction/__init__.py000066400000000000000000000002571317371701200176040ustar00rootroot00000000000000 from .probabilistic_direction_getter import ProbabilisticDirectionGetter from .probabilistic_direction_getter import DeterministicMaximumDirectionGetter from .peaks import * dipy-0.13.0/dipy/direction/peaks.py000066400000000000000000000530711317371701200171520ustar00rootroot00000000000000from __future__ import division, print_function, absolute_import from multiprocessing import cpu_count, Pool from itertools import repeat from os import path from warnings import warn from dipy.utils.six.moves import xrange from nibabel.tmpdirs import InTemporaryDirectory import numpy as np import scipy.optimize as opt from dipy.reconst.odf import gfa from dipy.reconst.recspeed import (local_maxima, remove_similar_vertices, search_descending) from dipy.core.sphere import Sphere from dipy.data import default_sphere from dipy.core.ndindex import ndindex from dipy.reconst.shm import sh_to_sf_matrix from dipy.reconst.peak_direction_getter import PeaksAndMetricsDirectionGetter def peak_directions_nl(sphere_eval, relative_peak_threshold=.25, min_separation_angle=25, sphere=default_sphere, xtol=1e-7): """Non Linear Direction Finder. Parameters ---------- sphere_eval : callable A function which can be evaluated on a sphere. relative_peak_threshold : float Only return peaks greater than ``relative_peak_threshold * m`` where m is the largest peak. min_separation_angle : float in [0, 90] The minimum distance between directions. If two peaks are too close only the larger of the two is returned. sphere : Sphere A discrete Sphere. The points on the sphere will be used for initial estimate of maximums. xtol : float Relative tolerance for optimization. Returns ------- directions : array (N, 3) Points on the sphere corresponding to N local maxima on the sphere. values : array (N,) Value of sphere_eval at each point on directions. """ # Find discrete peaks for use as seeds in non-linear search discrete_values = sphere_eval(sphere) values, indices = local_maxima(discrete_values, sphere.edges) seeds = np.column_stack([sphere.theta[indices], sphere.phi[indices]]) # Helper function def _helper(x): sphere = Sphere(theta=x[0], phi=x[1]) return -sphere_eval(sphere) # Non-linear search num_seeds = len(seeds) theta = np.empty(num_seeds) phi = np.empty(num_seeds) for i in xrange(num_seeds): peak = opt.fmin(_helper, seeds[i], xtol=xtol, disp=False) theta[i], phi[i] = peak # Evaluate on new-found peaks small_sphere = Sphere(theta=theta, phi=phi) values = sphere_eval(small_sphere) # Sort in descending order order = values.argsort()[::-1] values = values[order] directions = small_sphere.vertices[order] # Remove directions that are too small n = search_descending(values, relative_peak_threshold) directions = directions[:n] # Remove peaks too close to each-other directions, idx = remove_similar_vertices(directions, min_separation_angle, return_index=True) values = values[idx] return directions, values def peak_directions(odf, sphere, relative_peak_threshold=.5, min_separation_angle=25, minmax_norm=True): """Get the directions of odf peaks. Peaks are defined as points on the odf that are greater than at least one neighbor and greater than or equal to all neighbors. Peaks are sorted in descending order by their values then filtered based on their relative size and spacing on the sphere. An odf may have 0 peaks, for example if the odf is perfectly isotropic. Parameters ---------- odf : 1d ndarray The odf function evaluated on the vertices of `sphere` sphere : Sphere The Sphere providing discrete directions for evaluation. relative_peak_threshold : float in [0., 1.] Only peaks greater than ``min + relative_peak_threshold * scale`` are kept, where ``min = max(0, odf.min())`` and ``scale = odf.max() - min``. min_separation_angle : float in [0, 90] The minimum distance between directions. If two peaks are too close only the larger of the two is returned. Returns ------- directions : (N, 3) ndarray N vertices for sphere, one for each peak values : (N,) ndarray peak values indices : (N,) ndarray peak indices of the directions on the sphere Notes ----- If the odf has any negative values, they will be clipped to zeros. """ values, indices = local_maxima(odf, sphere.edges) # If there is only one peak return n = len(values) if n == 0 or (values[0] < 0.): return np.zeros((0, 3)), np.zeros(0), np.zeros(0, dtype=int) elif n == 1: return sphere.vertices[indices], values, indices odf_min = odf.min() odf_min = odf_min if (odf_min >= 0.) else 0. # because of the relative threshold this algorithm will give the same peaks # as if we divide (values - odf_min) with (odf_max - odf_min) or not so # here we skip the division to increase speed values_norm = (values - odf_min) # Remove small peaks n = search_descending(values_norm, relative_peak_threshold) indices = indices[:n] directions = sphere.vertices[indices] # Remove peaks too close together directions, uniq = remove_similar_vertices(directions, min_separation_angle, return_index=True) values = values[uniq] indices = indices[uniq] return directions, values, indices def _pam_from_attrs(klass, sphere, peak_indices, peak_values, peak_dirs, gfa, qa, shm_coeff, B, odf): """ Construct PeaksAndMetrics object (or subclass) from its attributes. This is also useful for pickling/unpickling of these objects (see also :func:`__reduce__` below). Parameters ---------- klass : class The class of object to be created. sphere : `Sphere` class instance. Sphere for discretization. peak_indices : ndarray Indices (in sphere vertices) of the peaks in each voxel. peak_values : ndarray The value of the peaks. peak_dirs : ndarray The direction of each peak. gfa : ndarray The Generalized Fractional Anisotropy in each voxel. qa : ndarray Quantitative Anisotropy in each voxel. shm_coeff : ndarray The coefficients of the spherical harmonic basis for the ODF in each voxel. B : ndarray The spherical harmonic matrix, for multiplication with the coefficients. odf : ndarray The orientation distribution function on the sphere in each voxel. Returns ------- pam : Instance of the class `klass`. """ this_pam = klass() this_pam.sphere = sphere this_pam.peak_dirs = peak_dirs this_pam.peak_values = peak_values this_pam.peak_indices = peak_indices this_pam.gfa = gfa this_pam.qa = qa this_pam.shm_coeff = shm_coeff this_pam.B = B this_pam.odf = odf return this_pam class PeaksAndMetrics(PeaksAndMetricsDirectionGetter): def __reduce__(self): return _pam_from_attrs, (self.__class__, self.sphere, self.peak_indices, self.peak_values, self.peak_dirs, self.gfa, self.qa, self.shm_coeff, self.B, self.odf) def _peaks_from_model_parallel(model, data, sphere, relative_peak_threshold, min_separation_angle, mask, return_odf, return_sh, gfa_thr, normalize_peaks, sh_order, sh_basis_type, npeaks, B, invB, nbr_processes): if nbr_processes is None: try: nbr_processes = cpu_count() except NotImplementedError: warn("Cannot determine number of cpus. " "returns peaks_from_model(..., parallel=False).") return peaks_from_model(model, data, sphere, relative_peak_threshold, min_separation_angle, mask, return_odf, return_sh, gfa_thr, normalize_peaks, sh_order, sh_basis_type, npeaks, parallel=False) elif nbr_processes <= 0: warn("Invalid number of processes (%d). " "returns peaks_from_model(..., parallel=False)." % nbr_processes) return peaks_from_model(model, data, sphere, relative_peak_threshold, min_separation_angle, mask, return_odf, return_sh, gfa_thr, normalize_peaks, sh_order, sh_basis_type, npeaks, parallel=False) shape = list(data.shape) data = np.reshape(data, (-1, shape[-1])) n = data.shape[0] nbr_chunks = nbr_processes ** 2 chunk_size = int(np.ceil(n / nbr_chunks)) indices = list(zip(np.arange(0, n, chunk_size), np.arange(0, n, chunk_size) + chunk_size)) with InTemporaryDirectory() as tmpdir: data_file_name = path.join(tmpdir, 'data.npy') np.save(data_file_name, data) if mask is not None: mask = mask.flatten() mask_file_name = path.join(tmpdir, 'mask.npy') np.save(mask_file_name, mask) else: mask_file_name = None pool = Pool(nbr_processes) pam_res = pool.map(_peaks_from_model_parallel_sub, zip(repeat((data_file_name, mask_file_name)), indices, repeat(model), repeat(sphere), repeat(relative_peak_threshold), repeat(min_separation_angle), repeat(return_odf), repeat(return_sh), repeat(gfa_thr), repeat(normalize_peaks), repeat(sh_order), repeat(sh_basis_type), repeat(npeaks), repeat(B), repeat(invB))) pool.close() pam = PeaksAndMetrics() pam.sphere = sphere # use memmap to reduce the memory usage pam.gfa = np.memmap(path.join(tmpdir, 'gfa.npy'), dtype=pam_res[0].gfa.dtype, mode='w+', shape=(data.shape[0])) pam.peak_dirs = np.memmap(path.join(tmpdir, 'peak_dirs.npy'), dtype=pam_res[0].peak_dirs.dtype, mode='w+', shape=(data.shape[0], npeaks, 3)) pam.peak_values = np.memmap(path.join(tmpdir, 'peak_values.npy'), dtype=pam_res[0].peak_values.dtype, mode='w+', shape=(data.shape[0], npeaks)) pam.peak_indices = np.memmap(path.join(tmpdir, 'peak_indices.npy'), dtype=pam_res[0].peak_indices.dtype, mode='w+', shape=(data.shape[0], npeaks)) pam.qa = np.memmap(path.join(tmpdir, 'qa.npy'), dtype=pam_res[0].qa.dtype, mode='w+', shape=(data.shape[0], npeaks)) if return_sh: nbr_shm_coeff = (sh_order + 2) * (sh_order + 1) // 2 pam.shm_coeff = np.memmap(path.join(tmpdir, 'shm.npy'), dtype=pam_res[0].shm_coeff.dtype, mode='w+', shape=(data.shape[0], nbr_shm_coeff)) pam.B = pam_res[0].B else: pam.shm_coeff = None pam.invB = None if return_odf: pam.odf = np.memmap(path.join(tmpdir, 'odf.npy'), dtype=pam_res[0].odf.dtype, mode='w+', shape=(data.shape[0], len(sphere.vertices))) else: pam.odf = None # copy subprocesses pam to a single pam (memmaps) for i, (start_pos, end_pos) in enumerate(indices): pam.gfa[start_pos: end_pos] = pam_res[i].gfa pam.peak_dirs[start_pos: end_pos] = pam_res[i].peak_dirs pam.peak_values[start_pos: end_pos] = pam_res[i].peak_values pam.peak_indices[start_pos: end_pos] = pam_res[i].peak_indices pam.qa[start_pos: end_pos] = pam_res[i].qa if return_sh: pam.shm_coeff[start_pos: end_pos] = pam_res[i].shm_coeff if return_odf: pam.odf[start_pos: end_pos] = pam_res[i].odf pam_res = None # load memmaps to arrays and reshape the metric shape[-1] = -1 pam.gfa = np.reshape(np.array(pam.gfa), shape[:-1]) pam.peak_dirs = np.reshape(np.array(pam.peak_dirs), shape + [3]) pam.peak_values = np.reshape(np.array(pam.peak_values), shape) pam.peak_indices = np.reshape(np.array(pam.peak_indices), shape) pam.qa = np.reshape(np.array(pam.qa), shape) if return_sh: pam.shm_coeff = np.reshape(np.array(pam.shm_coeff), shape) if return_odf: pam.odf = np.reshape(np.array(pam.odf), shape) # Make sure all worker processes have exited before leaving context # manager in order to prevent temporary file deletion errors in windows pool.join() return pam def _peaks_from_model_parallel_sub(args): (data_file_name, mask_file_name) = args[0] (start_pos, end_pos) = args[1] model = args[2] sphere = args[3] relative_peak_threshold = args[4] min_separation_angle = args[5] return_odf = args[6] return_sh = args[7] gfa_thr = args[8] normalize_peaks = args[9] sh_order = args[10] sh_basis_type = args[11] npeaks = args[12] B = args[13] invB = args[14] data = np.load(data_file_name, mmap_mode='r')[start_pos:end_pos] if mask_file_name is not None: mask = np.load(mask_file_name, mmap_mode='r')[start_pos:end_pos] else: mask = None return peaks_from_model(model, data, sphere, relative_peak_threshold, min_separation_angle, mask, return_odf, return_sh, gfa_thr, normalize_peaks, sh_order, sh_basis_type, npeaks, B, invB, parallel=False, nbr_processes=None) def peaks_from_model(model, data, sphere, relative_peak_threshold, min_separation_angle, mask=None, return_odf=False, return_sh=True, gfa_thr=0, normalize_peaks=False, sh_order=8, sh_basis_type=None, npeaks=5, B=None, invB=None, parallel=False, nbr_processes=None): """Fit the model to data and computes peaks and metrics Parameters ---------- model : a model instance `model` will be used to fit the data. sphere : Sphere The Sphere providing discrete directions for evaluation. relative_peak_threshold : float Only return peaks greater than ``relative_peak_threshold * m`` where m is the largest peak. min_separation_angle : float in [0, 90] The minimum distance between directions. If two peaks are too close only the larger of the two is returned. mask : array, optional If `mask` is provided, voxels that are False in `mask` are skipped and no peaks are returned. return_odf : bool If True, the odfs are returned. return_sh : bool If True, the odf as spherical harmonics coefficients is returned gfa_thr : float Voxels with gfa less than `gfa_thr` are skipped, no peaks are returned. normalize_peaks : bool If true, all peak values are calculated relative to `max(odf)`. sh_order : int, optional Maximum SH order in the SH fit. For `sh_order`, there will be ``(sh_order + 1) * (sh_order + 2) / 2`` SH coefficients (default 8). sh_basis_type : {None, 'mrtrix', 'fibernav'} ``None`` for the default dipy basis which is the fibernav basis, ``mrtrix`` for the MRtrix basis, and ``fibernav`` for the FiberNavigator basis sh_smooth : float, optional Lambda-regularization in the SH fit (default 0.0). npeaks : int Maximum number of peaks found (default 5 peaks). B : ndarray, optional Matrix that transforms spherical harmonics to spherical function ``sf = np.dot(sh, B)``. invB : ndarray, optional Inverse of B. parallel: bool If True, use multiprocessing to compute peaks and metric (default False). Temporary files are saved in the default temporary directory of the system. It can be changed using ``import tempfile`` and ``tempfile.tempdir = '/path/to/tempdir'``. nbr_processes: int If `parallel` is True, the number of subprocesses to use (default multiprocessing.cpu_count()). Returns ------- pam : PeaksAndMetrics An object with ``gfa``, ``peak_directions``, ``peak_values``, ``peak_indices``, ``odf``, ``shm_coeffs`` as attributes """ if return_sh and (B is None or invB is None): B, invB = sh_to_sf_matrix( sphere, sh_order, sh_basis_type, return_inv=True) if parallel: # It is mandatory to provide B and invB to the parallel function. # Otherwise, a call to np.linalg.pinv is made in a subprocess and # makes it timeout on some system. # see https://github.com/nipy/dipy/issues/253 for details return _peaks_from_model_parallel(model, data, sphere, relative_peak_threshold, min_separation_angle, mask, return_odf, return_sh, gfa_thr, normalize_peaks, sh_order, sh_basis_type, npeaks, B, invB, nbr_processes) shape = data.shape[:-1] if mask is None: mask = np.ones(shape, dtype='bool') else: if mask.shape != shape: raise ValueError("Mask is not the same shape as data.") gfa_array = np.zeros(shape) qa_array = np.zeros((shape + (npeaks,))) peak_dirs = np.zeros((shape + (npeaks, 3))) peak_values = np.zeros((shape + (npeaks,))) peak_indices = np.zeros((shape + (npeaks,)), dtype='int') peak_indices.fill(-1) if return_sh: n_shm_coeff = (sh_order + 2) * (sh_order + 1) // 2 shm_coeff = np.zeros((shape + (n_shm_coeff,))) if return_odf: odf_array = np.zeros((shape + (len(sphere.vertices),))) global_max = -np.inf for idx in ndindex(shape): if not mask[idx]: continue odf = model.fit(data[idx]).odf(sphere) if return_sh: shm_coeff[idx] = np.dot(odf, invB) if return_odf: odf_array[idx] = odf gfa_array[idx] = gfa(odf) if gfa_array[idx] < gfa_thr: global_max = max(global_max, odf.max()) continue # Get peaks of odf direction, pk, ind = peak_directions(odf, sphere, relative_peak_threshold, min_separation_angle) # Calculate peak metrics if pk.shape[0] != 0: global_max = max(global_max, pk[0]) n = min(npeaks, pk.shape[0]) qa_array[idx][:n] = pk[:n] - odf.min() peak_dirs[idx][:n] = direction[:n] peak_indices[idx][:n] = ind[:n] peak_values[idx][:n] = pk[:n] if normalize_peaks: peak_values[idx][:n] /= pk[0] peak_dirs[idx] *= peak_values[idx][:, None] qa_array /= global_max return _pam_from_attrs(PeaksAndMetrics, sphere, peak_indices, peak_values, peak_dirs, gfa_array, qa_array, shm_coeff if return_sh else None, B if return_sh else None, odf_array if return_odf else None) def reshape_peaks_for_visualization(peaks): """Reshape peaks for visualization. Reshape and convert to float32 a set of peaks for visualisation with mrtrix or the fibernavigator. Parameters: ----------- peaks: nd array (..., N, 3) or PeaksAndMetrics object The peaks to be reshaped and converted to float32. Returns: -------- peaks : nd array (..., 3*N) """ if isinstance(peaks, PeaksAndMetrics): peaks = peaks.peak_dirs return peaks.reshape(np.append(peaks.shape[:-2], -1)).astype('float32') dipy-0.13.0/dipy/direction/probabilistic_direction_getter.py000066400000000000000000000255361317371701200243140ustar00rootroot00000000000000""" Implementation of a probabilistic direction getter based on sampling from discrete distribution (pmf) at each step of the tracking.""" import numpy as np from dipy.direction.peaks import peak_directions, default_sphere from dipy.reconst.shm import order_from_ncoef, sph_harm_lookup from dipy.tracking.local.direction_getter import DirectionGetter from dipy.tracking.local.interpolation import trilinear_interpolate4d def _asarray(cython_memview): # TODO: figure out the best way to get an array from a memory view. # `np.array(view)` works, but is quite slow. Views are also "array_like", # but using them as arrays seems to also be quite slow. return np.fromiter(cython_memview, float) class PmfGen(object): pass class SimplePmfGen(PmfGen): def __init__(self, pmf_array): if pmf_array.min() < 0: raise ValueError("pmf should not have negative values") self.pmf_array = pmf_array def get_pmf(self, point): return trilinear_interpolate4d(self.pmf_array, point) class SHCoeffPmfGen(PmfGen): def __init__(self, shcoeff, sphere, basis_type): self.shcoeff = shcoeff self.sphere = sphere sh_order = order_from_ncoef(shcoeff.shape[-1]) try: basis = sph_harm_lookup[basis_type] except KeyError: raise ValueError("%s is not a known basis type." % basis_type) self._B, m, n = basis(sh_order, sphere.theta, sphere.phi) def get_pmf(self, point): coeff = trilinear_interpolate4d(self.shcoeff, point) pmf = np.dot(self._B, coeff) pmf.clip(0, out=pmf) return pmf class PeakDirectionGetter(DirectionGetter): """An abstract class for DirectionGetters that use the peak_directions machinery.""" sphere = default_sphere def __init__(self, sphere=None, **kwargs): if sphere is not None: self.sphere = sphere self._pf_kwargs = kwargs def _peak_directions(self, blob): """Gets directions using parameters provided at init. Blob can be any function defined on ``self.sphere``, ie an ODF, PMF, FOD. """ return peak_directions(blob, self.sphere, **self._pf_kwargs)[0] class ProbabilisticDirectionGetter(PeakDirectionGetter): """Randomly samples direction of a sphere based on probability mass function (pmf). The main constructors for this class are current from_pmf and from_shcoeff. The pmf gives the probability that each direction on the sphere should be chosen as the next direction. To get the true pmf from the "raw pmf" directions more than ``max_angle`` degrees from the incoming direction are set to 0 and the result is normalized. """ @classmethod def from_pmf(klass, pmf, max_angle, sphere, pmf_threshold=0.1, **kwargs): """Constructor for making a DirectionGetter from an array of Pmfs Parameters ---------- pmf : array, 4d The pmf to be used for tracking at each voxel. max_angle : float, [0, 90] The maximum allowed angle between incoming direction and new direction. sphere : Sphere The set of directions to be used for tracking. pmf_threshold : float [0., 1.] Used to remove direction from the probability mass function for selecting the tracking direction. relative_peak_threshold : float in [0., 1.] Used for extracting initial tracking directions. Passed to peak_directions. min_separation_angle : float in [0, 90] Used for extracting initial tracking directions. Passed to peak_directions. See also -------- dipy.direction.peaks.peak_directions """ pmf = np.asarray(pmf, dtype=float) if pmf.ndim != 4: raise ValueError("pmf should be a 4d array.") if pmf.shape[3] != len(sphere.theta): msg = ("The last dimension of pmf should match the number of " "points in sphere.") raise ValueError(msg) pmf_gen = SimplePmfGen(pmf) return klass(pmf_gen, max_angle, sphere, pmf_threshold, **kwargs) @classmethod def from_shcoeff(klass, shcoeff, max_angle, sphere, pmf_threshold=0.1, basis_type=None, **kwargs): """Probabilistic direction getter from a distribution of directions on the sphere. Parameters ---------- shcoeff : array The distribution of tracking directions at each voxel represented as a function on the sphere using the real spherical harmonic basis. For example the FOD of the Constrained Spherical Deconvolution model can be used this way. This distribution will be discretized using ``sphere`` and tracking directions will be chosen from the vertices of ``sphere`` based on the distribution. max_angle : float, [0, 90] The maximum allowed angle between incoming direction and new direction. sphere : Sphere The set of directions to be used for tracking. pmf_threshold : float [0., 1.] Used to remove direction from the probability mass function for selecting the tracking direction. basis_type : name of basis The basis that ``shcoeff`` are associated with. ``dipy.reconst.shm.real_sym_sh_basis`` is used by default. relative_peak_threshold : float in [0., 1.] Used for extracting initial tracking directions. Passed to peak_directions. min_separation_angle : float in [0, 90] Used for extracting initial tracking directions. Passed to peak_directions. See also -------- dipy.direction.peaks.peak_directions """ pmf_gen = SHCoeffPmfGen(shcoeff, sphere, basis_type) return klass(pmf_gen, max_angle, sphere, pmf_threshold, **kwargs) def __init__(self, pmf_gen, max_angle, sphere=None, pmf_threshold=0.1, **kwargs): """Direction getter from a pmf generator. Parameters ---------- pmf_gen : PmfGen Used to get probability mass function for selecting tracking directions. max_angle : float, [0, 90] The maximum allowed angle between incoming direction and new direction. sphere : Sphere The set of directions to be used for tracking. pmf_threshold : float [0., 1.] Used to remove direction from the probability mass function for selecting the tracking direction. relative_peak_threshold : float in [0., 1.] Used for extracting initial tracking directions. Passed to peak_directions. min_separation_angle : float in [0, 90] Used for extracting initial tracking directions. Passed to peak_directions. See also -------- dipy.direction.peaks.peak_directions """ PeakDirectionGetter.__init__(self, sphere, **kwargs) self.pmf_gen = pmf_gen self.pmf_threshold = pmf_threshold # The vertices need to be in a contiguous array self.vertices = self.sphere.vertices.copy() cos_similarity = np.cos(np.deg2rad(max_angle)) self._set_adjacency_matrix(sphere, cos_similarity) def _set_adjacency_matrix(self, sphere, cos_similarity): """Creates a dictionary where each key is a direction from sphere and each value is a boolean array indicating which directions are less than max_angle degrees from the key""" matrix = np.dot(sphere.vertices, sphere.vertices.T) matrix = abs(matrix) >= cos_similarity keys = [tuple(v) for v in sphere.vertices] adj_matrix = dict(zip(keys, matrix)) keys = [tuple(-v) for v in sphere.vertices] adj_matrix.update(zip(keys, matrix)) self._adj_matrix = adj_matrix def initial_direction(self, point): """Returns best directions at seed location to start tracking. Parameters ---------- point : ndarray, shape (3,) The point in an image at which to lookup tracking directions. Returns ------- directions : ndarray, shape (N, 3) Possible tracking directions from point. ``N`` may be 0, all directions should be unique. """ pmf = self.pmf_gen.get_pmf(point) return self._peak_directions(pmf) def get_direction(self, point, direction): """Samples a pmf to updates ``direction`` array with a new direction. Parameters ---------- point : memory-view (or ndarray), shape (3,) The point in an image at which to lookup tracking directions. direction : memory-view (or ndarray), shape (3,) Previous tracking direction. Returns ------- status : int Returns 0 `direction` was updated with a new tracking direction, or 1 otherwise. """ # point and direction are passed in as cython memory views pmf = self.pmf_gen.get_pmf(point) pmf[pmf < self.pmf_threshold] = 0 cdf = (self._adj_matrix[tuple(direction)] * pmf).cumsum() if cdf[-1] == 0: return 1 random_sample = np.random.random() * cdf[-1] idx = cdf.searchsorted(random_sample, 'right') newdir = self.vertices[idx] # Update direction and return 0 for error if np.dot(newdir, _asarray(direction)) > 0: direction[:] = newdir else: direction[:] = -newdir return 0 class DeterministicMaximumDirectionGetter(ProbabilisticDirectionGetter): """Return direction of a sphere with the highest probability mass function (pmf). """ def get_direction(self, point, direction): """Find direction with the highest pmf to updates ``direction`` array with a new direction. Parameters ---------- point : memory-view (or ndarray), shape (3,) The point in an image at which to lookup tracking directions. direction : memory-view (or ndarray), shape (3,) Previous tracking direction. Returns ------- status : int Returns 0 `direction` was updated with a new tracking direction, or 1 otherwise. """ # point and direction are passed in as cython memory views pmf = self.pmf_gen.get_pmf(point) pmf[pmf < self.pmf_threshold] = 0 cdf = self._adj_matrix[tuple(direction)] * pmf idx = np.argmax(cdf) if pmf[idx] == 0: return 1 newdir = self.vertices[idx] # Update direction and return 0 for error if np.dot(newdir, _asarray(direction)) > 0: direction[:] = newdir else: direction[:] = -newdir return 0 dipy-0.13.0/dipy/direction/tests/000077500000000000000000000000001317371701200166315ustar00rootroot00000000000000dipy-0.13.0/dipy/direction/tests/__init__.py000066400000000000000000000000411317371701200207350ustar00rootroot00000000000000# Make direction/tests a package dipy-0.13.0/dipy/direction/tests/test_peaks.py000066400000000000000000000571051317371701200213550ustar00rootroot00000000000000import numpy as np import pickle from io import BytesIO from numpy.testing import (assert_array_equal, assert_array_almost_equal, assert_almost_equal, run_module_suite, assert_equal, assert_) from dipy.reconst.odf import (OdfFit, OdfModel, gfa) from dipy.direction.peaks import (peaks_from_model, peak_directions, peak_directions_nl, reshape_peaks_for_visualization) from dipy.core.subdivide_octahedron import create_unit_hemisphere from dipy.core.sphere import unit_icosahedron from dipy.sims.voxel import multi_tensor, multi_tensor_odf from dipy.data import get_data, get_sphere from dipy.core.gradients import gradient_table, GradientTable from dipy.core.sphere_stats import angular_similarity from dipy.core.sphere import HemiSphere def test_peak_directions_nl(): def discrete_eval(sphere): return abs(sphere.vertices).sum(-1) directions, values = peak_directions_nl(discrete_eval) assert_equal(directions.shape, (4, 3)) assert_array_almost_equal(abs(directions), 1 / np.sqrt(3)) assert_array_equal(values, abs(directions).sum(-1)) # Test using a different sphere sphere = unit_icosahedron.subdivide(4) directions, values = peak_directions_nl(discrete_eval, sphere=sphere) assert_equal(directions.shape, (4, 3)) assert_array_almost_equal(abs(directions), 1 / np.sqrt(3)) assert_array_equal(values, abs(directions).sum(-1)) # Test the relative_peak_threshold def discrete_eval(sphere): A = abs(sphere.vertices).sum(-1) x, y, z = sphere.vertices.T B = 1 + (x * z > 0) + 2 * (y * z > 0) return A * B directions, values = peak_directions_nl(discrete_eval, .01) assert_equal(directions.shape, (4, 3)) directions, values = peak_directions_nl(discrete_eval, .3) assert_equal(directions.shape, (3, 3)) directions, values = peak_directions_nl(discrete_eval, .6) assert_equal(directions.shape, (2, 3)) directions, values = peak_directions_nl(discrete_eval, .8) assert_equal(directions.shape, (1, 3)) assert_almost_equal(values, 4 * 3 / np.sqrt(3)) # Test odfs with large areas of zero def discrete_eval(sphere): A = abs(sphere.vertices).sum(-1) x, y, z = sphere.vertices.T B = (x * z > 0) + 2 * (y * z > 0) return A * B directions, values = peak_directions_nl(discrete_eval, 0.) assert_equal(directions.shape, (3, 3)) directions, values = peak_directions_nl(discrete_eval, .6) assert_equal(directions.shape, (2, 3)) directions, values = peak_directions_nl(discrete_eval, .8) assert_equal(directions.shape, (1, 3)) assert_almost_equal(values, 3 * 3 / np.sqrt(3)) _sphere = create_unit_hemisphere(4) _odf = (_sphere.vertices * [1, 2, 3]).sum(-1) _gtab = GradientTable(np.ones((64, 3))) class SimpleOdfModel(OdfModel): sphere = _sphere def fit(self, data): fit = SimpleOdfFit(self, data) fit.model = self return fit class SimpleOdfFit(OdfFit): def odf(self, sphere=None): if sphere is None: sphere = self.model.sphere # Use ascontiguousarray to work around a bug in NumPy return np.ascontiguousarray((sphere.vertices * [1, 2, 3]).sum(-1)) def test_OdfFit(): m = SimpleOdfModel(_gtab) f = m.fit(None) odf = f.odf(_sphere) assert_equal(len(odf), len(_sphere.theta)) def test_peak_directions(): model = SimpleOdfModel(_gtab) fit = model.fit(None) odf = fit.odf() argmax = odf.argmax() mx = odf.max() sphere = fit.model.sphere # Only one peak dir, val, ind = peak_directions(odf, sphere, .5, 45) dir_e = sphere.vertices[[argmax]] assert_array_equal(ind, [argmax]) assert_array_equal(val, odf[ind]) assert_array_equal(dir, dir_e) odf[0] = mx * .9 # Two peaks, relative_threshold dir, val, ind = peak_directions(odf, sphere, 1., 0) dir_e = sphere.vertices[[argmax]] assert_array_equal(dir, dir_e) assert_array_equal(ind, [argmax]) assert_array_equal(val, odf[ind]) dir, val, ind = peak_directions(odf, sphere, .8, 0) dir_e = sphere.vertices[[argmax, 0]] assert_array_equal(dir, dir_e) assert_array_equal(ind, [argmax, 0]) assert_array_equal(val, odf[ind]) # Two peaks, angle_sep dir, val, ind = peak_directions(odf, sphere, 0., 90) dir_e = sphere.vertices[[argmax]] assert_array_equal(dir, dir_e) assert_array_equal(ind, [argmax]) assert_array_equal(val, odf[ind]) dir, val, ind = peak_directions(odf, sphere, 0., 0) dir_e = sphere.vertices[[argmax, 0]] assert_array_equal(dir, dir_e) assert_array_equal(ind, [argmax, 0]) assert_array_equal(val, odf[ind]) def _create_mt_sim(mevals, angles, fractions, S0, SNR, half_sphere=False): _, fbvals, fbvecs = get_data('small_64D') bvals = np.load(fbvals) bvecs = np.load(fbvecs) gtab = gradient_table(bvals, bvecs) S, sticks = multi_tensor(gtab, mevals, S0, angles=angles, fractions=fractions, snr=SNR) sphere = get_sphere('symmetric724').subdivide(2) if half_sphere: sphere = HemiSphere.from_sphere(sphere) odf_gt = multi_tensor_odf(sphere.vertices, mevals, angles=angles, fractions=fractions) return odf_gt, sticks, sphere def test_peak_directions_thorough(): # two equal fibers (creating a very sharp odf) mevals = np.array([[0.0025, 0.0003, 0.0003], [0.0025, 0.0003, 0.0003]]) angles = [(0, 0), (45, 0)] fractions = [50, 50] odf_gt, sticks, sphere = _create_mt_sim(mevals, angles, fractions, 100, None) directions, values, indices = peak_directions(odf_gt, sphere, .5, 25.) assert_almost_equal(angular_similarity(directions, sticks), 2, 2) # two unequal fibers fractions = [75, 25] odf_gt, sticks, sphere = _create_mt_sim(mevals, angles, fractions, 100, None) directions, values, indices = peak_directions(odf_gt, sphere, .5, 25.) assert_almost_equal(angular_similarity(directions, sticks), 1, 2) directions, values, indices = peak_directions(odf_gt, sphere, .20, 25.) assert_almost_equal(angular_similarity(directions, sticks), 2, 2) # two equal fibers short angle (simulating very sharp ODF) mevals = np.array(([0.0045, 0.0003, 0.0003], [0.0045, 0.0003, 0.0003])) fractions = [50, 50] angles = [(0, 0), (20, 0)] odf_gt, sticks, sphere = _create_mt_sim(mevals, angles, fractions, 100, None) directions, values, indices = peak_directions(odf_gt, sphere, .5, 25.) assert_almost_equal(angular_similarity(directions, sticks), 1, 2) directions, values, indices = peak_directions(odf_gt, sphere, .5, 15.) assert_almost_equal(angular_similarity(directions, sticks), 2, 2) # 1 fiber mevals = np.array([[0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003]]) fractions = [50, 50] angles = [(15, 0), (15, 0)] odf_gt, sticks, sphere = _create_mt_sim(mevals, angles, fractions, 100, None) directions, values, indices = peak_directions(odf_gt, sphere, .5, 15.) assert_almost_equal(angular_similarity(directions, sticks), 1, 2) AE = np.rad2deg(np.arccos(np.dot(directions[0], sticks[0]))) assert_(abs(AE) < 2. or abs(AE - 180) < 2.) # two equal fibers and one small noisy one mevals = np.array([[0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003]]) angles = [(0, 0), (45, 0), (90, 0)] fractions = [45, 45, 10] odf_gt, sticks, sphere = _create_mt_sim(mevals, angles, fractions, 100, None) directions, values, indices = peak_directions(odf_gt, sphere, .5, 25.) assert_almost_equal(angular_similarity(directions, sticks), 2, 2) # two equal fibers and one faulty mevals = np.array([[0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003]]) angles = [(0, 0), (45, 0), (60, 0)] fractions = [45, 45, 10] odf_gt, sticks, sphere = _create_mt_sim(mevals, angles, fractions, 100, None) directions, values, indices = peak_directions(odf_gt, sphere, .5, 25.) assert_almost_equal(angular_similarity(directions, sticks), 2, 2) # two equal fibers and one very very annoying one mevals = np.array([[0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003]]) angles = [(0, 0), (45, 0), (60, 0)] fractions = [40, 40, 20] odf_gt, sticks, sphere = _create_mt_sim(mevals, angles, fractions, 100, None) directions, values, indices = peak_directions(odf_gt, sphere, .5, 25.) assert_almost_equal(angular_similarity(directions, sticks), 2, 2) # three peaks and one faulty mevals = np.array([[0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003]]) angles = [(0, 0), (45, 0), (90, 0), (90, 45)] fractions = [35, 35, 20, 10] odf_gt, sticks, sphere = _create_mt_sim(mevals, angles, fractions, 100, None) directions, values, indices = peak_directions(odf_gt, sphere, .5, 25.) assert_almost_equal(angular_similarity(directions, sticks), 3, 2) # four peaks mevals = np.array([[0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003]]) angles = [(0, 0), (45, 0), (90, 0), (90, 45)] fractions = [25, 25, 25, 25] odf_gt, sticks, sphere = _create_mt_sim(mevals, angles, fractions, 100, None) directions, values, indices = peak_directions(odf_gt, sphere, .15, 5.) assert_almost_equal(angular_similarity(directions, sticks), 4, 2) # four difficult peaks mevals = np.array([[0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003]]) angles = [(0, 0), (45, 0), (90, 0), (90, 45)] fractions = [30, 30, 20, 20] odf_gt, sticks, sphere = _create_mt_sim(mevals, angles, fractions, 100, None) directions, values, indices = peak_directions(odf_gt, sphere, 0, 0) assert_almost_equal(angular_similarity(directions, sticks), 4, 1) odf_gt, sticks, hsphere = _create_mt_sim(mevals, angles, fractions, 100, None, half_sphere=True) directions, values, indices = peak_directions(odf_gt, hsphere, 0, 0) assert_equal(angular_similarity(directions, sticks) < 4, True) # four peaks and one them quite small fractions = [35, 35, 20, 10] odf_gt, sticks, sphere = _create_mt_sim(mevals, angles, fractions, 100, None) directions, values, indices = peak_directions(odf_gt, sphere, 0, 0) assert_equal(angular_similarity(directions, sticks) < 4, True) odf_gt, sticks, hsphere = _create_mt_sim(mevals, angles, fractions, 100, None, half_sphere=True) directions, values, indices = peak_directions(odf_gt, hsphere, 0, 0) assert_equal(angular_similarity(directions, sticks) < 4, True) # isotropic case mevals = np.array([[0.0015, 0.0015, 0.0015]]) angles = [(0, 0)] fractions = [100.] odf_gt, sticks, sphere = _create_mt_sim(mevals, angles, fractions, 100, None) directions, values, indices = peak_directions(odf_gt, sphere, .5, 25.) assert_equal(len(values) > 10, True) def test_difference_with_minmax(): # Show difference with and without minmax normalization # we create an odf here with 3 main peaks, 1 small sharp unwanted peak # (noise) and an isotropic compartment. mevals = np.array([[0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003], [0.0015, 0.00005, 0.00005], [0.0015, 0.0015, 0.0015]]) angles = [(0, 0), (45, 0), (90, 0), (90, 90), (0, 0)] fractions = [20, 20, 10, 1, 100 - 20 - 20 - 10 - 1] odf_gt, sticks, sphere = _create_mt_sim(mevals, angles, fractions, 100, None) # We will show that when the minmax normalization is used we can remove # the noisy peak using a lower threshold. odf_gt_minmax = (odf_gt - odf_gt.min()) / (odf_gt.max() - odf_gt.min()) _, values_1, _ = peak_directions(odf_gt, sphere, .30, 25.) assert_equal(len(values_1), 3) _, values_2, _ = peak_directions(odf_gt_minmax, sphere, .30, 25.) assert_equal(len(values_2), 3) # Setting the smallest value of the odf to zero is like running # peak_directions without the odf_min correction. odf_gt[odf_gt.argmin()] = 0. _, values_3, _ = peak_directions(odf_gt, sphere, .30, 25.,) assert_equal(len(values_3), 4) # we show here that to actually get that noisy peak out we need to # increase the peak threshold considerably directions, values_4, indices = peak_directions(odf_gt, sphere, .60, 25.,) assert_equal(len(values_4), 3) assert_almost_equal(values_1, values_4) def test_degenerative_cases(): sphere = get_sphere('symmetric724') # completely isotropic and degencase odf = np.zeros(sphere.vertices.shape[0]) directions, values, indices = peak_directions(odf, sphere, .5, 25) print(directions, values, indices) assert_equal(len(values), 0) assert_equal(len(directions), 0) assert_equal(len(indices), 0) odf = np.zeros(sphere.vertices.shape[0]) odf[0] = 0.020 odf[1] = 0.018 directions, values, indices = peak_directions(odf, sphere, .5, 25) print(directions, values, indices) assert_equal(values[0], 0.02) odf = - np.ones(sphere.vertices.shape[0]) directions, values, indices = peak_directions(odf, sphere, .5, 25) print(directions, values, indices) assert_equal(len(values), 0) odf = np.zeros(sphere.vertices.shape[0]) odf[0] = 0.020 odf[1] = 0.018 odf[2] = - 0.018 directions, values, indices = peak_directions(odf, sphere, .5, 25) assert_equal(values[0], 0.02) odf = np.ones(sphere.vertices.shape[0]) odf += 0.1 * np.random.rand(odf.shape[0]) directions, values, indices = peak_directions(odf, sphere, .5, 25) assert_(all(values > values[0] * .5)) assert_array_equal(values, odf[indices]) odf = np.ones(sphere.vertices.shape[0]) odf[1:] = np.finfo(np.float).eps * np.random.rand(odf.shape[0] - 1) directions, values, indices = peak_directions(odf, sphere, .5, 25) assert_equal(values[0], 1) assert_equal(len(values), 1) def test_peaksFromModel(): data = np.zeros((10, 2)) # Test basic case model = SimpleOdfModel(_gtab) odf_argmax = _odf.argmax() pam = peaks_from_model(model, data, _sphere, .5, 45, normalize_peaks=True) assert_array_equal(pam.gfa, gfa(_odf)) assert_array_equal(pam.peak_values[:, 0], 1.) assert_array_equal(pam.peak_values[:, 1:], 0.) mn, mx = _odf.min(), _odf.max() assert_array_equal(pam.qa[:, 0], (mx - mn) / mx) assert_array_equal(pam.qa[:, 1:], 0.) assert_array_equal(pam.peak_indices[:, 0], odf_argmax) assert_array_equal(pam.peak_indices[:, 1:], -1) # Test that odf array matches and is right shape pam = peaks_from_model(model, data, _sphere, .5, 45, return_odf=True) expected_shape = (len(data), len(_odf)) assert_equal(pam.odf.shape, expected_shape) assert_((_odf == pam.odf).all()) assert_array_equal(pam.peak_values[:, 0], _odf.max()) # Test mask mask = (np.arange(10) % 2) == 1 pam = peaks_from_model(model, data, _sphere, .5, 45, mask=mask, normalize_peaks=True) assert_array_equal(pam.gfa[~mask], 0) assert_array_equal(pam.qa[~mask], 0) assert_array_equal(pam.peak_values[~mask], 0) assert_array_equal(pam.peak_indices[~mask], -1) assert_array_equal(pam.gfa[mask], gfa(_odf)) assert_array_equal(pam.peak_values[mask, 0], 1.) assert_array_equal(pam.peak_values[mask, 1:], 0.) mn, mx = _odf.min(), _odf.max() assert_array_equal(pam.qa[mask, 0], (mx - mn) / mx) assert_array_equal(pam.qa[mask, 1:], 0.) assert_array_equal(pam.peak_indices[mask, 0], odf_argmax) assert_array_equal(pam.peak_indices[mask, 1:], -1) # Test serialization and deserialization: for normalize_peaks in [True, False]: for return_odf in [True, False]: for return_sh in [True, False]: pam = peaks_from_model(model, data, _sphere, .5, 45, normalize_peaks=normalize_peaks, return_odf=return_odf, return_sh=return_sh) b = BytesIO() pickle.dump(pam, b) b.seek(0) new_pam = pickle.load(b) b.close() for attr in ['peak_dirs', 'peak_values', 'peak_indices', 'gfa', 'qa', 'shm_coeff', 'B', 'odf']: assert_array_equal(getattr(pam, attr), getattr(new_pam, attr)) assert_array_equal(pam.sphere.vertices, new_pam.sphere.vertices) def test_peaksFromModelParallel(): SNR = 100 S0 = 100 _, fbvals, fbvecs = get_data('small_64D') bvals = np.load(fbvals) bvecs = np.load(fbvecs) gtab = gradient_table(bvals, bvecs) mevals = np.array(([0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003])) data, _ = multi_tensor(gtab, mevals, S0, angles=[(0, 0), (60, 0)], fractions=[50, 50], snr=SNR) # test equality with/without multiprocessing model = SimpleOdfModel(gtab) pam_multi = peaks_from_model(model, data, _sphere, .5, 45, normalize_peaks=True, return_odf=True, return_sh=True, parallel=True) pam_single = peaks_from_model(model, data, _sphere, .5, 45, normalize_peaks=True, return_odf=True, return_sh=True, parallel=False) pam_multi_inv1 = peaks_from_model(model, data, _sphere, .5, 45, normalize_peaks=True, return_odf=True, return_sh=True, parallel=True, nbr_processes=0) pam_multi_inv2 = peaks_from_model(model, data, _sphere, .5, 45, normalize_peaks=True, return_odf=True, return_sh=True, parallel=True, nbr_processes=-2) for pam in [pam_multi, pam_multi_inv1, pam_multi_inv2]: assert_equal(pam.gfa.dtype, pam_single.gfa.dtype) assert_equal(pam.gfa.shape, pam_single.gfa.shape) assert_array_almost_equal(pam.gfa, pam_single.gfa) assert_equal(pam.qa.dtype, pam_single.qa.dtype) assert_equal(pam.qa.shape, pam_single.qa.shape) assert_array_almost_equal(pam.qa, pam_single.qa) assert_equal(pam.peak_values.dtype, pam_single.peak_values.dtype) assert_equal(pam.peak_values.shape, pam_single.peak_values.shape) assert_array_almost_equal(pam.peak_values, pam_single.peak_values) assert_equal(pam.peak_indices.dtype, pam_single.peak_indices.dtype) assert_equal(pam.peak_indices.shape, pam_single.peak_indices.shape) assert_array_equal(pam.peak_indices, pam_single.peak_indices) assert_equal(pam.peak_dirs.dtype, pam_single.peak_dirs.dtype) assert_equal(pam.peak_dirs.shape, pam_single.peak_dirs.shape) assert_array_almost_equal(pam.peak_dirs, pam_single.peak_dirs) assert_equal(pam.shm_coeff.dtype, pam_single.shm_coeff.dtype) assert_equal(pam.shm_coeff.shape, pam_single.shm_coeff.shape) assert_array_almost_equal(pam.shm_coeff, pam_single.shm_coeff) assert_equal(pam.odf.dtype, pam_single.odf.dtype) assert_equal(pam.odf.shape, pam_single.odf.shape) assert_array_almost_equal(pam.odf, pam_single.odf) def test_peaks_shm_coeff(): SNR = 100 S0 = 100 _, fbvals, fbvecs = get_data('small_64D') from dipy.data import get_sphere sphere = get_sphere('repulsion724') bvals = np.load(fbvals) bvecs = np.load(fbvecs) gtab = gradient_table(bvals, bvecs) mevals = np.array(([0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003])) data, _ = multi_tensor(gtab, mevals, S0, angles=[(0, 0), (60, 0)], fractions=[50, 50], snr=SNR) from dipy.reconst.shm import CsaOdfModel model = CsaOdfModel(gtab, 4) pam = peaks_from_model(model, data[None, :], sphere, .5, 45, return_odf=True, return_sh=True) # Test that spherical harmonic coefficients return back correctly odf2 = np.dot(pam.shm_coeff, pam.B) assert_array_almost_equal(pam.odf, odf2) assert_equal(pam.shm_coeff.shape[-1], 45) pam = peaks_from_model(model, data[None, :], sphere, .5, 45, return_odf=True, return_sh=False) assert_equal(pam.shm_coeff, None) pam = peaks_from_model(model, data[None, :], sphere, .5, 45, return_odf=True, return_sh=True, sh_basis_type='mrtrix') odf2 = np.dot(pam.shm_coeff, pam.B) assert_array_almost_equal(pam.odf, odf2) def test_reshape_peaks_for_visualization(): data1 = np.random.randn(10, 5, 3).astype('float32') data2 = np.random.randn(10, 2, 5, 3).astype('float32') data3 = np.random.randn(10, 2, 12, 5, 3).astype('float32') data1_reshape = reshape_peaks_for_visualization(data1) data2_reshape = reshape_peaks_for_visualization(data2) data3_reshape = reshape_peaks_for_visualization(data3) assert_array_equal(data1_reshape.shape, (10, 15)) assert_array_equal(data2_reshape.shape, (10, 2, 15)) assert_array_equal(data3_reshape.shape, (10, 2, 12, 15)) assert_array_equal(data1_reshape.reshape(10, 5, 3), data1) assert_array_equal(data2_reshape.reshape(10, 2, 5, 3), data2) assert_array_equal(data3_reshape.reshape(10, 2, 12, 5, 3), data3) if __name__ == '__main__': run_module_suite() dipy-0.13.0/dipy/direction/tests/test_prob_direction_getter.py000066400000000000000000000043771317371701200246310ustar00rootroot00000000000000import numpy as np import numpy.testing as npt from dipy.core.sphere import unit_octahedron from dipy.reconst.shm import SphHarmFit, SphHarmModel from dipy.direction import ProbabilisticDirectionGetter def test_ProbabilisticDirectionGetter(): # Test the constructors and errors of the ProbabilisticDirectionGetter class SillyModel(SphHarmModel): sh_order = 4 def fit(self, data, mask=None): coeff = np.zeros(data.shape[:-1] + (15,)) return SphHarmFit(self, coeff, mask=None) model = SillyModel(gtab=None) data = np.zeros((3, 3, 3, 7)) fit = model.fit(data) # Sample point and direction point = np.zeros(3) dir = unit_octahedron.vertices[0].copy() # make a dg from a fit dg = ProbabilisticDirectionGetter.from_shcoeff(fit.shm_coeff, 90, unit_octahedron) state = dg.get_direction(point, dir) npt.assert_equal(state, 1) # Make a dg from a pmf N = unit_octahedron.theta.shape[0] pmf = np.zeros((3, 3, 3, N)) dg = ProbabilisticDirectionGetter.from_pmf(pmf, 90, unit_octahedron) state = dg.get_direction(point, dir) npt.assert_equal(state, 1) # pmf shape must match sphere bad_pmf = pmf[..., 1:] npt.assert_raises(ValueError, ProbabilisticDirectionGetter.from_pmf, bad_pmf, 90, unit_octahedron) # pmf must have 4 dimensions bad_pmf = pmf[0, ...] npt.assert_raises(ValueError, ProbabilisticDirectionGetter.from_pmf, bad_pmf, 90, unit_octahedron) # pmf cannot have negative values pmf[0, 0, 0, 0] = -1 npt.assert_raises(ValueError, ProbabilisticDirectionGetter.from_pmf, pmf, 90, unit_octahedron) # Check basis_type keyword dg = ProbabilisticDirectionGetter.from_shcoeff(fit.shm_coeff, 90, unit_octahedron, pmf_threshold=0.1, basis_type="mrtrix") npt.assert_raises(ValueError, ProbabilisticDirectionGetter.from_shcoeff, fit.shm_coeff, 90, unit_octahedron, pmf_threshold=0.1, basis_type="not a basis") dipy-0.13.0/dipy/external/000077500000000000000000000000001317371701200153315ustar00rootroot00000000000000dipy-0.13.0/dipy/external/__init__.py000066400000000000000000000002221317371701200174360ustar00rootroot00000000000000# init for externals package """ Calls to external packages """ # Test callable from numpy.testing import Tester test = Tester().test del Tester dipy-0.13.0/dipy/external/fsl.py000066400000000000000000000354701317371701200165000ustar00rootroot00000000000000''' FSL IO ''' from __future__ import with_statement import os from os.path import join as pjoin from subprocess import Popen, PIPE import numpy as np import numpy.linalg as npl from numpy import newaxis from scipy.ndimage import map_coordinates as mc from scipy.ndimage import affine_transform from dipy.io.dpy import Dpy import nibabel as nib from nibabel.tmpdirs import InTemporaryDirectory _VAL_FMT = ' %e' class FSLError(Exception): """ Class signals error in FSL processing """ def have_flirt(): """ Return True if we can call flirt without error Relies on the fact that flirt produces text on stdout when called with no arguments """ p = Popen('flirt', stdout=PIPE, stderr=PIPE, shell=True) stdout, stderr = p.communicate() return stdout != '' def write_bvals_bvecs(bvals, bvecs, outpath=None, prefix=''): ''' Write FSL FDT bvals and bvecs files Parameters ------------- bvals : (N,) sequence Vector with diffusion gradient strength (one per diffusion acquisition, N=no of acquisitions) bvecs : (N, 3) array-like diffusion gradient directions outpath : None or str path to write FDT bvals, bvecs text files None results in current working directory. prefix : str prefix for bvals, bvecs files in directory. Defaults to '' ''' if outpath is None: outpath = os.getcwd() bvals = tuple(bvals) bvecs = np.asarray(bvecs) bvecs[np.isnan(bvecs)] = 0 N = len(bvals) fname = pjoin(outpath, prefix + 'bvals') fmt = _VAL_FMT * N + '\n' open(fname, 'wt').write(fmt % bvals) fname = pjoin(outpath, prefix + 'bvecs') bvf = open(fname, 'wt') for dim_vals in bvecs.T: bvf.write(fmt % tuple(dim_vals)) def flirt2aff(mat, in_img, ref_img): """ Transform from `in_img` voxels to `ref_img` voxels given `mat` Parameters ---------- mat : (4,4) array contents (as array) of output ``-omat`` transformation file from flirt in_img : img image passed (as filename) to flirt as ``-in`` image ref_img : img image passed (as filename) to flirt as ``-ref`` image Returns ------- aff : (4,4) array Transform from voxel coordinates in ``in_img`` to voxel coordinates in ``ref_img`` Notes ----- Thanks to Mark Jenkinson and Jesper Andersson for the correct statements here, apologies for any errors we've added. ``flirt`` registers an ``in`` image to a ``ref`` image. It can produce (with the ``-omat`` option) - a 4 x 4 affine matrix giving the mapping from *inspace* to *refspace*. The rest of this note is to specify what *inspace* and *refspace* are. In what follows, a *voxtrans* for an image is the 4 by 4 affine ``np.diag([vox_i, vox_j, vox_k, 1])`` where ``vox_i`` etc are the voxel sizes for the first second and third voxel dimension. ``vox_i`` etc are always positive. If the input image has an affine with a negative determinant, then the mapping from voxel coordinates in the input image to *inspace* is simply *voxtrans* for the input image. If the reference image has a negative determinant, the mapping from voxel space in the reference image to *refspace* is simply *voxtrans* for the reference image. A negative determinant for the image affine is the common case, of an image with a x voxel flip. Analyze images don't store affines and flirt assumes a negative determinant in these cases. For positive determinant affines, flirt starts *inspace* and / or *refspace* with an x voxel flip. The mapping implied for an x voxel flip for image with shape (N_i, N_j, N_k) is: [[-1, 0, 0, N_i - 1], [ 0, 1, 0, 0], [ 0, 0, 1, 0], [ 0, 0, 0, 1]] If the input image has an affine with a positive determinant, then mapping from input image voxel coordinates to *inspace* is ``np.dot(input_voxtrans, input_x_flip)`` - where ``input_x_flip`` is the matrix above with ``N_i`` given by the input image first axis length. Similarly the mapping from reference voxel coordinates to *refspace*, if the reference image has a positive determinant, is ``np.dot(ref_voxtrans, ref_x_flip)`` - where ``ref_x_flip`` is the matrix above with ``N_i`` given by the reference image first axis length. """ in_hdr = in_img.header ref_hdr = ref_img.header # get_zooms gets the positive voxel sizes as returned in the header inspace = np.diag(in_hdr.get_zooms() + (1,)) refspace = np.diag(ref_hdr.get_zooms() + (1,)) if npl.det(in_img.affine) >= 0: inspace = np.dot(inspace, _x_flipper(in_hdr.get_data_shape()[0])) if npl.det(ref_img.affine) >= 0: refspace = np.dot(refspace, _x_flipper(ref_hdr.get_data_shape()[0])) # Return voxel to voxel mapping return np.dot(npl.inv(refspace), np.dot(mat, inspace)) def _x_flipper(N_i): flipr = np.diag([-1, 1, 1, 1]) flipr[0, 3] = N_i - 1 return flipr def flirt2aff_files(matfile, in_fname, ref_fname): """ Map from `in_fname` image voxels to `ref_fname` voxels given `matfile` See :func:`flirt2aff` docstring for details. Parameters ------------ matfile : str filename of output ``-omat`` transformation file from flirt in_fname : str filename for image passed to flirt as ``-in`` image ref_fname : str filename for image passed to flirt as ``-ref`` image Returns ------- aff : (4,4) array Transform from voxel coordinates in image for ``in_fname`` to voxel coordinates in image for ``ref_fname`` """ mat = np.loadtxt(matfile) in_img = nib.load(in_fname) ref_img = nib.load(ref_fname) return flirt2aff(mat, in_img, ref_img) def warp_displacements(ffa, flaff, fdis, fref, ffaw, order=1): ''' Warp an image using fsl displacements Parameters ------------ ffa : filename of nifti to be warped flaff : filename of .mat (flirt) fdis : filename of displacements (fnirtfileutils) fref : filename of reference volume e.g. (FMRIB58_FA_1mm.nii.gz) ffaw : filename for the output warped image ''' refaff = nib.load(fref).affine disdata = nib.load(fdis).get_data() imgfa = nib.load(ffa) fadata = imgfa.get_data() fazooms = imgfa.header.get_zooms() # from fa index to ref index res = flirt2aff_files(flaff, ffa, fref) # from ref index to fa index ires = np.linalg.inv(res) # create the 4d volume which has the indices for the reference image reftmp = np.zeros(disdata.shape) ''' #create the grid indices for the reference #refinds = np.ndindex(disdata.shape[:3]) for ijk_t in refinds: i,j,k = ijk_t reftmp[i,j,k,0]=i reftmp[i,j,k,1]=j reftmp[i,j,k,2]=k ''' # same as commented above but much faster reftmp[..., 0] = np.arange(disdata.shape[0])[:, newaxis, newaxis] reftmp[..., 1] = np.arange(disdata.shape[1])[newaxis, :, newaxis] reftmp[..., 2] = np.arange(disdata.shape[2])[newaxis, newaxis, :] # affine transform from reference index to the fa index A = np.dot(reftmp, ires[:3, :3].T) + ires[:3, 3] # add the displacements but first devide them by the voxel sizes A2 = A + disdata / fazooms # hold the displacements' shape reshaping di, dj, dk, dl = disdata.shape # do the interpolation using map coordinates # the list of points where the interpolation is done given by the reshaped # in 2D A2 (list of 3d points in fa index) W = mc(fadata, A2.reshape(di * dj * dk, dl).T, order=order).reshape(di, dj, dk) # save the warped image Wimg = nib.Nifti1Image(W, refaff) nib.save(Wimg, ffaw) def warp_displacements_tracks(fdpy, ffa, fmat, finv, fdis, fdisa, fref, fdpyw): """ Warp tracks from native space to the FMRIB58/MNI space We use here the fsl displacements. Have a look at create_displacements to see an example of how to use these displacements. Parameters ------------ fdpy : filename of the .dpy file with the tractography ffa : filename of nifti to be warped fmat : filename of .mat (flirt) fdis : filename of displacements (fnirtfileutils) fdisa : filename of displacements (fnirtfileutils + affine) finv : filename of invwarp displacements (invwarp) fref : filename of reference volume e.g. (FMRIB58_FA_1mm.nii.gz) fdpyw : filename of the warped tractography See also ----------- dipy.external.fsl.create_displacements """ # read the tracks from the image space dpr = Dpy(fdpy, 'r') T = dpr.read_tracks() dpr.close() # copy them in a new file dpw = Dpy(fdpyw, 'w', compression=1) dpw.write_tracks(T) dpw.close() # from fa index to ref index res = flirt2aff_files(fmat, ffa, fref) # load the reference img imgref = nib.load(fref) refaff = imgref.affine # load the invwarp displacements imginvw = nib.load(finv) invwdata = imginvw.get_data() invwaff = imginvw.affine # load the forward displacements imgdis = nib.load(fdis) disdata = imgdis.get_data() # load the forward displacements + affine imgdis2 = nib.load(fdisa) disdata2 = imgdis2.get_data() # from their difference create the affine disaff = disdata2 - disdata del disdata del disdata2 shape = nib.load(ffa).get_data().shape # transform the displacements affine back to image space disaff0 = affine_transform(disaff[..., 0], res[:3, :3], res[:3, 3], shape, order=1) disaff1 = affine_transform(disaff[..., 1], res[:3, :3], res[:3, 3], shape, order=1) disaff2 = affine_transform(disaff[..., 2], res[:3, :3], res[:3, 3], shape, order=1) # remove the transformed affine from the invwarp displacements di = invwdata[:, :, :, 0] + disaff0 dj = invwdata[:, :, :, 1] + disaff1 dk = invwdata[:, :, :, 2] + disaff2 dprw = Dpy(fdpyw, 'r+') rows = len(dprw.f.root.streamlines.tracks) blocks = np.round(np.linspace(0, rows, 10)).astype(int) # lets work in # blocks # print rows for i in range(len(blocks) - 1): # print blocks[i],blocks[i+1] # copy a lot of tracks together caboodle = dprw.f.root.streamlines.tracks[blocks[i]:blocks[i + 1]] mci = mc(di, caboodle.T, order=1) # interpolations for i displacement mcj = mc(dj, caboodle.T, order=1) # interpolations for j displacement mck = mc(dk, caboodle.T, order=1) # interpolations for k displacement D = np.vstack((mci, mcj, mck)).T # go back to mni image space WI2 = np.dot(caboodle, res[:3, :3].T) + res[:3, 3] + D # and then to mni world space caboodlew = np.dot(WI2, refaff[:3, :3].T) + refaff[:3, 3] # write back dprw.f.root.streamlines.tracks[blocks[i]:blocks[i + 1]] = ( caboodlew.astype('f4')) dprw.close() def pipe(cmd, print_sto=True, print_ste=True): """ A tine pipeline system to run external tools. For more advanced pipelining use nipype http://www.nipy.org/nipype cmd : String Command line to be run print_sto : boolean Print standard output (stdout) or not (default: True) print_ste : boolean Print standard error (stderr) or not (default: True) """ p = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE) sto = p.stdout.readlines() ste = p.stderr.readlines() if print_sto: print(sto) if print_ste: print(ste) def dcm2nii(dname, outdir, filt='*.dcm', options='-d n -g n -i n -o'): cmd = 'dcm2nii ' + options + ' ' + outdir + ' ' + dname + '/' + filt print(cmd) pipe(cmd) def eddy_correct(in_nii, out_nii, ref=0): cmd = 'eddy_correct ' + in_nii + ' ' + out_nii + ' ' + str(ref) print(cmd) pipe(cmd) def bet(in_nii, out_nii, options=' -F -f .2 -g 0'): cmd = 'bet ' + in_nii + ' ' + out_nii + options print(cmd) pipe(cmd) def run_flirt_imgs(in_img, ref_img, dof=6, flags=''): """ Run flirt on nibabel images, returning affine Parameters ---------- in_img : `SpatialImage` image to register ref_img : `SpatialImage` image to register to dof : int, optional degrees of freedom for registration (default 6) flags : str, optional other flags to pass to flirt command string Returns ------- in_vox2out_vox : (4,4) ndarray affine such that, if ``[i, j, k]`` is a coordinate in voxels in the `in_img`, and ``[p, q, r]`` are the equivalent voxel coordinates in the reference image, then ``[p, q, r] = np.dot(in_vox2out_vox[:3,:3]), [i, j, k] + in_vox2out_vox[:3,3])`` """ omat = 'reg.mat' with InTemporaryDirectory(): nib.save(in_img, 'in.nii') nib.save(ref_img, 'ref.nii') cmd = 'flirt %s -dof %d -in in.nii -ref ref.nii -omat %s' % ( flags, dof, omat) proc = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE) stdout, stderr = proc.communicate() if not os.path.isfile(omat): raise FSLError('Command "%s" failed somehow - stdout: %s\n' 'and stderr: %s\n' % (cmd, stdout, stderr)) res = np.loadtxt(omat) return flirt2aff(res, in_img, ref_img) def apply_warp(in_nii, affine_mat, nonlin_nii, out_nii): cmd = 'applywarp --ref=${FSLDIR}/data/standard/FMRIB58_FA_1mm --in=' + \ in_nii + ' --warp=' + nonlin_nii + ' --out=' + out_nii print(cmd) pipe(cmd) def create_displacements(fin, fmat, fnonlin, finvw, fdisp, fdispa, fref): """ Create displacements using FSL's FLIRT and FNIRT tools Parameters ---------- fin : filename of initial source image fmat : filename of .mat (flirt) fnonlin : filename of fnirt output finvw : filename of invwarp displacements (invwarp) fdis : filename of fnirtfileutils fdisa : filename of fnirtfileutils (with other parameters) fref : filename of reference image e.g. (FMRIB58_FA_1mm.nii.gz) """ commands = [] commands.append('flirt -ref ' + fref + ' -in ' + fin + ' -omat ' + fmat) commands.append('fnirt --in=' + fin + ' --aff=' + fmat + ' --cout=' + fnonlin + ' --config=FA_2_FMRIB58_1mm') commands.append('invwarp --ref=' + fin + ' --warp=' + fnonlin + ' --out=' + finvw) commands.append('fnirtfileutils --in=' + fnonlin + ' --ref=${FSLDIR}/data/standard/FMRIB58_FA_1mm --out=' + fdisp) commands.append('fnirtfileutils --in=' + fnonlin + ' --ref=${FSLDIR}/data/standard/FMRIB58_FA_1mm --out=' + fdispa + ' --withaff') for c in commands: print(c) pipe(c) dipy-0.13.0/dipy/external/tests/000077500000000000000000000000001317371701200164735ustar00rootroot00000000000000dipy-0.13.0/dipy/external/tests/__init__.py000066400000000000000000000000741317371701200206050ustar00rootroot00000000000000# Externals test directory made into package with this file dipy-0.13.0/dipy/fixes/000077500000000000000000000000001317371701200146255ustar00rootroot00000000000000dipy-0.13.0/dipy/fixes/__init__.py000066400000000000000000000000411317371701200167310ustar00rootroot00000000000000# Make fixes directory a package dipy-0.13.0/dipy/fixes/argparse.py000066400000000000000000002463301317371701200170130ustar00rootroot00000000000000# emacs: -*- coding: utf-8; mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: # Copyright 2006-2009 Steven J. Bethard . # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER # IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import copy as _copy import os as _os import re as _re import sys as _sys import textwrap as _textwrap from gettext import gettext as _ """Command-line parsing library This module is an optparse-inspired command-line parsing library that: - handles both optional and positional arguments - produces highly informative usage messages - supports parsers that dispatch to sub-parsers The following is a simple usage example that sums integers from the command-line and writes the result to a file:: parser = argparse.ArgumentParser( description='sum the integers at the command line') parser.add_argument( 'integers', metavar='int', nargs='+', type=int, help='an integer to be summed') parser.add_argument( '--log', default=sys.stdout, type=argparse.FileType('w'), help='the file where the sum should be written') args = parser.parse_args() args.log.write('%s' % sum(args.integers)) args.log.close() The module contains the following public classes: - ArgumentParser -- The main entry point for command-line parsing. As the example above shows, the add_argument() method is used to populate the parser with actions for optional and positional arguments. Then the parse_args() method is invoked to convert the args at the command-line into an object with attributes. - ArgumentError -- The exception raised by ArgumentParser objects when there are errors with the parser's actions. Errors raised while parsing the command-line are caught by ArgumentParser and emitted as command-line messages. - FileType -- A factory for defining types of files to be created. As the example above shows, instances of FileType are typically passed as the type= argument of add_argument() calls. - Action -- The base class for parser actions. Typically actions are selected by passing strings like 'store_true' or 'append_const' to the action= argument of add_argument(). However, for greater customization of ArgumentParser actions, subclasses of Action may be defined and passed as the action= argument. - HelpFormatter, RawDescriptionHelpFormatter, RawTextHelpFormatter, ArgumentDefaultsHelpFormatter -- Formatter classes which may be passed as the formatter_class= argument to the ArgumentParser constructor. HelpFormatter is the default, RawDescriptionHelpFormatter and RawTextHelpFormatter tell the parser not to change the formatting for help text, and ArgumentDefaultsHelpFormatter adds information about argument defaults to the help. All other classes in this module are considered implementation details. (Also note that HelpFormatter and RawDescriptionHelpFormatter are only considered public as object names -- the API of the formatter objects is still considered an implementation detail.) """ __version__ = '1.0.1' __all__ = [ 'ArgumentParser', 'ArgumentError', 'Namespace', 'Action', 'FileType', 'HelpFormatter', 'RawDescriptionHelpFormatter', 'RawTextHelpFormatter' 'ArgumentDefaultsHelpFormatter', ] try: _set = set except NameError: from sets import Set as _set try: _basestring = basestring except NameError: _basestring = str try: _sorted = sorted except NameError: def _sorted(iterable, reverse=False): result = list(iterable) result.sort() if reverse: result.reverse() return result # silence Python 2.6 buggy warnings about Exception.message if _sys.version_info[:2] == (2, 6): import warnings warnings.filterwarnings( action='ignore', message='BaseException.message has been deprecated as of Python 2.6', category=DeprecationWarning, module='argparse') SUPPRESS = '==SUPPRESS==' OPTIONAL = '?' ZERO_OR_MORE = '*' ONE_OR_MORE = '+' PARSER = '==PARSER==' # ============================= # Utility functions and classes # ============================= class _AttributeHolder(object): """Abstract base class that provides __repr__. The __repr__ method returns a string in the format:: ClassName(attr=name, attr=name, ...) The attributes are determined either by a class-level attribute, '_kwarg_names', or by inspecting the instance __dict__. """ def __repr__(self): type_name = type(self).__name__ arg_strings = [] for arg in self._get_args(): arg_strings.append(repr(arg)) for name, value in self._get_kwargs(): arg_strings.append('%s=%r' % (name, value)) return '%s(%s)' % (type_name, ', '.join(arg_strings)) def _get_kwargs(self): return _sorted(self.__dict__.items()) def _get_args(self): return [] def _ensure_value(namespace, name, value): if getattr(namespace, name, None) is None: setattr(namespace, name, value) return getattr(namespace, name) # =============== # Formatting Help # =============== class HelpFormatter(object): """Formatter for generating usage messages and argument help strings. Only the name of this class is considered a public API. All the methods provided by the class are considered an implementation detail. """ def __init__(self, prog, indent_increment=2, max_help_position=24, width=None): # default setting for width if width is None: try: width = int(_os.environ['COLUMNS']) except (KeyError, ValueError): width = 80 width -= 2 self._prog = prog self._indent_increment = indent_increment self._max_help_position = max_help_position self._width = width self._current_indent = 0 self._level = 0 self._action_max_length = 0 self._root_section = self._Section(self, None) self._current_section = self._root_section self._whitespace_matcher = _re.compile(r'\s+') self._long_break_matcher = _re.compile(r'\n\n\n+') # =============================== # Section and indentation methods # =============================== def _indent(self): self._current_indent += self._indent_increment self._level += 1 def _dedent(self): self._current_indent -= self._indent_increment assert self._current_indent >= 0, 'Indent decreased below 0.' self._level -= 1 class _Section(object): def __init__(self, formatter, parent, heading=None): self.formatter = formatter self.parent = parent self.heading = heading self.items = [] def format_help(self): # format the indented section if self.parent is not None: self.formatter._indent() join = self.formatter._join_parts for func, args in self.items: func(*args) item_help = join([func(*args) for func, args in self.items]) if self.parent is not None: self.formatter._dedent() # return nothing if the section was empty if not item_help: return '' # add the heading if the section was non-empty if self.heading is not SUPPRESS and self.heading is not None: current_indent = self.formatter._current_indent heading = '%*s%s:\n' % (current_indent, '', self.heading) else: heading = '' # join the section-initial newline, the heading and the help return join(['\n', heading, item_help, '\n']) def _add_item(self, func, args): self._current_section.items.append((func, args)) # ======================== # Message building methods # ======================== def start_section(self, heading): self._indent() section = self._Section(self, self._current_section, heading) self._add_item(section.format_help, []) self._current_section = section def end_section(self): self._current_section = self._current_section.parent self._dedent() def add_text(self, text): if text is not SUPPRESS and text is not None: self._add_item(self._format_text, [text]) def add_usage(self, usage, actions, groups, prefix=None): if usage is not SUPPRESS: args = usage, actions, groups, prefix self._add_item(self._format_usage, args) def add_argument(self, action): if action.help is not SUPPRESS: # find all invocations get_invocation = self._format_action_invocation invocations = [get_invocation(action)] for subaction in self._iter_indented_subactions(action): invocations.append(get_invocation(subaction)) # update the maximum item length invocation_length = max([len(s) for s in invocations]) action_length = invocation_length + self._current_indent self._action_max_length = max(self._action_max_length, action_length) # add the item to the list self._add_item(self._format_action, [action]) def add_arguments(self, actions): for action in actions: self.add_argument(action) # ======================= # Help-formatting methods # ======================= def format_help(self): help = self._root_section.format_help() if help: help = self._long_break_matcher.sub('\n\n', help) help = help.strip('\n') + '\n' return help def _join_parts(self, part_strings): return ''.join([part for part in part_strings if part and part is not SUPPRESS]) def _format_usage(self, usage, actions, groups, prefix): if prefix is None: prefix = _('usage: ') # if usage is specified, use that if usage is not None: usage = usage % dict(prog=self._prog) # if no optionals or positionals are available, usage is just prog elif usage is None and not actions: usage = '%(prog)s' % dict(prog=self._prog) # if optionals and positionals are available, calculate usage elif usage is None: prog = '%(prog)s' % dict(prog=self._prog) # split optionals from positionals optionals = [] positionals = [] for action in actions: if action.option_strings: optionals.append(action) else: positionals.append(action) # build full usage string format = self._format_actions_usage action_usage = format(optionals + positionals, groups) usage = ' '.join([s for s in [prog, action_usage] if s]) # wrap the usage parts if it's too long text_width = self._width - self._current_indent if len(prefix) + len(usage) > text_width: # break usage into wrappable parts part_regexp = r'\(.*?\)+|\[.*?\]+|\S+' opt_usage = format(optionals, groups) pos_usage = format(positionals, groups) opt_parts = _re.findall(part_regexp, opt_usage) pos_parts = _re.findall(part_regexp, pos_usage) assert ' '.join(opt_parts) == opt_usage assert ' '.join(pos_parts) == pos_usage # helper for wrapping lines def get_lines(parts, indent, prefix=None): lines = [] line = [] if prefix is not None: line_len = len(prefix) - 1 else: line_len = len(indent) - 1 for part in parts: if line_len + 1 + len(part) > text_width: lines.append(indent + ' '.join(line)) line = [] line_len = len(indent) - 1 line.append(part) line_len += len(part) + 1 if line: lines.append(indent + ' '.join(line)) if prefix is not None: lines[0] = lines[0][len(indent):] return lines # if prog is short, follow it with optionals or positionals if len(prefix) + len(prog) <= 0.75 * text_width: indent = ' ' * (len(prefix) + len(prog) + 1) if opt_parts: lines = get_lines([prog] + opt_parts, indent, prefix) lines.extend(get_lines(pos_parts, indent)) elif pos_parts: lines = get_lines([prog] + pos_parts, indent, prefix) else: lines = [prog] # if prog is long, put it on its own line else: indent = ' ' * len(prefix) parts = opt_parts + pos_parts lines = get_lines(parts, indent) if len(lines) > 1: lines = [] lines.extend(get_lines(opt_parts, indent)) lines.extend(get_lines(pos_parts, indent)) lines = [prog] + lines # join lines into usage usage = '\n'.join(lines) # prefix with 'usage:' return '%s%s\n\n' % (prefix, usage) def _format_actions_usage(self, actions, groups): # find group indices and identify actions in groups group_actions = _set() inserts = {} for group in groups: try: start = actions.index(group._group_actions[0]) except ValueError: continue else: end = start + len(group._group_actions) if actions[start:end] == group._group_actions: for action in group._group_actions: group_actions.add(action) if not group.required: inserts[start] = '[' inserts[end] = ']' else: inserts[start] = '(' inserts[end] = ')' for i in range(start + 1, end): inserts[i] = '|' # collect all actions format strings parts = [] for i, action in enumerate(actions): # suppressed arguments are marked with None # remove | separators for suppressed arguments if action.help is SUPPRESS: parts.append(None) if inserts.get(i) == '|': inserts.pop(i) elif inserts.get(i + 1) == '|': inserts.pop(i + 1) # produce all arg strings elif not action.option_strings: part = self._format_args(action, action.dest) # if it's in a group, strip the outer [] if action in group_actions: if part[0] == '[' and part[-1] == ']': part = part[1:-1] # add the action string to the list parts.append(part) # produce the first way to invoke the option in brackets else: option_string = action.option_strings[0] # if the Optional doesn't take a value, format is: # -s or --long if action.nargs == 0: part = '%s' % option_string # if the Optional takes a value, format is: # -s ARGS or --long ARGS else: default = action.dest.upper() args_string = self._format_args(action, default) part = '%s %s' % (option_string, args_string) # make it look optional if it's not required or in a group if not action.required and action not in group_actions: part = '[%s]' % part # add the action string to the list parts.append(part) # insert things at the necessary indices for i in _sorted(inserts, reverse=True): parts[i:i] = [inserts[i]] # join all the action items with spaces text = ' '.join([item for item in parts if item is not None]) # clean up separators for mutually exclusive groups open = r'[\[(]' close = r'[\])]' text = _re.sub(r'(%s) ' % open, r'\1', text) text = _re.sub(r' (%s)' % close, r'\1', text) text = _re.sub(r'%s *%s' % (open, close), r'', text) text = _re.sub(r'\(([^|]*)\)', r'\1', text) text = text.strip() # return the text return text def _format_text(self, text): text_width = self._width - self._current_indent indent = ' ' * self._current_indent return self._fill_text(text, text_width, indent) + '\n\n' def _format_action(self, action): # determine the required width and the entry label help_position = min(self._action_max_length + 2, self._max_help_position) help_width = self._width - help_position action_width = help_position - self._current_indent - 2 action_header = self._format_action_invocation(action) # ho nelp; start on same line and add a final newline if not action.help: tup = self._current_indent, '', action_header action_header = '%*s%s\n' % tup # short action name; start on the same line and pad two spaces elif len(action_header) <= action_width: tup = self._current_indent, '', action_width, action_header action_header = '%*s%-*s ' % tup indent_first = 0 # long action name; start on the next line else: tup = self._current_indent, '', action_header action_header = '%*s%s\n' % tup indent_first = help_position # collect the pieces of the action help parts = [action_header] # if there was help for the action, add lines of help text if action.help: help_text = self._expand_help(action) help_lines = self._split_lines(help_text, help_width) parts.append('%*s%s\n' % (indent_first, '', help_lines[0])) for line in help_lines[1:]: parts.append('%*s%s\n' % (help_position, '', line)) # or add a newline if the description doesn't end with one elif not action_header.endswith('\n'): parts.append('\n') # if there are any sub-actions, add their help as well for subaction in self._iter_indented_subactions(action): parts.append(self._format_action(subaction)) # return a single string return self._join_parts(parts) def _format_action_invocation(self, action): if not action.option_strings: metavar, = self._metavar_formatter(action, action.dest)(1) return metavar else: parts = [] # if the Optional doesn't take a value, format is: # -s, --long if action.nargs == 0: parts.extend(action.option_strings) # if the Optional takes a value, format is: # -s ARGS, --long ARGS else: default = action.dest.upper() args_string = self._format_args(action, default) for option_string in action.option_strings: parts.append('%s %s' % (option_string, args_string)) return ', '.join(parts) def _metavar_formatter(self, action, default_metavar): if action.metavar is not None: result = action.metavar elif action.choices is not None: choice_strs = [str(choice) for choice in action.choices] result = '{%s}' % ','.join(choice_strs) else: result = default_metavar def format(tuple_size): if isinstance(result, tuple): return result else: return (result, ) * tuple_size return format def _format_args(self, action, default_metavar): get_metavar = self._metavar_formatter(action, default_metavar) if action.nargs is None: result = '%s' % get_metavar(1) elif action.nargs == OPTIONAL: result = '[%s]' % get_metavar(1) elif action.nargs == ZERO_OR_MORE: result = '[%s [%s ...]]' % get_metavar(2) elif action.nargs == ONE_OR_MORE: result = '%s [%s ...]' % get_metavar(2) elif action.nargs is PARSER: result = '%s ...' % get_metavar(1) else: formats = ['%s' for _ in range(action.nargs)] result = ' '.join(formats) % get_metavar(action.nargs) return result def _expand_help(self, action): params = dict(vars(action), prog=self._prog) for name in list(params): if params[name] is SUPPRESS: del params[name] if params.get('choices') is not None: choices_str = ', '.join([str(c) for c in params['choices']]) params['choices'] = choices_str return self._get_help_string(action) % params def _iter_indented_subactions(self, action): try: get_subactions = action._get_subactions except AttributeError: pass else: self._indent() for subaction in get_subactions(): yield subaction self._dedent() def _split_lines(self, text, width): text = self._whitespace_matcher.sub(' ', text).strip() return _textwrap.wrap(text, width) def _fill_text(self, text, width, indent): text = self._whitespace_matcher.sub(' ', text).strip() return _textwrap.fill(text, width, initial_indent=indent, subsequent_indent=indent) def _get_help_string(self, action): return action.help class RawDescriptionHelpFormatter(HelpFormatter): """Help message formatter which retains any formatting in descriptions. Only the name of this class is considered a public API. All the methods provided by the class are considered an implementation detail. """ def _fill_text(self, text, width, indent): return ''.join([indent + line for line in text.splitlines(True)]) class RawTextHelpFormatter(RawDescriptionHelpFormatter): """Help message formatter which retains formatting of all help text. Only the name of this class is considered a public API. All the methods provided by the class are considered an implementation detail. """ def _split_lines(self, text, width): return text.splitlines() class ArgumentDefaultsHelpFormatter(HelpFormatter): """Help message formatter which adds default values to argument help. Only the name of this class is considered a public API. All the methods provided by the class are considered an implementation detail. """ def _get_help_string(self, action): help = action.help if '%(default)' not in action.help: if action.default is not SUPPRESS: defaulting_nargs = [OPTIONAL, ZERO_OR_MORE] if action.option_strings or action.nargs in defaulting_nargs: help += ' (default: %(default)s)' return help # ===================== # Options and Arguments # ===================== def _get_action_name(argument): if argument is None: return None elif argument.option_strings: return '/'.join(argument.option_strings) elif argument.metavar not in (None, SUPPRESS): return argument.metavar elif argument.dest not in (None, SUPPRESS): return argument.dest else: return None class ArgumentError(Exception): """An error from creating or using an argument (optional or positional). The string value of this exception is the message, augmented with information about the argument that caused it. """ def __init__(self, argument, message): self.argument_name = _get_action_name(argument) self.message = message def __str__(self): if self.argument_name is None: format = '%(message)s' else: format = 'argument %(argument_name)s: %(message)s' return format % dict(message=self.message, argument_name=self.argument_name) # ============== # Action classes # ============== class Action(_AttributeHolder): """Information about how to convert command line strings to Python objects. Action objects are used by an ArgumentParser to represent the information needed to parse a single argument from one or more strings from the command line. The keyword arguments to the Action constructor are also all attributes of Action instances. Keyword Arguments: - option_strings -- A list of command-line option strings which should be associated with this action. - dest -- The name of the attribute to hold the created object(s) - nargs -- The number of command-line arguments that should be consumed. By default, one argument will be consumed and a single value will be produced. Other values include: - N (an integer) consumes N arguments (and produces a list) - '?' consumes zero or one arguments - '*' consumes zero or more arguments (and produces a list) - '+' consumes one or more arguments (and produces a list) Note that the difference between the default and nargs=1 is that with the default, a single value will be produced, while with nargs=1, a list containing a single value will be produced. - const -- The value to be produced if the option is specified and the option uses an action that takes no values. - default -- The value to be produced if the option is not specified. - type -- The type which the command-line arguments should be converted to, should be one of 'string', 'int', 'float', 'complex' or a callable object that accepts a single string argument. If None, 'string' is assumed. - choices -- A container of values that should be allowed. If not None, after a command-line argument has been converted to the appropriate type, an exception will be raised if it is not a member of this collection. - required -- True if the action must always be specified at the command line. This is only meaningful for optional command-line arguments. - help -- The help string describing the argument. - metavar -- The name to be used for the option's argument with the help string. If None, the 'dest' value will be used as the name. """ def __init__(self, option_strings, dest, nargs=None, const=None, default=None, type=None, choices=None, required=False, help=None, metavar=None): self.option_strings = option_strings self.dest = dest self.nargs = nargs self.const = const self.default = default self.type = type self.choices = choices self.required = required self.help = help self.metavar = metavar def _get_kwargs(self): names = [ 'option_strings', 'dest', 'nargs', 'const', 'default', 'type', 'choices', 'help', 'metavar', ] return [(name, getattr(self, name)) for name in names] def __call__(self, parser, namespace, values, option_string=None): raise NotImplementedError(_('.__call__() not defined')) class _StoreAction(Action): def __init__(self, option_strings, dest, nargs=None, const=None, default=None, type=None, choices=None, required=False, help=None, metavar=None): if nargs == 0: raise ValueError('nargs for store actions must be > 0; if you ' 'have nothing to store, actions such as store ' 'true or store const may be more appropriate') if const is not None and nargs != OPTIONAL: raise ValueError('nargs must be %r to supply const' % OPTIONAL) super(_StoreAction, self).__init__( option_strings=option_strings, dest=dest, nargs=nargs, const=const, default=default, type=type, choices=choices, required=required, help=help, metavar=metavar) def __call__(self, parser, namespace, values, option_string=None): setattr(namespace, self.dest, values) class _StoreConstAction(Action): def __init__(self, option_strings, dest, const, default=None, required=False, help=None, metavar=None): super(_StoreConstAction, self).__init__( option_strings=option_strings, dest=dest, nargs=0, const=const, default=default, required=required, help=help) def __call__(self, parser, namespace, values, option_string=None): setattr(namespace, self.dest, self.const) class _StoreTrueAction(_StoreConstAction): def __init__(self, option_strings, dest, default=False, required=False, help=None): super(_StoreTrueAction, self).__init__( option_strings=option_strings, dest=dest, const=True, default=default, required=required, help=help) class _StoreFalseAction(_StoreConstAction): def __init__(self, option_strings, dest, default=True, required=False, help=None): super(_StoreFalseAction, self).__init__( option_strings=option_strings, dest=dest, const=False, default=default, required=required, help=help) class _AppendAction(Action): def __init__(self, option_strings, dest, nargs=None, const=None, default=None, type=None, choices=None, required=False, help=None, metavar=None): if nargs == 0: raise ValueError('nargs for append actions must be > 0; if arg ' 'strings are not supplying the value to append, ' 'the append const action may be more appropriate') if const is not None and nargs != OPTIONAL: raise ValueError('nargs must be %r to supply const' % OPTIONAL) super(_AppendAction, self).__init__( option_strings=option_strings, dest=dest, nargs=nargs, const=const, default=default, type=type, choices=choices, required=required, help=help, metavar=metavar) def __call__(self, parser, namespace, values, option_string=None): items = _copy.copy(_ensure_value(namespace, self.dest, [])) items.append(values) setattr(namespace, self.dest, items) class _AppendConstAction(Action): def __init__(self, option_strings, dest, const, default=None, required=False, help=None, metavar=None): super(_AppendConstAction, self).__init__( option_strings=option_strings, dest=dest, nargs=0, const=const, default=default, required=required, help=help, metavar=metavar) def __call__(self, parser, namespace, values, option_string=None): items = _copy.copy(_ensure_value(namespace, self.dest, [])) items.append(self.const) setattr(namespace, self.dest, items) class _CountAction(Action): def __init__(self, option_strings, dest, default=None, required=False, help=None): super(_CountAction, self).__init__( option_strings=option_strings, dest=dest, nargs=0, default=default, required=required, help=help) def __call__(self, parser, namespace, values, option_string=None): new_count = _ensure_value(namespace, self.dest, 0) + 1 setattr(namespace, self.dest, new_count) class _HelpAction(Action): def __init__(self, option_strings, dest=SUPPRESS, default=SUPPRESS, help=None): super(_HelpAction, self).__init__( option_strings=option_strings, dest=dest, default=default, nargs=0, help=help) def __call__(self, parser, namespace, values, option_string=None): parser.print_help() parser.exit() class _VersionAction(Action): def __init__(self, option_strings, dest=SUPPRESS, default=SUPPRESS, help=None): super(_VersionAction, self).__init__( option_strings=option_strings, dest=dest, default=default, nargs=0, help=help) def __call__(self, parser, namespace, values, option_string=None): parser.print_version() parser.exit() class _SubParsersAction(Action): class _ChoicesPseudoAction(Action): def __init__(self, name, help): sup = super(_SubParsersAction._ChoicesPseudoAction, self) sup.__init__(option_strings=[], dest=name, help=help) def __init__(self, option_strings, prog, parser_class, dest=SUPPRESS, help=None, metavar=None): self._prog_prefix = prog self._parser_class = parser_class self._name_parser_map = {} self._choices_actions = [] super(_SubParsersAction, self).__init__( option_strings=option_strings, dest=dest, nargs=PARSER, choices=self._name_parser_map, help=help, metavar=metavar) def add_parser(self, name, **kwargs): # set prog from the existing prefix if kwargs.get('prog') is None: kwargs['prog'] = '%s %s' % (self._prog_prefix, name) # create a pseudo-action to hold the choice help if 'help' in kwargs: help = kwargs.pop('help') choice_action = self._ChoicesPseudoAction(name, help) self._choices_actions.append(choice_action) # create the parser and add it to the map parser = self._parser_class(**kwargs) self._name_parser_map[name] = parser return parser def _get_subactions(self): return self._choices_actions def __call__(self, parser, namespace, values, option_string=None): parser_name = values[0] arg_strings = values[1:] # set the parser name if requested if self.dest is not SUPPRESS: setattr(namespace, self.dest, parser_name) # select the parser try: parser = self._name_parser_map[parser_name] except KeyError: tup = parser_name, ', '.join(self._name_parser_map) msg = _('unknown parser %r (choices: %s)' % tup) raise ArgumentError(self, msg) # parse all the remaining options into the namespace parser.parse_args(arg_strings, namespace) # ============== # Type classes # ============== class FileType(object): """Factory for creating file object types Instances of FileType are typically passed as type= arguments to the ArgumentParser add_argument() method. Keyword Arguments: - mode -- A string indicating how the file is to be opened. Accepts the same values as the builtin open() function. - bufsize -- The file's desired buffer size. Accepts the same values as the builtin open() function. """ def __init__(self, mode='r', bufsize=None): self._mode = mode self._bufsize = bufsize def __call__(self, string): # the special argument "-" means sys.std{in,out} if string == '-': if 'r' in self._mode: return _sys.stdin elif 'w' in self._mode: return _sys.stdout else: msg = _('argument "-" with mode %r' % self._mode) raise ValueError(msg) # all other arguments are used as file names if self._bufsize: return open(string, self._mode, self._bufsize) else: return open(string, self._mode) def __repr__(self): args = [self._mode, self._bufsize] args_str = ', '.join([repr(arg) for arg in args if arg is not None]) return '%s(%s)' % (type(self).__name__, args_str) # =========================== # Optional and Positional Parsing # =========================== class Namespace(_AttributeHolder): """Simple object for storing attributes. Implements equality by attribute names and values, and provides a simple string representation. """ def __init__(self, **kwargs): for name in kwargs: setattr(self, name, kwargs[name]) def __eq__(self, other): return vars(self) == vars(other) def __ne__(self, other): return not (self == other) class _ActionsContainer(object): def __init__(self, description, prefix_chars, argument_default, conflict_handler): super(_ActionsContainer, self).__init__() self.description = description self.argument_default = argument_default self.prefix_chars = prefix_chars self.conflict_handler = conflict_handler # set up registries self._registries = {} # register actions self.register('action', None, _StoreAction) self.register('action', 'store', _StoreAction) self.register('action', 'store_const', _StoreConstAction) self.register('action', 'store_true', _StoreTrueAction) self.register('action', 'store_false', _StoreFalseAction) self.register('action', 'append', _AppendAction) self.register('action', 'append_const', _AppendConstAction) self.register('action', 'count', _CountAction) self.register('action', 'help', _HelpAction) self.register('action', 'version', _VersionAction) self.register('action', 'parsers', _SubParsersAction) # raise an exception if the conflict handler is invalid self._get_handler() # action storage self._actions = [] self._option_string_actions = {} # groups self._action_groups = [] self._mutually_exclusive_groups = [] # defaults storage self._defaults = {} # determines whether an "option" looks like a negative number self._negative_number_matcher = _re.compile(r'^-\d+|-\d*.\d+$') # whether or not there are any optionals that look like negative # numbers -- uses a list so it can be shared and edited self._has_negative_number_optionals = [] # ==================== # Registration methods # ==================== def register(self, registry_name, value, object): registry = self._registries.setdefault(registry_name, {}) registry[value] = object def _registry_get(self, registry_name, value, default=None): return self._registries[registry_name].get(value, default) # ================================== # Namespace default settings methods # ================================== def set_defaults(self, **kwargs): self._defaults.update(kwargs) # if these defaults match any existing arguments, replace # the previous default on the object with the new one for action in self._actions: if action.dest in kwargs: action.default = kwargs[action.dest] # ======================= # Adding argument actions # ======================= def add_argument(self, *args, **kwargs): """ add_argument(dest, ..., name=value, ...) add_argument(option_string, option_string, ..., name=value, ...) """ # if no positional args are supplied or only one is supplied and # it doesn't look like an option string, parse a positional # argument chars = self.prefix_chars if not args or len(args) == 1 and args[0][0] not in chars: kwargs = self._get_positional_kwargs(*args, **kwargs) # otherwise, we're adding an optional argument else: kwargs = self._get_optional_kwargs(*args, **kwargs) # if no default was supplied, use the parser-level default if 'default' not in kwargs: dest = kwargs['dest'] if dest in self._defaults: kwargs['default'] = self._defaults[dest] elif self.argument_default is not None: kwargs['default'] = self.argument_default # create the action object, and add it to the parser action_class = self._pop_action_class(kwargs) action = action_class(**kwargs) return self._add_action(action) def add_argument_group(self, *args, **kwargs): group = _ArgumentGroup(self, *args, **kwargs) self._action_groups.append(group) return group def add_mutually_exclusive_group(self, **kwargs): group = _MutuallyExclusiveGroup(self, **kwargs) self._mutually_exclusive_groups.append(group) return group def _add_action(self, action): # resolve any conflicts self._check_conflict(action) # add to actions list self._actions.append(action) action.container = self # index the action by any option strings it has for option_string in action.option_strings: self._option_string_actions[option_string] = action # set the flag if any option strings look like negative numbers for option_string in action.option_strings: if self._negative_number_matcher.match(option_string): if not self._has_negative_number_optionals: self._has_negative_number_optionals.append(True) # return the created action return action def _remove_action(self, action): self._actions.remove(action) def _add_container_actions(self, container): # collect groups by titles title_group_map = {} for group in self._action_groups: if group.title in title_group_map: msg = _('cannot merge actions - two groups are named %r') raise ValueError(msg % (group.title)) title_group_map[group.title] = group # map each action to its group group_map = {} for group in container._action_groups: # if a group with the title exists, use that, otherwise # create a new group matching the container's group if group.title not in title_group_map: title_group_map[group.title] = self.add_argument_group( title=group.title, description=group.description, conflict_handler=group.conflict_handler) # map the actions to their new group for action in group._group_actions: group_map[action] = title_group_map[group.title] # add container's mutually exclusive groups # NOTE: if add_mutually_exclusive_group ever gains title= and # description= then this code will need to be expanded as above for group in container._mutually_exclusive_groups: mutex_group = self.add_mutually_exclusive_group( required=group.required) # map the actions to their new mutex group for action in group._group_actions: group_map[action] = mutex_group # add all actions to this container or their group for action in container._actions: group_map.get(action, self)._add_action(action) def _get_positional_kwargs(self, dest, **kwargs): # make sure required is not specified if 'required' in kwargs: msg = _("'required' is an invalid argument for positionals") raise TypeError(msg) # mark positional arguments as required if at least one is # always required if kwargs.get('nargs') not in [OPTIONAL, ZERO_OR_MORE]: kwargs['required'] = True if kwargs.get('nargs') == ZERO_OR_MORE and 'default' not in kwargs: kwargs['required'] = True # return the keyword arguments with no option strings return dict(kwargs, dest=dest, option_strings=[]) def _get_optional_kwargs(self, *args, **kwargs): # determine short and long option strings option_strings = [] long_option_strings = [] for option_string in args: # error on one-or-fewer-character option strings if len(option_string) < 2: msg = _('invalid option string %r: ' 'must be at least two characters long') raise ValueError(msg % option_string) # error on strings that don't start with an appropriate prefix if not option_string[0] in self.prefix_chars: msg = _('invalid option string %r: ' 'must start with a character %r') tup = option_string, self.prefix_chars raise ValueError(msg % tup) # error on strings that are all prefix characters if not (_set(option_string) - _set(self.prefix_chars)): msg = _('invalid option string %r: ' 'must contain characters other than %r') tup = option_string, self.prefix_chars raise ValueError(msg % tup) # strings starting with two prefix characters are long options option_strings.append(option_string) if option_string[0] in self.prefix_chars: if option_string[1] in self.prefix_chars: long_option_strings.append(option_string) # infer destination, '--foo-bar' -> 'foo_bar' and '-x' -> 'x' dest = kwargs.pop('dest', None) if dest is None: if long_option_strings: dest_option_string = long_option_strings[0] else: dest_option_string = option_strings[0] dest = dest_option_string.lstrip(self.prefix_chars) dest = dest.replace('-', '_') # return the updated keyword arguments return dict(kwargs, dest=dest, option_strings=option_strings) def _pop_action_class(self, kwargs, default=None): action = kwargs.pop('action', default) return self._registry_get('action', action, action) def _get_handler(self): # determine function from conflict handler string handler_func_name = '_handle_conflict_%s' % self.conflict_handler try: return getattr(self, handler_func_name) except AttributeError: msg = _('invalid conflict_resolution value: %r') raise ValueError(msg % self.conflict_handler) def _check_conflict(self, action): # find all options that conflict with this option confl_optionals = [] for option_string in action.option_strings: if option_string in self._option_string_actions: confl_optional = self._option_string_actions[option_string] confl_optionals.append((option_string, confl_optional)) # resolve any conflicts if confl_optionals: conflict_handler = self._get_handler() conflict_handler(action, confl_optionals) def _handle_conflict_error(self, action, conflicting_actions): message = _('conflicting option string(s): %s') conflict_string = ', '.join([option_string for option_string, action in conflicting_actions]) raise ArgumentError(action, message % conflict_string) def _handle_conflict_resolve(self, action, conflicting_actions): # remove all conflicting options for option_string, action in conflicting_actions: # remove the conflicting option action.option_strings.remove(option_string) self._option_string_actions.pop(option_string, None) # if the option now has no option string, remove it from the # container holding it if not action.option_strings: action.container._remove_action(action) class _ArgumentGroup(_ActionsContainer): def __init__(self, container, title=None, description=None, **kwargs): # add any missing keyword arguments by checking the container update = kwargs.setdefault update('conflict_handler', container.conflict_handler) update('prefix_chars', container.prefix_chars) update('argument_default', container.argument_default) super_init = super(_ArgumentGroup, self).__init__ super_init(description=description, **kwargs) # group attributes self.title = title self._group_actions = [] # share most attributes with the container self._registries = container._registries self._actions = container._actions self._option_string_actions = container._option_string_actions self._defaults = container._defaults self._has_negative_number_optionals = \ container._has_negative_number_optionals def _add_action(self, action): action = super(_ArgumentGroup, self)._add_action(action) self._group_actions.append(action) return action def _remove_action(self, action): super(_ArgumentGroup, self)._remove_action(action) self._group_actions.remove(action) class _MutuallyExclusiveGroup(_ArgumentGroup): def __init__(self, container, required=False): super(_MutuallyExclusiveGroup, self).__init__(container) self.required = required self._container = container def _add_action(self, action): if action.required: msg = _('mutually exclusive arguments must be optional') raise ValueError(msg) action = self._container._add_action(action) self._group_actions.append(action) return action def _remove_action(self, action): self._container._remove_action(action) self._group_actions.remove(action) class ArgumentParser(_AttributeHolder, _ActionsContainer): """Object for parsing command line strings into Python objects. Keyword Arguments: - prog -- The name of the program (default: sys.argv[0]) - usage -- A usage message (default: auto-generated from arguments) - description -- A description of what the program does - epilog -- Text following the argument descriptions - version -- Add a -v/--version option with the given version string - parents -- Parsers whose arguments should be copied into this one - formatter_class -- HelpFormatter class for printing help messages - prefix_chars -- Characters that prefix optional arguments - fromfile_prefix_chars -- Characters that prefix files containing additional arguments - argument_default -- The default value for all arguments - conflict_handler -- String indicating how to handle conflicts - add_help -- Add a -h/-help option """ def __init__(self, prog=None, usage=None, description=None, epilog=None, version=None, parents=[], formatter_class=HelpFormatter, prefix_chars='-', fromfile_prefix_chars=None, argument_default=None, conflict_handler='error', add_help=True): superinit = super(ArgumentParser, self).__init__ superinit(description=description, prefix_chars=prefix_chars, argument_default=argument_default, conflict_handler=conflict_handler) # default setting for prog if prog is None: prog = _os.path.basename(_sys.argv[0]) self.prog = prog self.usage = usage self.epilog = epilog self.version = version self.formatter_class = formatter_class self.fromfile_prefix_chars = fromfile_prefix_chars self.add_help = add_help add_group = self.add_argument_group self._positionals = add_group(_('positional arguments')) self._optionals = add_group(_('optional arguments')) self._subparsers = None # register types def identity(string): return string self.register('type', None, identity) # add help and version arguments if necessary # (using explicit default to override global argument_default) if self.add_help: self.add_argument( '-h', '--help', action='help', default=SUPPRESS, help=_('show this help message and exit')) if self.version: self.add_argument( '-v', '--version', action='version', default=SUPPRESS, help=_("show program's version number and exit")) # add parent arguments and defaults for parent in parents: self._add_container_actions(parent) try: defaults = parent._defaults except AttributeError: pass else: self._defaults.update(defaults) # ======================= # Pretty __repr__ methods # ======================= def _get_kwargs(self): names = [ 'prog', 'usage', 'description', 'version', 'formatter_class', 'conflict_handler', 'add_help', ] return [(name, getattr(self, name)) for name in names] # ================================== # Optional/Positional adding methods # ================================== def add_subparsers(self, **kwargs): if self._subparsers is not None: self.error(_('cannot have multiple subparser arguments')) # add the parser class to the arguments if it's not present kwargs.setdefault('parser_class', type(self)) if 'title' in kwargs or 'description' in kwargs: title = _(kwargs.pop('title', 'subcommands')) description = _(kwargs.pop('description', None)) self._subparsers = self.add_argument_group(title, description) else: self._subparsers = self._positionals # prog defaults to the usage message of this parser, skipping # optional arguments and with no "usage:" prefix if kwargs.get('prog') is None: formatter = self._get_formatter() positionals = self._get_positional_actions() groups = self._mutually_exclusive_groups formatter.add_usage(self.usage, positionals, groups, '') kwargs['prog'] = formatter.format_help().strip() # create the parsers action and add it to the positionals list parsers_class = self._pop_action_class(kwargs, 'parsers') action = parsers_class(option_strings=[], **kwargs) self._subparsers._add_action(action) # return the created parsers action return action def _add_action(self, action): if action.option_strings: self._optionals._add_action(action) else: self._positionals._add_action(action) return action def _get_optional_actions(self): return [action for action in self._actions if action.option_strings] def _get_positional_actions(self): return [action for action in self._actions if not action.option_strings] # ===================================== # Command line argument parsing methods # ===================================== def parse_args(self, args=None, namespace=None): args, argv = self.parse_known_args(args, namespace) if argv: msg = _('unrecognized arguments: %s') self.error(msg % ' '.join(argv)) return args def parse_known_args(self, args=None, namespace=None): # args default to the system args if args is None: args = _sys.argv[1:] # default Namespace built from parser defaults if namespace is None: namespace = Namespace() # add any action defaults that aren't present for action in self._actions: if action.dest is not SUPPRESS: if not hasattr(namespace, action.dest): if action.default is not SUPPRESS: default = action.default if isinstance(action.default, _basestring): default = self._get_value(action, default) setattr(namespace, action.dest, default) # add any parser defaults that aren't present for dest in self._defaults: if not hasattr(namespace, dest): setattr(namespace, dest, self._defaults[dest]) # parse the arguments and exit if there are any errors try: return self._parse_known_args(args, namespace) except ArgumentError: err = _sys.exc_info()[1] self.error(str(err)) def _parse_known_args(self, arg_strings, namespace): # replace arg strings that are file references if self.fromfile_prefix_chars is not None: arg_strings = self._read_args_from_files(arg_strings) # map all mutually exclusive arguments to the other arguments # they can't occur with action_conflicts = {} for mutex_group in self._mutually_exclusive_groups: group_actions = mutex_group._group_actions for i, mutex_action in enumerate(mutex_group._group_actions): conflicts = action_conflicts.setdefault(mutex_action, []) conflicts.extend(group_actions[:i]) conflicts.extend(group_actions[i + 1:]) # find all option indices, and determine the arg_string_pattern # which has an 'O' if there is an option at an index, # an 'A' if there is an argument, or a '-' if there is a '--' option_string_indices = {} arg_string_pattern_parts = [] arg_strings_iter = iter(arg_strings) for i, arg_string in enumerate(arg_strings_iter): # all args after -- are non-options if arg_string == '--': arg_string_pattern_parts.append('-') for arg_string in arg_strings_iter: arg_string_pattern_parts.append('A') # otherwise, add the arg to the arg strings # and note the index if it was an option else: option_tuple = self._parse_optional(arg_string) if option_tuple is None: pattern = 'A' else: option_string_indices[i] = option_tuple pattern = 'O' arg_string_pattern_parts.append(pattern) # join the pieces together to form the pattern arg_strings_pattern = ''.join(arg_string_pattern_parts) # converts arg strings to the appropriate and then takes the action seen_actions = _set() seen_non_default_actions = _set() def take_action(action, argument_strings, option_string=None): seen_actions.add(action) argument_values = self._get_values(action, argument_strings) # error if this argument is not allowed with other previously # seen arguments, assuming that actions that use the default # value don't really count as "present" if argument_values is not action.default: seen_non_default_actions.add(action) for conflict_action in action_conflicts.get(action, []): if conflict_action in seen_non_default_actions: msg = _('not allowed with argument %s') action_name = _get_action_name(conflict_action) raise ArgumentError(action, msg % action_name) # take the action if we didn't receive a SUPPRESS value # (e.g. from a default) if argument_values is not SUPPRESS: action(self, namespace, argument_values, option_string) # function to convert arg_strings into an optional action def consume_optional(start_index): # get the optional identified at this index option_tuple = option_string_indices[start_index] action, option_string, explicit_arg = option_tuple # identify additional optionals in the same arg string # (e.g. -xyz is the same as -x -y -z if no args are required) match_argument = self._match_argument action_tuples = [] while True: # if we found no optional action, skip it if action is None: extras.append(arg_strings[start_index]) return start_index + 1 # if there is an explicit argument, try to match the # optional's string arguments to only this if explicit_arg is not None: arg_count = match_argument(action, 'A') # if the action is a single-dash option and takes no # arguments, try to parse more single-dash options out # of the tail of the option string chars = self.prefix_chars if arg_count == 0 and option_string[1] not in chars: action_tuples.append((action, [], option_string)) for char in self.prefix_chars: option_string = char + explicit_arg[0] explicit_arg = explicit_arg[1:] or None optionals_map = self._option_string_actions if option_string in optionals_map: action = optionals_map[option_string] break else: msg = _('ignored explicit argument %r') raise ArgumentError(action, msg % explicit_arg) # if the action expect exactly one argument, we've # successfully matched the option; exit the loop elif arg_count == 1: stop = start_index + 1 args = [explicit_arg] action_tuples.append((action, args, option_string)) break # error if a double-dash option did not use the # explicit argument else: msg = _('ignored explicit argument %r') raise ArgumentError(action, msg % explicit_arg) # if there is no explicit argument, try to match the # optional's string arguments with the following strings # if successful, exit the loop else: start = start_index + 1 selected_patterns = arg_strings_pattern[start:] arg_count = match_argument(action, selected_patterns) stop = start + arg_count args = arg_strings[start:stop] action_tuples.append((action, args, option_string)) break # add the Optional to the list and return the index at which # the Optional's string args stopped assert action_tuples for action, args, option_string in action_tuples: take_action(action, args, option_string) return stop # the list of Positionals left to be parsed; this is modified # by consume_positionals() positionals = self._get_positional_actions() # function to convert arg_strings into positional actions def consume_positionals(start_index): # match as many Positionals as possible match_partial = self._match_arguments_partial selected_pattern = arg_strings_pattern[start_index:] arg_counts = match_partial(positionals, selected_pattern) # slice off the appropriate arg strings for each Positional # and add the Positional and its args to the list for action, arg_count in zip(positionals, arg_counts): args = arg_strings[start_index: start_index + arg_count] start_index += arg_count take_action(action, args) # slice off the Positionals that we just parsed and return the # index at which the Positionals' string args stopped positionals[:] = positionals[len(arg_counts):] return start_index # consume Positionals and Optionals alternately, until we have # passed the last option string extras = [] start_index = 0 if option_string_indices: max_option_string_index = max(option_string_indices) else: max_option_string_index = -1 while start_index <= max_option_string_index: # consume any Positionals preceding the next option next_option_string_index = min([ index for index in option_string_indices if index >= start_index]) if start_index != next_option_string_index: positionals_end_index = consume_positionals(start_index) # only try to parse the next optional if we didn't consume # the option string during the positionals parsing if positionals_end_index > start_index: start_index = positionals_end_index continue else: start_index = positionals_end_index # if we consumed all the positionals we could and we're not # at the index of an option string, there were extra arguments if start_index not in option_string_indices: strings = arg_strings[start_index:next_option_string_index] extras.extend(strings) start_index = next_option_string_index # consume the next optional and any arguments for it start_index = consume_optional(start_index) # consume any positionals following the last Optional stop_index = consume_positionals(start_index) # if we didn't consume all the argument strings, there were extras extras.extend(arg_strings[stop_index:]) # if we didn't use all the Positional objects, there were too few # arg strings supplied. if positionals: self.error(_('too few arguments')) # make sure all required actions were present for action in self._actions: if action.required: if action not in seen_actions: name = _get_action_name(action) self.error(_('argument %s is required') % name) # make sure all required groups had one option present for group in self._mutually_exclusive_groups: if group.required: for action in group._group_actions: if action in seen_non_default_actions: break # if no actions were used, report the error else: names = [_get_action_name(action) for action in group._group_actions if action.help is not SUPPRESS] msg = _('one of the arguments %s is required') self.error(msg % ' '.join(names)) # return the updated namespace and the extra arguments return namespace, extras def _read_args_from_files(self, arg_strings): # expand arguments referencing files new_arg_strings = [] for arg_string in arg_strings: # for regular arguments, just add them back into the list if arg_string[0] not in self.fromfile_prefix_chars: new_arg_strings.append(arg_string) # replace arguments referencing files with the file content else: try: args_file = open(arg_string[1:]) try: arg_strings = args_file.read().splitlines() arg_strings = self._read_args_from_files(arg_strings) new_arg_strings.extend(arg_strings) finally: args_file.close() except IOError: err = _sys.exc_info()[1] self.error(str(err)) # return the modified argument list return new_arg_strings def _match_argument(self, action, arg_strings_pattern): # match the pattern for this action to the arg strings nargs_pattern = self._get_nargs_pattern(action) match = _re.match(nargs_pattern, arg_strings_pattern) # raise an exception if we weren't able to find a match if match is None: nargs_errors = { None: _('expected one argument'), OPTIONAL: _('expected at most one argument'), ONE_OR_MORE: _('expected at least one argument'), } default = _('expected %s argument(s)') % action.nargs msg = nargs_errors.get(action.nargs, default) raise ArgumentError(action, msg) # return the number of arguments matched return len(match.group(1)) def _match_arguments_partial(self, actions, arg_strings_pattern): # progressively shorten the actions list by slicing off the # final actions until we find a match result = [] for i in range(len(actions), 0, -1): actions_slice = actions[:i] pattern = ''.join([self._get_nargs_pattern(action) for action in actions_slice]) match = _re.match(pattern, arg_strings_pattern) if match is not None: result.extend([len(string) for string in match.groups()]) break # return the list of arg string counts return result def _parse_optional(self, arg_string): # if it's an empty string, it was meant to be a positional if not arg_string: return None # if it doesn't start with a prefix, it was meant to be positional if not arg_string[0] in self.prefix_chars: return None # if it's just dashes, it was meant to be positional if not arg_string.strip('-'): return None # if the option string is present in the parser, return the action if arg_string in self._option_string_actions: action = self._option_string_actions[arg_string] return action, arg_string, None # search through all possible prefixes of the option string # and all actions in the parser for possible interpretations option_tuples = self._get_option_tuples(arg_string) # if multiple actions match, the option string was ambiguous if len(option_tuples) > 1: options = ', '.join([option_string for action, option_string, explicit_arg in option_tuples]) tup = arg_string, options self.error(_('ambiguous option: %s could match %s') % tup) # if exactly one action matched, this segmentation is good, # so return the parsed action elif len(option_tuples) == 1: option_tuple, = option_tuples return option_tuple # if it was not found as an option, but it looks like a negative # number, it was meant to be positional # unless there are negative-number-like options if self._negative_number_matcher.match(arg_string): if not self._has_negative_number_optionals: return None # if it contains a space, it was meant to be a positional if ' ' in arg_string: return None # it was meant to be an optional but there is no such option # in this parser (though it might be a valid option in a subparser) return None, arg_string, None def _get_option_tuples(self, option_string): result = [] # option strings starting with two prefix characters are only # split at the '=' chars = self.prefix_chars if option_string[0] in chars and option_string[1] in chars: if '=' in option_string: option_prefix, explicit_arg = option_string.split('=', 1) else: option_prefix = option_string explicit_arg = None for option_string in self._option_string_actions: if option_string.startswith(option_prefix): action = self._option_string_actions[option_string] tup = action, option_string, explicit_arg result.append(tup) # single character options can be concatenated with their arguments # but multiple character options always have to have their argument # separate elif option_string[0] in chars and option_string[1] not in chars: option_prefix = option_string explicit_arg = None short_option_prefix = option_string[:2] short_explicit_arg = option_string[2:] for option_string in self._option_string_actions: if option_string == short_option_prefix: action = self._option_string_actions[option_string] tup = action, option_string, short_explicit_arg result.append(tup) elif option_string.startswith(option_prefix): action = self._option_string_actions[option_string] tup = action, option_string, explicit_arg result.append(tup) # shouldn't ever get here else: self.error(_('unexpected option string: %s') % option_string) # return the collected option tuples return result def _get_nargs_pattern(self, action): # in all examples below, we have to allow for '--' args # which are represented as '-' in the pattern nargs = action.nargs # the default (None) is assumed to be a single argument if nargs is None: nargs_pattern = '(-*A-*)' # allow zero or one arguments elif nargs == OPTIONAL: nargs_pattern = '(-*A?-*)' # allow zero or more arguments elif nargs == ZERO_OR_MORE: nargs_pattern = '(-*[A-]*)' # allow one or more arguments elif nargs == ONE_OR_MORE: nargs_pattern = '(-*A[A-]*)' # allow one argument followed by any number of options or arguments elif nargs is PARSER: nargs_pattern = '(-*A[-AO]*)' # all others should be integers else: nargs_pattern = '(-*%s-*)' % '-*'.join('A' * nargs) # if this is an optional action, -- is not allowed if action.option_strings: nargs_pattern = nargs_pattern.replace('-*', '') nargs_pattern = nargs_pattern.replace('-', '') # return the pattern return nargs_pattern # ======================== # Value conversion methods # ======================== def _get_values(self, action, arg_strings): # for everything but PARSER args, strip out '--' if action.nargs is not PARSER: arg_strings = [s for s in arg_strings if s != '--'] # optional argument produces a default when not present if not arg_strings and action.nargs == OPTIONAL: if action.option_strings: value = action.const else: value = action.default if isinstance(value, _basestring): value = self._get_value(action, value) self._check_value(action, value) # when nargs='*' on a positional, if there were no command-line # args, use the default if it is anything other than None elif (not arg_strings and action.nargs == ZERO_OR_MORE and not action.option_strings): if action.default is not None: value = action.default else: value = arg_strings self._check_value(action, value) # single argument or optional argument produces a single value elif len(arg_strings) == 1 and action.nargs in [None, OPTIONAL]: arg_string, = arg_strings value = self._get_value(action, arg_string) self._check_value(action, value) # PARSER arguments convert all values, but check only the first elif action.nargs is PARSER: value = [self._get_value(action, v) for v in arg_strings] self._check_value(action, value[0]) # all other types of nargs produce a list else: value = [self._get_value(action, v) for v in arg_strings] for v in value: self._check_value(action, v) # return the converted value return value def _get_value(self, action, arg_string): type_func = self._registry_get('type', action.type, action.type) if not hasattr(type_func, '__call__'): if not hasattr(type_func, '__bases__'): # classic classes msg = _('%r is not callable') raise ArgumentError(action, msg % type_func) # convert the value to the appropriate type try: result = type_func(arg_string) # TypeErrors or ValueErrors indicate errors except (TypeError, ValueError): name = getattr(action.type, '__name__', repr(action.type)) msg = _('invalid %s value: %r') raise ArgumentError(action, msg % (name, arg_string)) # return the converted value return result def _check_value(self, action, value): # converted value must be one of the choices (if specified) if action.choices is not None and value not in action.choices: tup = value, ', '.join(map(repr, action.choices)) msg = _('invalid choice: %r (choose from %s)') % tup raise ArgumentError(action, msg) # ======================= # Help-formatting methods # ======================= def format_usage(self): formatter = self._get_formatter() formatter.add_usage(self.usage, self._actions, self._mutually_exclusive_groups) return formatter.format_help() def format_help(self): formatter = self._get_formatter() # usage formatter.add_usage(self.usage, self._actions, self._mutually_exclusive_groups) # description formatter.add_text(self.description) # positionals, optionals and user-defined groups for action_group in self._action_groups: formatter.start_section(action_group.title) formatter.add_text(action_group.description) formatter.add_arguments(action_group._group_actions) formatter.end_section() # epilog formatter.add_text(self.epilog) # determine help from format above return formatter.format_help() def format_version(self): formatter = self._get_formatter() formatter.add_text(self.version) return formatter.format_help() def _get_formatter(self): return self.formatter_class(prog=self.prog) # ===================== # Help-printing methods # ===================== def print_usage(self, file=None): self._print_message(self.format_usage(), file) def print_help(self, file=None): self._print_message(self.format_help(), file) def print_version(self, file=None): self._print_message(self.format_version(), file) def _print_message(self, message, file=None): if message: if file is None: file = _sys.stderr file.write(message) # =============== # Exiting methods # =============== def exit(self, status=0, message=None): if message: _sys.stderr.write(message) _sys.exit(status) def error(self, message): """error(message: string) Prints a usage message incorporating the message to stderr and exits. If you override this in a subclass, it should not return -- it should either exit or raise an exception. """ self.print_usage(_sys.stderr) self.exit(2, _('%s: error: %s\n') % (self.prog, message)) dipy-0.13.0/dipy/fixes/scipy.py000066400000000000000000000072641317371701200163370ustar00rootroot00000000000000from __future__ import division, print_function, absolute_import import numpy as np from scipy.linalg import svd __all__ = ['polar'] def polar(a, side="right"): """ Compute the polar decomposition. Returns the factors of the polar decomposition [1]_ `u` and `p` such that ``a = up`` (if `side` is "right") or ``a = pu`` (if `side` is "left"), where `p` is positive semidefinite. Depending on the shape of `a`, either the rows or columns of `u` are orthonormal. When `a` is a square array, `u` is a square unitary array. When `a` is not square, the "canonical polar decomposition" [2]_ is computed. Parameters ---------- a : array_like, shape (m, n). The array to be factored. side : {'left', 'right'}, optional Determines whether a right or left polar decomposition is computed. If `side` is "right", then ``a = up``. If `side` is "left", then ``a = pu``. The default is "right". Returns ------- u : ndarray, shape (m, n) If `a` is square, then `u` is unitary. If m > n, then the columns of `a` are orthonormal, and if m < n, then the rows of `u` are orthonormal. p : ndarray `p` is Hermitian positive semidefinite. If `a` is nonsingular, `p` is positive definite. The shape of `p` is (n, n) or (m, m), depending on whether `side` is "right" or "left", respectively. References ---------- .. [1] R. A. Horn and C. R. Johnson, "Matrix Analysis", Cambridge University Press, 1985. .. [2] N. J. Higham, "Functions of Matrices: Theory and Computation", SIAM, 2008. Notes ----- Copyright (c) 2001, 2002 Enthought, Inc. All rights reserved. Copyright (c) 2003-2012 SciPy Developers. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: a. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. b. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. c. Neither the name of Enthought nor the names of the SciPy Developers may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ if side not in ['right', 'left']: raise ValueError("`side` must be either 'right' or 'left'") a = np.asarray(a) if a.ndim != 2: raise ValueError("`a` must be a 2-D array.") w, s, vh = svd(a, full_matrices=False) u = w.dot(vh) if side == 'right': # a = up p = (vh.T.conj() * s).dot(vh) else: # a = pu p = (w * s).dot(w.T.conj()) return u, p dipy-0.13.0/dipy/info.py000066400000000000000000000071321317371701200150170ustar00rootroot00000000000000""" This file contains defines parameters for dipy that we use to fill settings in setup.py, the dipy top-level docstring, and for building the docs. In setup.py in particular, we exec this file, so it cannot import dipy """ # dipy version information. An empty _version_extra corresponds to a # full release. '.dev' as a _version_extra string means this is a development # version _version_major = 0 _version_minor = 13 _version_micro = 0 #_version_extra = 'dev' _version_extra = '' # Format expected by setup.py and doc/source/conf.py: string of form "X.Y.Z" __version__ = "%s.%s.%s%s" % (_version_major, _version_minor, _version_micro, _version_extra) CLASSIFIERS = ["Development Status :: 3 - Alpha", "Environment :: Console", "Intended Audience :: Science/Research", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Programming Language :: Python", "Topic :: Scientific/Engineering"] description = 'Diffusion MRI utilities in python' # Note: this long_description is actually a copy/paste from the top-level # README.rst, so that it shows up nicely on PyPI. So please remember to edit # it only in one place and sync it correctly. long_description = """ ====== DIPY ====== Dipy is a python toolbox for analysis of MR diffusion imaging. Dipy is for research only; please do not use results from dipy for clinical decisions. Website ======= Current information can always be found from the dipy website - http://dipy.org Mailing Lists ============= Please see the developer's list at http://mail.scipy.org/mailman/listinfo/nipy-devel Code ==== You can find our sources and single-click downloads: * `Main repository`_ on Github. * Documentation_ for all releases and current development tree. * Download as a tar/zip file the `current trunk`_. .. _main repository: http://github.com/nipy/dipy .. _Documentation: http://dipy.org .. _current trunk: https://github.com/nipy/dipy/archive/master.zip License ======= Dipy is licensed under the terms of the BSD license. Please see the LICENSE file in the dipy distribution. Dipy uses other libraries also licensed under the BSD or the MIT licenses, with the only exception of the SHORE module which optionally uses the cvxopt library. Cvxopt is licensed under the GPL license. """ # versions for dependencies # Check these versions against .travis.yml and requirements.txt CYTHON_MIN_VERSION='0.25.1' NUMPY_MIN_VERSION='1.7.1' SCIPY_MIN_VERSION='0.9' NIBABEL_MIN_VERSION='2.1.0' H5PY_MIN_VERSION='2.4.0' # Main setup parameters NAME = 'dipy' MAINTAINER = "Eleftherios Garyfallidis" MAINTAINER_EMAIL = "neuroimaging@python.org" DESCRIPTION = description LONG_DESCRIPTION = long_description URL = "http://dipy.org" DOWNLOAD_URL = "http://github.com/nipy/dipy/archives/master" LICENSE = "BSD license" CLASSIFIERS = CLASSIFIERS AUTHOR = "dipy developers" AUTHOR_EMAIL = "neuroimaging@python.org" PLATFORMS = "OS Independent" MAJOR = _version_major MINOR = _version_minor MICRO = _version_micro ISRELEASE = _version_extra == '' VERSION = __version__ PROVIDES = ["dipy"] REQUIRES = ["numpy (>=%s)" % NUMPY_MIN_VERSION, "scipy (>=%s)" % SCIPY_MIN_VERSION, "nibabel (>=%s)" % NIBABEL_MIN_VERSION, "h5py (>=%s)" % H5PY_MIN_VERSION] dipy-0.13.0/dipy/io/000077500000000000000000000000001317371701200141165ustar00rootroot00000000000000dipy-0.13.0/dipy/io/__init__.py000066400000000000000000000006141317371701200162300ustar00rootroot00000000000000# init for io routines from __future__ import division, print_function, absolute_import from .gradients import read_bvals_bvecs from .dpy import Dpy from .bvectxt import (read_bvec_file, ornt_mapping, reorient_vectors, reorient_on_axis, orientation_from_string, orientation_to_string) from .pickles import save_pickle, load_pickle from . import utils dipy-0.13.0/dipy/io/bvectxt.py000066400000000000000000000130251317371701200161500ustar00rootroot00000000000000from __future__ import division, print_function, absolute_import import numpy as np from os.path import splitext def read_bvec_file(filename, atol=.001): """ Read gradient table information from a pair of files with extentions .bvec and .bval. The bval file should have one row of values representing the bvalues of each volume in the dwi data set. The bvec file should have three rows, where the rows are the x, y, and z components of the normalized gradient direction for each of the volumes. Parameters ------------ filename : The path to the either the bvec or bval file atol : float, optional The tolorance used to check all the gradient directions are normalized. Defult is .001 """ base, ext = splitext(filename) if ext == '': bvec = base+'.bvec' bval = base+'.bval' elif ext == '.bvec': bvec = filename bval = base+'.bval' elif ext == '.bval': bvec = base+'.bvec' bval = filename else: raise ValueError('filename must have .bvec or .bval extension') b_values = np.loadtxt(bval) grad_table = np.loadtxt(bvec) if grad_table.shape[0] != 3: raise IOError('bvec file should have three rows') if b_values.ndim != 1: raise IOError('bval file should have one row') if b_values.shape[0] != grad_table.shape[1]: raise IOError('the gradient file and b value fileshould' 'have the same number of columns') grad_norms = np.sqrt((grad_table**2).sum(0)) if not np.allclose(grad_norms[b_values > 0], 1, atol=atol): raise IOError('the magnitudes of the gradient directions' + 'are not within ' + str(atol) + ' of 1') grad_table[:, b_values > 0] = (grad_table[:, b_values > 0] / grad_norms[b_values > 0]) return (grad_table, b_values) def ornt_mapping(ornt1, ornt2): """Calculates the mapping needing to get from orn1 to orn2""" mapping = np.empty((len(ornt1), 2), 'int') mapping[:, 0] = -1 A = ornt1[:, 0].argsort() B = ornt2[:, 0].argsort() mapping[B, 0] = A assert (mapping[:, 0] != -1).all() sign = ornt2[:, 1] * ornt1[mapping[:, 0], 1] mapping[:, 1] = sign return mapping def reorient_vectors(input, current_ornt, new_ornt, axis=0): """Changes the orientation of a gradients or other vectors Moves vectors, storted along axis, from current_ornt to new_ornt. For example the vector [x, y, z] in "RAS" will be [-x, -y, z] in "LPS". R: Right A: Anterior S: Superior L: Left P: Posterior I: Inferior Examples -------- >>> gtab = np.array([[1, 1, 1], [1, 2, 3]]) >>> reorient_vectors(gtab, 'ras', 'asr', axis=1) array([[1, 1, 1], [2, 3, 1]]) >>> reorient_vectors(gtab, 'ras', 'lps', axis=1) array([[-1, -1, 1], [-1, -2, 3]]) >>> bvec = gtab.T >>> reorient_vectors(bvec, 'ras', 'lps', axis=0) array([[-1, -1], [-1, -2], [ 1, 3]]) >>> reorient_vectors(bvec, 'ras', 'lsp') array([[-1, -1], [ 1, 3], [-1, -2]]) """ if isinstance(current_ornt, str): current_ornt = orientation_from_string(current_ornt) if isinstance(new_ornt, str): new_ornt = orientation_from_string(new_ornt) n = input.shape[axis] if current_ornt.shape != (n, 2) or new_ornt.shape != (n, 2): raise ValueError("orientations do not match") input = np.asarray(input) mapping = ornt_mapping(current_ornt, new_ornt) output = input.take(mapping[:, 0], axis) out_view = np.rollaxis(output, axis, output.ndim) out_view *= mapping[:, 1] return output def reorient_on_axis(input, current_ornt, new_ornt, axis=0): if isinstance(current_ornt, str): current_ornt = orientation_from_string(current_ornt) if isinstance(new_ornt, str): new_ornt = orientation_from_string(new_ornt) n = input.shape[axis] if current_ornt.shape != (n, 2) or new_ornt.shape != (n, 2): raise ValueError("orientations do not match") mapping = ornt_mapping(current_ornt, new_ornt) order = [slice(None)] * input.ndim order[axis] = mapping[:, 0] shape = [1] * input.ndim shape[axis] = -1 sign = mapping[:, 1] sign.shape = shape output = input[order] output *= sign return output def orientation_from_string(string_ornt): """Returns an array representation of an ornt string""" orientation_dict = dict(r=(0, 1), l=(0, -1), a=(1, 1), p=(1, -1), s=(2, 1), i=(2, -1)) ornt = tuple(orientation_dict[ii] for ii in string_ornt.lower()) ornt = np.array(ornt) if _check_ornt(ornt): msg = string_ornt + " does not seem to be a valid orientation string" raise ValueError(msg) return ornt def orientation_to_string(ornt): """Returns a string representation of a 3d ornt""" if _check_ornt(ornt): msg = repr(ornt) + " does not seem to be a valid orientation" raise ValueError(msg) orientation_dict = {(0, 1): 'r', (0, -1): 'l', (1, 1): 'a', (1, -1): 'p', (2, 1): 's', (2, -1): 'i'} ornt_string = '' for ii in ornt: ornt_string += orientation_dict[(ii[0], ii[1])] return ornt_string def _check_ornt(ornt): uniq = np.unique(ornt[:, 0]) if len(uniq) != len(ornt): print(len(uniq)) return True uniq = np.unique(ornt[:, 1]) if tuple(uniq) not in set([(-1, 1), (-1,), (1,)]): print(tuple(uniq)) return True dipy-0.13.0/dipy/io/dpy.py000066400000000000000000000105721317371701200152710ustar00rootroot00000000000000""" A class for handling large tractography datasets. It is built using the h5py which in turn implement key features of the HDF5 (hierachical data format) API [1]_. References ---------- .. [1] http://www.hdfgroup.org/HDF5/doc/H5.intro.html """ import numpy as np import h5py from nibabel.streamlines import ArraySequence as Streamlines # Make sure not to carry across setup module from * import __all__ = ['Dpy'] class Dpy(object): def __init__(self, fname, mode='r', compression=0): """ Advanced storage system for tractography based on HDF5 Parameters ------------ fname : str, full filename mode : 'r' read 'w' write 'r+' read and write only if file already exists compression : 0 no compression to 9 maximum compression Examples ---------- >>> import os >>> from tempfile import mkstemp #temp file >>> from dipy.io.dpy import Dpy >>> def dpy_example(): ... fd,fname = mkstemp() ... fname += '.dpy'#add correct extension ... dpw = Dpy(fname,'w') ... A=np.ones((5,3)) ... B=2*A.copy() ... C=3*A.copy() ... dpw.write_track(A) ... dpw.write_track(B) ... dpw.write_track(C) ... dpw.close() ... dpr = Dpy(fname,'r') ... A=dpr.read_track() ... B=dpr.read_track() ... T=dpr.read_tracksi([0,1,2,0,0,2]) ... dpr.close() ... os.remove(fname) #delete file from disk >>> dpy_example() """ self.mode = mode self.f = h5py.File(fname, mode=self.mode) self.compression = compression if self.mode == 'w': self.f.attrs['version'] = u'0.0.1' self.streamlines = self.f.create_group('streamlines') self.tracks = self.streamlines.create_dataset( 'tracks', shape=(0, 3), dtype='f4', maxshape=(None, 3), chunks=True) self.offsets = self.streamlines.create_dataset( 'offsets', shape=(1,), dtype='i8', maxshape=(None,), chunks=True) self.curr_pos = 0 self.offsets[:] = np.array([self.curr_pos]).astype(np.int64) if self.mode == 'r': self.tracks = self.f['streamlines']['tracks'] self.offsets = self.f['streamlines']['offsets'] self.track_no = len(self.offsets) - 1 self.offs_pos = 0 def version(self): return self.f.attrs['version'] def write_track(self, track): """ write on track each time """ self.tracks.resize(self.tracks.shape[0] + track.shape[0], axis=0) self.tracks[-track.shape[0]:] = track.astype(np.float32) self.curr_pos += track.shape[0] self.offsets.resize(self.offsets.shape[0] + 1, axis=0) self.offsets[-1] = self.curr_pos def write_tracks(self, tracks): """ write many tracks together """ self.tracks.resize(self.tracks.shape[0] + tracks._data.shape[0], axis=0) self.tracks[-tracks._data.shape[0]:] = tracks._data self.offsets.resize(self.offsets.shape[0] + tracks._offsets.shape[0], axis=0) self.offsets[-tracks._offsets.shape[0]:] = \ self.offsets[-tracks._offsets.shape[0] - 1] + \ tracks._offsets + tracks._lengths def read_track(self): """ read one track each time """ off0, off1 = self.offsets[self.offs_pos:self.offs_pos + 2] self.offs_pos += 1 return self.tracks[off0:off1] def read_tracksi(self, indices): """ read tracks with specific indices """ tracks = Streamlines() for i in indices: off0, off1 = self.offsets[i:i + 2] tracks.append(self.tracks[off0:off1]) return tracks def read_tracks(self): """ read the entire tractography """ I = self.offsets[:] TR = self.tracks[:] tracks = Streamlines() for i in range(len(I) - 1): off0, off1 = I[i:i + 2] tracks.append(TR[off0:off1]) return tracks def close(self): self.f.close() if __name__ == '__main__': pass dipy-0.13.0/dipy/io/gradients.py000066400000000000000000000045021317371701200164510ustar00rootroot00000000000000from __future__ import division, print_function, absolute_import from os.path import splitext from dipy.utils.six import string_types import numpy as np def read_bvals_bvecs(fbvals, fbvecs): """ Read b-values and b-vectors from disk Parameters ---------- fbvals : str Full path to file with b-values. None to not read bvals. fbvecs : str Full path of file with b-vectors. None to not read bvecs. Returns ------- bvals : array, (N,) or None bvecs : array, (N, 3) or None Notes ----- Files can be either '.bvals'/'.bvecs' or '.txt' or '.npy' (containing arrays stored with the appropriate values). """ # Loop over the provided inputs, reading each one in turn and adding them # to this list: vals = [] for this_fname in [fbvals, fbvecs]: # If the input was None, we don't read anything and move on: if this_fname is None: vals.append(None) else: if isinstance(this_fname, string_types): base, ext = splitext(this_fname) if ext in ['.bvals', '.bval', '.bvecs', '.bvec', '.txt', '']: vals.append(np.squeeze(np.loadtxt(this_fname))) elif ext == '.npy': vals.append(np.squeeze(np.load(this_fname))) else: e_s = "File type %s is not recognized" % ext raise ValueError(e_s) else: raise ValueError('String with full path to file is required') # Once out of the loop, unpack them: bvals, bvecs = vals[0], vals[1] # If bvecs is None, you can just return now w/o making more checks: if bvecs is None: return bvals, bvecs if bvecs.shape[1] > bvecs.shape[0]: bvecs = bvecs.T if min(bvecs.shape) != 3: raise IOError('bvec file should have three rows') if bvecs.ndim != 2: raise IOError('bvec file should be saved as a two dimensional array') # If bvals is None, you don't need to check that they have the same shape: if bvals is None: return bvals, bvecs if len(bvals.shape) > 1: raise IOError('bval file should have one row') if max(bvals.shape) != max(bvecs.shape): raise IOError('b-values and b-vectors shapes do not correspond') return bvals, bvecs dipy-0.13.0/dipy/io/image.py000066400000000000000000000010671317371701200155560ustar00rootroot00000000000000from __future__ import division, print_function, absolute_import import nibabel as nib def load_nifti(fname, return_img=False, return_voxsize=False): img = nib.load(fname) hdr = img.header data = img.get_data() vox_size = hdr.get_zooms()[:3] ret_val = [data, img.affine] if return_voxsize: ret_val.append(vox_size) if return_img: ret_val.append(img) return tuple(ret_val) def save_nifti(fname, data, affine, hdr=None): result_img = nib.Nifti1Image(data, affine, header=hdr) result_img.to_filename(fname) dipy-0.13.0/dipy/io/peaks.py000066400000000000000000000141031317371701200155720ustar00rootroot00000000000000from __future__ import division, print_function, absolute_import import os import numpy as np from dipy.direction.peaks import (PeaksAndMetrics, reshape_peaks_for_visualization) from dipy.core.sphere import Sphere from dipy.io.image import save_nifti import h5py def _safe_save(group, array, name): """ Safe saving of arrays with specific names Parameters ---------- group : HDF5 group array : array name : string """ if array is not None: ds = group.create_dataset(name, shape=array.shape, dtype=array.dtype, chunks=True) ds[:] = array def load_peaks(fname, verbose=False): """ Load a PeaksAndMetrics HDF5 file (PAM5) Parameters ---------- fname : string Filename of PAM5 file. verbose : bool Print summary information about the loaded file. Returns ------- pam : PeaksAndMetrics object """ if os.path.splitext(fname)[1].lower() != '.pam5': raise IOError('This function supports only PAM5 (HDF5) files') f = h5py.File(fname, 'r') pam = PeaksAndMetrics() pamh = f['pam'] version = f.attrs['version'] if version != '0.0.1': raise IOError('Incorrect PAM5 file version {0}'.format(version,)) try: affine = pamh['affine'][:] except KeyError: affine = None peak_dirs = pamh['peak_dirs'][:] peak_values = pamh['peak_values'][:] peak_indices = pamh['peak_indices'][:] try: shm_coeff = pamh['shm_coeff'][:] except KeyError: shm_coeff = None sphere_vertices = pamh['sphere_vertices'][:] try: odf = pamh['odf'][:] except KeyError: odf = None pam.affine = affine pam.peak_dirs = peak_dirs pam.peak_values = peak_values pam.peak_indices = peak_indices pam.shm_coeff = shm_coeff pam.sphere = Sphere(xyz=sphere_vertices) pam.B = pamh['B'][:] pam.total_weight = pamh['total_weight'][:][0] pam.ang_thr = pamh['ang_thr'][:][0] pam.gfa = pamh['gfa'][:] pam.qa = pamh['qa'][:] pam.odf = odf f.close() if verbose: print('PAM5 version') print(version) print('Affine') print(pam.affine) print('Dirs shape') print(pam.peak_dirs.shape) print('SH shape') if pam.shm_coeff is not None: print(pam.shm_coeff.shape) else: print('None') print('ODF shape') if pam.odf is not None: print(pam.odf.shape) else: print('None') print('Total weight') print(pam.total_weight) print('Angular threshold') print(pam.ang_thr) print('Sphere vertices shape') print(pam.sphere.vertices.shape) return pam def save_peaks(fname, pam, affine=None, verbose=False): """ Save all important attributes of object PeaksAndMetrics in a PAM5 file (HDF5). Parameters ---------- fname : string Filename of PAM5 file pam : PeaksAndMetrics Object holding peak_dirs, shm_coeffs and other attributes affine : array The 4x4 matrix transforming the date from native to world coordinates. PeaksAndMetrics should have that attribute but if not it can be provided here. Default None. verbose : bool Print summary information about the saved file. """ if os.path.splitext(fname)[1] != '.pam5': raise IOError('This function saves only PAM5 (HDF5) files') if not (hasattr(pam, 'peak_dirs') and hasattr(pam, 'peak_values') and hasattr(pam, 'peak_indices')): msg = 'Cannot save object without peak_dirs, peak_values' msg += ' and peak_indices' raise ValueError(msg) f = h5py.File(fname, 'w') group = f.create_group('pam') f.attrs['version'] = u'0.0.1' version_string = f.attrs['version'] affine = pam.affine if hasattr(pam, 'affine') else affine shm_coeff = pam.shm_coeff if hasattr(pam, 'shm_coeff') else None odf = pam.odf if hasattr(pam, 'odf') else None _safe_save(group, affine, 'affine') _safe_save(group, pam.peak_dirs, 'peak_dirs') _safe_save(group, pam.peak_values, 'peak_values') _safe_save(group, pam.peak_indices, 'peak_indices') _safe_save(group, shm_coeff, 'shm_coeff') _safe_save(group, pam.sphere.vertices, 'sphere_vertices') _safe_save(group, pam.B, 'B') _safe_save(group, np.array([pam.total_weight]), 'total_weight') _safe_save(group, np.array([pam.ang_thr]), 'ang_thr') _safe_save(group, pam.gfa, 'gfa') _safe_save(group, pam.qa, 'qa') _safe_save(group, odf, 'odf') f.close() if verbose: print('PAM5 version') print(version_string) print('Affine') print(affine) print('Dirs shape') print(pam.peak_dirs.shape) print('SH shape') if shm_coeff is not None: print(shm_coeff.shape) else: print('None') print('ODF shape') if odf is not None: print(pam.odf.shape) else: print('None') print('Total weight') print(pam.total_weight) print('Angular threshold') print(pam.ang_thr) print('Sphere vertices shape') print(pam.sphere.vertices.shape) return pam def peaks_to_niftis(pam, fname_shm, fname_dirs, fname_values, fname_indices, fname_gfa, reshape_dirs=False): """ Save SH, directions, indices and values of peaks to Nifti. """ save_nifti(fname_shm, pam.shm_coeff.astype(np.float32), pam.affine) if reshape_dirs: pam_dirs = reshape_peaks_for_visualization(pam) else: pam_dirs = pam.peak_dirs.astype(np.float32) save_nifti(fname_dirs, pam_dirs, pam.affine) save_nifti(fname_values, pam.peak_values.astype(np.float32), pam.affine) save_nifti(fname_indices, pam.peak_indices, pam.affine) save_nifti(fname_gfa, pam.gfa, pam.affine) dipy-0.13.0/dipy/io/pickles.py000066400000000000000000000024321317371701200161230ustar00rootroot00000000000000""" Load and save pickles """ # Python 3 merged cPickle into pickle from dipy.utils.six.moves import cPickle def save_pickle(fname, dix): ''' Save `dix` to `fname` as pickle Parameters ------------ fname : str filename to save object e.g. a dictionary dix : str dictionary or other object Examples ---------- >>> import os >>> from tempfile import mkstemp >>> fd, fname = mkstemp() # make temporary file (opened, attached to fh) >>> d={0:{'d':1}} >>> save_pickle(fname, d) >>> d2=load_pickle(fname) We remove the temporary file we created for neatness >>> os.close(fd) # the file is still open, we need to close the fh >>> os.remove(fname) See also ---------- dipy.io.pickles.load_pickle ''' out = open(fname, 'wb') cPickle.dump(dix, out, protocol=cPickle.HIGHEST_PROTOCOL) out.close() def load_pickle(fname): ''' Load object from pickle file `fname` Parameters ------------ fname : str filename to load dict or other python object Returns --------- dix : object dictionary or other object Examples ---------- dipy.io.pickles.save_pickle ''' inp = open(fname, 'rb') dix = cPickle.load(inp) inp.close() return dix dipy-0.13.0/dipy/io/tests/000077500000000000000000000000001317371701200152605ustar00rootroot00000000000000dipy-0.13.0/dipy/io/tests/__init__.py000066400000000000000000000001731317371701200173720ustar00rootroot00000000000000# init to allow relative imports in tests # Test callable from numpy.testing import Tester test = Tester().test del Tester dipy-0.13.0/dipy/io/tests/test_bvectxt.py000066400000000000000000000037061317371701200203560ustar00rootroot00000000000000import numpy as np from numpy.testing import assert_array_equal from nose.tools import assert_raises from dipy.io.bvectxt import orientation_from_string, reorient_vectors, \ orientation_to_string, reorient_vectors def test_orientation_from_to_string(): ras = np.array(((0, 1), (1, 1), (2, 1))) lps = np.array(((0, -1), (1, -1), (2, 1))) asl = np.array(((1, 1), (2, 1), (0, -1))) assert_array_equal(orientation_from_string('ras'), ras) assert_array_equal(orientation_from_string('lps'), lps) assert_array_equal(orientation_from_string('asl'), asl) assert_raises(ValueError, orientation_from_string, 'aasl') assert orientation_to_string(ras) == 'ras' assert orientation_to_string(lps) == 'lps' assert orientation_to_string(asl) == 'asl' def test_reorient_vectors(): bvec = np.arange(12).reshape((3, 4)) assert_array_equal(reorient_vectors(bvec, 'ras', 'ras'), bvec) assert_array_equal(reorient_vectors(bvec, 'ras', 'lpi'), -bvec) result = bvec[[1, 2, 0]] assert_array_equal(reorient_vectors(bvec, 'ras', 'asr'), result) bvec = result result = bvec[[1, 0, 2]] * [[-1], [1], [-1]] assert_array_equal(reorient_vectors(bvec, 'asr', 'ial'), result) result = bvec[[1, 0, 2]] * [[-1], [1], [1]] assert_array_equal(reorient_vectors(bvec, 'asr', 'iar'), result) assert_raises(ValueError, reorient_vectors, bvec, 'ras', 'ra') bvec = np.arange(12).reshape((3, 4)) bvec = bvec.T assert_array_equal(reorient_vectors(bvec, 'ras', 'ras', axis=1), bvec) assert_array_equal(reorient_vectors(bvec, 'ras', 'lpi', axis=1), -bvec) result = bvec[:, [1, 2, 0]] assert_array_equal(reorient_vectors(bvec, 'ras', 'asr', axis=1), result) bvec = result result = bvec[:, [1, 0, 2]] * [-1, 1, -1] assert_array_equal(reorient_vectors(bvec, 'asr', 'ial', axis=1), result) result = bvec[:, [1, 0, 2]] * [-1, 1, 1] assert_array_equal(reorient_vectors(bvec, 'asr', 'iar', axis=1), result) dipy-0.13.0/dipy/io/tests/test_dpy.py000066400000000000000000000017641317371701200174750ustar00rootroot00000000000000import os import numpy as np from nibabel.tmpdirs import InTemporaryDirectory from dipy.io.dpy import Dpy import numpy.testing as npt from dipy.tracking.streamline import Streamlines def test_dpy(): fname = 'test.bin' with InTemporaryDirectory(): dpw = Dpy(fname, 'w') A = np.ones((5, 3)) B = 2 * A.copy() C = 3 * A.copy() dpw.write_track(A) dpw.write_track(B) dpw.write_track(C) dpw.write_tracks(Streamlines([C, B, A])) all_tracks = np.ascontiguousarray(np.vstack([A, B, C, C, B, A])) npt.assert_array_equal(all_tracks, dpw.tracks[:]) dpw.close() dpr = Dpy(fname, 'r') npt.assert_equal(dpr.version() == u'0.0.1', True) T = dpr.read_tracksi([0, 1, 2, 0, 0, 2]) T2 = dpr.read_tracks() npt.assert_equal(len(T2), 6) dpr.close() npt.assert_array_equal(A, T[0]) npt.assert_array_equal(C, T[5]) if __name__ == '__main__': npt.run_module_suite()dipy-0.13.0/dipy/io/tests/test_io.py000066400000000000000000000003421317371701200172770ustar00rootroot00000000000000""" Tests for overall io sub-package """ from dipy import io from nose.tools import assert_false def test_imports(): # Make sure io has not pulled in setup_module from dpy assert_false(hasattr(io, 'setup_module')) dipy-0.13.0/dipy/io/tests/test_io_gradients.py000066400000000000000000000042401317371701200213400ustar00rootroot00000000000000from __future__ import division, print_function, absolute_import import os.path as osp import tempfile import numpy as np import numpy.testing as npt from dipy.data import get_data from dipy.io.gradients import read_bvals_bvecs from dipy.core.gradients import gradient_table def test_read_bvals_bvecs(): fimg, fbvals, fbvecs = get_data('small_101D') bvals, bvecs = read_bvals_bvecs(fbvals, fbvecs) gt = gradient_table(bvals, bvecs) npt.assert_array_equal(bvals, gt.bvals) npt.assert_array_equal(bvecs, gt.bvecs) # None should also work as an input: bvals_none, bvecs_none = read_bvals_bvecs(None, fbvecs) npt.assert_array_equal(bvecs_none, gt.bvecs) bvals_none, bvecs_none = read_bvals_bvecs(fbvals, None) npt.assert_array_equal(bvals_none, gt.bvals) # Test for error raising with unknown file formats: nan_fbvecs = osp.splitext(fbvecs)[0] + '.nan' # Nonsense extension npt.assert_raises(ValueError, read_bvals_bvecs, fbvals, nan_fbvecs) # Test for error raising with incorrect file-contents: # These bvecs only have two rows/columns: new_bvecs1 = bvecs[:, :2] # Make a temporary file bv_file1 = tempfile.NamedTemporaryFile(mode='wt') # And fill it with these 2-columned bvecs: for x in range(new_bvecs1.shape[0]): bv_file1.file.write('%s %s\n' % (new_bvecs1[x][0], new_bvecs1[x][1])) bv_file1.close() npt.assert_raises(IOError, read_bvals_bvecs, fbvals, bv_file1.name) # These bvecs are saved as one long array: new_bvecs2 = np.ravel(bvecs) bv_file2 = tempfile.NamedTemporaryFile() np.save(bv_file2, new_bvecs2) bv_file2.close() npt.assert_raises(IOError, read_bvals_bvecs, fbvals, bv_file2.name) # There are less bvecs than bvals: new_bvecs3 = bvecs[:-1, :] bv_file3 = tempfile.NamedTemporaryFile() np.save(bv_file3, new_bvecs3) bv_file3.close() npt.assert_raises(IOError, read_bvals_bvecs, fbvals, bv_file3.name) # You entered the bvecs on both sides: npt.assert_raises(IOError, read_bvals_bvecs, fbvecs, fbvecs) if __name__ == '__main__': from numpy.testing import run_module_suite run_module_suite() dipy-0.13.0/dipy/io/tests/test_io_peaks.py000066400000000000000000000102211317371701200204570ustar00rootroot00000000000000from __future__ import division, print_function, absolute_import import os import numpy as np import numpy.testing as npt from nibabel.tmpdirs import InTemporaryDirectory from dipy.reconst.peaks import PeaksAndMetrics from dipy.data import get_sphere from dipy.io.peaks import load_peaks, save_peaks, peaks_to_niftis def test_io_peaks(): with InTemporaryDirectory(): fname = 'test.pam5' sphere = get_sphere('repulsion724') pam = PeaksAndMetrics() pam.affine = np.eye(4) pam.peak_dirs = np.random.rand(10, 10, 10, 5, 3) pam.peak_values = np.zeros((10, 10, 10, 5)) pam.peak_indices = np.zeros((10, 10, 10, 5)) pam.shm_coeff = np.zeros((10, 10, 10, 45)) pam.sphere = sphere pam.B = np.zeros((45, sphere.vertices.shape[0])) pam.total_weight = 0.5 pam.ang_thr = 60 pam.gfa = np.zeros((10, 10, 10)) pam.qa = np.zeros((10, 10, 10, 5)) pam.odf = np.zeros((10, 10, 10, sphere.vertices.shape[0])) save_peaks(fname, pam) pam2 = load_peaks(fname, verbose=True) npt.assert_array_equal(pam.peak_dirs, pam2.peak_dirs) pam2.affine = None fname2 = 'test2.pam5' save_peaks(fname2, pam2, np.eye(4)) pam2_res = load_peaks(fname2, verbose=True) npt.assert_array_equal(pam.peak_dirs, pam2_res.peak_dirs) pam3 = load_peaks(fname2, verbose=False) for attr in ['peak_dirs', 'peak_values', 'peak_indices', 'gfa', 'qa', 'shm_coeff', 'B', 'odf']: npt.assert_array_equal(getattr(pam3, attr), getattr(pam, attr)) npt.assert_equal(pam3.total_weight, pam.total_weight) npt.assert_equal(pam3.ang_thr, pam.ang_thr) npt.assert_array_almost_equal(pam3.sphere.vertices, pam.sphere.vertices) fname3 = 'test3.pam5' pam4 = PeaksAndMetrics() npt.assert_raises(ValueError, save_peaks, fname3, pam4) fname4 = 'test4.pam5' del pam.affine save_peaks(fname4, pam, affine=None) fname5 = 'test5.pkm' npt.assert_raises(IOError, save_peaks, fname5, pam) pam.affine = np.eye(4) fname6 = 'test6.pam5' save_peaks(fname6, pam, verbose=True) del pam.shm_coeff save_peaks(fname6, pam, verbose=False) pam.shm_coeff = np.zeros((10, 10, 10, 45)) del pam.odf save_peaks(fname6, pam) pam_tmp = load_peaks(fname6, True) npt.assert_equal(pam_tmp.odf, None) fname7 = 'test7.paw' npt.assert_raises(IOError, load_peaks, fname7) del pam.shm_coeff save_peaks(fname6, pam, verbose=True) fname_shm = 'shm.nii.gz' fname_dirs = 'dirs.nii.gz' fname_values = 'values.nii.gz' fname_indices = 'indices.nii.gz' fname_gfa = 'gfa.nii.gz' pam.shm_coeff = np.ones((10, 10, 10, 45)) peaks_to_niftis(pam, fname_shm, fname_dirs, fname_values, fname_indices, fname_gfa, reshape_dirs=False) os.path.isfile(fname_shm) os.path.isfile(fname_dirs) os.path.isfile(fname_values) os.path.isfile(fname_indices) os.path.isfile(fname_gfa) def test_io_save_peaks_error(): with InTemporaryDirectory(): fname = 'test.pam5' pam = PeaksAndMetrics() npt.assert_raises(IOError, save_peaks, 'test.pam', pam) npt.assert_raises(ValueError, save_peaks, fname, pam) sphere = get_sphere('repulsion724') pam.affine = np.eye(4) pam.peak_dirs = np.random.rand(10, 10, 10, 5, 3) pam.peak_values = np.zeros((10, 10, 10, 5)) pam.peak_indices = np.zeros((10, 10, 10, 5)) pam.shm_coeff = np.zeros((10, 10, 10, 45)) pam.sphere = sphere pam.B = np.zeros((45, sphere.vertices.shape[0])) pam.total_weight = 0.5 pam.ang_thr = 60 pam.gfa = np.zeros((10, 10, 10)) pam.qa = np.zeros((10, 10, 10, 5)) pam.odf = np.zeros((10, 10, 10, sphere.vertices.shape[0])) if __name__ == '__main__': #npt.run_module_suite() test_io_peaks() test_io_save_peaks_error()dipy-0.13.0/dipy/io/trackvis.py000066400000000000000000000017001317371701200163140ustar00rootroot00000000000000import nibabel as nib import numpy as np from dipy.tracking import utils def save_trk(filename, points, vox_to_ras, shape): """A temporary helper function for saving trk files. This function will soon be replaced by better trk file support in nibabel. """ voxel_order = nib.orientations.aff2axcodes(vox_to_ras) voxel_order = "".join(voxel_order) # Compute the vox_to_ras of "trackvis space" zooms = np.sqrt((vox_to_ras * vox_to_ras).sum(0)) vox_to_trk = np.diag(zooms) vox_to_trk[3, 3] = 1 vox_to_trk[:3, 3] = zooms[:3] / 2. points = utils.move_streamlines(points, input_space=vox_to_ras, output_space=vox_to_trk) data = ((p, None, None) for p in points) hdr = nib.trackvis.empty_header() hdr['dim'] = shape hdr['voxel_order'] = voxel_order hdr['voxel_size'] = zooms[:3] nib.trackvis.write(filename, data, hdr) dipy-0.13.0/dipy/io/utils.py000066400000000000000000000025121317371701200156300ustar00rootroot00000000000000''' Utility functions for file formats ''' from __future__ import division, print_function, absolute_import import numpy as np from nibabel import Nifti1Image def nifti1_symmat(image_data, *args, **kwargs): """Returns a Nifti1Image with a symmetric matrix intent Parameters: ----------- image_data : array-like should have lower triangular elements of a symmetric matrix along the last dimension all other arguments and keywords are passed to Nifti1Image Returns: -------- image : Nifti1Image 5d, extra dimensions addes before the last. Has symmetric matrix intent code """ image_data = make5d(image_data) last_dim = image_data.shape[-1] n = (np.sqrt(1+8*last_dim) - 1)/2 if (n % 1) != 0: raise ValueError("input_data does not seem to have matrix elements") image = Nifti1Image(image_data, *args, **kwargs) hdr = image.header hdr.set_intent('symmetric matrix', (n,)) return image def make5d(input): """reshapes the input to have 5 dimensions, adds extra dimensions just before the last dimession """ input = np.asarray(input) if input.ndim > 5: raise ValueError("input is already more than 5d") shape = input.shape shape = shape[:-1] + (1,)*(5-len(shape)) + shape[-1:] return input.reshape(shape) dipy-0.13.0/dipy/io/vtk.py000066400000000000000000000054531317371701200153030ustar00rootroot00000000000000from __future__ import division, print_function, absolute_import from dipy.viz.utils import set_input # Conditional import machinery for vtk from dipy.utils.optpkg import optional_package # Allow import, but disable doctests if we don't have vtk vtk, have_vtk, setup_module = optional_package('vtk') colors, have_vtk_colors, _ = optional_package('vtk.util.colors') ns, have_numpy_support, _ = optional_package('vtk.util.numpy_support') if have_vtk: version = vtk.vtkVersion.GetVTKSourceVersion().split(' ')[-1] major_version = vtk.vtkVersion.GetVTKMajorVersion() def load_polydata(file_name): """ Load a vtk polydata to a supported format file Supported file formats are OBJ, VTK, FIB, PLY, STL and XML Parameters ---------- file_name : string Returns ------- output : vtkPolyData """ # get file extension (type) lower case file_extension = file_name.split(".")[-1].lower() if file_extension == "vtk": reader = vtk.vtkPolyDataReader() elif file_extension == "fib": reader = vtk.vtkPolyDataReader() elif file_extension == "ply": reader = vtk.vtkPLYReader() elif file_extension == "stl": reader = vtk.vtkSTLReader() elif file_extension == "xml": reader = vtk.vtkXMLPolyDataReader() elif file_extension == "obj": try: # try to read as a normal obj reader = vtk.vtkOBJReader() except: # than try load a MNI obj format reader = vtk.vtkMNIObjectReader() else: raise "polydata " + file_extension + " is not suported" reader.SetFileName(file_name) reader.Update() # print(file_name + " Mesh " + file_extension + " Loaded") return reader.GetOutput() def save_polydata(polydata, file_name, binary=False, color_array_name=None): """ Save a vtk polydata to a supported format file Save formats can be VTK, FIB, PLY, STL and XML. Parameters ---------- polydata : vtkPolyData file_name : string """ # get file extension (type) file_extension = file_name.split(".")[-1].lower() if file_extension == "vtk": writer = vtk.vtkPolyDataWriter() elif file_extension == "fib": writer = vtk.vtkPolyDataWriter() elif file_extension == "ply": writer = vtk.vtkPLYWriter() elif file_extension == "stl": writer = vtk.vtkSTLWriter() elif file_extension == "xml": writer = vtk.vtkXMLPolyDataWriter() elif file_extension == "obj": raise "mni obj or Wavefront obj ?" # writer = set_input(vtk.vtkMNIObjectWriter(), polydata) writer.SetFileName(file_name) writer = set_input(writer, polydata) if color_array_name is not None: writer.SetArrayName(color_array_name) if binary: writer.SetFileTypeToBinary() writer.Update() writer.Write() dipy-0.13.0/dipy/pkg_info.py000066400000000000000000000055511317371701200156630ustar00rootroot00000000000000from __future__ import division, print_function, absolute_import import os import sys import subprocess from dipy.utils.six.moves import configparser COMMIT_INFO_FNAME = 'COMMIT_INFO.txt' def pkg_commit_hash(pkg_path): ''' Get short form of commit hash given directory `pkg_path` There should be a file called 'COMMIT_INFO.txt' in `pkg_path`. This is a file in INI file format, with at least one section: ``commit hash``, and two variables ``archive_subst_hash`` and ``install_hash``. The first has a substitution pattern in it which may have been filled by the execution of ``git archive`` if this is an archive generated that way. The second is filled in by the installation, if the installation is from a git archive. We get the commit hash from (in order of preference): * A substituted value in ``archive_subst_hash`` * A written commit hash value in ``install_hash` * git's output, if we are in a git repository If all these fail, we return a not-found placeholder tuple Parameters ------------- pkg_path : str directory containing package Returns --------- hash_from : str Where we got the hash from - description hash_str : str short form of hash ''' # Try and get commit from written commit text file pth = os.path.join(pkg_path, COMMIT_INFO_FNAME) if not os.path.isfile(pth): raise IOError('Missing commit info file %s' % pth) cfg_parser = configparser.RawConfigParser() cfg_parser.read(pth) archive_subst = cfg_parser.get('commit hash', 'archive_subst_hash') if not archive_subst.startswith('$Format'): # it has been substituted return 'archive substitution', archive_subst install_subst = cfg_parser.get('commit hash', 'install_hash') if install_subst != '': return 'installation', install_subst # maybe we are in a repository proc = subprocess.Popen('git rev-parse --short HEAD', stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=pkg_path, shell=True) repo_commit, _ = proc.communicate() if repo_commit: return 'repository', repo_commit.strip() return '(none found)', '' def get_pkg_info(pkg_path): ''' Return dict describing the context of this package Parameters ------------ pkg_path : str path containing __init__.py for package Returns ---------- context : dict with named parameters of interest ''' src, hsh = pkg_commit_hash(pkg_path) import numpy import dipy return dict( pkg_path=pkg_path, commit_source=src, commit_hash=hsh, sys_version=sys.version, sys_executable=sys.executable, sys_platform=sys.platform, np_version=numpy.__version__, dipy_version=dipy.__version__) dipy-0.13.0/dipy/reconst/000077500000000000000000000000001317371701200151645ustar00rootroot00000000000000dipy-0.13.0/dipy/reconst/__init__.py000066400000000000000000000002331317371701200172730ustar00rootroot00000000000000# init for reconst aka the reconstruction module # Test callable from numpy.testing import Tester test = Tester().test bench = Tester().bench del Tester dipy-0.13.0/dipy/reconst/base.py000066400000000000000000000020011317371701200164410ustar00rootroot00000000000000""" Base-classes for reconstruction models and reconstruction fits. All the models in the reconst module follow the same template: a Model object is used to represent the abstract properties of the model, that are independent of the specifics of the data . These properties are reused whenver fitting a particular set of data (different voxels, for example). """ class ReconstModel(object): """ Abstract class for signal reconstruction models """ def __init__(self, gtab): """Initialization of the abstract class for signal reconstruction models Parameters ---------- gtab : GradientTable class instance """ self.gtab = gtab def fit(self, data, mask=None, **kwargs): return ReconstFit(self, data) class ReconstFit(object): """ Abstract class which holds the fit result of ReconstModel For example that could be holding FA or GFA etc. """ def __init__(self, model, data): self.model = model self.data = data dipy-0.13.0/dipy/reconst/benchmarks/000077500000000000000000000000001317371701200173015ustar00rootroot00000000000000dipy-0.13.0/dipy/reconst/benchmarks/__init__.py000066400000000000000000000000311317371701200214040ustar00rootroot00000000000000# Init for reconst bench dipy-0.13.0/dipy/reconst/benchmarks/bench_bounding_box.py000066400000000000000000000017431317371701200234740ustar00rootroot00000000000000from __future__ import division, print_function, absolute_import """ Benchmarks for bounding_box Run all benchmarks with:: import dipy.reconst as dire dire.bench() If you have doctests enabled by default in nose (with a noserc file or environment variable), and you have a numpy version <= 1.6.1, this will also run the doctests, let's hope they pass. Run this benchmark with: nosetests -s --match '(?:^|[\\b_\\.//-])[Bb]ench' /path/to/bench_bounding_box.py """ import numpy as np from numpy.testing import measure from dipy.segment.mask import bounding_box def bench_bounding_box(): vol = np.zeros((100, 100, 100)) vol[0, 0, 0] = 1 times = 100 time = measure("bounding_box(vol)", times) / times print("Bounding_box on a sparse volume: {}".format(time)) vol[:] = 10 times = 1 time = measure("bounding_box(vol)", times) / times print("Bounding_box on a dense volume: {}".format(time)) if __name__ == "__main__": bench_bounding_box() dipy-0.13.0/dipy/reconst/benchmarks/bench_csd.py000066400000000000000000000031251317371701200215640ustar00rootroot00000000000000#!/usr/bin/env python import numpy as np import numpy.testing as npt from dipy.reconst.csdeconv import ConstrainedSphericalDeconvModel from dipy.core.gradients import GradientTable from dipy.data import read_stanford_labels def num_grad(gtab): return (~gtab.b0s_mask).sum() def bench_csdeconv(center=(50, 40, 40), width=12): img, gtab, labels_img = read_stanford_labels() data = img.get_data() labels = labels_img.get_data() shape = labels.shape mask = np.in1d(labels, [1, 2]) mask.shape = shape a, b, c = center hw = width // 2 idx = (slice(a - hw, a + hw), slice(b - hw, b + hw), slice(c - hw, c + hw)) data_small = data[idx].copy() mask_small = mask[idx].copy() voxels = mask_small.sum() cmd = "model.fit(data_small, mask_small)" print("== Benchmarking CSD fit on %d voxels ==" % voxels) msg = "SH order - %d, gradient directons - %d :: %g sec" # Basic case sh_order = 8 model = ConstrainedSphericalDeconvModel(gtab, None, sh_order=sh_order) time = npt.measure(cmd) print(msg % (sh_order, num_grad(gtab), time)) # Smaller data set data_small = data_small[..., :75].copy() gtab = GradientTable(gtab.gradients[:75]) model = ConstrainedSphericalDeconvModel(gtab, None, sh_order=sh_order) time = npt.measure(cmd) print(msg % (sh_order, num_grad(gtab), time)) # Super resolution sh_order = 12 model = ConstrainedSphericalDeconvModel(gtab, None, sh_order=sh_order) time = npt.measure(cmd) print(msg % (sh_order, num_grad(gtab), time)) if __name__ == "__main__": bench_csdeconv() dipy-0.13.0/dipy/reconst/benchmarks/bench_peaks.py000066400000000000000000000022101317371701200221100ustar00rootroot00000000000000""" Benchmarks for peak finding Run all benchmarks with:: import dipy.reconst as dire dire.bench() If you have doctests enabled by default in nose (with a noserc file or environment variable), and you have a numpy version <= 1.6.1, this will also run the doctests, let's hope they pass. Run this benchmark with: nosetests -s --match '(?:^|[\\b_\\.//-])[Bb]ench' /path/to/bench_peaks.py """ import numpy as np from dipy.reconst.recspeed import local_maxima from dipy.data import get_sphere from dipy.core.sphere import unique_edges from numpy.testing import measure def bench_local_maxima(): repeat = 10000 sphere = get_sphere('symmetric724') vertices, faces = sphere.vertices, sphere.faces odf = abs(vertices.sum(-1)) edges = unique_edges(faces) print('Timing peak finding') timed0 = measure("local_maxima(odf, edges)", repeat) print('Actual sphere: %0.2f' % timed0) # Create an artificial odf with a few peaks odf = np.zeros(len(vertices)) odf[1] = 1. odf[143] = 143. odf[505] = 505. timed1 = measure("local_maxima(odf, edges)", repeat) print('Few-peak sphere: %0.2f' % timed1) dipy-0.13.0/dipy/reconst/benchmarks/bench_squash.py000066400000000000000000000121371317371701200223220ustar00rootroot00000000000000""" Benchmarks for fast squashing Run all benchmarks with:: import dipy.reconst as dire dire.bench() If you have doctests enabled by default in nose (with a noserc file or environment variable), and you have a numpy version <= 1.6.1, this will also run the doctests, let's hope they pass. Run this benchmark with: nosetests -s --match '(?:^|[\\b_\\.//-])[Bb]ench' /path/to/bench_squash.py """ from __future__ import division, print_function, absolute_import from functools import reduce import numpy as np from dipy.core.ndindex import ndindex from dipy.reconst.quick_squash import quick_squash from numpy.testing import measure, dec def old_squash(arr, mask=None, fill=0): """Try and make a standard array from an object array This function takes an object array and attempts to convert it to a more useful dtype. If array can be converted to a better dtype, Nones are replaced by `fill`. To make the behaviour of this function more clear, here are the most common cases: 1. `arr` is an array of scalars of type `T`. Returns an array like `arr.astype(T)` 2. `arr` is an array of arrays. All items in `arr` have the same shape `S`. Returns an array with shape `arr.shape + S`. 3. `arr` is an array of arrays of different shapes. Returns `arr`. 4. Items in `arr` are not ndarrys or scalars. Returns `arr`. Parameters ---------- arr : array, dtype=object The array to be converted. mask : array, dtype=bool, optional Where arr has Nones. fill : number, optional Nones are replaced by fill. Returns ------- result : array Examples -------- >>> arr = np.empty(3, dtype=object) >>> arr.fill(2) >>> old_squash(arr) array([2, 2, 2]) >>> arr[0] = None >>> old_squash(arr) array([0, 2, 2]) >>> arr.fill(np.ones(2)) >>> r = old_squash(arr) >>> r.shape == (3, 2) True >>> r.dtype dtype('float64') """ if mask is None: mask = np.vectorize(lambda x : x is not None)(arr) not_none = arr[mask] # all None, just return arr if not_none.size == 0: return arr first = not_none[0] # If the first item is an ndarray if type(first) is np.ndarray: shape = first.shape try: # Check the shapes of all items all_same_shape = all(item.shape == shape for item in not_none) except AttributeError: return arr # If items have different shapes just return arr if not all_same_shape: return arr # Find common dtype. np.result_type can do this more simply, but it is # only available for numpy 1.6.0 dtypes = set(a.dtype for a in not_none) tiny_arrs = [np.zeros((1,), dtype=dt) for dt in dtypes] dtype = reduce(np.add, tiny_arrs).dtype # Create output array and fill result = np.empty(arr.shape + shape, dtype=dtype) result.fill(fill) for ijk in ndindex(arr.shape): if mask[ijk]: result[ijk] = arr[ijk] return result # If the first item is a scalar elif np.isscalar(first): "first is not an ndarray" all_scalars = all(np.isscalar(item) for item in not_none) if not all_scalars: return arr # See comment about np.result_type above. We sum against the smallest # possible type, bool, and let numpy type promotion find the best # common type. The values might all be Python scalars so we need to # cast to numpy type at the end to be sure of having a dtype. dtype = np.asarray(sum(not_none, False)).dtype temp = arr.copy() temp[~mask] = fill return temp.astype(dtype) else: return arr def bench_quick_squash(): # nosetests -s --match '(?:^|[\\b_\\.//-])[Bb]ench' repeat = 10 shape = (300, 200) arrs = np.zeros(shape, dtype=object) scalars = np.zeros(shape, dtype=object) for ijk in ndindex(arrs.shape): arrs[ijk] = np.ones((3, 5)) scalars[ijk] = np.float32(0) print('\nSquashing benchmarks') for name, objs in ( ('floats', np.zeros(shape, float).astype(object)), ('ints', np.zeros(shape, int).astype(object)), ('arrays', arrs), ('scalars', scalars), ): print(name) timed0 = measure("quick_squash(objs)", repeat) timed1 = measure("old_squash(objs)", repeat) print("fast %4.2f; slow %4.2f" % (timed0, timed1)) objs[50, 50] = None timed0 = measure("quick_squash(objs)", repeat) timed1 = measure("old_squash(objs)", repeat) print("With None: fast %4.2f; slow %4.2f" % (timed0, timed1)) msk = objs != np.array(None) timed0 = measure("quick_squash(objs, msk)", repeat) timed1 = measure("old_squash(objs, msk)", repeat) print("With mask: fast %4.2f; slow %4.2f" % (timed0, timed1)) objs[50, 50] = np.float32(0) timed0 = measure("quick_squash(objs, msk)", repeat) timed1 = measure("old_squash(objs, msk)", repeat) print("Other dtype: fast %4.2f; slow %4.2f" % (timed0, timed1)) dipy-0.13.0/dipy/reconst/benchmarks/bench_vec_val_sum.py000066400000000000000000000020541317371701200233160ustar00rootroot00000000000000""" Benchmarks for vec / val summation routine Run benchmarks with:: import dipy.reconst as dire dire.bench() If you have doctests enabled by default in nose (with a noserc file or environment variable), and you have a numpy version <= 1.6.1, this will also run the doctests, let's hope they pass. """ import numpy as np from numpy.random import randn from dipy.reconst.vec_val_sum import vec_val_vect from numpy.testing import measure, dec try: np.einsum except AttributeError: with_einsum = dec.skipif(True, "Need einsum for benchmark") else: def with_einsum(f): return f @with_einsum def bench_vec_val_vect(): # nosetests -s --match '(?:^|[\\b_\\.//-])[Bb]ench' repeat = 100 shape = (100, 100) evecs, evals = randn(*(shape + (3, 3))), randn(*(shape + (3,))) etime = measure("np.einsum('...ij,...j,...kj->...ik', evecs, evals, \ evecs)", repeat) vtime = measure("vec_val_vect(evecs, evals)", repeat) print("einsum %4.2f; vec_val_vect %4.2f" % (etime, vtime)) dipy-0.13.0/dipy/reconst/cache.py000066400000000000000000000047011317371701200166030ustar00rootroot00000000000000from dipy.core.onetime import auto_attr class Cache(object): """Cache values based on a key object (such as a sphere or gradient table). Notes ----- This class is meant to be used as a mix-in:: class MyModel(Model, Cache): pass class MyModelFit(Fit): pass Inside a method on the fit, typical usage would be:: def odf(sphere): M = self.model.cache_get('odf_basis_matrix', key=sphere) if M is None: M = self._compute_basis_matrix(sphere) self.model.cache_set('odf_basis_matrix', key=sphere, value=M) """ # We use this method instead of __init__ to construct the cache, so # that the class can be used as a mixin, without having to worry about # calling the super-class constructor @auto_attr def _cache(self): return {} def cache_set(self, tag, key, value): """Store a value in the cache. Parameters ---------- tag : str Description of the cached value. key : object Key object used to look up the cached value. value : object Value stored in the cache for each unique combination of ``(tag, key)``. Examples -------- >>> def compute_expensive_matrix(parameters): ... # Imagine the following computation is very expensive ... return (p**2 for p in parameters) >>> c = Cache() >>> parameters = (1, 2, 3) >>> X1 = compute_expensive_matrix(parameters) >>> c.cache_set('expensive_matrix', parameters, X1) >>> X2 = c.cache_get('expensive_matrix', parameters) >>> X1 is X2 True """ self._cache[(tag, key)] = value def cache_get(self, tag, key, default=None): """Retrieve a value from the cache. Parameters ---------- tag : str Description of the cached value. key : object Key object used to look up the cached value. default : object Value to be returned if no cached entry is found. Returns ------- v : object Value from the cache associated with ``(tag, key)``. Returns `default` if no cached entry is found. """ return self._cache.get((tag, key), default) def cache_clear(self): """Clear the cache. """ self._cache = {} dipy-0.13.0/dipy/reconst/cross_validation.py000066400000000000000000000135421317371701200211060ustar00rootroot00000000000000""" Cross-validation analysis of diffusion models """ from __future__ import division, print_function, absolute_import from dipy.utils.six.moves import range import numpy as np import dipy.core.gradients as gt def coeff_of_determination(data, model, axis=-1): """ Calculate the coefficient of determination for a model prediction, relative to data. Parameters ---------- data : ndarray The data model : ndarray The predictions of a model for this data. Same shape as the data. axis: int, optional The axis along which different samples are laid out (default: -1). Returns ------- COD : ndarray The coefficient of determination. This has shape `data.shape[:-1]` Notes ----- See: http://en.wikipedia.org/wiki/Coefficient_of_determination The coefficient of determination is calculated as: .. math:: R^2 = 100 * (1 - \frac{SSE}{SSD}) where SSE is the sum of the squared error between the model and the data (sum of the squared residuals) and SSD is the sum of the squares of the deviations of the data from the mean of the data (variance * N). """ residuals = data - model ss_err = np.sum(residuals ** 2, axis=axis) demeaned_data = data - np.mean(data, axis=axis)[..., np.newaxis] ss_tot = np.sum(demeaned_data ** 2, axis=axis) # Don't divide by 0: if np.all(ss_tot == 0.0): return np.nan return 100 * (1 - (ss_err/ss_tot)) def kfold_xval(model, data, folds, *model_args, **model_kwargs): """ Perform k-fold cross-validation to generate out-of-sample predictions for each measurement. Parameters ---------- model : Model class instance The type of the model to use for prediction. The corresponding Fit object must have a `predict` function implementd One of the following: `reconst.dti.TensorModel` or `reconst.csdeconv.ConstrainedSphericalDeconvModel`. data : ndarray Diffusion MRI data acquired with the GradientTable of the model. Shape will typically be `(x, y, z, b)` where `xyz` are spatial dimensions and b is the number of bvals/bvecs in the GradientTable. folds : int The number of divisions to apply to the data model_args : list Additional arguments to the model initialization model_kwargs : dict Additional key-word arguments to the model initialization. If contains the kwarg `mask`, this will be used as a key-word argument to the `fit` method of the model object, rather than being used in the initialization of the model object Notes ----- This function assumes that a prediction API is implemented in the Model class for which prediction is conducted. That is, the Fit object that gets generated upon fitting the model needs to have a `predict` method, which receives a GradientTable class instance as input and produces a predicted signal as output. It also assumes that the model object has `bval` and `bvec` attributes holding b-values and corresponding unit vectors. References ---------- .. [1] Rokem, A., Chan, K.L. Yeatman, J.D., Pestilli, F., Mezer, A., Wandell, B.A., 2014. Evaluating the accuracy of diffusion models at multiple b-values with cross-validation. ISMRM 2014. """ # This should always be there, if the model inherits from # dipy.reconst.base.ReconstModel: gtab = model.gtab data_b = data[..., ~gtab.b0s_mask] div_by_folds = np.mod(data_b.shape[-1], folds) # Make sure that an equal number of samples get left out in each fold: if div_by_folds != 0: msg = "The number of folds must divide the diffusion-weighted " msg += "data equally, but " msg = "np.mod(%s, %s) is %s" % (data_b.shape[-1], folds, div_by_folds) raise ValueError(msg) data_0 = data[..., gtab.b0s_mask] S0 = np.mean(data_0, -1) n_in_fold = data_b.shape[-1] / folds prediction = np.zeros(data.shape) # We are going to leave out some randomly chosen samples in each iteration: order = np.random.permutation(data_b.shape[-1]) nz_bval = gtab.bvals[~gtab.b0s_mask] nz_bvec = gtab.bvecs[~gtab.b0s_mask] # Pop the mask, if there is one, out here for use in every fold: mask = model_kwargs.pop('mask', None) gtgt = gt.gradient_table # Shorthand for k in range(folds): fold_mask = np.ones(data_b.shape[-1], dtype=bool) fold_idx = order[int(k * n_in_fold): int((k + 1) * n_in_fold)] fold_mask[fold_idx] = False this_data = np.concatenate([data_0, data_b[..., fold_mask]], -1) this_gtab = gtgt(np.hstack([gtab.bvals[gtab.b0s_mask], nz_bval[fold_mask]]), np.concatenate([gtab.bvecs[gtab.b0s_mask], nz_bvec[fold_mask]])) left_out_gtab = gtgt(np.hstack([gtab.bvals[gtab.b0s_mask], nz_bval[~fold_mask]]), np.concatenate([gtab.bvecs[gtab.b0s_mask], nz_bvec[~fold_mask]])) this_model = model.__class__(this_gtab, *model_args, **model_kwargs) this_fit = this_model.fit(this_data, mask=mask) if not hasattr(this_fit, 'predict'): err_str = "Models of type: %s " % this_model.__class__ err_str += "do not have an implementation of model prediction" err_str += " and do not support cross-validation" raise ValueError(err_str) this_predict = S0[..., None] * this_fit.predict(left_out_gtab, S0=1) idx_to_assign = np.where(~gtab.b0s_mask)[0][~fold_mask] prediction[..., idx_to_assign] =\ this_predict[..., np.sum(gtab.b0s_mask):] # For the b0 measurements prediction[..., gtab.b0s_mask] = S0[..., None] return prediction dipy-0.13.0/dipy/reconst/csdeconv.py000066400000000000000000001166151317371701200173540ustar00rootroot00000000000000from __future__ import division, print_function, absolute_import import warnings import numpy as np from scipy.integrate import quad from scipy.special import lpn, gamma import scipy.linalg as la import scipy.linalg.lapack as ll from dipy.data import small_sphere, get_sphere, default_sphere from dipy.core.geometry import cart2sphere from dipy.core.ndindex import ndindex from dipy.sims.voxel import single_tensor from dipy.utils.six.moves import range from dipy.reconst.multi_voxel import multi_voxel_fit from dipy.reconst.dti import TensorModel, fractional_anisotropy from dipy.reconst.shm import (sph_harm_ind_list, real_sph_harm, sph_harm_lookup, lazy_index, SphHarmFit, real_sym_sh_basis, sh_to_rh, forward_sdeconv_mat, SphHarmModel) from dipy.direction.peaks import peaks_from_model from dipy.core.geometry import vec2vec_rotmat class AxSymShResponse(object): """A simple wrapper for response functions represented using only axially symmetric, even spherical harmonic functions (ie, m == 0 and n even). Parameters: ----------- S0 : float Signal with no diffusion weighting. dwi_response : array Response function signal as coefficients to axially symmetric, even spherical harmonic. """ def __init__(self, S0, dwi_response, bvalue=None): self.S0 = S0 self.dwi_response = dwi_response self.bvalue = bvalue self.m = np.zeros(len(dwi_response)) self.sh_order = 2 * (len(dwi_response) - 1) self.n = np.arange(0, self.sh_order + 1, 2) def basis(self, sphere): """A basis that maps the response coefficients onto a sphere.""" theta = sphere.theta[:, None] phi = sphere.phi[:, None] return real_sph_harm(self.m, self.n, theta, phi) def on_sphere(self, sphere): """Evaluates the response function on sphere.""" B = self.basis(sphere) return np.dot(self.dwi_response, B.T) class ConstrainedSphericalDeconvModel(SphHarmModel): def __init__(self, gtab, response, reg_sphere=None, sh_order=8, lambda_=1, tau=0.1): r""" Constrained Spherical Deconvolution (CSD) [1]_. Spherical deconvolution computes a fiber orientation distribution (FOD), also called fiber ODF (fODF) [2]_, as opposed to a diffusion ODF as the QballModel or the CsaOdfModel. This results in a sharper angular profile with better angular resolution that is the best object to be used for later deterministic and probabilistic tractography [3]_. A sharp fODF is obtained because a single fiber *response* function is injected as *a priori* knowledge. The response function is often data-driven and is thus provided as input to the ConstrainedSphericalDeconvModel. It will be used as deconvolution kernel, as described in [1]_. Parameters ---------- gtab : GradientTable response : tuple or AxSymShResponse object A tuple with two elements. The first is the eigen-values as an (3,) ndarray and the second is the signal value for the response function without diffusion weighting. This is to be able to generate a single fiber synthetic signal. The response function will be used as deconvolution kernel ([1]_) reg_sphere : Sphere (optional) sphere used to build the regularization B matrix. Default: 'symmetric362'. sh_order : int (optional) maximal spherical harmonics order. Default: 8 lambda_ : float (optional) weight given to the constrained-positivity regularization part of the deconvolution equation (see [1]_). Default: 1 tau : float (optional) threshold controlling the amplitude below which the corresponding fODF is assumed to be zero. Ideally, tau should be set to zero. However, to improve the stability of the algorithm, tau is set to tau*100 % of the mean fODF amplitude (here, 10% by default) (see [1]_). Default: 0.1 References ---------- .. [1] Tournier, J.D., et al. NeuroImage 2007. Robust determination of the fibre orientation distribution in diffusion MRI: Non-negativity constrained super-resolved spherical deconvolution .. [2] Descoteaux, M., et al. IEEE TMI 2009. Deterministic and Probabilistic Tractography Based on Complex Fibre Orientation Distributions .. [3] C\^ot\'e, M-A., et al. Medical Image Analysis 2013. Tractometer: Towards validation of tractography pipelines .. [4] Tournier, J.D, et al. Imaging Systems and Technology 2012. MRtrix: Diffusion Tractography in Crossing Fiber Regions """ # Initialize the parent class: SphHarmModel.__init__(self, gtab) m, n = sph_harm_ind_list(sh_order) self.m, self.n = m, n self._where_b0s = lazy_index(gtab.b0s_mask) self._where_dwi = lazy_index(~gtab.b0s_mask) no_params = ((sh_order + 1) * (sh_order + 2)) / 2 if no_params > np.sum(~gtab.b0s_mask): msg = "Number of parameters required for the fit are more " msg += "than the actual data points" warnings.warn(msg, UserWarning) x, y, z = gtab.gradients[self._where_dwi].T r, theta, phi = cart2sphere(x, y, z) # for the gradient sphere self.B_dwi = real_sph_harm(m, n, theta[:, None], phi[:, None]) # for the sphere used in the regularization positivity constraint if reg_sphere is None: self.sphere = small_sphere else: self.sphere = reg_sphere r, theta, phi = cart2sphere( self.sphere.x, self.sphere.y, self.sphere.z ) self.B_reg = real_sph_harm(m, n, theta[:, None], phi[:, None]) if response is None: response = (np.array([0.0015, 0.0003, 0.0003]), 1) self.response = response if isinstance(response, AxSymShResponse): r_sh = response.dwi_response self.response_scaling = response.S0 n_response = response.n m_response = response.m else: self.S_r = estimate_response(gtab, self.response[0], self.response[1]) r_sh = np.linalg.lstsq(self.B_dwi, self.S_r[self._where_dwi])[0] n_response = n m_response = m self.response_scaling = response[1] r_rh = sh_to_rh(r_sh, m_response, n_response) self.R = forward_sdeconv_mat(r_rh, n) # scale lambda_ to account for differences in the number of # SH coefficients and number of mapped directions # This is exactly what is done in [4]_ lambda_ = (lambda_ * self.R.shape[0] * r_rh[0] / (np.sqrt(self.B_reg.shape[0]) * np.sqrt(362.))) self.B_reg *= lambda_ self.sh_order = sh_order self.tau = tau self._X = X = self.R.diagonal() * self.B_dwi self._P = np.dot(X.T, X) @multi_voxel_fit def fit(self, data): dwi_data = data[self._where_dwi] shm_coeff, _ = csdeconv(dwi_data, self._X, self.B_reg, self.tau, P=self._P) return SphHarmFit(self, shm_coeff, None) def predict(self, sh_coeff, gtab=None, S0=1.): """Compute a signal prediction given spherical harmonic coefficients for the provided GradientTable class instance. Parameters ---------- sh_coeff : ndarray The spherical harmonic representation of the FOD from which to make the signal prediction. gtab : GradientTable The gradients for which the signal will be predicted. Use the model's gradient table by default. S0 : ndarray or float The non diffusion-weighted signal value. Returns ------- pred_sig : ndarray The predicted signal. """ if gtab is None or gtab is self.gtab: SH_basis = self.B_dwi gtab = self.gtab else: x, y, z = gtab.gradients[~gtab.b0s_mask].T r, theta, phi = cart2sphere(x, y, z) SH_basis, m, n = real_sym_sh_basis(self.sh_order, theta, phi) # Because R is diagonal, the matrix multiply is written as a multiply predict_matrix = SH_basis * self.R.diagonal() S0 = np.asarray(S0)[..., None] scaling = S0 / self.response_scaling # This is the key operation: convolve and multiply by S0: pre_pred_sig = scaling * np.dot(sh_coeff, predict_matrix.T) # Now put everything in its right place: pred_sig = np.zeros(pre_pred_sig.shape[:-1] + (gtab.bvals.shape[0],)) pred_sig[..., ~gtab.b0s_mask] = pre_pred_sig pred_sig[..., gtab.b0s_mask] = S0 return pred_sig class ConstrainedSDTModel(SphHarmModel): def __init__(self, gtab, ratio, reg_sphere=None, sh_order=8, lambda_=1., tau=0.1): r""" Spherical Deconvolution Transform (SDT) [1]_. The SDT computes a fiber orientation distribution (FOD) as opposed to a diffusion ODF as the QballModel or the CsaOdfModel. This results in a sharper angular profile with better angular resolution. The Constrained SDTModel is similar to the Constrained CSDModel but mathematically it deconvolves the q-ball ODF as oppposed to the HARDI signal (see [1]_ for a comparison and a through discussion). A sharp fODF is obtained because a single fiber *response* function is injected as *a priori* knowledge. In the SDTModel, this response is a single fiber q-ball ODF as opposed to a single fiber signal function for the CSDModel. The response function will be used as deconvolution kernel. Parameters ---------- gtab : GradientTable ratio : float ratio of the smallest vs the largest eigenvalue of the single prolate tensor response function reg_sphere : Sphere sphere used to build the regularization B matrix sh_order : int maximal spherical harmonics order lambda_ : float weight given to the constrained-positivity regularization part of the deconvolution equation tau : float threshold (tau *mean(fODF)) controlling the amplitude below which the corresponding fODF is assumed to be zero. References ---------- .. [1] Descoteaux, M., et al. IEEE TMI 2009. Deterministic and Probabilistic Tractography Based on Complex Fibre Orientation Distributions. """ SphHarmModel.__init__(self, gtab) m, n = sph_harm_ind_list(sh_order) self.m, self.n = m, n self._where_b0s = lazy_index(gtab.b0s_mask) self._where_dwi = lazy_index(~gtab.b0s_mask) no_params = ((sh_order + 1) * (sh_order + 2)) / 2 if no_params > np.sum(~gtab.b0s_mask): msg = "Number of parameters required for the fit are more " msg += "than the actual data points" warnings.warn(msg, UserWarning) x, y, z = gtab.gradients[self._where_dwi].T r, theta, phi = cart2sphere(x, y, z) # for the gradient sphere self.B_dwi = real_sph_harm(m, n, theta[:, None], phi[:, None]) # for the odf sphere if reg_sphere is None: self.sphere = get_sphere('symmetric362') else: self.sphere = reg_sphere r, theta, phi = cart2sphere( self.sphere.x, self.sphere.y, self.sphere.z ) self.B_reg = real_sph_harm(m, n, theta[:, None], phi[:, None]) self.R, self.P = forward_sdt_deconv_mat(ratio, n) # scale lambda_ to account for differences in the number of # SH coefficients and number of mapped directions self.lambda_ = (lambda_ * self.R.shape[0] * self.R[0, 0] / self.B_reg.shape[0]) self.tau = tau self.sh_order = sh_order @multi_voxel_fit def fit(self, data): s_sh = np.linalg.lstsq(self.B_dwi, data[self._where_dwi])[0] # initial ODF estimation odf_sh = np.dot(self.P, s_sh) qball_odf = np.dot(self.B_reg, odf_sh) Z = np.linalg.norm(qball_odf) # normalize ODF odf_sh /= Z shm_coeff, num_it = odf_deconv(odf_sh, self.R, self.B_reg, self.lambda_, self.tau) # print 'SDT CSD converged after %d iterations' % num_it return SphHarmFit(self, shm_coeff, None) def estimate_response(gtab, evals, S0): """ Estimate single fiber response function Parameters ---------- gtab : GradientTable evals : ndarray S0 : float non diffusion weighted Returns ------- S : estimated signal """ evecs = np.array([[0, 0, 1], [0, 1, 0], [1, 0, 0]]) return single_tensor(gtab, S0, evals, evecs, snr=None) def forward_sdt_deconv_mat(ratio, n, r2_term=False): """ Build forward sharpening deconvolution transform (SDT) matrix Parameters ---------- ratio : float ratio = $\frac{\lambda_2}{\lambda_1}$ of the single fiber response function n : ndarray (N,) The degree of spherical harmonic function associated with each row of the deconvolution matrix. Only even degrees are allowed. r2_term : bool True if ODF comes from an ODF computed from a model using the $r^2$ term in the integral. For example, DSI, GQI, SHORE, CSA, Tensor, Multi-tensor ODFs. This results in using the proper analytical response function solution solving from the single-fiber ODF with the r^2 term. This derivation is not published anywhere but is very similar to [1]_. Returns ------- R : ndarray (N, N) SDT deconvolution matrix P : ndarray (N, N) Funk-Radon Transform (FRT) matrix References ---------- .. [1] Descoteaux, M. PhD Thesis. INRIA Sophia-Antipolis. 2008. """ if np.any(n % 2): raise ValueError("n has odd degrees, expecting only even degrees") n_degrees = n.max() // 2 + 1 sdt = np.zeros(n_degrees) # SDT matrix frt = np.zeros(n_degrees) # FRT (Funk-Radon transform) q-ball matrix for l in np.arange(0, n_degrees * 2, 2): if r2_term: sharp = quad(lambda z: lpn(l, z)[0][-1] * gamma(1.5) * np.sqrt(ratio / (4 * np.pi ** 3)) / np.power((1 - (1 - ratio) * z ** 2), 1.5), -1., 1.) else: sharp = quad(lambda z: lpn(l, z)[0][-1] * np.sqrt(1 / (1 - (1 - ratio) * z * z)), -1., 1.) sdt[l // 2] = sharp[0] frt[l // 2] = 2 * np.pi * lpn(l, 0)[0][-1] idx = n // 2 b = sdt[idx] bb = frt[idx] return np.diag(b), np.diag(bb) potrf, potrs = ll.get_lapack_funcs(('potrf', 'potrs')) def _solve_cholesky(Q, z): L, info = potrf(Q, lower=False, overwrite_a=False, clean=False) if info > 0: msg = "%d-th leading minor not positive definite" % info raise la.LinAlgError(msg) if info < 0: msg = 'illegal value in %d-th argument of internal potrf' % -info raise ValueError(msg) f, info = potrs(L, z, lower=False, overwrite_b=False) if info != 0: msg = 'illegal value in %d-th argument of internal potrs' % -info raise ValueError(msg) return f def csdeconv(dwsignal, X, B_reg, tau=0.1, convergence=50, P=None): r""" Constrained-regularized spherical deconvolution (CSD) [1]_ Deconvolves the axially symmetric single fiber response function `r_rh` in rotational harmonics coefficients from the diffusion weighted signal in `dwsignal`. Parameters ---------- dwsignal : array Diffusion weighted signals to be deconvolved. X : array Prediction matrix which estimates diffusion weighted signals from FOD coefficients. B_reg : array (N, B) SH basis matrix which maps FOD coefficients to FOD values on the surface of the sphere. B_reg should be scaled to account for lambda. tau : float Threshold controlling the amplitude below which the corresponding fODF is assumed to be zero. Ideally, tau should be set to zero. However, to improve the stability of the algorithm, tau is set to tau*100 % of the max fODF amplitude (here, 10% by default). This is similar to peak detection where peaks below 0.1 amplitude are usually considered noise peaks. Because SDT is based on a q-ball ODF deconvolution, and not signal deconvolution, using the max instead of mean (as in CSD), is more stable. convergence : int Maximum number of iterations to allow the deconvolution to converge. P : ndarray This is an optimization to avoid computing ``dot(X.T, X)`` many times. If the same ``X`` is used many times, ``P`` can be precomputed and passed to this function. Returns ------- fodf_sh : ndarray (``(sh_order + 1)*(sh_order + 2)/2``,) Spherical harmonics coefficients of the constrained-regularized fiber ODF. num_it : int Number of iterations in the constrained-regularization used for convergence. Notes ----- This section describes how the fitting of the SH coefficients is done. Problem is to minimise per iteration: $F(f_n) = ||Xf_n - S||^2 + \lambda^2 ||H_{n-1} f_n||^2$ Where $X$ maps current FOD SH coefficients $f_n$ to DW signals $s$ and $H_{n-1}$ maps FOD SH coefficients $f_n$ to amplitudes along set of negative directions identified in previous iteration, i.e. the matrix formed by the rows of $B_{reg}$ for which $Hf_{n-1}<0$ where $B_{reg}$ maps $f_n$ to FOD amplitude on a sphere. Solve by differentiating and setting to zero: $\Rightarrow \frac{\delta F}{\delta f_n} = 2X^T(Xf_n - S) + 2 \lambda^2 H_{n-1}^TH_{n-1}f_n=0$ Or: $(X^TX + \lambda^2 H_{n-1}^TH_{n-1})f_n = X^Ts$ Define $Q = X^TX + \lambda^2 H_{n-1}^TH_{n-1}$ , which by construction is a square positive definite symmetric matrix of size $n_{SH} by n_{SH}$. If needed, positive definiteness can be enforced with a small minimum norm regulariser (helps a lot with poorly conditioned direction sets and/or superresolution): $Q = X^TX + (\lambda H_{n-1}^T) (\lambda H_{n-1}) + \mu I$ Solve $Qf_n = X^Ts$ using Cholesky decomposition: $Q = LL^T$ where $L$ is lower triangular. Then problem can be solved by back-substitution: $L_y = X^Ts$ $L^Tf_n = y$ To speeds things up further, form $P = X^TX + \mu I$, and update to form $Q$ by rankn update with $H_{n-1}$. The dipy implementation looks like: form initially $P = X^T X + \mu I$ and $\lambda B_{reg}$ for each voxel: form $z = X^Ts$ estimate $f_0$ by solving $Pf_0=z$. We use a simplified $l_{max}=4$ solution here, but it might not make a big difference. Then iterate until no change in rows of $H$ used in $H_n$ form $H_{n}$ given $f_{n-1}$ form $Q = P + (\lambda H_{n-1}^T) (\lambda H_{n-1}$) (this can be done by rankn update, but we currently do not use rankn update). solve $Qf_n = z$ using Cholesky decomposition We'd like to thanks Donald Tournier for his help with describing and implementing this algorithm. References ---------- .. [1] Tournier, J.D., et al. NeuroImage 2007. Robust determination of the fibre orientation distribution in diffusion MRI: Non-negativity constrained super-resolved spherical deconvolution. """ mu = 1e-5 if P is None: P = np.dot(X.T, X) z = np.dot(X.T, dwsignal) try: fodf_sh = _solve_cholesky(P, z) except la.LinAlgError: P = P + mu * np.eye(P.shape[0]) fodf_sh = _solve_cholesky(P, z) # For the first iteration we use a smooth FOD that only uses SH orders up # to 4 (the first 15 coefficients). fodf = np.dot(B_reg[:, :15], fodf_sh[:15]) # The mean of an fodf can be computed by taking $Y_{0,0} * coeff_{0,0}$ threshold = B_reg[0, 0] * fodf_sh[0] * tau where_fodf_small = (fodf < threshold).nonzero()[0] # If the low-order fodf does not have any values less than threshold, the # full-order fodf is used. if len(where_fodf_small) == 0: fodf = np.dot(B_reg, fodf_sh) where_fodf_small = (fodf < threshold).nonzero()[0] # If the fodf still has no values less than threshold, return the fodf. if len(where_fodf_small) == 0: return fodf_sh, 0 for num_it in range(1, convergence + 1): # This is the super-resolved trick. Wherever there is a negative # amplitude value on the fODF, it concatenates a value to the S vector # so that the estimation can focus on trying to eliminate it. In a # sense, this "adds" a measurement, which can help to better estimate # the fodf_sh, even if you have more SH coefficients to estimate than # actual S measurements. H = B_reg.take(where_fodf_small, axis=0) # We use the Cholesky decomposition to solve for the SH coefficients. Q = P + np.dot(H.T, H) fodf_sh = _solve_cholesky(Q, z) # Sample the FOD using the regularization sphere and compute k. fodf = np.dot(B_reg, fodf_sh) where_fodf_small_last = where_fodf_small where_fodf_small = (fodf < threshold).nonzero()[0] if (len(where_fodf_small) == len(where_fodf_small_last) and (where_fodf_small == where_fodf_small_last).all()): break else: msg = 'maximum number of iterations exceeded - failed to converge' warnings.warn(msg) return fodf_sh, num_it def odf_deconv(odf_sh, R, B_reg, lambda_=1., tau=0.1, r2_term=False): r""" ODF constrained-regularized spherical deconvolution using the Sharpening Deconvolution Transform (SDT) [1]_, [2]_. Parameters ---------- odf_sh : ndarray (``(sh_order + 1)*(sh_order + 2)/2``,) ndarray of SH coefficients for the ODF spherical function to be deconvolved R : ndarray (``(sh_order + 1)(sh_order + 2)/2``, ``(sh_order + 1)(sh_order + 2)/2``) SDT matrix in SH basis B_reg : ndarray (``(sh_order + 1)(sh_order + 2)/2``, ``(sh_order + 1)(sh_order + 2)/2``) SH basis matrix used for deconvolution lambda_ : float lambda parameter in minimization equation (default 1.0) tau : float threshold (tau *max(fODF)) controlling the amplitude below which the corresponding fODF is assumed to be zero. r2_term : bool True if ODF is computed from model that uses the $r^2$ term in the integral. Recall that Tuch's ODF (used in Q-ball Imaging [1]_) and the true normalized ODF definition differ from a $r^2$ term in the ODF integral. The original Sharpening Deconvolution Transform (SDT) technique [2]_ is expecting Tuch's ODF without the $r^2$ (see [3]_ for the mathematical details). Now, this function supports ODF that have been computed using the $r^2$ term because the proper analytical response function has be derived. For example, models such as DSI, GQI, SHORE, CSA, Tensor, Multi-tensor ODFs, should now be deconvolved with the r2_term=True. Returns ------- fodf_sh : ndarray (``(sh_order + 1)(sh_order + 2)/2``,) Spherical harmonics coefficients of the constrained-regularized fiber ODF num_it : int Number of iterations in the constrained-regularization used for convergence References ---------- .. [1] Tuch, D. MRM 2004. Q-Ball Imaging. .. [2] Descoteaux, M., et al. IEEE TMI 2009. Deterministic and Probabilistic Tractography Based on Complex Fibre Orientation Distributions .. [3] Descoteaux, M, PhD thesis, INRIA Sophia-Antipolis, 2008. """ # In ConstrainedSDTModel.fit, odf_sh is divided by its norm (Z) and # sometimes the norm is 0 which creates NaNs. if np.any(np.isnan(odf_sh)): return np.zeros_like(odf_sh), 0 # Generate initial fODF estimate, which is the ODF truncated at SH order 4 fodf_sh = np.linalg.lstsq(R, odf_sh)[0] fodf_sh[15:] = 0 fodf = np.dot(B_reg, fodf_sh) # if sharpening a q-ball odf (it is NOT properly normalized), we need to # force normalization otherwise, for DSI, CSA, SHORE, Tensor odfs, they are # normalized by construction if ~r2_term: Z = np.linalg.norm(fodf) fodf_sh /= Z fodf = np.dot(B_reg, fodf_sh) threshold = tau * np.max(np.dot(B_reg, fodf_sh)) # print(np.min(fodf), np.max(fodf), np.mean(fodf), threshold, tau) k = [] convergence = 50 for num_it in range(1, convergence + 1): A = np.dot(B_reg, fodf_sh) k2 = np.nonzero(A < threshold)[0] if (k2.shape[0] + R.shape[0]) < B_reg.shape[1]: warnings.warn( 'too few negative directions identified - failed to converge') return fodf_sh, num_it if num_it > 1 and k.shape[0] == k2.shape[0]: if (k == k2).all(): return fodf_sh, num_it k = k2 M = np.concatenate((R, lambda_ * B_reg[k, :])) ODF = np.concatenate((odf_sh, np.zeros(k.shape))) try: fodf_sh = np.linalg.lstsq(M, ODF)[0] except np.linalg.LinAlgError as lae: # SVD did not converge in Linear Least Squares in current # voxel. Proceeding with initial SH estimate for this voxel. pass warnings.warn('maximum number of iterations exceeded - failed to converge') return fodf_sh, num_it def odf_sh_to_sharp(odfs_sh, sphere, basis=None, ratio=3 / 15., sh_order=8, lambda_=1., tau=0.1, r2_term=False): r""" Sharpen odfs using the sharpening deconvolution transform [2]_ This function can be used to sharpen any smooth ODF spherical function. In theory, this should only be used to sharpen QballModel ODFs, but in practice, one can play with the deconvolution ratio and sharpen almost any ODF-like spherical function. The constrained-regularization is stable and will not only sharpen the ODF peaks but also regularize the noisy peaks. Parameters ---------- odfs_sh : ndarray (``(sh_order + 1)*(sh_order + 2)/2``, ) array of odfs expressed as spherical harmonics coefficients sphere : Sphere sphere used to build the regularization matrix basis : {None, 'mrtrix', 'fibernav'} different spherical harmonic basis. None is the fibernav basis as well. ratio : float, ratio of the smallest vs the largest eigenvalue of the single prolate tensor response function (:math:`\frac{\lambda_2}{\lambda_1}`) sh_order : int maximal SH order of the SH representation lambda_ : float lambda parameter (see odfdeconv) (default 1.0) tau : float tau parameter in the L matrix construction (see odfdeconv) (default 0.1) r2_term : bool True if ODF is computed from model that uses the $r^2$ term in the integral. Recall that Tuch's ODF (used in Q-ball Imaging [1]_) and the true normalized ODF definition differ from a $r^2$ term in the ODF integral. The original Sharpening Deconvolution Transform (SDT) technique [2]_ is expecting Tuch's ODF without the $r^2$ (see [3]_ for the mathematical details). Now, this function supports ODF that have been computed using the $r^2$ term because the proper analytical response function has be derived. For example, models such as DSI, GQI, SHORE, CSA, Tensor, Multi-tensor ODFs, should now be deconvolved with the r2_term=True. Returns ------- fodf_sh : ndarray sharpened odf expressed as spherical harmonics coefficients References ---------- .. [1] Tuch, D. MRM 2004. Q-Ball Imaging. .. [2] Descoteaux, M., et al. IEEE TMI 2009. Deterministic and Probabilistic Tractography Based on Complex Fibre Orientation Distributions .. [3] Descoteaux, M, et al. MRM 2007. Fast, Regularized and Analytical Q-Ball Imaging """ r, theta, phi = cart2sphere(sphere.x, sphere.y, sphere.z) real_sym_sh = sph_harm_lookup[basis] B_reg, m, n = real_sym_sh(sh_order, theta, phi) R, P = forward_sdt_deconv_mat(ratio, n, r2_term=r2_term) # scale lambda to account for differences in the number of # SH coefficients and number of mapped directions lambda_ = lambda_ * R.shape[0] * R[0, 0] / B_reg.shape[0] fodf_sh = np.zeros(odfs_sh.shape) for index in ndindex(odfs_sh.shape[:-1]): fodf_sh[index], num_it = odf_deconv(odfs_sh[index], R, B_reg, lambda_=lambda_, tau=tau, r2_term=r2_term) return fodf_sh def fa_superior(FA, fa_thr): """ Check that the FA is greater than the FA threshold Parameters ---------- FA : array Fractional Anisotropy fa_thr : int FA threshold Returns ------- True when the FA value is greater than the FA threshold, otherwise False. """ return FA > fa_thr def fa_inferior(FA, fa_thr): """ Check that the FA is lower than the FA threshold Parameters ---------- FA : array Fractional Anisotropy fa_thr : int FA threshold Returns ------- True when the FA value is lower than the FA threshold, otherwise False. """ return FA < fa_thr def auto_response(gtab, data, roi_center=None, roi_radius=10, fa_thr=0.7, fa_callable=fa_superior, return_number_of_voxels=False): """ Automatic estimation of response function using FA. Parameters ---------- gtab : GradientTable data : ndarray diffusion data roi_center : tuple, (3,) Center of ROI in data. If center is None, it is assumed that it is the center of the volume with shape `data.shape[:3]`. roi_radius : int radius of cubic ROI fa_thr : float FA threshold fa_callable : callable A callable that defines an operation that compares FA with the fa_thr. The operator should have two positional arguments (e.g., `fa_operator(FA, fa_thr)`) and it should return a bool array. return_number_of_voxels : bool If True, returns the number of voxels used for estimating the response function. Returns ------- response : tuple, (2,) (`evals`, `S0`) ratio : float The ratio between smallest versus largest eigenvalue of the response. number of voxels : int (optional) The number of voxels used for estimating the response function. Notes ----- In CSD there is an important pre-processing step: the estimation of the fiber response function. In order to do this we look for voxels with very anisotropic configurations. For example we can use an ROI (20x20x20) at the center of the volume and store the signal values for the voxels with FA values higher than 0.7. Of course, if we haven't precalculated FA we need to fit a Tensor model to the datasets. Which is what we do in this function. For the response we also need to find the average S0 in the ROI. This is possible using `gtab.b0s_mask()` we can find all the S0 volumes (which correspond to b-values equal 0) in the dataset. The `response` consists always of a prolate tensor created by averaging the highest and second highest eigenvalues in the ROI with FA higher than threshold. We also include the average S0s. We also return the `ratio` which is used for the SDT models. If requested, the number of voxels used for estimating the response function is also returned, which can be used to judge the fidelity of the response function. As a rule of thumb, at least 300 voxels should be used to estimate a good response function (see [1]_). References ---------- .. [1] Tournier, J.D., et al. NeuroImage 2004. Direct estimation of the fiber orientation density function from diffusion-weighted MRI data using spherical deconvolution """ ten = TensorModel(gtab) if roi_center is None: ci, cj, ck = np.array(data.shape[:3]) // 2 else: ci, cj, ck = roi_center w = roi_radius roi = data[int(ci - w): int(ci + w), int(cj - w): int(cj + w), int(ck - w): int(ck + w)] tenfit = ten.fit(roi) FA = fractional_anisotropy(tenfit.evals) FA[np.isnan(FA)] = 0 indices = np.where(fa_callable(FA, fa_thr)) if indices[0].size == 0: msg = "No voxel with a FA higher than " + str(fa_thr) + " were found." msg += " Try a larger roi or a lower threshold." warnings.warn(msg, UserWarning) lambdas = tenfit.evals[indices][:, :2] S0s = roi[indices][:, np.nonzero(gtab.b0s_mask)[0]] response, ratio = _get_response(S0s, lambdas) if return_number_of_voxels: return response, ratio, indices[0].size return response, ratio def response_from_mask(gtab, data, mask): """ Estimate the response function from a given mask. Parameters ---------- gtab : GradientTable data : ndarray Diffusion data mask : ndarray Mask to use for the estimation of the response function. For example a mask of the white matter voxels with FA values higher than 0.7 (see [1]_). Returns ------- response : tuple, (2,) (`evals`, `S0`) ratio : float The ratio between smallest versus largest eigenvalue of the response. Notes ----- See csdeconv.auto_response() or csdeconv.recursive_response() if you don't have a computed mask for the response function estimation. References ---------- .. [1] Tournier, J.D., et al. NeuroImage 2004. Direct estimation of the fiber orientation density function from diffusion-weighted MRI data using spherical deconvolution """ ten = TensorModel(gtab) indices = np.where(mask > 0) if indices[0].size == 0: msg = "No voxel in mask with value > 0 were found." warnings.warn(msg, UserWarning) return (np.nan, np.nan), np.nan tenfit = ten.fit(data[indices]) lambdas = tenfit.evals[:, :2] S0s = data[indices][:, np.nonzero(gtab.b0s_mask)[0]] return _get_response(S0s, lambdas) def _get_response(S0s, lambdas): S0 = np.mean(S0s) l01 = np.mean(lambdas, axis=0) evals = np.array([l01[0], l01[1], l01[1]]) response = (evals, S0) ratio = evals[1] / evals[0] return response, ratio def recursive_response(gtab, data, mask=None, sh_order=8, peak_thr=0.01, init_fa=0.08, init_trace=0.0021, iter=8, convergence=0.001, parallel=True, nbr_processes=None, sphere=default_sphere): """ Recursive calibration of response function using peak threshold Parameters ---------- gtab : GradientTable data : ndarray diffusion data mask : ndarray, optional mask for recursive calibration, for example a white matter mask. It has shape `data.shape[0:3]` and dtype=bool. Default: use the entire data array. sh_order : int, optional maximal spherical harmonics order. Default: 8 peak_thr : float, optional peak threshold, how large the second peak can be relative to the first peak in order to call it a single fiber population [1]. Default: 0.01 init_fa : float, optional FA of the initial 'fat' response function (tensor). Default: 0.08 init_trace : float, optional trace of the initial 'fat' response function (tensor). Default: 0.0021 iter : int, optional maximum number of iterations for calibration. Default: 8. convergence : float, optional convergence criterion, maximum relative change of SH coefficients. Default: 0.001. parallel : bool, optional Whether to use parallelization in peak-finding during the calibration procedure. Default: True nbr_processes: int If `parallel` is True, the number of subprocesses to use (default multiprocessing.cpu_count()). sphere : Sphere, optional. The sphere used for peak finding. Default: default_sphere. Returns ------- response : ndarray response function in SH coefficients Notes ----- In CSD there is an important pre-processing step: the estimation of the fiber response function. Using an FA threshold is not a very robust method. It is dependent on the dataset (non-informed used subjectivity), and still depends on the diffusion tensor (FA and first eigenvector), which has low accuracy at high b-value. This function recursively calibrates the response function, for more information see [1]. References ---------- .. [1] Tax, C.M.W., et al. NeuroImage 2014. Recursive calibration of the fiber response function for spherical deconvolution of diffusion MRI data. """ S0 = 1. evals = fa_trace_to_lambdas(init_fa, init_trace) res_obj = (evals, S0) if mask is None: data = data.reshape(-1, data.shape[-1]) else: data = data[mask] n = np.arange(0, sh_order + 1, 2) where_dwi = lazy_index(~gtab.b0s_mask) response_p = np.ones(len(n)) for num_it in range(iter): r_sh_all = np.zeros(len(n)) csd_model = ConstrainedSphericalDeconvModel(gtab, res_obj, sh_order=sh_order) csd_peaks = peaks_from_model(model=csd_model, data=data, sphere=sphere, relative_peak_threshold=peak_thr, min_separation_angle=25, parallel=parallel, nbr_processes=nbr_processes) dirs = csd_peaks.peak_dirs vals = csd_peaks.peak_values single_peak_mask = (vals[:, 1] / vals[:, 0]) < peak_thr data = data[single_peak_mask] dirs = dirs[single_peak_mask] for num_vox in range(data.shape[0]): rotmat = vec2vec_rotmat(dirs[num_vox, 0], np.array([0, 0, 1])) rot_gradients = np.dot(rotmat, gtab.gradients.T).T x, y, z = rot_gradients[where_dwi].T r, theta, phi = cart2sphere(x, y, z) # for the gradient sphere B_dwi = real_sph_harm(0, n, theta[:, None], phi[:, None]) r_sh_all += np.linalg.lstsq(B_dwi, data[num_vox, where_dwi])[0] response = r_sh_all / data.shape[0] res_obj = AxSymShResponse(data[:, gtab.b0s_mask].mean(), response) change = abs((response_p - response) / response_p) if all(change < convergence): break response_p = response return res_obj def fa_trace_to_lambdas(fa=0.08, trace=0.0021): lambda1 = (trace / 3.) * (1 + 2 * fa / (3 - 2 * fa ** 2) ** (1 / 2.)) lambda2 = (trace / 3.) * (1 - fa / (3 - 2 * fa ** 2) ** (1 / 2.)) evals = np.array([lambda1, lambda2, lambda2]) return evals dipy-0.13.0/dipy/reconst/dki.py000066400000000000000000002264331317371701200163170ustar00rootroot00000000000000#!/usr/bin/python """ Classes and functions for fitting the diffusion kurtosis model """ from __future__ import division, print_function, absolute_import import numpy as np import scipy.optimize as opt import dipy.core.sphere as dps from dipy.reconst.dti import (TensorFit, mean_diffusivity, from_lower_triangular, lower_triangular, decompose_tensor, MIN_POSITIVE_SIGNAL) from dipy.reconst.utils import dki_design_matrix as design_matrix from dipy.reconst.recspeed import local_maxima from dipy.utils.six.moves import range from dipy.reconst.base import ReconstModel from dipy.core.ndindex import ndindex from dipy.core.geometry import (sphere2cart, cart2sphere) from dipy.data import get_sphere from dipy.reconst.vec_val_sum import vec_val_vect from dipy.core.gradients import check_multi_b def _positive_evals(L1, L2, L3, er=2e-7): """ Helper function that indentifies which voxels in a array have all eigenvalues significantly larger than zero Parameters ---------- L1 : ndarray First independent variable of the integral. L2 : ndarray Second independent variable of the integral. L3 : ndarray Third independent variable of the integral. er : float, optional A eigenvalues is classified as larger than zero if it is larger than er Returns ------- ind : boolean (n,) Array that marks the voxels that have all eigenvalues are larger than zero. """ ind = np.logical_and(L1 > er, np.logical_and(L2 > er, L3 > er)) return ind def carlson_rf(x, y, z, errtol=3e-4): r""" Computes the Carlson's incomplete elliptic integral of the first kind defined as: .. math:: R_F = \frac{1}{2} \int_{0}^{\infty} \left [(t+x)(t+y)(t+z) \right ] ^{-\frac{1}{2}}dt Parameters ---------- x : ndarray First independent variable of the integral. y : ndarray Second independent variable of the integral. z : ndarray Third independent variable of the integral. errtol : float Error tolerance. Integral is computed with relative error less in magnitude than the defined value Returns ------- RF : ndarray Value of the incomplete first order elliptic integral Note ----- x, y, and z have to be nonnegative and at most one of them is zero. References ---------- .. [1] Carlson, B.C., 1994. Numerical computation of real or complex elliptic integrals. arXiv:math/9409227 [math.CA] """ xn = x.copy() yn = y.copy() zn = z.copy() An = (xn + yn + zn) / 3.0 Q = (3. * errtol) ** (-1 / 6.) * \ np.max(np.abs([An - xn, An - yn, An - zn]), axis=0) # Convergence has to be done voxel by voxel index = ndindex(x.shape) for v in index: n = 0 # Convergence condition while 4.**(-n) * Q[v] > abs(An[v]): xnroot = np.sqrt(xn[v]) ynroot = np.sqrt(yn[v]) znroot = np.sqrt(zn[v]) lamda = xnroot * (ynroot + znroot) + ynroot * znroot n = n + 1 xn[v] = (xn[v] + lamda) * 0.250 yn[v] = (yn[v] + lamda) * 0.250 zn[v] = (zn[v] + lamda) * 0.250 An[v] = (An[v] + lamda) * 0.250 # post convergence calculation X = 1. - xn / An Y = 1. - yn / An Z = - X - Y E2 = X * Y - Z * Z E3 = X * Y * Z RF = An**(-1 / 2.) * \ (1 - E2 / 10. + E3 / 14. + (E2**2) / 24. - 3 / 44. * E2 * E3) return RF def carlson_rd(x, y, z, errtol=1e-4): r""" Computes the Carlson's incomplete elliptic integral of the second kind defined as: .. math:: R_D = \frac{3}{2} \int_{0}^{\infty} (t+x)^{-\frac{1}{2}} (t+y)^{-\frac{1}{2}}(t+z) ^{-\frac{3}{2}} Parameters ---------- x : ndarray First independent variable of the integral. y : ndarray Second independent variable of the integral. z : ndarray Third independent variable of the integral. errtol : float Error tolerance. Integral is computed with relative error less in magnitude than the defined value Returns ------- RD : ndarray Value of the incomplete second order elliptic integral Note ----- x, y, and z have to be nonnegative and at most x or y is zero. """ xn = x.copy() yn = y.copy() zn = z.copy() A0 = (xn + yn + 3. * zn) / 5.0 An = A0.copy() Q = (errtol / 4.) ** (-1 / 6.) * \ np.max(np.abs([An - xn, An - yn, An - zn]), axis=0) sum_term = np.zeros(x.shape, dtype=x.dtype) n = np.zeros(x.shape) # Convergence has to be done voxel by voxel index = ndindex(x.shape) for v in index: # Convergence condition while 4.**(-n[v]) * Q[v] > abs(An[v]): xnroot = np.sqrt(xn[v]) ynroot = np.sqrt(yn[v]) znroot = np.sqrt(zn[v]) lamda = xnroot * (ynroot + znroot) + ynroot * znroot sum_term[v] = sum_term[v] + \ 4.**(-n[v]) / (znroot * (zn[v] + lamda)) n[v] = n[v] + 1 xn[v] = (xn[v] + lamda) * 0.250 yn[v] = (yn[v] + lamda) * 0.250 zn[v] = (zn[v] + lamda) * 0.250 An[v] = (An[v] + lamda) * 0.250 # post convergence calculation X = (A0 - x) / (4.**(n) * An) Y = (A0 - y) / (4.**(n) * An) Z = - (X + Y) / 3. E2 = X * Y - 6. * Z * Z E3 = (3. * X * Y - 8. * Z * Z) * Z E4 = 3. * (X * Y - Z * Z) * Z**2. E5 = X * Y * Z**3. RD = \ 4**(-n) * An**(-3 / 2.) * \ (1 - 3 / 14. * E2 + 1 / 6. * E3 + 9 / 88. * (E2**2) - 3 / 22. * E4 - 9 / 52. * E2 * E3 + 3 / 26. * E5) + 3 * sum_term return RD def _F1m(a, b, c): """ Helper function that computes function $F_1$ which is required to compute the analytical solution of the Mean kurtosis. Parameters ---------- a : ndarray Array containing the values of parameter $\lambda_1$ of function $F_1$ b : ndarray Array containing the values of parameter $\lambda_2$ of function $F_1$ c : ndarray Array containing the values of parameter $\lambda_3$ of function $F_1$ Returns ------- F1 : ndarray Value of the function $F_1$ for all elements of the arrays a, b, and c Notes -------- Function $F_1$ is defined as [1]_: .. math:: F_1(\lambda_1,\lambda_2,\lambda_3)= \frac{(\lambda_1+\lambda_2+\lambda_3)^2} {18(\lambda_1-\lambda_2)(\lambda_1-\lambda_3)} [\frac{\sqrt{\lambda_2\lambda_3}}{\lambda_1} R_F(\frac{\lambda_1}{\lambda_2},\frac{\lambda_1}{\lambda_3},1)+\\ \frac{3\lambda_1^2-\lambda_1\lambda_2-\lambda_2\lambda_3- \lambda_1\lambda_3} {3\lambda_1 \sqrt{\lambda_2 \lambda_3}} R_D(\frac{\lambda_1}{\lambda_2},\frac{\lambda_1}{\lambda_3},1)-1 ] References ---------- .. [1] Tabesh, A., Jensen, J.H., Ardekani, B.A., Helpern, J.A., 2011. Estimation of tensors and tensor-derived measures in diffusional kurtosis imaging. Magn Reson Med. 65(3), 823-836 """ # Eigenvalues are considered equal if they are not 2.5% different to each # other. This value is adjusted according to the analysis reported in: # http://gsoc2015dipydki.blogspot.co.uk/2015/08/rnh-post-13-start-wrapping-up-test.html er = 2.5e-2 # Initialize F1 F1 = np.zeros(a.shape) # Only computes F1 in voxels that have all eigenvalues larger than zero cond0 = _positive_evals(a, b, c) # Apply formula for non problematic plaussible cases, i.e. a!=b and a!=c cond1 = np.logical_and(cond0, np.logical_and(abs(a - b) >= a * er, abs(a - c) >= a * er)) if np.sum(cond1) != 0: L1 = a[cond1] L2 = b[cond1] L3 = c[cond1] RFm = carlson_rf(L1 / L2, L1 / L3, np.ones(len(L1))) RDm = carlson_rd(L1 / L2, L1 / L3, np.ones(len(L1))) F1[cond1] = ((L1 + L2 + L3) ** 2) / (18 * (L1 - L2) * (L1 - L3)) * \ (np.sqrt(L2 * L3) / L1 * RFm + (3 * L1**2 - L1 * L2 - L1 * L3 - L2 * L3) / (3 * L1 * np.sqrt(L2 * L3)) * RDm - 1) # Resolve possible sigularity a==b cond2 = np.logical_and(cond0, np.logical_and(abs(a - b) < a * er, abs(a - c) > a * er)) if np.sum(cond2) != 0: L1 = (a[cond2] + b[cond2]) / 2. L3 = c[cond2] F1[cond2] = _F2m(L3, L1, L1) / 2. # Resolve possible sigularity a==c cond3 = np.logical_and(cond0, np.logical_and(abs(a - c) < a * er, abs(a - b) > a * er)) if np.sum(cond3) != 0: L1 = (a[cond3] + c[cond3]) / 2. L2 = b[cond3] F1[cond3] = _F2m(L2, L1, L1) / 2 # Resolve possible sigularity a==b and a==c cond4 = np.logical_and(cond0, np.logical_and(abs(a - c) < a * er, abs(a - b) < a * er)) if np.sum(cond4) != 0: F1[cond4] = 1 / 5. return F1 def _F2m(a, b, c): """ Helper function that computes function $F_2$ which is required to compute the analytical solution of the Mean kurtosis. Parameters ---------- a : ndarray Array containing the values of parameter $\lambda_1$ of function $F_2$ b : ndarray Array containing the values of parameter $\lambda_2$ of function $F_2$ c : ndarray Array containing the values of parameter $\lambda_3$ of function $F_2$ Returns ------- F2 : ndarray Value of the function $F_2$ for all elements of the arrays a, b, and c Notes -------- Function $F_2$ is defined as [1]_: .. math:: F_2(\lambda_1,\lambda_2,\lambda_3)= \frac{(\lambda_1+\lambda_2+\lambda_3)^2} {3(\lambda_2-\lambda_3)^2} [\frac{\lambda_2+\lambda_3}{\sqrt{\lambda_2\lambda_3}} R_F(\frac{\lambda_1}{\lambda_2},\frac{\lambda_1}{\lambda_3},1)+\\ \frac{2\lambda_1-\lambda_2-\lambda_3}{3\sqrt{\lambda_2 \lambda_3}} R_D(\frac{\lambda_1}{\lambda_2},\frac{\lambda_1}{\lambda_3},1)-2] References ---------- .. [1] Tabesh, A., Jensen, J.H., Ardekani, B.A., Helpern, J.A., 2011. Estimation of tensors and tensor-derived measures in diffusional kurtosis imaging. Magn Reson Med. 65(3), 823-836 """ # Eigenvalues are considered equal if they are not 2.5% different to each # other. This value is adjusted according to the analysis reported in: # http://gsoc2015dipydki.blogspot.co.uk/2015/08/rnh-post-13-start-wrapping-up-test.html er = 2.5e-2 # Initialize F2 F2 = np.zeros(a.shape) # Only computes F2 in voxels that have all eigenvalues larger than zero cond0 = _positive_evals(a, b, c) # Apply formula for non problematic plaussible cases, i.e. b!=c cond1 = np.logical_and(cond0, (abs(b - c) > b * er)) if np.sum(cond1) != 0: L1 = a[cond1] L2 = b[cond1] L3 = c[cond1] RF = carlson_rf(L1 / L2, L1 / L3, np.ones(len(L1))) RD = carlson_rd(L1 / L2, L1 / L3, np.ones(len(L1))) F2[cond1] = (((L1 + L2 + L3) ** 2) / (3. * (L2 - L3) ** 2)) * \ (((L2 + L3) / (np.sqrt(L2 * L3))) * RF + ((2. * L1 - L2 - L3) / (3. * np.sqrt(L2 * L3))) * RD - 2.) # Resolve possible sigularity b==c cond2 = np.logical_and(cond0, np.logical_and(abs(b - c) < b * er, abs(a - b) > b * er)) if np.sum(cond2) != 0: L1 = a[cond2] L3 = (c[cond2] + b[cond2]) / 2. # Cumpute alfa [1]_ x = 1. - (L1 / L3) alpha = np.zeros(len(L1)) for i in range(len(x)): if x[i] > 0: alpha[i] = 1. / np.sqrt(x[i]) * np.arctanh(np.sqrt(x[i])) else: alpha[i] = 1. / np.sqrt(-x[i]) * np.arctan(np.sqrt(-x[i])) F2[cond2] = \ 6. * ((L1 + 2. * L3)**2) / (144. * L3**2 * (L1 - L3)**2) * \ (L3 * (L1 + 2. * L3) + L1 * (L1 - 4. * L3) * alpha) # Resolve possible sigularity a==b and a==c cond3 = np.logical_and(cond0, np.logical_and(abs(b - c) < b * er, abs(a - b) < b * er)) if np.sum(cond3) != 0: F2[cond3] = 6 / 15. return F2 def directional_diffusion(dt, V, min_diffusivity=0): r""" Calculates the apparent diffusion coefficient (adc) in each direction of a sphere for a single voxel [1]_. Parameters ---------- dt : array (6,) elements of the diffusion tensor of the voxel. V : array (g, 3) g directions of a Sphere in Cartesian coordinates min_diffusivity : float (optional) Because negative eigenvalues are not physical and small eigenvalues cause quite a lot of noise in diffusion-based metrics, diffusivity values smaller than `min_diffusivity` are replaced with `min_diffusivity`. Default = 0 Returns -------- adc : ndarray (g,) Apparent diffusion coefficient (adc) in all g directions of a sphere for a single voxel. References ---------- .. [1] Neto Henriques R, Correia MM, Nunes RG, Ferreira HA (2015). Exploring the 3D geometry of the diffusion kurtosis tensor - Impact on the development of robust tractography procedures and novel biomarkers, NeuroImage 111: 85-99 """ adc = \ V[:, 0] * V[:, 0] * dt[0] + \ 2 * V[:, 0] * V[:, 1] * dt[1] + \ V[:, 1] * V[:, 1] * dt[2] + \ 2 * V[:, 0] * V[:, 2] * dt[3] + \ 2 * V[:, 1] * V[:, 2] * dt[4] + \ V[:, 2] * V[:, 2] * dt[5] if min_diffusivity is not None: adc = adc.clip(min=min_diffusivity) return adc def directional_diffusion_variance(kt, V, min_kurtosis=-3/7): r""" Calculates the apparent diffusion variance (adv) in each direction of a sphere for a single voxel [1]_. Parameters ---------- dt : array (6,) elements of the diffusion tensor of the voxel. kt : array (15,) elements of the kurtosis tensor of the voxel. V : array (g, 3) g directions of a Sphere in Cartesian coordinates min_kurtosis : float (optional) Because high-amplitude negative values of kurtosis are not physicaly and biologicaly pluasible, and these cause artefacts in kurtosis-based measures, directional kurtosis values smaller than `min_kurtosis` are replaced with `min_kurtosis`. Default = -3./7 (theoretical kurtosis limit for regions that consist of water confined to spherical pores [2]_) adc : ndarray(g,) (optional) Apparent diffusion coefficient (adc) in all g directions of a sphere for a single voxel. adv : ndarray(g,) (optional) Apparent diffusion variance coefficient (advc) in all g directions of a sphere for a single voxel. Returns -------- adv : ndarray (g,) Apparent diffusion variance (adv) in all g directions of a sphere for a single voxel. References ---------- .. [1] Neto Henriques R, Correia MM, Nunes RG, Ferreira HA (2015). Exploring the 3D geometry of the diffusion kurtosis tensor - Impact on the development of robust tractography procedures and novel biomarkers, NeuroImage 111: 85-99 """ adv = \ V[:, 0] * V[:, 0] * V[:, 0] * V[:, 0] * kt[0] + \ V[:, 1] * V[:, 1] * V[:, 1] * V[:, 1] * kt[1] + \ V[:, 2] * V[:, 2] * V[:, 2] * V[:, 2] * kt[2] + \ 4 * V[:, 0] * V[:, 0] * V[:, 0] * V[:, 1] * kt[3] + \ 4 * V[:, 0] * V[:, 0] * V[:, 0] * V[:, 2] * kt[4] + \ 4 * V[:, 0] * V[:, 1] * V[:, 1] * V[:, 1] * kt[5] + \ 4 * V[:, 1] * V[:, 1] * V[:, 1] * V[:, 2] * kt[6] + \ 4 * V[:, 0] * V[:, 2] * V[:, 2] * V[:, 2] * kt[7] + \ 4 * V[:, 1] * V[:, 2] * V[:, 2] * V[:, 2] * kt[8] + \ 6 * V[:, 0] * V[:, 0] * V[:, 1] * V[:, 1] * kt[9] + \ 6 * V[:, 0] * V[:, 0] * V[:, 2] * V[:, 2] * kt[10] + \ 6 * V[:, 1] * V[:, 1] * V[:, 2] * V[:, 2] * kt[11] + \ 12 * V[:, 0] * V[:, 0] * V[:, 1] * V[:, 2] * kt[12] + \ 12 * V[:, 0] * V[:, 1] * V[:, 1] * V[:, 2] * kt[13] + \ 12 * V[:, 0] * V[:, 1] * V[:, 2] * V[:, 2] * kt[14] return adv def directional_kurtosis(dt, md, kt, V, min_diffusivity=0, min_kurtosis=-3/7, adc=None, adv=None): r""" Calculates the apparent kurtosis coefficient (akc) in each direction of a sphere for a single voxel [1]_. Parameters ---------- dt : array (6,) elements of the diffusion tensor of the voxel. md : float mean diffusivity of the voxel kt : array (15,) elements of the kurtosis tensor of the voxel. V : array (g, 3) g directions of a Sphere in Cartesian coordinates min_diffusivity : float (optional) Because negative eigenvalues are not physical and small eigenvalues cause quite a lot of noise in diffusion-based metrics, diffusivity values smaller than `min_diffusivity` are replaced with `min_diffusivity`. Default = 0 min_kurtosis : float (optional) Because high-amplitude negative values of kurtosis are not physicaly and biologicaly pluasible, and these cause artefacts in kurtosis-based measures, directional kurtosis values smaller than `min_kurtosis` are replaced with `min_kurtosis`. Default = -3./7 (theoretical kurtosis limit for regions that consist of water confined to spherical pores [2]_) adc : ndarray(g,) (optional) Apparent diffusion coefficient (adc) in all g directions of a sphere for a single voxel. adv : ndarray(g,) (optional) Apparent diffusion variance (advc) in all g directions of a sphere for a single voxel. Returns -------- akc : ndarray (g,) Apparent kurtosis coefficient (AKC) in all g directions of a sphere for a single voxel. References ---------- .. [1] Neto Henriques R, Correia MM, Nunes RG, Ferreira HA (2015). Exploring the 3D geometry of the diffusion kurtosis tensor - Impact on the development of robust tractography procedures and novel biomarkers, NeuroImage 111: 85-99 .. [2] Barmpoutis, A., & Zhuo, J., 2011. Diffusion kurtosis imaging: Robust estimation from DW-MRI using homogeneous polynomials. Proceedings of the 8th {IEEE} International Symposium on Biomedical Imaging: From Nano to Macro, ISBI 2011, 262-265. doi: 10.1109/ISBI.2011.5872402 """ if adc is None: adc = directional_diffusion(dt, V, min_diffusivity=min_diffusivity) if adv is None: adv = directional_diffusion_variance(kt, V) akc = adv * (md / adc) ** 2 if min_kurtosis is not None: akc = akc.clip(min=min_kurtosis) return akc def apparent_kurtosis_coef(dki_params, sphere, min_diffusivity=0, min_kurtosis=-3./7): r""" Calculates the apparent kurtosis coefficient (AKC) in each direction of a sphere [1]_. Parameters ---------- dki_params : ndarray (x, y, z, 27) or (n, 27) All parameters estimated from the diffusion kurtosis model. Parameters are ordered as follows: 1) Three diffusion tensor's eigenvalues 2) Three lines of the eigenvector matrix each containing the first, second and third coordinates of the eigenvectors respectively 3) Fifteen elements of the kurtosis tensor sphere : a Sphere class instance The AKC will be calculated for each of the vertices in the sphere min_diffusivity : float (optional) Because negative eigenvalues are not physical and small eigenvalues cause quite a lot of noise in diffusion-based metrics, diffusivity values smaller than `min_diffusivity` are replaced with `min_diffusivity`. Default = 0 min_kurtosis : float (optional) Because high-amplitude negative values of kurtosis are not physicaly and biologicaly pluasible, and these cause artefacts in kurtosis-based measures, directional kurtosis values smaller than `min_kurtosis` are replaced with `min_kurtosis`. Default = -3./7 (theoretical kurtosis limit for regions that consist of water confined to spherical pores [2]_) Returns -------- akc : ndarray (x, y, z, g) or (n, g) Apparent kurtosis coefficient (AKC) for all g directions of a sphere. Notes ----- For each sphere direction with coordinates $(n_{1}, n_{2}, n_{3})$, the calculation of AKC is done using formula [1]_: .. math :: AKC(n)=\frac{MD^{2}}{ADC(n)^{2}}\sum_{i=1}^{3}\sum_{j=1}^{3} \sum_{k=1}^{3}\sum_{l=1}^{3}n_{i}n_{j}n_{k}n_{l}W_{ijkl} where $W_{ijkl}$ are the elements of the kurtosis tensor, MD the mean diffusivity and ADC the apparent diffusion coefficent computed as: .. math :: ADC(n)=\sum_{i=1}^{3}\sum_{j=1}^{3}n_{i}n_{j}D_{ij} where $D_{ij}$ are the elements of the diffusion tensor. References ---------- .. [1] Neto Henriques R, Correia MM, Nunes RG, Ferreira HA (2015). Exploring the 3D geometry of the diffusion kurtosis tensor - Impact on the development of robust tractography procedures and novel biomarkers, NeuroImage 111: 85-99 .. [2] Barmpoutis, A., & Zhuo, J., 2011. Diffusion kurtosis imaging: Robust estimation from DW-MRI using homogeneous polynomials. Proceedings of the 8th {IEEE} International Symposium on Biomedical Imaging: From Nano to Macro, ISBI 2011, 262-265. doi: 10.1109/ISBI.2011.5872402 """ # Flat parameters outshape = dki_params.shape[:-1] dki_params = dki_params.reshape((-1, dki_params.shape[-1])) # Split data evals, evecs, kt = split_dki_param(dki_params) # Initialize AKC matrix V = sphere.vertices akc = np.zeros((len(kt), len(V))) # select relevant voxels to process rel_i = _positive_evals(evals[..., 0], evals[..., 1], evals[..., 2]) kt = kt[rel_i] evecs = evecs[rel_i] evals = evals[rel_i] akci = akc[rel_i] # Compute MD and DT md = mean_diffusivity(evals) dt = lower_triangular(vec_val_vect(evecs, evals)) # loop over all relevant voxels for vox in range(len(kt)): akci[vox] = directional_kurtosis(dt[vox], md[vox], kt[vox], V, min_diffusivity=min_diffusivity, min_kurtosis=min_kurtosis) # reshape data according to input data akc[rel_i] = akci return akc.reshape((outshape + (len(V),))) def mean_kurtosis(dki_params, min_kurtosis=-3./7, max_kurtosis=3): r""" Computes mean Kurtosis (MK) from the kurtosis tensor [1]_. Parameters ---------- dki_params : ndarray (x, y, z, 27) or (n, 27) All parameters estimated from the diffusion kurtosis model. Parameters are ordered as follows: 1) Three diffusion tensor's eigenvalues 2) Three lines of the eigenvector matrix each containing the first, second and third coordinates of the eigenvector 3) Fifteen elements of the kurtosis tensor min_kurtosis : float (optional) To keep kurtosis values within a plausible biophysical range, mean kurtosis values that are smaller than `min_kurtosis` are replaced with `min_kurtosis`. Default = -3./7 (theoretical kurtosis limit for regions that consist of water confined to spherical pores [2]_) max_kurtosis : float (optional) To keep kurtosis values within a plausible biophysical range, mean kurtosis values that are larger than `max_kurtosis` are replaced with `max_kurtosis`. Default = 10 Returns ------- mk : array Calculated MK. Notes -------- The MK analytical solution is calculated using the following equation [1]_: .. math:: MK=F_1(\lambda_1,\lambda_2,\lambda_3)\hat{W}_{1111}+ F_1(\lambda_2,\lambda_1,\lambda_3)\hat{W}_{2222}+ F_1(\lambda_3,\lambda_2,\lambda_1)\hat{W}_{3333}+ \\ F_2(\lambda_1,\lambda_2,\lambda_3)\hat{W}_{2233}+ F_2(\lambda_2,\lambda_1,\lambda_3)\hat{W}_{1133}+ F_2(\lambda_3,\lambda_2,\lambda_1)\hat{W}_{1122} where $\hat{W}_{ijkl}$ are the components of the $W$ tensor in the coordinates system defined by the eigenvectors of the diffusion tensor $\mathbf{D}$ and F_1(\lambda_1,\lambda_2,\lambda_3)= \frac{(\lambda_1+\lambda_2+\lambda_3)^2} {18(\lambda_1-\lambda_2)(\lambda_1-\lambda_3)} [\frac{\sqrt{\lambda_2\lambda_3}}{\lambda_1} R_F(\frac{\lambda_1}{\lambda_2},\frac{\lambda_1}{\lambda_3},1)+\\ \frac{3\lambda_1^2-\lambda_1\lambda_2-\lambda_2\lambda_3- \lambda_1\lambda_3} {3\lambda_1 \sqrt{\lambda_2 \lambda_3}} R_D(\frac{\lambda_1}{\lambda_2},\frac{\lambda_1}{\lambda_3},1)-1 ] F_2(\lambda_1,\lambda_2,\lambda_3)= \frac{(\lambda_1+\lambda_2+\lambda_3)^2} {3(\lambda_2-\lambda_3)^2} [\frac{\lambda_2+\lambda_3}{\sqrt{\lambda_2\lambda_3}} R_F(\frac{\lambda_1}{\lambda_2},\frac{\lambda_1}{\lambda_3},1)+\\ \frac{2\lambda_1-\lambda_2-\lambda_3}{3\sqrt{\lambda_2 \lambda_3}} R_D(\frac{\lambda_1}{\lambda_2},\frac{\lambda_1}{\lambda_3},1)-2] where $R_f$ and $R_d$ are the Carlson's elliptic integrals. References ---------- .. [1] Tabesh, A., Jensen, J.H., Ardekani, B.A., Helpern, J.A., 2011. Estimation of tensors and tensor-derived measures in diffusional kurtosis imaging. Magn Reson Med. 65(3), 823-836 .. [2] Barmpoutis, A., & Zhuo, J., 2011. Diffusion kurtosis imaging: Robust estimation from DW-MRI using homogeneous polynomials. Proceedings of the 8th {IEEE} International Symposium on Biomedical Imaging: From Nano to Macro, ISBI 2011, 262-265. doi: 10.1109/ISBI.2011.5872402 """ # Flat parameters. For numpy versions more recent than 1.6.0, this step # isn't required outshape = dki_params.shape[:-1] dki_params = dki_params.reshape((-1, dki_params.shape[-1])) # Split the model parameters to three variable containing the evals, evecs, # and kurtosis elements evals, evecs, kt = split_dki_param(dki_params) # Rotate the kurtosis tensor from the standard Cartesian coordinate system # to another coordinate system in which the 3 orthonormal eigenvectors of # DT are the base coordinate Wxxxx = Wrotate_element(kt, 0, 0, 0, 0, evecs) Wyyyy = Wrotate_element(kt, 1, 1, 1, 1, evecs) Wzzzz = Wrotate_element(kt, 2, 2, 2, 2, evecs) Wxxyy = Wrotate_element(kt, 0, 0, 1, 1, evecs) Wxxzz = Wrotate_element(kt, 0, 0, 2, 2, evecs) Wyyzz = Wrotate_element(kt, 1, 1, 2, 2, evecs) # Compute MK MK = \ _F1m(evals[..., 0], evals[..., 1], evals[..., 2]) * Wxxxx + \ _F1m(evals[..., 1], evals[..., 0], evals[..., 2]) * Wyyyy + \ _F1m(evals[..., 2], evals[..., 1], evals[..., 0]) * Wzzzz + \ _F2m(evals[..., 0], evals[..., 1], evals[..., 2]) * Wyyzz + \ _F2m(evals[..., 1], evals[..., 0], evals[..., 2]) * Wxxzz + \ _F2m(evals[..., 2], evals[..., 1], evals[..., 0]) * Wxxyy if min_kurtosis is not None: MK = MK.clip(min=min_kurtosis) if max_kurtosis is not None: MK = MK.clip(max=max_kurtosis) return MK.reshape(outshape) def _G1m(a, b, c): """ Helper function that computes function $G_1$ which is required to compute the analytical solution of the Radial kurtosis. Parameters ---------- a : ndarray Array containing the values of parameter $\lambda_1$ of function $G_1$ b : ndarray Array containing the values of parameter $\lambda_2$ of function $G_1$ c : ndarray Array containing the values of parameter $\lambda_3$ of function $G_1$ Returns ------- G1 : ndarray Value of the function $G_1$ for all elements of the arrays a, b, and c Notes -------- Function $G_1$ is defined as [1]_: .. math:: G_1(\lambda_1,\lambda_2,\lambda_3)= \frac{(\lambda_1+\lambda_2+\lambda_3)^2}{18\lambda_2(\lambda_2- \lambda_3)} \left (2\lambda_2 + \frac{\lambda_3^2-3\lambda_2\lambda_3}{\sqrt{\lambda_2\lambda_3}} \right) References ---------- .. [1] Tabesh, A., Jensen, J.H., Ardekani, B.A., Helpern, J.A., 2011. Estimation of tensors and tensor-derived measures in diffusional kurtosis imaging. Magn Reson Med. 65(3), 823-836 """ # Float error used to compare two floats, abs(l1 - l2) < er for l1 = l2 # Error is defined as five orders of magnitude larger than system's epslon er = np.finfo(a.ravel()[0]).eps * 1e5 # Initialize G1 G1 = np.zeros(a.shape) # Only computes G1 in voxels that have all eigenvalues larger than zero cond0 = _positive_evals(a, b, c) # Apply formula for non problematic plaussible cases, i.e. b!=c cond1 = np.logical_and(cond0, (abs(b - c) > er)) if np.sum(cond1) != 0: L1 = a[cond1] L2 = b[cond1] L3 = c[cond1] G1[cond1] = \ (L1 + L2 + L3)**2 / (18 * L2 * (L2 - L3)**2) * \ (2. * L2 + (L3**2 - 3 * L2 * L3) / np.sqrt(L2 * L3)) # Resolve possible sigularity b==c cond2 = np.logical_and(cond0, abs(b - c) < er) if np.sum(cond2) != 0: L1 = a[cond2] L2 = b[cond2] G1[cond2] = (L1 + 2. * L2)**2 / (24. * L2**2) return G1 def _G2m(a, b, c): """ Helper function that computes function $G_2$ which is required to compute the analytical solution of the Radial kurtosis. Parameters ---------- a : ndarray Array containing the values of parameter $\lambda_1$ of function $G_2$ b : ndarray Array containing the values of parameter $\lambda_2$ of function $G_2$ c : ndarray (n,) Array containing the values of parameter $\lambda_3$ of function $G_2$ Returns ------- G2 : ndarray Value of the function $G_2$ for all elements of the arrays a, b, and c Notes -------- Function $G_2$ is defined as [1]_: .. math:: G_2(\lambda_1,\lambda_2,\lambda_3)= \frac{(\lambda_1+\lambda_2+\lambda_3)^2}{(\lambda_2-\lambda_3)^2} \left ( \frac{\lambda_2+\lambda_3}{\sqrt{\lambda_2\lambda_3}}-2\right ) References ---------- .. [1] Tabesh, A., Jensen, J.H., Ardekani, B.A., Helpern, J.A., 2011. Estimation of tensors and tensor-derived measures in diffusional kurtosis imaging. Magn Reson Med. 65(3), 823-836 """ # Float error used to compare two floats, abs(l1 - l2) < er for l1 = l2 # Error is defined as five order of magnitude larger than system's epslon er = np.finfo(a.ravel()[0]).eps * 1e5 # Initialize G2 G2 = np.zeros(a.shape) # Only computes G2 in voxels that have all eigenvalues larger than zero cond0 = _positive_evals(a, b, c) # Apply formula for non problematic plaussible cases, i.e. b!=c cond1 = np.logical_and(cond0, (abs(b - c) > er)) if np.sum(cond1) != 0: L1 = a[cond1] L2 = b[cond1] L3 = c[cond1] G2[cond1] = \ (L1 + L2 + L3)**2 / (3 * (L2 - L3)**2) * \ ((L2 + L3) / np.sqrt(L2 * L3) - 2) # Resolve possible sigularity b==c cond2 = np.logical_and(cond0, abs(b - c) < er) if np.sum(cond2) != 0: L1 = a[cond2] L2 = b[cond2] G2[cond2] = (L1 + 2. * L2)**2 / (12. * L2**2) return G2 def radial_kurtosis(dki_params, min_kurtosis=-3./7, max_kurtosis=10): r""" Radial Kurtosis (RK) of a diffusion kurtosis tensor [1]_. Parameters ---------- dki_params : ndarray (x, y, z, 27) or (n, 27) All parameters estimated from the diffusion kurtosis model. Parameters are ordered as follows: 1) Three diffusion tensor's eigenvalues 2) Three lines of the eigenvector matrix each containing the first, second and third coordinates of the eigenvector 3) Fifteen elements of the kurtosis tensor min_kurtosis : float (optional) To keep kurtosis values within a plausible biophysical range, radial kurtosis values that are smaller than `min_kurtosis` are replaced with `min_kurtosis`. Default = -3./7 (theoretical kurtosis limit for regions that consist of water confined to spherical pores [2]_) max_kurtosis : float (optional) To keep kurtosis values within a plausible biophysical range, radial kurtosis values that are larger than `max_kurtosis` are replaced with `max_kurtosis`. Default = 10 Returns ------- rk : array Calculated RK. Notes -------- RK is calculated with the following equation [1]_:: .. math:: K_{\bot} = G_1(\lambda_1,\lambda_2,\lambda_3)\hat{W}_{2222} + G_1(\lambda_1,\lambda_3,\lambda_2)\hat{W}_{3333} + G_2(\lambda_1,\lambda_2,\lambda_3)\hat{W}_{2233} where: .. math:: G_1(\lambda_1,\lambda_2,\lambda_3)= \frac{(\lambda_1+\lambda_2+\lambda_3)^2}{18\lambda_2(\lambda_2- \lambda_3)} \left (2\lambda_2 + \frac{\lambda_3^2-3\lambda_2\lambda_3}{\sqrt{\lambda_2\lambda_3}} \right) and .. math:: G_2(\lambda_1,\lambda_2,\lambda_3)= \frac{(\lambda_1+\lambda_2+\lambda_3)^2}{(\lambda_2-\lambda_3)^2} \left ( \frac{\lambda_2+\lambda_3}{\sqrt{\lambda_2\lambda_3}}-2\right ) References ---------- .. [1] Tabesh, A., Jensen, J.H., Ardekani, B.A., Helpern, J.A., 2011. Estimation of tensors and tensor-derived measures in diffusional kurtosis imaging. Magn Reson Med. 65(3), 823-836 .. [2] Barmpoutis, A., & Zhuo, J., 2011. Diffusion kurtosis imaging: Robust estimation from DW-MRI using homogeneous polynomials. Proceedings of the 8th {IEEE} International Symposium on Biomedical Imaging: From Nano to Macro, ISBI 2011, 262-265. doi: 10.1109/ISBI.2011.5872402 """ # Flat parameters. For numpy versions more recent than 1.6.0, this step # isn't required outshape = dki_params.shape[:-1] dki_params = dki_params.reshape((-1, dki_params.shape[-1])) # Split the model parameters to three variable containing the evals, evecs, # and kurtosis elements evals, evecs, kt = split_dki_param(dki_params) # Rotate the kurtosis tensor from the standard Cartesian coordinate system # to another coordinate system in which the 3 orthonormal eigenvectors of # DT are the base coordinate Wyyyy = Wrotate_element(kt, 1, 1, 1, 1, evecs) Wzzzz = Wrotate_element(kt, 2, 2, 2, 2, evecs) Wyyzz = Wrotate_element(kt, 1, 1, 2, 2, evecs) # Compute RK RK = \ _G1m(evals[..., 0], evals[..., 1], evals[..., 2]) * Wyyyy + \ _G1m(evals[..., 0], evals[..., 2], evals[..., 1]) * Wzzzz + \ _G2m(evals[..., 0], evals[..., 1], evals[..., 2]) * Wyyzz if min_kurtosis is not None: RK = RK.clip(min=min_kurtosis) if max_kurtosis is not None: RK = RK.clip(max=max_kurtosis) return RK.reshape(outshape) def axial_kurtosis(dki_params, min_kurtosis=-3./7, max_kurtosis=10): r""" Computes axial Kurtosis (AK) from the kurtosis tensor. Parameters ---------- dki_params : ndarray (x, y, z, 27) or (n, 27) All parameters estimated from the diffusion kurtosis model. Parameters are ordered as follows: 1) Three diffusion tensor's eigenvalues 2) Three lines of the eigenvector matrix each containing the first, second and third coordinates of the eigenvector 3) Fifteen elements of the kurtosis tensor min_kurtosis : float (optional) To keep kurtosis values within a plausible biophysical range, axial kurtosis values that are smaller than `min_kurtosis` are replaced with `min_kurtosis`. Default = -3./7 (theoretical kurtosis limit for regions that consist of water confined to spherical pores [1]_) max_kurtosis : float (optional) To keep kurtosis values within a plausible biophysical range, axial kurtosis values that are larger than `max_kurtosis` are replaced with `max_kurtosis`. Default = 10 Returns ------- ak : array Calculated AK. References ---------- .. [1] Barmpoutis, A., & Zhuo, J., 2011. Diffusion kurtosis imaging: Robust estimation from DW-MRI using homogeneous polynomials. Proceedings of the 8th {IEEE} International Symposium on Biomedical Imaging: From Nano to Macro, ISBI 2011, 262-265. doi: 10.1109/ISBI.2011.5872402 """ # Flat parameters outshape = dki_params.shape[:-1] dki_params = dki_params.reshape((-1, dki_params.shape[-1])) # Split data evals, evecs, kt = split_dki_param(dki_params) # Initialize AK AK = np.zeros(kt.shape[:-1]) # select relevant voxels to process rel_i = _positive_evals(evals[..., 0], evals[..., 1], evals[..., 2]) kt = kt[rel_i] evecs = evecs[rel_i] evals = evals[rel_i] AKi = AK[rel_i] # Compute MD md = mean_diffusivity(evals) dt = lower_triangular(vec_val_vect(evecs, evals)) # loop over all voxels for vox in range(len(kt)): AKi[vox] = directional_kurtosis(dt[vox], md[vox], kt[vox], np.array([evecs[vox, :, 0]])) # reshape data according to input data AK[rel_i] = AKi if min_kurtosis is not None: AK = AK.clip(min=min_kurtosis) if max_kurtosis is not None: AK = AK.clip(max=max_kurtosis) return AK.reshape(outshape) def _kt_maximum_converge(ang, dt, md, kt): """ Helper function that computes the inverse of the directional kurtosis of a voxel along a given direction in polar coordinates. Parameters ---------- ang : array (2,) array containing the two polar angles dt : array (6,) elements of the diffusion tensor of the voxel. md : float mean diffusivity of the voxel kt : array (15,) elements of the kurtosis tensor of the voxel. Returns ------- neg_kt : float The inverse value of the apparent kurtosis for the given direction. Notes ----- This function is used to refine the kurtosis maximum estimate See also -------- dipy.reconst.dki.kurtosis_maximum """ n = np.array([sphere2cart(1, ang[0], ang[1])]) return -1. * directional_kurtosis(dt, md, kt, n) def _voxel_kurtosis_maximum(dt, md, kt, sphere, gtol=1e-2): """ Computes the maximum value of a single voxel kurtosis tensor Parameters ---------- dt : array (6,) elements of the diffusion tensor of the voxel. md : float mean diffusivity of the voxel kt : array (15,) elements of the kurtosis tensor of the voxel. sphere : Sphere class instance, optional The sphere providing sample directions for the initial search of the maximum value of kurtosis. gtol : float, optional This input is to refine kurtosis maximum under the precision of the directions sampled on the sphere class instance. The gradient of the convergence procedure must be less than gtol before successful termination. If gtol is None, fiber direction is directly taken from the initial sampled directions of the given sphere object Returns ------- max_value : float kurtosis tensor maximum value max_dir : array (3,) Cartesian coordinates of the direction of the maximal kurtosis value """ # Estimation of maximum kurtosis candidates akc = directional_kurtosis(dt, md, kt, sphere.vertices) max_val, ind = local_maxima(akc, sphere.edges) n = len(max_val) # case that none maximum was find (spherical or null kurtosis tensors) if n == 0: return np.mean(akc), np.zeros(3) max_dir = sphere.vertices[ind] # Select the maximum from the candidates max_value = max(max_val) max_direction = max_dir[np.argmax(max_val.argmax)] # refine maximum direction if gtol is not None: for p in range(n): r, theta, phi = cart2sphere(max_dir[p, 0], max_dir[p, 1], max_dir[p, 2]) ang = np.array([theta, phi]) ang[:] = opt.fmin_bfgs(_kt_maximum_converge, ang, args=(dt, md, kt), disp=False, retall=False, gtol=gtol) k_dir = np.array([sphere2cart(1., ang[0], ang[1])]) k_val = directional_kurtosis(dt, md, kt, k_dir) if k_val > max_value: max_value = k_val max_direction = k_dir return max_value, max_direction def kurtosis_maximum(dki_params, sphere='repulsion100', gtol=1e-2, mask=None): """ Computes kurtosis maximum value Parameters ---------- dki_params : ndarray (x, y, z, 27) or (n, 27) All parameters estimated from the diffusion kurtosis model. Parameters are ordered as follows: 1) Three diffusion tensor's eingenvalues 2) Three lines of the eigenvector matrix each containing the first, second and third coordinates of the eigenvector 3) Fifteen elements of the kurtosis tensor sphere : Sphere class instance, optional The sphere providing sample directions for the initial search of the maximal value of kurtosis. gtol : float, optional This input is to refine kurtosis maximum under the precision of the directions sampled on the sphere class instance. The gradient of the convergence procedure must be less than gtol before successful termination. If gtol is None, fiber direction is directly taken from the initial sampled directions of the given sphere object mask : ndarray A boolean array used to mark the coordinates in the data that should be analyzed that has the shape dki_params.shape[:-1] Returns -------- max_value : float kurtosis tensor maximum value max_dir : array (3,) Cartesian coordinates of the direction of the maximal kurtosis value """ shape = dki_params.shape[:-1] # load gradient directions if not isinstance(sphere, dps.Sphere): sphere = get_sphere('repulsion100') # select voxels where to find fiber directions if mask is None: mask = np.ones(shape, dtype='bool') else: if mask.shape != shape: raise ValueError("Mask is not the same shape as dki_params.") evals, evecs, kt = split_dki_param(dki_params) # select non-zero voxels pos_evals = _positive_evals(evals[..., 0], evals[..., 1], evals[..., 2]) mask = np.logical_and(mask, pos_evals) kt_max = np.zeros(mask.shape) dt = lower_triangular(vec_val_vect(evecs, evals)) md = mean_diffusivity(evals) for idx in ndindex(shape): if not mask[idx]: continue kt_max[idx], da = _voxel_kurtosis_maximum(dt[idx], md[idx], kt[idx], sphere, gtol=gtol) return kt_max def dki_prediction(dki_params, gtab, S0=1.): """ Predict a signal given diffusion kurtosis imaging parameters. Parameters ---------- dki_params : ndarray (x, y, z, 27) or (n, 27) All parameters estimated from the diffusion kurtosis model. Parameters are ordered as follows: 1) Three diffusion tensor's eigenvalues 2) Three lines of the eigenvector matrix each containing the first, second and third coordinates of the eigenvector 3) Fifteen elements of the kurtosis tensor gtab : a GradientTable class instance The gradient table for this prediction S0 : float or ndarray (optional) The non diffusion-weighted signal in every voxel, or across all voxels. Default: 150 Returns -------- S : (..., N) ndarray Simulated signal based on the DKI model: .. math:: S=S_{0}e^{-bD+\frac{1}{6}b^{2}D^{2}K} """ evals, evecs, kt = split_dki_param(dki_params) # Define DKI design matrix according to given gtab A = design_matrix(gtab) # Flat parameters and initialize pred_sig fevals = evals.reshape((-1, evals.shape[-1])) fevecs = evecs.reshape((-1,) + evecs.shape[-2:]) fkt = kt.reshape((-1, kt.shape[-1])) pred_sig = np.zeros((len(fevals), len(gtab.bvals))) if isinstance(S0, np.ndarray): S0_vol = np.reshape(S0, (len(fevals))) else: S0_vol = S0 # looping for all voxels for v in range(len(pred_sig)): DT = np.dot(np.dot(fevecs[v], np.diag(fevals[v])), fevecs[v].T) dt = lower_triangular(DT) MD = (dt[0] + dt[2] + dt[5]) / 3 if isinstance(S0_vol, np.ndarray): this_S0 = S0_vol[v] else: this_S0 = S0_vol X = np.concatenate((dt, fkt[v] * MD * MD, np.array([np.log(this_S0)])), axis=0) pred_sig[v] = np.exp(np.dot(A, X)) # Reshape data according to the shape of dki_params pred_sig = pred_sig.reshape(dki_params.shape[:-1] + (pred_sig.shape[-1],)) return pred_sig class DiffusionKurtosisModel(ReconstModel): """ Class for the Diffusion Kurtosis Model """ def __init__(self, gtab, fit_method="WLS", *args, **kwargs): """ Diffusion Kurtosis Tensor Model [1] Parameters ---------- gtab : GradientTable class instance fit_method : str or callable str can be one of the following: 'OLS' or 'ULLS' for ordinary least squares dki.ols_fit_dki 'WLS' or 'UWLLS' for weighted ordinary least squares dki.wls_fit_dki callable has to have the signature: fit_method(design_matrix, data, *args, **kwargs) args, kwargs : arguments and key-word arguments passed to the fit_method. See dki.ols_fit_dki, dki.wls_fit_dki for details References ---------- [1] Tabesh, A., Jensen, J.H., Ardekani, B.A., Helpern, J.A., 2011. Estimation of tensors and tensor-derived measures in diffusional kurtosis imaging. Magn Reson Med. 65(3), 823-836 """ ReconstModel.__init__(self, gtab) if not callable(fit_method): try: self.fit_method = common_fit_methods[fit_method] except KeyError: raise ValueError('"' + str(fit_method) + '" is not a known ' 'fit method, the fit method should either be ' 'a function or one of the common fit methods') self.design_matrix = design_matrix(self.gtab) self.args = args self.kwargs = kwargs self.min_signal = self.kwargs.pop('min_signal', None) if self.min_signal is not None and self.min_signal <= 0: e_s = "The `min_signal` key-word argument needs to be strictly" e_s += " positive." raise ValueError(e_s) # Check if at least three b-values are given enough_b = check_multi_b(self.gtab, 3, non_zero=False) if not enough_b: mes = "DKI requires at least 3 b-values (which can include b=0)" raise ValueError(mes) def fit(self, data, mask=None): """ Fit method of the DKI model class Parameters ---------- data : array The measured signal from one voxel. mask : array A boolean array used to mark the coordinates in the data that should be analyzed that has the shape data.shape[-1] """ if mask is not None: # Check for valid shape of the mask if mask.shape != data.shape[:-1]: raise ValueError("Mask is not the same shape as data.") mask = np.array(mask, dtype=bool, copy=False) data_in_mask = np.reshape(data[mask], (-1, data.shape[-1])) if self.min_signal is None: min_signal = MIN_POSITIVE_SIGNAL else: min_signal = self.min_signal data_in_mask = np.maximum(data_in_mask, min_signal) params_in_mask = self.fit_method(self.design_matrix, data_in_mask, *self.args, **self.kwargs) if mask is None: out_shape = data.shape[:-1] + (-1, ) dki_params = params_in_mask.reshape(out_shape) else: dki_params = np.zeros(data.shape[:-1] + (27,)) dki_params[mask, :] = params_in_mask return DiffusionKurtosisFit(self, dki_params) def predict(self, dki_params, S0=1.): """ Predict a signal for this DKI model class instance given parameters. Parameters ---------- dki_params : ndarray (x, y, z, 27) or (n, 27) All parameters estimated from the diffusion kurtosis model. Parameters are ordered as follows: 1) Three diffusion tensor's eigenvalues 2) Three lines of the eigenvector matrix each containing the first, second and third coordinates of the eigenvector 3) Fifteen elements of the kurtosis tensor S0 : float or ndarray (optional) The non diffusion-weighted signal in every voxel, or across all voxels. Default: 1 """ return dki_prediction(dki_params, self.gtab, S0) class DiffusionKurtosisFit(TensorFit): """ Class for fitting the Diffusion Kurtosis Model""" def __init__(self, model, model_params): """ Initialize a DiffusionKurtosisFit class instance. Since DKI is an extension of DTI, class instance is defined as subclass of the TensorFit from dti.py Parameters ---------- model : DiffusionKurtosisModel Class instance Class instance containing the Diffusion Kurtosis Model for the fit model_params : ndarray (x, y, z, 27) or (n, 27) All parameters estimated from the diffusion kurtosis model. Parameters are ordered as follows: 1) Three diffusion tensor's eigenvalues 2) Three lines of the eigenvector matrix each containing the first, second and third coordinates of the eigenvector 3) Fifteen elements of the kurtosis tensor """ TensorFit.__init__(self, model, model_params) @property def kt(self): """ Returns the 15 independent elements of the kurtosis tensor as an array """ return self.model_params[..., 12:] def akc(self, sphere): r""" Calculates the apparent kurtosis coefficient (AKC) in each direction on the sphere for each voxel in the data Parameters ---------- sphere : Sphere class instance Returns ------- akc : ndarray The estimates of the apparent kurtosis coefficient in every direction on the input sphere Notes ----- For each sphere direction with coordinates $(n_{1}, n_{2}, n_{3})$, the calculation of AKC is done using formula: .. math :: AKC(n)=\frac{MD^{2}}{ADC(n)^{2}}\sum_{i=1}^{3}\sum_{j=1}^{3} \sum_{k=1}^{3}\sum_{l=1}^{3}n_{i}n_{j}n_{k}n_{l}W_{ijkl} where $W_{ijkl}$ are the elements of the kurtosis tensor, MD the mean diffusivity and ADC the apparent diffusion coefficent computed as: .. math :: ADC(n)=\sum_{i=1}^{3}\sum_{j=1}^{3}n_{i}n_{j}D_{ij} where $D_{ij}$ are the elements of the diffusion tensor. """ return apparent_kurtosis_coef(self.model_params, sphere) def mk(self, min_kurtosis=-3./7, max_kurtosis=10): r""" Computes mean Kurtosis (MK) from the kurtosis tensor. Parameters ---------- min_kurtosis : float (optional) To keep kurtosis values within a plausible biophysical range, mean kurtosis values that are smaller than `min_kurtosis` are replaced with `min_kurtosis`. Default = -3./7 (theoretical kurtosis limit for regions that consist of water confined to spherical pores [2]_) max_kurtosis : float (optional) To keep kurtosis values within a plausible biophysical range, mean kurtosis values that are larger than `max_kurtosis` are replaced with `max_kurtosis`. Default = 10 Returns ------- mk : array Calculated MK. Notes -------- The MK analytical solution is calculated using the following equation [1]_: .. math:: MK=F_1(\lambda_1,\lambda_2,\lambda_3)\hat{W}_{1111}+ F_1(\lambda_2,\lambda_1,\lambda_3)\hat{W}_{2222}+ F_1(\lambda_3,\lambda_2,\lambda_1)\hat{W}_{3333}+ \\ F_2(\lambda_1,\lambda_2,\lambda_3)\hat{W}_{2233}+ F_2(\lambda_2,\lambda_1,\lambda_3)\hat{W}_{1133}+ F_2(\lambda_3,\lambda_2,\lambda_1)\hat{W}_{1122} where $\hat{W}_{ijkl}$ are the components of the $W$ tensor in the coordinates system defined by the eigenvectors of the diffusion tensor $\mathbf{D}$ and .. math:: F_1(\lambda_1,\lambda_2,\lambda_3)= \frac{(\lambda_1+\lambda_2+\lambda_3)^2} {18(\lambda_1-\lambda_2)(\lambda_1-\lambda_3)} [\frac{\sqrt{\lambda_2\lambda_3}}{\lambda_1} R_F(\frac{\lambda_1}{\lambda_2},\frac{\lambda_1}{\lambda_3},1)+\\ \frac{3\lambda_1^2-\lambda_1\lambda_2-\lambda_2\lambda_3- \lambda_1\lambda_3} {3\lambda_1 \sqrt{\lambda_2 \lambda_3}} R_D(\frac{\lambda_1}{\lambda_2},\frac{\lambda_1}{\lambda_3},1)-1 ] and .. math:: F_2(\lambda_1,\lambda_2,\lambda_3)= \frac{(\lambda_1+\lambda_2+\lambda_3)^2} {3(\lambda_2-\lambda_3)^2} [\frac{\lambda_2+\lambda_3}{\sqrt{\lambda_2\lambda_3}} R_F(\frac{\lambda_1}{\lambda_2},\frac{\lambda_1}{\lambda_3},1)+\\ \frac{2\lambda_1-\lambda_2-\lambda_3}{3\sqrt{\lambda_2 \lambda_3}} R_D(\frac{\lambda_1}{\lambda_2},\frac{\lambda_1}{\lambda_3},1)-2] where $R_f$ and $R_d$ are the Carlson's elliptic integrals. References ---------- .. [1] Tabesh, A., Jensen, J.H., Ardekani, B.A., Helpern, J.A., 2011. Estimation of tensors and tensor-derived measures in diffusional kurtosis imaging. Magn Reson Med. 65(3), 823-836 .. [2] Barmpoutis, A., & Zhuo, J., 2011. Diffusion kurtosis imaging: Robust estimation from DW-MRI using homogeneous polynomials. Proceedings of the 8th {IEEE} International Symposium on Biomedical Imaging: From Nano to Macro, ISBI 2011, 262-265. doi: 10.1109/ISBI.2011.5872402 """ return mean_kurtosis(self.model_params, min_kurtosis, max_kurtosis) def ak(self, min_kurtosis=-3./7, max_kurtosis=10): r""" Axial Kurtosis (AK) of a diffusion kurtosis tensor [1]_. Parameters ---------- min_kurtosis : float (optional) To keep kurtosis values within a plausible biophysical range, axial kurtosis values that are smaller than `min_kurtosis` are replaced with -3./7 (theoretical kurtosis limit for regions that consist of water confined to spherical pores [2]_) max_kurtosis : float (optional) To keep kurtosis values within a plausible biophysical range, axial kurtosis values that are larger than `max_kurtosis` are replaced with `max_kurtosis`. Default = 10 Returns ------- ak : array Calculated AK. References ---------- .. [1] Tabesh, A., Jensen, J.H., Ardekani, B.A., Helpern, J.A., 2011. Estimation of tensors and tensor-derived measures in diffusional kurtosis imaging. Magn Reson Med. 65(3), 823-836 .. [2] Barmpoutis, A., & Zhuo, J., 2011. Diffusion kurtosis imaging: Robust estimation from DW-MRI using homogeneous polynomials. Proceedings of the 8th {IEEE} International Symposium on Biomedical Imaging: From Nano to Macro, ISBI 2011, 262-265. doi: 10.1109/ISBI.2011.5872402 """ return axial_kurtosis(self.model_params, min_kurtosis, max_kurtosis) def rk(self, min_kurtosis=-3./7, max_kurtosis=10): r""" Radial Kurtosis (RK) of a diffusion kurtosis tensor [1]_. Parameters ---------- min_kurtosis : float (optional) To keep kurtosis values within a plausible biophysical range, axial kurtosis values that are smaller than `min_kurtosis` are replaced with -3./7 (theoretical kurtosis limit for regions that consist of water confined to spherical pores [2]_) max_kurtosis : float (optional) To keep kurtosis values within a plausible biophysical range, axial kurtosis values that are larger than `max_kurtosis` are replaced with `max_kurtosis`. Default = 10 Returns ------- rk : array Calculated RK. Notes ------ RK is calculated with the following equation: .. math:: K_{\bot} = G_1(\lambda_1,\lambda_2,\lambda_3)\hat{W}_{2222} + G_1(\lambda_1,\lambda_3,\lambda_2)\hat{W}_{3333} + G_2(\lambda_1,\lambda_2,\lambda_3)\hat{W}_{2233} where: .. math:: G_1(\lambda_1,\lambda_2,\lambda_3)= \frac{(\lambda_1+\lambda_2+\lambda_3)^2}{18\lambda_2(\lambda_2- \lambda_3)} \left (2\lambda_2 + \frac{\lambda_3^2-3\lambda_2\lambda_3}{\sqrt{\lambda_2\lambda_3}} \right) and .. math:: G_2(\lambda_1,\lambda_2,\lambda_3)= \frac{(\lambda_1+\lambda_2+\lambda_3)^2}{(\lambda_2-\lambda_3)^2} \left ( \frac{\lambda_2+\lambda_3}{\sqrt{\lambda_2\lambda_3}}-2 \right ) References ---------- .. [1] Tabesh, A., Jensen, J.H., Ardekani, B.A., Helpern, J.A., 2011. Estimation of tensors and tensor-derived measures in diffusional kurtosis imaging. Magn Reson Med. 65(3), 823-836 .. [2] Barmpoutis, A., & Zhuo, J., 2011. Diffusion kurtosis imaging: Robust estimation from DW-MRI using homogeneous polynomials. Proceedings of the 8th {IEEE} International Symposium on Biomedical Imaging: From Nano to Macro, ISBI 2011, 262-265. doi: 10.1109/ISBI.2011.5872402 """ return radial_kurtosis(self.model_params, min_kurtosis, max_kurtosis) def kmax(self, sphere='repulsion100', gtol=1e-5, mask=None): r""" Computes the maximum value of a single voxel kurtosis tensor Parameters ---------- sphere : Sphere class instance, optional The sphere providing sample directions for the initial search of the maximum value of kurtosis. gtol : float, optional This input is to refine kurtosis maximum under the precision of the directions sampled on the sphere class instance. The gradient of the convergence procedure must be less than gtol before successful termination. If gtol is None, fiber direction is directly taken from the initial sampled directions of the given sphere object Returns -------- max_value : float kurtosis tensor maximum value """ return kurtosis_maximum(self.model_params, sphere, gtol, mask) def predict(self, gtab, S0=1.): r""" Given a DKI model fit, predict the signal on the vertices of a gradient table Parameters ---------- gtab : a GradientTable class instance The gradient table for this prediction S0 : float or ndarray (optional) The non diffusion-weighted signal in every voxel, or across all voxels. Default: 1 Notes ----- The predicted signal is given by: .. math:: S(n,b)=S_{0}e^{-bD(n)+\frac{1}{6}b^{2}D(n)^{2}K(n)} $\mathbf{D(n)}$ and $\mathbf{K(n)}$ can be computed from the DT and KT using the following equations: .. math:: D(n)=\sum_{i=1}^{3}\sum_{j=1}^{3}n_{i}n_{j}D_{ij} and .. math:: K(n)=\frac{MD^{2}}{D(n)^{2}}\sum_{i=1}^{3}\sum_{j=1}^{3} \sum_{k=1}^{3}\sum_{l=1}^{3}n_{i}n_{j}n_{k}n_{l}W_{ijkl} where $D_{ij}$ and $W_{ijkl}$ are the elements of the second-order DT and the fourth-order KT tensors, respectively, and $MD$ is the mean diffusivity. """ return dki_prediction(self.model_params, gtab, S0) def ols_fit_dki(design_matrix, data): r""" Computes ordinary least squares (OLS) fit to calculate the diffusion tensor and kurtosis tensor using a linear regression diffusion kurtosis model [1]_. Parameters ---------- design_matrix : array (g, 22) Design matrix holding the covariants used to solve for the regression coefficients. data : array (N, g) Data or response variables holding the data. Note that the last dimension should contain the data. It makes no copies of data. Returns ------- dki_params : array (N, 27) All parameters estimated from the diffusion kurtosis model. Parameters are ordered as follows: 1) Three diffusion tensor's eigenvalues 2) Three lines of the eigenvector matrix each containing the first, second and third coordinates of the eigenvector 3) Fifteen elements of the kurtosis tensor See Also -------- wls_fit_dki References ---------- [1] Tabesh, A., Jensen, J.H., Ardekani, B.A., Helpern, J.A., 2011. Estimation of tensors and tensor-derived measures in diffusional kurtosis imaging. Magn Reson Med. 65(3), 823-836 """ tol = 1e-6 # preparing data and initializing parameters data = np.asarray(data) data_flat = data.reshape((-1, data.shape[-1])) dki_params = np.empty((len(data_flat), 27)) # inverting design matrix and defining minimum diffusion min_diffusivity = tol / -design_matrix.min() inv_design = np.linalg.pinv(design_matrix) # looping OLS solution on all data voxels for vox in range(len(data_flat)): dki_params[vox] = _ols_iter(inv_design, data_flat[vox], min_diffusivity) # Reshape data according to the input data shape dki_params = dki_params.reshape((data.shape[:-1]) + (27,)) return dki_params def _ols_iter(inv_design, sig, min_diffusivity): """ Helper function used by ols_fit_dki - Applies OLS fit of the diffusion kurtosis model to single voxel signals. Parameters ---------- inv_design : array (g, 22) Inverse of the design matrix holding the covariants used to solve for the regression coefficients. sig : array (g,) Diffusion-weighted signal for a single voxel data. min_diffusivity : float Because negative eigenvalues are not physical and small eigenvalues, much smaller than the diffusion weighting, cause quite a lot of noise in metrics such as fa, diffusivity values smaller than `min_diffusivity` are replaced with `min_diffusivity`. Returns ------- dki_params : array (27,) All parameters estimated from the diffusion kurtosis model. Parameters are ordered as follows: 1) Three diffusion tensor's eigenvalues 2) Three lines of the eigenvector matrix each containing the first, second and third coordinates of the eigenvector 3) Fifteen elements of the kurtosis tensor """ # DKI ordinary linear least square solution log_s = np.log(sig) result = np.dot(inv_design, log_s) # Extracting the diffusion tensor parameters from solution DT_elements = result[:6] evals, evecs = decompose_tensor(from_lower_triangular(DT_elements), min_diffusivity=min_diffusivity) # Extracting kurtosis tensor parameters from solution MD_square = (evals.mean(0))**2 KT_elements = result[6:21] / MD_square # Write output dki_params = np.concatenate((evals, evecs[0], evecs[1], evecs[2], KT_elements), axis=0) return dki_params def wls_fit_dki(design_matrix, data): r""" Computes weighted linear least squares (WLS) fit to calculate the diffusion tensor and kurtosis tensor using a weighted linear regression diffusion kurtosis model [1]_. Parameters ---------- design_matrix : array (g, 22) Design matrix holding the covariants used to solve for the regression coefficients. data : array (N, g) Data or response variables holding the data. Note that the last dimension should contain the data. It makes no copies of data. min_signal : default = 1 All values below min_signal are repalced with min_signal. This is done in order to avoid taking log(0) durring the tensor fitting. Returns ------- dki_params : array (N, 27) All parameters estimated from the diffusion kurtosis model for all N voxels. Parameters are ordered as follows: 1) Three diffusion tensor's eigenvalues 2) Three lines of the eigenvector matrix each containing the first second and third coordinates of the eigenvector 3) Fifteen elements of the kurtosis tensor References ---------- [1] Veraart, J., Sijbers, J., Sunaert, S., Leemans, A., Jeurissen, B., 2013. Weighted linear least squares estimation of diffusion MRI parameters: Strengths, limitations, and pitfalls. Magn Reson Med 81, 335-346. """ tol = 1e-6 # preparing data and initializing parametres data = np.asarray(data) data_flat = data.reshape((-1, data.shape[-1])) dki_params = np.empty((len(data_flat), 27)) # inverting design matrix and defining minimum diffusion min_diffusivity = tol / -design_matrix.min() inv_design = np.linalg.pinv(design_matrix) # looping WLS solution on all data voxels for vox in range(len(data_flat)): dki_params[vox] = _wls_iter(design_matrix, inv_design, data_flat[vox], min_diffusivity) # Reshape data according to the input data shape dki_params = dki_params.reshape((data.shape[:-1]) + (27,)) return dki_params def _wls_iter(design_matrix, inv_design, sig, min_diffusivity): """ Helper function used by wls_fit_dki - Applies WLS fit of the diffusion kurtosis model to single voxel signals. Parameters ---------- design_matrix : array (g, 22) Design matrix holding the covariants used to solve for the regression coefficients inv_design : array (g, 22) Inverse of the design matrix. sig : array (g, ) Diffusion-weighted signal for a single voxel data. min_diffusivity : float Because negative eigenvalues are not physical and small eigenvalues, much smaller than the diffusion weighting, cause quite a lot of noise in metrics such as fa, diffusivity values smaller than `min_diffusivity` are replaced with `min_diffusivity`. Returns ------- dki_params : array (27, ) All parameters estimated from the diffusion kurtosis model. Parameters are ordered as follows: 1) Three diffusion tensor's eigenvalues 2) Three lines of the eigenvector matrix each containing the first, second and third coordinates of the eigenvector 3) Fifteen elements of the kurtosis tensor """ A = design_matrix # DKI ordinary linear least square solution log_s = np.log(sig) ols_result = np.dot(inv_design, log_s) # Define weights as diag(yn**2) W = np.diag(np.exp(2 * np.dot(A, ols_result))) # DKI weighted linear least square solution inv_AT_W_A = np.linalg.pinv(np.dot(np.dot(A.T, W), A)) AT_W_LS = np.dot(np.dot(A.T, W), log_s) wls_result = np.dot(inv_AT_W_A, AT_W_LS) # Extracting the diffusion tensor parameters from solution DT_elements = wls_result[:6] evals, evecs = decompose_tensor(from_lower_triangular(DT_elements), min_diffusivity=min_diffusivity) # Extracting kurtosis tensor parameters from solution MD_square = (evals.mean(0))**2 KT_elements = wls_result[6:21] / MD_square # Write output dki_params = np.concatenate((evals, evecs[0], evecs[1], evecs[2], KT_elements), axis=0) return dki_params def Wrotate(kt, Basis): r""" Rotate a kurtosis tensor from the standard Cartesian coordinate system to another coordinate system basis Parameters ---------- kt : (15,) Vector with the 15 independent elements of the kurtosis tensor Basis : array (3, 3) Vectors of the basis column-wise oriented inds : array(m, 4) (optional) Array of vectors containing the four indexes of m specific elements of the rotated kurtosis tensor. If not specified all 15 elements of the rotated kurtosis tensor are computed. Returns -------- Wrot : array (m,) or (15,) Vector with the m independent elements of the rotated kurtosis tensor. If 'indices' is not specified all 15 elements of the rotated kurtosis tensor are computed. Note ------ KT elements are assumed to be ordered as follows: .. math:: \begin{matrix} ( & W_{xxxx} & W_{yyyy} & W_{zzzz} & W_{xxxy} & W_{xxxz} & ... \\ & W_{xyyy} & W_{yyyz} & W_{xzzz} & W_{yzzz} & W_{xxyy} & ... \\ & W_{xxzz} & W_{yyzz} & W_{xxyz} & W_{xyyz} & W_{xyzz} & & )\end{matrix} References ---------- [1] Hui ES, Cheung MM, Qi L, Wu EX, 2008. Towards better MR characterization of neural tissues using directional diffusion kurtosis analysis. Neuroimage 42(1): 122-34 """ inds = np.array([[0, 0, 0, 0], [1, 1, 1, 1], [2, 2, 2, 2], [0, 0, 0, 1], [0, 0, 0, 2], [0, 1, 1, 1], [1, 1, 1, 2], [0, 2, 2, 2], [1, 2, 2, 2], [0, 0, 1, 1], [0, 0, 2, 2], [1, 1, 2, 2], [0, 0, 1, 2], [0, 1, 1, 2], [0, 1, 2, 2]]) Wrot = np.zeros(kt.shape) for e in range(len(inds)): Wrot[..., e] = Wrotate_element(kt, inds[e][0], inds[e][1], inds[e][2], inds[e][3], Basis) return Wrot # Defining keys to select a kurtosis tensor element with indexes (i, j, k, l) # on a kt vector that contains only the 15 independent elements of the kurtosis # tensor: Considering y defined by (i+1) * (j+1) * (k+1) * (l+1). Two elements # of the full 4D kurtosis tensor are equal if y obtain from the indexes of # these two element are equal. Therefore, the possible values of y (1, 16, 81, # 2, 3, 8, 24 27, 54, 4, 9, 36, 6, 12, 18) are used to point each element of # the kurtosis tensor on the format of a vector containing the 15 independent # elements. ind_ele = {1: 0, 16: 1, 81: 2, 2: 3, 3: 4, 8: 5, 24: 6, 27: 7, 54: 8, 4: 9, 9: 10, 36: 11, 6: 12, 12: 13, 18: 14} def Wrotate_element(kt, indi, indj, indk, indl, B): r""" Computes the the specified index element of a kurtosis tensor rotated to the coordinate system basis B. Parameters ---------- kt : ndarray (x, y, z, 15) or (n, 15) Array containing the 15 independent elements of the kurtosis tensor indi : int Rotated kurtosis tensor element index i (0 for x, 1 for y, 2 for z) indj : int Rotated kurtosis tensor element index j (0 for x, 1 for y, 2 for z) indk : int Rotated kurtosis tensor element index k (0 for x, 1 for y, 2 for z) indl: int Rotated kurtosis tensor element index l (0 for x, 1 for y, 2 for z) B: array (x, y, z, 3, 3) or (n, 15) Vectors of the basis column-wise oriented Returns ------- Wre : float rotated kurtosis tensor element of index ind_i, ind_j, ind_k, ind_l Note ----- It is assumed that initial kurtosis tensor elementes are defined on the Cartesian coordinate system. References ---------- [1] Hui ES, Cheung MM, Qi L, Wu EX, 2008. Towards better MR characterization of neural tissues using directional diffusion kurtosis analysis. Neuroimage 42(1): 122-34 """ Wre = 0 xyz = [0, 1, 2] for il in xyz: for jl in xyz: for kl in xyz: for ll in xyz: key = (il + 1) * (jl + 1) * (kl + 1) * (ll + 1) multiplyB = \ B[..., il, indi] * B[..., jl, indj] * \ B[..., kl, indk] * B[..., ll, indl] Wre = Wre + multiplyB * kt[..., ind_ele[key]] return Wre def Wcons(k_elements): r""" Construct the full 4D kurtosis tensors from its 15 independent elements Parameters ---------- k_elements : (15,) elements of the kurtosis tensor in the following order: .. math:: \begin{matrix} ( & W_{xxxx} & W_{yyyy} & W_{zzzz} & W_{xxxy} & W_{xxxz} & ... \\ & W_{xyyy} & W_{yyyz} & W_{xzzz} & W_{yzzz} & W_{xxyy} & ... \\ & W_{xxzz} & W_{yyzz} & W_{xxyz} & W_{xyyz} & W_{xyzz} & & )\end{matrix} Returns ------- W : array(3, 3, 3, 3) Full 4D kurtosis tensor """ W = np.zeros((3, 3, 3, 3)) xyz = [0, 1, 2] for ind_i in xyz: for ind_j in xyz: for ind_k in xyz: for ind_l in xyz: key = (ind_i + 1) * (ind_j + 1) * (ind_k + 1) * (ind_l + 1) W[ind_i][ind_j][ind_k][ind_l] = k_elements[ind_ele[key]] return W def split_dki_param(dki_params): r""" Extract the diffusion tensor eigenvalues, the diffusion tensor eigenvector matrix, and the 15 independent elements of the kurtosis tensor from the model parameters estimated from the DKI model Parameters ---------- dki_params : ndarray (x, y, z, 27) or (n, 27) All parameters estimated from the diffusion kurtosis model. Parameters are ordered as follows: 1) Three diffusion tensor's eigenvalues 2) Three lines of the eigenvector matrix each containing the first, second and third coordinates of the eigenvector 3) Fifteen elements of the kurtosis tensor Returns -------- eigvals : array (x, y, z, 3) or (n, 3) Eigenvalues from eigen decomposition of the tensor. eigvecs : array (x, y, z, 3, 3) or (n, 3, 3) Associated eigenvectors from eigen decomposition of the tensor. Eigenvectors are columnar (e.g. eigvecs[:,j] is associated with eigvals[j]) kt : array (x, y, z, 15) or (n, 15) Fifteen elements of the kurtosis tensor """ evals = dki_params[..., :3] evecs = dki_params[..., 3:12].reshape(dki_params.shape[:-1] + (3, 3)) kt = dki_params[..., 12:] return evals, evecs, kt common_fit_methods = {'WLS': wls_fit_dki, 'OLS': ols_fit_dki, 'UWLLS': wls_fit_dki, 'ULLS': ols_fit_dki, 'WLLS': wls_fit_dki, 'OLLS': ols_fit_dki, } dipy-0.13.0/dipy/reconst/dki_micro.py000066400000000000000000000576071317371701200175150ustar00rootroot00000000000000#!/usr/bin/python """ Classes and functions for fitting the DKI-based microstructural model """ from __future__ import division, print_function, absolute_import import numpy as np from dipy.reconst.dti import (lower_triangular, from_lower_triangular, decompose_tensor, trace, mean_diffusivity, radial_diffusivity, axial_diffusivity, MIN_POSITIVE_SIGNAL) from dipy.reconst.dki import (split_dki_param, _positive_evals, directional_kurtosis, directional_diffusion, kurtosis_maximum, DiffusionKurtosisModel, DiffusionKurtosisFit) from dipy.reconst.dti import design_matrix as dti_design_matrix from dipy.core.ndindex import ndindex from dipy.reconst.vec_val_sum import vec_val_vect from dipy.data import get_sphere import dipy.core.sphere as dps def axonal_water_fraction(dki_params, sphere='repulsion100', gtol=1e-2, mask=None): """ Computes the axonal water fraction from DKI [1]_. Parameters ---------- dki_params : ndarray (x, y, z, 27) or (n, 27) All parameters estimated from the diffusion kurtosis model. Parameters are ordered as follows: 1) Three diffusion tensor's eigenvalues 2) Three lines of the eigenvector matrix each containing the first, second and third coordinates of the eigenvector 3) Fifteen elements of the kurtosis tensor sphere : Sphere class instance, optional The sphere providing sample directions for the initial search of the maximal value of kurtosis. gtol : float, optional This input is to refine kurtosis maxima under the precision of the directions sampled on the sphere class instance. The gradient of the convergence procedure must be less than gtol before successful termination. If gtol is None, fiber direction is directly taken from the initial sampled directions of the given sphere object mask : ndarray A boolean array used to mark the coordinates in the data that should be analyzed that has the shape dki_params.shape[:-1] Returns -------- awf : ndarray (x, y, z) or (n) Axonal Water Fraction References ---------- .. [1] Fieremans E, Jensen JH, Helpern JA, 2011. White matter characterization with diffusional kurtosis imaging. Neuroimage 58(1):177-88. doi: 10.1016/j.neuroimage.2011.06.006 """ kt_max = kurtosis_maximum(dki_params, sphere=sphere, gtol=gtol, mask=mask) awf = kt_max / (kt_max + 3) return awf def diffusion_components(dki_params, sphere='repulsion100', awf=None, mask=None): """ Extracts the restricted and hindered diffusion tensors of well aligned fibers from diffusion kurtosis imaging parameters [1]_. Parameters ---------- dki_params : ndarray (x, y, z, 27) or (n, 27) All parameters estimated from the diffusion kurtosis model. Parameters are ordered as follows: 1) Three diffusion tensor's eigenvalues 2) Three lines of the eigenvector matrix each containing the first, second and third coordinates of the eigenvector 3) Fifteen elements of the kurtosis tensor sphere : Sphere class instance, optional The sphere providing sample directions to sample the restricted and hindered cellular diffusion tensors. For more details see Fieremans et al., 2011. awf : ndarray (optional) Array containing values of the axonal water fraction that has the shape dki_params.shape[:-1]. If not given this will be automatically computed using :func:`axonal_water_fraction`" with function's default precision. mask : ndarray (optional) A boolean array used to mark the coordinates in the data that should be analyzed that has the shape dki_params.shape[:-1] Returns -------- edt : ndarray (x, y, z, 6) or (n, 6) Parameters of the hindered diffusion tensor. idt : ndarray (x, y, z, 6) or (n, 6) Parameters of the restricted diffusion tensor. Note ---- In the original article of DKI microstructural model [1]_, the hindered and restricted tensors were definde as the intra-cellular and extra-cellular diffusion compartments respectively. References ---------- .. [1] Fieremans E, Jensen JH, Helpern JA, 2011. White matter characterization with diffusional kurtosis imaging. Neuroimage 58(1):177-88. doi: 10.1016/j.neuroimage.2011.06.006 """ shape = dki_params.shape[:-1] # load gradient directions if not isinstance(sphere, dps.Sphere): sphere = get_sphere(sphere) # select voxels where to apply the single fiber model if mask is None: mask = np.ones(shape, dtype='bool') else: if mask.shape != shape: raise ValueError("Mask is not the same shape as dki_params.") else: mask = np.array(mask, dtype=bool, copy=False) # check or compute awf values if awf is None: awf = axonal_water_fraction(dki_params, sphere=sphere, mask=mask) else: if awf.shape != shape: raise ValueError("awf array is not the same shape as dki_params.") # Initialize hindered and restricted diffusion tensors edt_all = np.zeros(shape + (6,)) idt_all = np.zeros(shape + (6,)) # Generate matrix that converts apparant diffusion coefficients to tensors B = np.zeros((sphere.x.size, 6)) B[:, 0] = sphere.x * sphere.x # Bxx B[:, 1] = sphere.x * sphere.y * 2. # Bxy B[:, 2] = sphere.y * sphere.y # Byy B[:, 3] = sphere.x * sphere.z * 2. # Bxz B[:, 4] = sphere.y * sphere.z * 2. # Byz B[:, 5] = sphere.z * sphere.z # Bzz pinvB = np.linalg.pinv(B) # Compute hindered and restricted diffusion tensors for all voxels evals, evecs, kt = split_dki_param(dki_params) dt = lower_triangular(vec_val_vect(evecs, evals)) md = mean_diffusivity(evals) index = ndindex(mask.shape) for idx in index: if not mask[idx]: continue # sample apparent diffusion and kurtosis values di = directional_diffusion(dt[idx], sphere.vertices) ki = directional_kurtosis(dt[idx], md[idx], kt[idx], sphere.vertices, adc=di, min_kurtosis=0) edi = di * (1 + np.sqrt(ki * awf[idx] / (3.0 - 3.0 * awf[idx]))) edt = np.dot(pinvB, edi) edt_all[idx] = edt # We only move on if there is an axonal water fraction. # Otherwise, remaining params are already zero, so move on if awf[idx] == 0: continue # Convert apparent diffusion and kurtosis values to apparent diffusion # values of the hindered and restricted diffusion idi = di * (1 - np.sqrt(ki * (1.0 - awf[idx]) / (3.0 * awf[idx]))) # generate hindered and restricted diffusion tensors idt = np.dot(pinvB, idi) idt_all[idx] = idt return edt_all, idt_all def dkimicro_prediction(params, gtab, S0=1): r""" Signal prediction given the DKI microstructure model parameters. Parameters ---------- params : ndarray (x, y, z, 40) or (n, 40) All parameters estimated from the diffusion kurtosis microstructure model. Parameters are ordered as follows: 1) Three diffusion tensor's eigenvalues 2) Three lines of the eigenvector matrix each containing the first, second and third coordinates of the eigenvector 3) Fifteen elements of the kurtosis tensor 4) Six elements of the hindered diffusion tensor 5) Six elements of the restricted diffusion tensor 6) Axonal water fraction gtab : a GradientTable class instance The gradient table for this prediction S0 : float or ndarray The non diffusion-weighted signal in every voxel, or across all voxels. Default: 1 Returns -------- S : (..., N) ndarray Simulated signal based on the DKI microstructure model Notes ----- 1) The predicted signal is given by: $S(\theta, b) = S_0 * [f * e^{-b ADC_{r}} + (1-f) * e^{-b ADC_{h}]$, where $ ADC_{r} and ADC_{h} are the apparent diffusion coefficients of the diffusion hindered and restricted compartment for a given direction $\theta$, $b$ is the b value provided in the GradientTable input for that direction, $f$ is the volume fraction of the restricted diffusion compartment (also known as the axonal water fraction). 2) In the original article of DKI microstructural model [1]_, the hindered and restricted tensors were definde as the intra-cellular and extra-cellular diffusion compartments respectively. """ # Initialize pred_sig pred_sig = np.zeros(params.shape[:-1] + (gtab.bvals.shape[0],)) # Define dti design matrix and region to process D = dti_design_matrix(gtab) evals = params[..., :3] mask = _positive_evals(evals[..., 0], evals[..., 1], evals[..., 2]) # Prepare parameters f = params[..., 27] adce = params[..., 28:34] adci = params[..., 34:40] if isinstance(S0, np.ndarray): S0_vol = S0 * np.ones(params.shape[:-1]) else: S0_vol = S0 # Process pred_sig for all data voxels index = ndindex(evals.shape[:-1]) for v in index: if mask[v]: pred_sig[v] = (1. - f[v]) * np.exp(np.dot(D[:, :6], adce[v])) + \ f[v] * np.exp(np.dot(D[:, :6], adci[v])) return pred_sig * S0_vol def tortuosity(hindered_ad, hindered_rd): """ Computes the tortuosity of the hindered diffusion compartment given its axial and radial diffusivities Parameters ---------- hindered_ad: ndarray Array containing the values of the hindered axial diffusivity. hindered_rd: ndarray Array containing the values of the hindered radial diffusivity. Return ------ Tortuosity of the hindered diffusion compartment """ if not isinstance(hindered_rd, np.ndarray): hindered_rd = np.array(hindered_rd) if not isinstance(hindered_ad, np.ndarray): hindered_ad = np.array(hindered_ad) tortuosity = np.zeros(hindered_rd.shape) # mask to avoid divisions by zero mask = hindered_rd > 0 # Check single voxel cases. For numpy versions more recent than 1.7, # this if else condition is not required since single voxel can be # processed using the same line of code of multi-voxel if hindered_rd.size == 1: if mask: tortuosity = hindered_ad / hindered_rd else: tortuosity[mask] = hindered_ad[mask] / hindered_rd[mask] return tortuosity def _compartments_eigenvalues(cdt): """ Helper function that computes the eigenvalues of a tissue sub compartment given its individual diffusion tensor Parameters ---------- cdt : ndarray (..., 6) Diffusion tensors elements of the tissue compartment stored in lower triangular order. Returns ------- eval : ndarry (..., 3) Eigenvalues of the tissue compartment """ evals, evecs = decompose_tensor(from_lower_triangular(cdt)) return evals class KurtosisMicrostructureModel(DiffusionKurtosisModel): """ Class for the Diffusion Kurtosis Microstructural Model """ def __init__(self, gtab, fit_method="WLS", *args, **kwargs): """ Initialize a KurtosisMicrostrutureModel class instance [1]_. Parameters ---------- gtab : GradientTable class instance fit_method : str or callable str can be one of the following: 'OLS' or 'ULLS' to fit the diffusion tensor and kurtosis tensor using the ordinary linear least squares solution dki.ols_fit_dki 'WLS' or 'UWLLS' to fit the diffusion tensor and kurtosis tensor using the ordinary linear least squares solution dki.wls_fit_dki callable has to have the signature: fit_method(design_matrix, data, *args, **kwargs) args, kwargs : arguments and key-word arguments passed to the fit_method. See dki.ols_fit_dki, dki.wls_fit_dki for details References ---------- .. [1] Fieremans, E., Jensen, J.H., Helpern, J.A., 2011. White Matter Characterization with Diffusion Kurtosis Imaging. Neuroimage 58(1): 177-188. doi:10.1016/j.neuroimage.2011.06.006 """ DiffusionKurtosisModel.__init__(self, gtab, fit_method="WLS", *args, **kwargs) def fit(self, data, mask=None, sphere='repulsion100', gtol=1e-2, awf_only=False): """ Fit method of the Diffusion Kurtosis Microstructural Model Parameters ---------- data : array An 4D matrix containing the diffusion-weighted data. mask : array A boolean array used to mark the coordinates in the data that should be analyzed that has the shape data.shape[-1] sphere : Sphere class instance, optional The sphere providing sample directions for the initial search of the maximal value of kurtosis. gtol : float, optional This input is to refine kurtosis maxima under the precision of the directions sampled on the sphere class instance. The gradient of the convergence procedure must be less than gtol before successful termination. If gtol is None, fiber direction is directly taken from the initial sampled directions of the given sphere object awf_only : bool, optiomal If set to true only the axonal volume fraction is computed from the kurtosis tensor. Default = False """ if mask is not None: # Check for valid shape of the mask if mask.shape != data.shape[:-1]: raise ValueError("Mask is not the same shape as data.") mask = np.array(mask, dtype=bool, copy=False) data_in_mask = np.reshape(data[mask], (-1, data.shape[-1])) if self.min_signal is None: self.min_signal = MIN_POSITIVE_SIGNAL data_in_mask = np.maximum(data_in_mask, self.min_signal) # DKI fit dki_params = self.fit_method(self.design_matrix, data_in_mask, *self.args, **self.kwargs) # Computing awf awf = axonal_water_fraction(dki_params, sphere=sphere, gtol=gtol) if awf_only: params_all_mask = np.concatenate((dki_params, np.array([awf]).T), axis=-1) else: # Computing the hindered and restricted diffusion tensors hdt, rdt = diffusion_components(dki_params, sphere=sphere, awf=awf) params_all_mask = np.concatenate((dki_params, np.array([awf]).T, hdt, rdt), axis=-1) if mask is None: out_shape = data.shape[:-1] + (-1,) params = params_all_mask.reshape(out_shape) else: params = np.zeros(data.shape[:-1] + (params_all_mask.shape[-1],)) params[mask, :] = params_all_mask return KurtosisMicrostructuralFit(self, params) def predict(self, params, S0=1.): """ Predict a signal for the DKI microstructural model class instance given parameters. Parameters ---------- params : ndarray (x, y, z, 40) or (n, 40) All parameters estimated from the diffusion kurtosis microstructural model. Parameters are ordered as follows: 1) Three diffusion tensor's eigenvalues 2) Three lines of the eigenvector matrix each containing the first, second and third coordinates of the eigenvector 3) Fifteen elements of the kurtosis tensor 4) Six elements of the hindered diffusion tensor 5) Six elements of the restricted diffusion tensor 6) Axonal water fraction S0 : float or ndarray (optional) The non diffusion-weighted signal in every voxel, or across all voxels. Default: 1 Note ----- In the original article of DKI microstructural model [1]_, the hindered and restricted tensors were definde as the intra-cellular and extra-cellular diffusion compartments respectively. References ---------- .. [1] Fieremans, E., Jensen, J.H., Helpern, J.A., 2011. White Matter Characterization with Diffusion Kurtosis Imaging. Neuroimage 58(1): 177-188. doi:10.1016/j.neuroimage.2011.06.006 """ return dkimicro_prediction(params, self.gtab, S0) class KurtosisMicrostructuralFit(DiffusionKurtosisFit): """ Class for fitting the Diffusion Kurtosis Microstructural Model """ def __init__(self, model, model_params): """ Initialize a KurtosisMicrostructural Fit class instance. Parameters ---------- model : DiffusionKurtosisModel Class instance Class instance containing the Diffusion Kurtosis Model for the fit model_params : ndarray (x, y, z, 40) or (n, 40) All parameters estimated from the diffusion kurtosis microstructural model. Parameters are ordered as follows: 1) Three diffusion tensor's eigenvalues 2) Three lines of the eigenvector matrix each containing the first, second and third coordinates of the eigenvector 3) Fifteen elements of the kurtosis tensor 4) Six elements of the hindered diffusion tensor 5) Six elements of the restricted diffusion tensor 6) Axonal water fraction Note ----- In the original article of DKI microstructural model [1]_, the hindered and restricted tensors were definde as the intra-cellular and extra-cellular diffusion compartments respectively. References ---------- .. [1] Fieremans, E., Jensen, J.H., Helpern, J.A., 2011. White Matter Characterization with Diffusion Kurtosis Imaging. Neuroimage 58(1): 177-188. doi:10.1016/j.neuroimage.2011.06.006 """ DiffusionKurtosisFit.__init__(self, model, model_params) @property def awf(self): """ Returns the volume fraction of the restricted diffusion compartment also known as axonal water fraction. Note ---- The volume fraction of the restricted diffusion compartment can be seem as the volume fraction of the intra-cellular compartment [1]_. References ---------- .. [1] Fieremans, E., Jensen, J.H., Helpern, J.A., 2011. White Matter Characterization with Diffusion Kurtosis Imaging. Neuroimage 58(1): 177-188. doi:10.1016/j.neuroimage.2011.06.006 """ return self.model_params[..., 27] @property def restricted_evals(self): """ Returns the eigenvalues of the restricted diffusion compartment. Note ----- The restricted diffusion tensor can be seem as the tissue's intra-cellular diffusion compartment [1]_. References ---------- .. [1] Fieremans, E., Jensen, J.H., Helpern, J.A., 2011. White Matter Characterization with Diffusion Kurtosis Imaging. Neuroimage 58(1): 177-188. doi:10.1016/j.neuroimage.2011.06.006 """ self._is_awfonly() return _compartments_eigenvalues(self.model_params[..., 34:40]) @property def hindered_evals(self): """ Returns the eigenvalues of the hindered diffusion compartment. Note ----- The hindered diffusion tensor can be seem as the tissue's extra-cellular diffusion compartment [1]_. References ---------- .. [1] Fieremans, E., Jensen, J.H., Helpern, J.A., 2011. White Matter Characterization with Diffusion Kurtosis Imaging. Neuroimage 58(1): 177-188. doi:10.1016/j.neuroimage.2011.06.006 """ self._is_awfonly() return _compartments_eigenvalues(self.model_params[..., 28:34]) @property def axonal_diffusivity(self): """ Returns the axonal diffusivity defined as the restricted diffusion tensor trace [1]_. References ---------- .. [1] Fieremans, E., Jensen, J.H., Helpern, J.A., 2011. White Matter Characterization with Diffusion Kurtosis Imaging. Neuroimage 58(1): 177-188. doi:10.1016/j.neuroimage.2011.06.006 """ return trace(self.restricted_evals) @property def hindered_ad(self): """ Returns the axial diffusivity of the hindered compartment. Note ----- The hindered diffusion tensor can be seem as the tissue's extra-cellular diffusion compartment [1]_. References ---------- .. [1] Fieremans, E., Jensen, J.H., Helpern, J.A., 2011. White Matter Characterization with Diffusion Kurtosis Imaging. Neuroimage 58(1): 177-188. doi:10.1016/j.neuroimage.2011.06.006 """ return axial_diffusivity(self.hindered_evals) @property def hindered_rd(self): """ Returns the radial diffusivity of the hindered compartment. Note ----- The hindered diffusion tensor can be seem as the tissue's extra-cellular diffusion compartment [1]_. References ---------- .. [1] Fieremans, E., Jensen, J.H., Helpern, J.A., 2011. White Matter Characterization with Diffusion Kurtosis Imaging. Neuroimage 58(1): 177-188. doi:10.1016/j.neuroimage.2011.06.006 """ return radial_diffusivity(self.hindered_evals) @property def tortuosity(self): """ Returns the tortuosity of the hindered diffusion which is defined by ADe / RDe, where ADe and RDe are the axial and radial diffusivities of the hindered compartment [1]_. Note ----- The hindered diffusion tensor can be seem as the tissue's extra-cellular diffusion compartment [1]_. References ---------- .. [1] Fieremans, E., Jensen, J.H., Helpern, J.A., 2011. White Matter Characterization with Diffusion Kurtosis Imaging. Neuroimage 58(1): 177-188. doi:10.1016/j.neuroimage.2011.06.006 """ return tortuosity(self.hindered_ad, self.hindered_rd) def _is_awfonly(self): """ To raise error if only the axonal water fraction was computed """ if self.model_params.shape[-1] < 39: raise ValueError('Only the awf was processed! Rerun model fit ' 'with input parameter awf_only set to False') def predict(self, gtab, S0=1.): r""" Given a DKI microstructural model fit, predict the signal on the vertices of a gradient table gtab : a GradientTable class instance The gradient table for this prediction S0 : float or ndarray (optional) The non diffusion-weighted signal in every voxel, or across all voxels. Default: 1 Notes ----- The predicted signal is given by: $S(\theta, b) = S_0 * [f * e^{-b ADC_{r}} + (1-f) * e^{-b ADC_{h}]$, where $ADC_{r}$ and $ADC_{h}$ are the apparent diffusion coefficients of the diffusion hindered and restricted compartment for a given direction $\theta$, $b$ is the b value provided in the GradientTable input for that direction, $f$ is the volume fraction of the restricted diffusion compartment (also known as the axonal water fraction). """ self._is_awfonly() return dkimicro_prediction(self.model_params, gtab, S0) dipy-0.13.0/dipy/reconst/dsi.py000066400000000000000000000525011317371701200163200ustar00rootroot00000000000000import numpy as np from scipy.ndimage import map_coordinates from scipy.fftpack import fftn, fftshift, ifftshift from dipy.reconst.odf import OdfModel, OdfFit from dipy.reconst.cache import Cache from dipy.reconst.multi_voxel import multi_voxel_fit class DiffusionSpectrumModel(OdfModel, Cache): def __init__(self, gtab, qgrid_size=17, r_start=2.1, r_end=6., r_step=0.2, filter_width=32, normalize_peaks=False): r""" Diffusion Spectrum Imaging The theoretical idea underlying this method is that the diffusion propagator $P(\mathbf{r})$ (probability density function of the average spin displacements) can be estimated by applying 3D FFT to the signal values $S(\mathbf{q})$ ..math:: :nowrap: \begin{eqnarray} P(\mathbf{r}) & = & S_{0}^{-1}\int S(\mathbf{q})\exp(-i2\pi\mathbf{q}\cdot\mathbf{r})d\mathbf{r} \end{eqnarray} where $\mathbf{r}$ is the displacement vector and $\mathbf{q}$ is the wavector which corresponds to different gradient directions. Method used to calculate the ODFs. Here we implement the method proposed by Wedeen et. al [1]_. The main assumption for this model is fast gradient switching and that the acquisition gradients will sit on a keyhole Cartesian grid in q_space [3]_. Parameters ---------- gtab : GradientTable, Gradient directions and bvalues container class qgrid_size : int, has to be an odd number. Sets the size of the q_space grid. For example if qgrid_size is 17 then the shape of the grid will be ``(17, 17, 17)``. r_start : float, ODF is sampled radially in the PDF. This parameters shows where the sampling should start. r_end : float, Radial endpoint of ODF sampling r_step : float, Step size of the ODf sampling from r_start to r_end filter_width : float, Strength of the hanning filter References ---------- .. [1] Wedeen V.J et. al, "Mapping Complex Tissue Architecture With Diffusion Spectrum Magnetic Resonance Imaging", MRM 2005. .. [2] Canales-Rodriguez E.J et. al, "Deconvolution in Diffusion Spectrum Imaging", Neuroimage, 2010. .. [3] Garyfallidis E, "Towards an accurate brain tractography", PhD thesis, University of Cambridge, 2012. Examples -------- In this example where we provide the data, a gradient table and a reconstruction sphere, we calculate generalized FA for the first voxel in the data with the reconstruction performed using DSI. >>> from dipy.data import dsi_voxels, get_sphere >>> data, gtab = dsi_voxels() >>> sphere = get_sphere('symmetric724') >>> from dipy.reconst.dsi import DiffusionSpectrumModel >>> ds = DiffusionSpectrumModel(gtab) >>> dsfit = ds.fit(data) >>> from dipy.reconst.odf import gfa >>> np.round(gfa(dsfit.odf(sphere))[0, 0, 0], 2) 0.11 Notes ------ A. Have in mind that DSI expects gradients on both hemispheres. If your gradients span only one hemisphere you need to duplicate the data and project them to the other hemisphere before calling this class. The function dipy.reconst.dsi.half_to_full_qspace can be used for this purpose. B. If you increase the size of the grid (parameter qgrid_size) you will most likely also need to update the r_* parameters. This is because the added zero padding from the increase of gqrid_size also introduces a scaling of the PDF. C. We assume that data only one b0 volume is provided. See Also -------- dipy.reconst.gqi.GeneralizedQSampling """ self.bvals = gtab.bvals self.bvecs = gtab.bvecs self.normalize_peaks = normalize_peaks # 3d volume for Sq if qgrid_size % 2 == 0: raise ValueError('qgrid_size needs to be an odd integer') self.qgrid_size = qgrid_size # necessary shifting for centering self.origin = self.qgrid_size // 2 # hanning filter width self.filter = hanning_filter(gtab, filter_width, self.origin) # odf sampling radius self.qradius = np.arange(r_start, r_end, r_step) self.qradiusn = len(self.qradius) # create qspace grid self.qgrid = create_qspace(gtab, self.origin) b0 = np.min(self.bvals) self.dn = (self.bvals > b0).sum() self.gtab = gtab @multi_voxel_fit def fit(self, data): return DiffusionSpectrumFit(self, data) class DiffusionSpectrumFit(OdfFit): def __init__(self, model, data): """ Calculates PDF and ODF and other properties for a single voxel Parameters ---------- model : object, DiffusionSpectrumModel data : 1d ndarray, signal values """ self.model = model self.data = data self.qgrid_sz = self.model.qgrid_size self.dn = self.model.dn self._gfa = None self.npeaks = 5 self._peak_values = None self._peak_indices = None def pdf(self, normalized=True): """ Applies the 3D FFT in the q-space grid to generate the diffusion propagator """ values = self.data * self.model.filter # create the signal volume Sq = np.zeros((self.qgrid_sz, self.qgrid_sz, self.qgrid_sz)) # fill q-space for i in range(len(values)): qx, qy, qz = self.model.qgrid[i] Sq[qx, qy, qz] += values[i] # apply fourier transform Pr = fftshift(np.real(fftn(ifftshift(Sq), 3 * (self.qgrid_sz, )))) # clipping negative values to 0 (ringing artefact) Pr = np.clip(Pr, 0, Pr.max()) # normalize the propagator to obtain a pdf if normalized: Pr /= Pr.sum() return Pr def rtop_signal(self, filtering=True): """ Calculates the return to origin probability (rtop) from the signal rtop equals to the sum of all signal values Parameters ---------- filtering : boolean, optional Whether to perform Hanning filtering. Default: True Returns ------- rtop : float the return to origin probability """ if filtering: values = self.data * self.model.filter else: values = self.data rtop = values.sum() return rtop def rtop_pdf(self, normalized=True): r""" Calculates the return to origin probability from the propagator, which is the propagator evaluated at zero (see Descoteaux et Al. [1]_, Tuch [2]_, Wu et al. [3]_) rtop = P(0) Parameters ---------- normalized : boolean, optional Whether to normalize the propagator by its sum in order to obtain a pdf. Default: True. Returns ------- rtop : float the return to origin probability References ---------- .. [1] Descoteaux M. et. al, "Multiple q-shell diffusion propagator imaging", Medical Image Analysis, vol 15, No. 4, p. 603-621, 2011. .. [2] Tuch D.S., "Diffusion MRI of Complex Tissue Structure", PhD Thesis, 2002. .. [3] Wu Y. et. al, "Computation of Diffusion Function Measures in q -Space Using Magnetic Resonance Hybrid Diffusion Imaging", IEEE TRANSACTIONS ON MEDICAL IMAGING, vol. 27, No. 6, p. 858-865, 2008 """ Pr = self.pdf(normalized=normalized) center = self.qgrid_sz // 2 rtop = Pr[center, center, center] return rtop def msd_discrete(self, normalized=True): r""" Calculates the mean squared displacement on the discrete propagator ..math:: :nowrap: \begin{equation} MSD:{DSI}=\int_{-\infty}^{\infty}\int_{-\infty}^{\infty}\int_{-\infty}^{\infty} P(\hat{\mathbf{r}}) \cdot \hat{\mathbf{r}}^{2} \ dr_x \ dr_y \ dr_z \end{equation} where $\hat{\mathbf{r}}$ is a point in the 3D Propagator space (see Wu et. al [1]_). Parameters ---------- normalized : boolean, optional Whether to normalize the propagator by its sum in order to obtain a pdf. Default: True Returns ------- msd : float the mean square displacement References ---------- .. [1] Wu Y. et. al, "Hybrid diffusion imaging", NeuroImage, vol 36, p. 617-629, 2007. """ Pr = self.pdf(normalized=normalized) # create the r squared 3D matrix gridsize = self.qgrid_sz center = gridsize // 2 a = np.arange(gridsize) - center x = np.tile(a, (gridsize, gridsize, 1)) y = np.tile(a.reshape(gridsize, 1), (gridsize, 1, gridsize)) z = np.tile(a.reshape(gridsize, 1, 1), (1, gridsize, gridsize)) r2 = x ** 2 + y ** 2 + z ** 2 msd = np.sum(Pr * r2) / float((gridsize ** 3)) return msd def odf(self, sphere): r""" Calculates the real discrete odf for a given discrete sphere ..math:: :nowrap: \begin{equation} \psi_{DSI}(\hat{\mathbf{u}})=\int_{0}^{\infty}P(r\hat{\mathbf{u}})r^{2}dr \end{equation} where $\hat{\mathbf{u}}$ is the unit vector which corresponds to a sphere point. """ interp_coords = self.model.cache_get('interp_coords', key=sphere) if interp_coords is None: interp_coords = pdf_interp_coords(sphere, self.model.qradius, self.model.origin) self.model.cache_set('interp_coords', sphere, interp_coords) Pr = self.pdf() # calculate the orientation distribution function return pdf_odf(Pr, self.model.qradius, interp_coords) def create_qspace(gtab, origin): """ create the 3D grid which holds the signal values (q-space) Parameters ---------- gtab : GradientTable origin : (3,) ndarray center of qspace Returns ------- qgrid : ndarray qspace coordinates """ # create the q-table from bvecs and bvals qtable = create_qtable(gtab, origin) # center and index in qspace volume qgrid = qtable + origin return qgrid.astype('i8') def create_qtable(gtab, origin): """ create a normalized version of gradients Parameters ---------- gtab : GradientTable origin : (3,) ndarray center of qspace Returns ------- qtable : ndarray """ bv = gtab.bvals bsorted = np.sort(bv[np.bitwise_not(gtab.b0s_mask)]) for i in range(len(bsorted)): bmin = bsorted[i] try: if np.sqrt(bv.max() / bmin) > origin + 1: continue else: break except ZeroDivisionError: continue bv = np.sqrt(bv / bmin) qtable = np.vstack((bv, bv, bv)).T * gtab.bvecs return np.floor(qtable + .5) def hanning_filter(gtab, filter_width, origin): """ create a hanning window The signal is premultiplied by a Hanning window before Fourier transform in order to ensure a smooth attenuation of the signal at high q values. Parameters ---------- gtab : GradientTable filter_width : int origin : (3,) ndarray center of qspace Returns ------- filter : (N,) ndarray where N is the number of non-b0 gradient directions """ qtable = create_qtable(gtab, origin) # calculate r - hanning filter free parameter r = np.sqrt(qtable[:, 0] ** 2 + qtable[:, 1] ** 2 + qtable[:, 2] ** 2) # setting hanning filter width and hanning return .5 * np.cos(2 * np.pi * r / filter_width) def pdf_interp_coords(sphere, rradius, origin): """ Precompute coordinates for ODF calculation from the PDF Parameters ---------- sphere : object, Sphere rradius : array, shape (N,) line interpolation points origin : array, shape (3,) center of the grid """ interp_coords = rradius * sphere.vertices[np.newaxis].T origin = np.reshape(origin, [-1, 1, 1]) interp_coords = origin + interp_coords return interp_coords def pdf_odf(Pr, rradius, interp_coords): r""" Calculates the real ODF from the diffusion propagator(PDF) Pr Parameters ---------- Pr : array, shape (X, X, X) probability density function rradius : array, shape (N,) interpolation range on the radius interp_coords : array, shape (3, M, N) coordinates in the pdf for interpolating the odf """ PrIs = map_coordinates(Pr, interp_coords, order=1) odf = (PrIs * rradius ** 2).sum(-1) return odf def half_to_full_qspace(data, gtab): """ Half to full Cartesian grid mapping Useful when dMRI data are provided in one qspace hemisphere as DiffusionSpectrum expects data to be in full qspace. Parameters ---------- data : array, shape (X, Y, Z, W) where (X, Y, Z) volume size and W number of gradient directions gtab : GradientTable container for b-values and b-vectors (gradient directions) Returns ------- new_data : array, shape (X, Y, Z, 2 * W -1) new_gtab : GradientTable Notes ----- We assume here that only on b0 is provided with the initial data. If that is not the case then you will need to write your own preparation function before providing the gradients and the data to the DiffusionSpectrumModel class. """ bvals = gtab.bvals bvecs = gtab.bvecs bvals = np.append(bvals, bvals[1:]) bvecs = np.append(bvecs, - bvecs[1:], axis=0) data = np.append(data, data[..., 1:], axis=-1) gtab.bvals = bvals.copy() gtab.bvecs = bvecs.copy() return data, gtab def project_hemisph_bvecs(gtab): """ Project any near identical bvecs to the other hemisphere Parameters ---------- gtab : object, GradientTable Notes ------- Useful only when working with some types of dsi data. """ bvals = gtab.bvals bvecs = gtab.bvecs bvs = bvals[1:] bvcs = bvecs[1:] b = bvs[:, None] * bvcs bb = np.zeros((len(bvs), len(bvs))) pairs = [] for (i, vec) in enumerate(b): for (j, vec2) in enumerate(b): bb[i, j] = np.sqrt(np.sum((vec - vec2) ** 2)) I = np.argsort(bb[i]) for j in I: if j != i: break if (j, i) in pairs: pass else: pairs.append((i, j)) bvecs2 = bvecs.copy() for (i, j) in pairs: bvecs2[1 + j] = - bvecs2[1 + j] return bvecs2, pairs class DiffusionSpectrumDeconvModel(DiffusionSpectrumModel): def __init__(self, gtab, qgrid_size=35, r_start=4.1, r_end=13., r_step=0.4, filter_width=np.inf, normalize_peaks=False): r""" Diffusion Spectrum Deconvolution The idea is to remove the convolution on the DSI propagator that is caused by the truncation of the q-space in the DSI sampling. ..math:: :nowrap: \begin{eqnarray*} P_{dsi}(\mathbf{r}) & = & S_{0}^{-1}\iiint\limits_{\| \mathbf{q} \| \le \mathbf{q_{max}}} S(\mathbf{q})\exp(-i2\pi\mathbf{q}\cdot\mathbf{r})d\mathbf{q} \\ & = & S_{0}^{-1}\iiint\limits_{\mathbf{q}} \left( S(\mathbf{q}) \cdot M(\mathbf{q}) \right) \exp(-i2\pi\mathbf{q}\cdot\mathbf{r})d\mathbf{q} \\ & = & P(\mathbf{r}) \otimes \left( S_{0}^{-1}\iiint\limits_{\mathbf{q}} M(\mathbf{q}) \exp(-i2\pi\mathbf{q}\cdot\mathbf{r})d\mathbf{q} \right) \\ \end{eqnarray*} where $\mathbf{r}$ is the displacement vector and $\mathbf{q}$ is the wavector which corresponds to different gradient directions, $M(\mathbf{q})$ is a mask corresponding to your q-space sampling and $\otimes$ is the convolution operator [1]_. Parameters ---------- gtab : GradientTable, Gradient directions and bvalues container class qgrid_size : int, has to be an odd number. Sets the size of the q_space grid. For example if qgrid_size is 35 then the shape of the grid will be ``(35, 35, 35)``. r_start : float, ODF is sampled radially in the PDF. This parameters shows where the sampling should start. r_end : float, Radial endpoint of ODF sampling r_step : float, Step size of the ODf sampling from r_start to r_end filter_width : float, Strength of the hanning filter References ---------- .. [1] Canales-Rodriguez E.J et. al, "Deconvolution in Diffusion Spectrum Imaging", Neuroimage, 2010. .. [2] Biggs David S.C. et. al, "Acceleration of Iterative Image Restoration Algorithms", Applied Optics, vol. 36, No. 8, p. 1766-1775, 1997. """ DiffusionSpectrumModel.__init__(self, gtab, qgrid_size, r_start, r_end, r_step, filter_width, normalize_peaks) @multi_voxel_fit def fit(self, data): return DiffusionSpectrumDeconvFit(self, data) class DiffusionSpectrumDeconvFit(DiffusionSpectrumFit): def pdf(self): """ Applies the 3D FFT in the q-space grid to generate the DSI diffusion propagator, remove the background noise with a hard threshold and then deconvolve the propagator with the Lucy-Richardson deconvolution algorithm """ values = self.data # create the signal volume Sq = np.zeros((self.qgrid_sz, self.qgrid_sz, self.qgrid_sz)) # fill q-space for i in range(len(values)): qx, qy, qz = self.model.qgrid[i] Sq[qx, qy, qz] += values[i] # get deconvolution PSF DSID_PSF = self.model.cache_get('deconv_psf', key=self.model.gtab) if DSID_PSF is None: DSID_PSF = gen_PSF(self.model.qgrid, self.qgrid_sz, self.qgrid_sz, self.qgrid_sz) self.model.cache_set('deconv_psf', self.model.gtab, DSID_PSF) # apply fourier transform Pr = fftshift(np.abs(np.real(fftn(ifftshift(Sq), 3 * (self.qgrid_sz, ))))) # threshold propagator Pr = threshold_propagator(Pr) # apply LR deconvolution Pr = LR_deconv(Pr, DSID_PSF, 5, 2) return Pr def threshold_propagator(P, estimated_snr=15.): """ Applies hard threshold on the propagator to remove background noise for the deconvolution. """ P_thresholded = P.copy() threshold = P_thresholded.max() / float(estimated_snr) P_thresholded[P_thresholded < threshold] = 0 return P_thresholded / P_thresholded.sum() def gen_PSF(qgrid_sampling, siz_x, siz_y, siz_z): """ Generate a PSF for DSI Deconvolution by taking the ifft of the binary q-space sampling mask and truncating it to keep only the center. """ Sq = np.zeros((siz_x, siz_y, siz_z)) # fill q-space for i in range(qgrid_sampling.shape[0]): qx, qy, qz = qgrid_sampling[i] Sq[qx, qy, qz] = 1 return Sq * np.real(np.fft.fftshift(np.fft.ifftn(np.fft.ifftshift(Sq)))) def LR_deconv(prop, psf, numit=5, acc_factor=1): r""" Perform Lucy-Richardson deconvolution algorithm on a 3D array. Parameters ---------- prop : 3-D ndarray of dtype float The 3D volume to be deconvolve psf : 3-D ndarray of dtype float The filter that will be used for the deconvolution. numit : int Number of Lucy-Richardson iteration to perform. acc_factor : float Exponential acceleration factor as in [1]_. References ---------- .. [1] Biggs David S.C. et. al, "Acceleration of Iterative Image Restoration Algorithms", Applied Optics, vol. 36, No. 8, p. 1766-1775, 1997. """ eps = 1e-16 # Create the otf of the same size as prop otf = np.zeros_like(prop) # prop.ndim==3 otf[otf.shape[0] // 2 - psf.shape[0] // 2:otf.shape[0] // 2 + psf.shape[0] // 2 + 1, otf.shape[1] // 2 - psf.shape[1] // 2: otf.shape[1] // 2 + psf.shape[1] // 2 + 1, otf.shape[2] // 2 - psf.shape[2] // 2:otf.shape[2] // 2 + psf.shape[2] // 2 + 1] = psf otf = np.real(np.fft.fftn(np.fft.ifftshift(otf))) # Enforce Positivity prop = np.clip(prop, 0, np.inf) prop_deconv = prop.copy() for it in range(numit): # Blur the estimate reBlurred = np.real(np.fft.ifftn(otf * np.fft.fftn(prop_deconv))) reBlurred[reBlurred < eps] = eps # Update the estimate prop_deconv = prop_deconv * ( np.real(np.fft.ifftn( otf * np.fft.fftn((prop / reBlurred) + eps)))) ** acc_factor # Enforce positivity prop_deconv = np.clip(prop_deconv, 0, np.inf) return prop_deconv / prop_deconv.sum() if __name__ == '__main__': pass dipy-0.13.0/dipy/reconst/dti.py000077500000000000000000002021161317371701200163230ustar00rootroot00000000000000#!/usr/bin/python """ Classes and functions for fitting tensors """ from __future__ import division, print_function, absolute_import import warnings import functools import numpy as np import scipy.optimize as opt from dipy.utils.six.moves import range from dipy.utils.arrfuncs import pinv, eigh from dipy.data import get_sphere from dipy.core.gradients import gradient_table from dipy.core.geometry import vector_norm from dipy.reconst.vec_val_sum import vec_val_vect from dipy.core.onetime import auto_attr from dipy.reconst.base import ReconstModel MIN_POSITIVE_SIGNAL = 0.0001 def _roll_evals(evals, axis=-1): """ Helper function to check that the evals provided to functions calculating tensor statistics have the right shape Parameters ---------- evals : array-like Eigenvalues of a diffusion tensor. shape should be (...,3). axis : int The axis of the array which contains the 3 eigenvals. Default: -1 Returns ------- evals : array-like Eigenvalues of a diffusion tensor, rolled so that the 3 eigenvals are the last axis. """ if evals.shape[-1] != 3: msg = "Expecting 3 eigenvalues, got {}".format(evals.shape[-1]) raise ValueError(msg) evals = np.rollaxis(evals, axis) return evals def fractional_anisotropy(evals, axis=-1): r""" Fractional anisotropy (FA) of a diffusion tensor. Parameters ---------- evals : array-like Eigenvalues of a diffusion tensor. axis : int Axis of `evals` which contains 3 eigenvalues. Returns ------- fa : array Calculated FA. Range is 0 <= FA <= 1. Notes -------- FA is calculated using the following equation: .. math:: FA = \sqrt{\frac{1}{2}\frac{(\lambda_1-\lambda_2)^2+(\lambda_1- \lambda_3)^2+(\lambda_2-\lambda_3)^2}{\lambda_1^2+ \lambda_2^2+\lambda_3^2}} """ evals = _roll_evals(evals, axis) # Make sure not to get nans all_zero = (evals == 0).all(axis=0) ev1, ev2, ev3 = evals fa = np.sqrt(0.5 * ((ev1 - ev2) ** 2 + (ev2 - ev3) ** 2 + (ev3 - ev1) ** 2) / ((evals * evals).sum(0) + all_zero)) return fa def geodesic_anisotropy(evals, axis=-1): r""" Geodesic anisotropy (GA) of a diffusion tensor. Parameters ---------- evals : array-like Eigenvalues of a diffusion tensor. axis : int Axis of `evals` which contains 3 eigenvalues. Returns ------- ga : array Calculated GA. In the range 0 to +infinity Notes -------- GA is calculated using the following equation given in [1]_: .. math:: GA = \sqrt{\sum_{i=1}^3 \log^2{\left ( \lambda_i/<\mathbf{D}> \right )}}, \quad \textrm{where} \quad <\mathbf{D}> = (\lambda_1\lambda_2\lambda_3)^{1/3} Note that the notation, $$, is often used as the mean diffusivity (MD) of the diffusion tensor and can lead to confusions in the literature (see [1]_ versus [2]_ versus [3]_ for example). Reference [2]_ defines geodesic anisotropy (GA) with $$ as the MD in the denominator of the sum. This is wrong. The original paper [1]_ defines GA with $ = det(D)^{1/3}$, as the isotropic part of the distance. This might be an explanation for the confusion. The isotropic part of the diffusion tensor in Euclidean space is the MD whereas the isotropic part of the tensor in log-Euclidean space is $det(D)^{1/3}$. The Appendix of [1]_ and log-Euclidean derivations from [3]_ are clear on this. Hence, all that to say that $ = det(D)^{1/3}$ here for the GA definition and not MD. References ---------- .. [1] P. G. Batchelor, M. Moakher, D. Atkinson, F. Calamante, A. Connelly, "A rigorous framework for diffusion tensor calculus", Magnetic Resonance in Medicine, vol. 53, pp. 221-225, 2005. .. [2] M. M. Correia, V. F. Newcombe, G.B. Williams. "Contrast-to-noise ratios for indices of anisotropy obtained from diffusion MRI: a study with standard clinical b-values at 3T". NeuroImage, vol. 57, pp. 1103-1115, 2011. .. [3] A. D. Lee, etal, P. M. Thompson. "Comparison of fractional and geodesic anisotropy in diffusion tensor images of 90 monozygotic and dizygotic twins". 5th IEEE International Symposium on Biomedical Imaging (ISBI), pp. 943-946, May 2008. .. [4] V. Arsigny, P. Fillard, X. Pennec, N. Ayache. "Log-Euclidean metrics for fast and simple calculus on diffusion tensors." Magnetic Resonance in Medecine, vol 56, pp. 411-421, 2006. """ evals = _roll_evals(evals, axis) ev1, ev2, ev3 = evals log1 = np.zeros(ev1.shape) log2 = np.zeros(ev1.shape) log3 = np.zeros(ev1.shape) idx = np.nonzero(ev1) # this is the definition in [1]_ detD = np.power(ev1 * ev2 * ev3, 1 / 3.) log1[idx] = np.log(ev1[idx] / detD[idx]) log2[idx] = np.log(ev2[idx] / detD[idx]) log3[idx] = np.log(ev3[idx] / detD[idx]) ga = np.sqrt(log1 ** 2 + log2 ** 2 + log3 ** 2) return ga def mean_diffusivity(evals, axis=-1): r""" Mean Diffusivity (MD) of a diffusion tensor. Parameters ---------- evals : array-like Eigenvalues of a diffusion tensor. axis : int Axis of `evals` which contains 3 eigenvalues. Returns ------- md : array Calculated MD. Notes -------- MD is calculated with the following equation: .. math:: MD = \frac{\lambda_1 + \lambda_2 + \lambda_3}{3} """ evals = _roll_evals(evals, axis) return evals.mean(0) def axial_diffusivity(evals, axis=-1): r""" Axial Diffusivity (AD) of a diffusion tensor. Also called parallel diffusivity. Parameters ---------- evals : array-like Eigenvalues of a diffusion tensor, must be sorted in descending order along `axis`. axis : int Axis of `evals` which contains 3 eigenvalues. Returns ------- ad : array Calculated AD. Notes -------- AD is calculated with the following equation: .. math:: AD = \lambda_1 """ evals = _roll_evals(evals, axis) ev1, ev2, ev3 = evals return ev1 def radial_diffusivity(evals, axis=-1): r""" Radial Diffusivity (RD) of a diffusion tensor. Also called perpendicular diffusivity. Parameters ---------- evals : array-like Eigenvalues of a diffusion tensor, must be sorted in descending order along `axis`. axis : int Axis of `evals` which contains 3 eigenvalues. Returns ------- rd : array Calculated RD. Notes -------- RD is calculated with the following equation: .. math:: RD = \frac{\lambda_2 + \lambda_3}{2} """ evals = _roll_evals(evals, axis) return evals[1:].mean(0) def trace(evals, axis=-1): r""" Trace of a diffusion tensor. Parameters ---------- evals : array-like Eigenvalues of a diffusion tensor. axis : int Axis of `evals` which contains 3 eigenvalues. Returns ------- trace : array Calculated trace of the diffusion tensor. Notes -------- Trace is calculated with the following equation: .. math:: Trace = \lambda_1 + \lambda_2 + \lambda_3 """ evals = _roll_evals(evals, axis) return evals.sum(0) def color_fa(fa, evecs): r""" Color fractional anisotropy of diffusion tensor Parameters ---------- fa : array-like Array of the fractional anisotropy (can be 1D, 2D or 3D) evecs : array-like eigen vectors from the tensor model Returns ------- rgb : Array with 3 channels for each color as the last dimension. Colormap of the FA with red for the x value, y for the green value and z for the blue value. Note ----- It is computed from the clipped FA between 0 and 1 using the following formula .. math:: rgb = abs(max(\vec{e})) \times fa """ if (fa.shape != evecs[..., 0, 0].shape) or ((3, 3) != evecs.shape[-2:]): raise ValueError("Wrong number of dimensions for evecs") return np.abs(evecs[..., 0]) * np.clip(fa, 0, 1)[..., None] # The following are used to calculate the tensor mode: def determinant(q_form): """ The determinant of a tensor, given in quadratic form Parameters ---------- q_form : ndarray The quadratic form of a tensor, or an array with quadratic forms of tensors. Should be of shape (x, y, z, 3, 3) or (n, 3, 3) or (3, 3). Returns ------- det : array The determinant of the tensor in each spatial coordinate """ # Following the conventions used here: # http://en.wikipedia.org/wiki/Determinant aei = q_form[..., 0, 0] * q_form[..., 1, 1] * q_form[..., 2, 2] bfg = q_form[..., 0, 1] * q_form[..., 1, 2] * q_form[..., 2, 0] cdh = q_form[..., 0, 2] * q_form[..., 1, 0] * q_form[..., 2, 1] ceg = q_form[..., 0, 2] * q_form[..., 1, 1] * q_form[..., 2, 0] bdi = q_form[..., 0, 1] * q_form[..., 1, 0] * q_form[..., 2, 2] afh = q_form[..., 0, 0] * q_form[..., 1, 2] * q_form[..., 2, 1] return aei + bfg + cdh - ceg - bdi - afh def isotropic(q_form): r""" Calculate the isotropic part of the tensor [1]_. Parameters ---------- q_form : ndarray The quadratic form of a tensor, or an array with quadratic forms of tensors. Should be of shape (x,y,z,3,3) or (n, 3, 3) or (3,3). Returns ------- A_hat: ndarray The isotropic part of the tensor in each spatial coordinate Notes ----- The isotropic part of a tensor is defined as (equations 3-5 of [1]_): .. math :: \bar{A} = \frac{1}{2} tr(A) I .. [1] Daniel B. Ennis and G. Kindlmann, "Orthogonal Tensor Invariants and the Analysis of Diffusion Tensor Magnetic Resonance Images", Magnetic Resonance in Medicine, vol. 55, no. 1, pp. 136-146, 2006. """ tr_A = q_form[..., 0, 0] + q_form[..., 1, 1] + q_form[..., 2, 2] my_I = np.eye(3) tr_AI = (tr_A.reshape(tr_A.shape + (1, 1)) * my_I) return (1 / 3.0) * tr_AI def deviatoric(q_form): r""" Calculate the deviatoric (anisotropic) part of the tensor [1]_. Parameters ---------- q_form : ndarray The quadratic form of a tensor, or an array with quadratic forms of tensors. Should be of shape (x,y,z,3,3) or (n, 3, 3) or (3,3). Returns ------- A_squiggle : ndarray The deviatoric part of the tensor in each spatial coordinate. Notes ----- The deviatoric part of the tensor is defined as (equations 3-5 in [1]_): .. math :: \widetilde{A} = A - \bar{A} Where $A$ is the tensor quadratic form and $\bar{A}$ is the anisotropic part of the tensor. .. [1] Daniel B. Ennis and G. Kindlmann, "Orthogonal Tensor Invariants and the Analysis of Diffusion Tensor Magnetic Resonance Images", Magnetic Resonance in Medicine, vol. 55, no. 1, pp. 136-146, 2006. """ A_squiggle = q_form - isotropic(q_form) return A_squiggle def norm(q_form): r""" Calculate the Frobenius norm of a tensor quadratic form Parameters ---------- q_form: ndarray The quadratic form of a tensor, or an array with quadratic forms of tensors. Should be of shape (x,y,z,3,3) or (n, 3, 3) or (3,3). Returns ------- norm : ndarray The Frobenius norm of the 3,3 tensor q_form in each spatial coordinate. Notes ----- The Frobenius norm is defined as: :math: ||A||_F = [\sum_{i,j} abs(a_{i,j})^2]^{1/2} See also -------- np.linalg.norm """ return np.sqrt(np.sum(np.sum(np.abs(q_form ** 2), -1), -1)) def mode(q_form): r""" Mode (MO) of a diffusion tensor [1]_. Parameters ---------- q_form : ndarray The quadratic form of a tensor, or an array with quadratic forms of tensors. Should be of shape (x, y, z, 3, 3) or (n, 3, 3) or (3, 3). Returns ------- mode : array Calculated tensor mode in each spatial coordinate. Notes ----- Mode ranges between -1 (planar anisotropy) and +1 (linear anisotropy) with 0 representing orthotropy. Mode is calculated with the following equation (equation 9 in [1]_): .. math:: Mode = 3*\sqrt{6}*det(\widetilde{A}/norm(\widetilde{A})) Where $\widetilde{A}$ is the deviatoric part of the tensor quadratic form. References ---------- .. [1] Daniel B. Ennis and G. Kindlmann, "Orthogonal Tensor Invariants and the Analysis of Diffusion Tensor Magnetic Resonance Images", Magnetic Resonance in Medicine, vol. 55, no. 1, pp. 136-146, 2006. """ A_squiggle = deviatoric(q_form) A_s_norm = norm(A_squiggle) # Add two dims for the (3,3), so that it can broadcast on A_squiggle: A_s_norm = A_s_norm.reshape(A_s_norm.shape + (1, 1)) return 3 * np.sqrt(6) * determinant((A_squiggle / A_s_norm)) def linearity(evals, axis=-1): r""" The linearity of the tensor [1]_ Parameters ---------- evals : array-like Eigenvalues of a diffusion tensor. axis : int Axis of `evals` which contains 3 eigenvalues. Returns ------- linearity : array Calculated linearity of the diffusion tensor. Notes -------- Linearity is calculated with the following equation: .. math:: Linearity = \frac{\lambda_1-\lambda_2}{\lambda_1+\lambda_2+\lambda_3} Notes ----- [1] Westin C.-F., Peled S., Gubjartsson H., Kikinis R., Jolesz F., "Geometrical diffusion measures for MRI from tensor basis analysis" in Proc. 5th Annual ISMRM, 1997. """ evals = _roll_evals(evals, axis) ev1, ev2, ev3 = evals return (ev1 - ev2) / evals.sum(0) def planarity(evals, axis=-1): r""" The planarity of the tensor [1]_ Parameters ---------- evals : array-like Eigenvalues of a diffusion tensor. axis : int Axis of `evals` which contains 3 eigenvalues. Returns ------- linearity : array Calculated linearity of the diffusion tensor. Notes -------- Planarity is calculated with the following equation: .. math:: Planarity = \frac{2 (\lambda_2-\lambda_3)}{\lambda_1+\lambda_2+\lambda_3} Notes ----- [1] Westin C.-F., Peled S., Gubjartsson H., Kikinis R., Jolesz F., "Geometrical diffusion measures for MRI from tensor basis analysis" in Proc. 5th Annual ISMRM, 1997. """ evals = _roll_evals(evals, axis) ev1, ev2, ev3 = evals return (2 * (ev2 - ev3) / evals.sum(0)) def sphericity(evals, axis=-1): r""" The sphericity of the tensor [1]_ Parameters ---------- evals : array-like Eigenvalues of a diffusion tensor. axis : int Axis of `evals` which contains 3 eigenvalues. Returns ------- sphericity : array Calculated sphericity of the diffusion tensor. Notes -------- Sphericity is calculated with the following equation: .. math:: Sphericity = \frac{3 \lambda_3)}{\lambda_1+\lambda_2+\lambda_3} Notes ----- [1] Westin C.-F., Peled S., Gubjartsson H., Kikinis R., Jolesz F., "Geometrical diffusion measures for MRI from tensor basis analysis" in Proc. 5th Annual ISMRM, 1997. """ evals = _roll_evals(evals, axis) ev1, ev2, ev3 = evals return (3 * ev3) / evals.sum(0) def apparent_diffusion_coef(q_form, sphere): r""" Calculate the apparent diffusion coefficient (ADC) in each direction of a sphere. Parameters ---------- q_form : ndarray The quadratic form of a tensor, or an array with quadratic forms of tensors. Should be of shape (..., 3, 3) sphere : a Sphere class instance The ADC will be calculated for each of the vertices in the sphere Notes ----- The calculation of ADC, relies on the following relationship: .. math :: ADC = \vec{b} Q \vec{b}^T Where Q is the quadratic form of the tensor. """ bvecs = sphere.vertices bvals = np.ones(bvecs.shape[0]) gtab = gradient_table(bvals, bvecs) D = design_matrix(gtab)[:, :6] return -np.dot(lower_triangular(q_form), D.T) def tensor_prediction(dti_params, gtab, S0): """ Predict a signal given tensor parameters. Parameters ---------- dti_params : ndarray Tensor parameters. The last dimension should have 12 tensor parameters: 3 eigenvalues, followed by the 3 corresponding eigenvectors. gtab : a GradientTable class instance The gradient table for this prediction S0 : float or ndarray The non diffusion-weighted signal in every voxel, or across all voxels. Default: 1 Notes ----- The predicted signal is given by: $S(\theta, b) = S_0 * e^{-b ADC}$, where $ADC = \theta Q \theta^T$, $\theta$ is a unit vector pointing at any direction on the sphere for which a signal is to be predicted, $b$ is the b value provided in the GradientTable input for that direction, $Q$ is the quadratic form of the tensor determined by the input parameters. """ evals = dti_params[..., :3] evecs = dti_params[..., 3:].reshape(dti_params.shape[:-1] + (3, 3)) qform = vec_val_vect(evecs, evals) del evals, evecs lower_tri = lower_triangular(qform, S0) del qform D = design_matrix(gtab) return np.exp(np.dot(lower_tri, D.T)) class TensorModel(ReconstModel): """ Diffusion Tensor """ def __init__(self, gtab, fit_method="WLS", return_S0_hat=False, *args, **kwargs): """ A Diffusion Tensor Model [1]_, [2]_. Parameters ---------- gtab : GradientTable class instance fit_method : str or callable str can be one of the following: 'WLS' for weighted least squares :func:`dti.wls_fit_tensor` 'LS' or 'OLS' for ordinary least squares :func:`dti.ols_fit_tensor` 'NLLS' for non-linear least-squares :func:`dti.nlls_fit_tensor` 'RT' or 'restore' or 'RESTORE' for RESTORE robust tensor fitting [3]_ :func:`dti.restore_fit_tensor` callable has to have the signature: fit_method(design_matrix, data, *args, **kwargs) return_S0_hat : bool Boolean to return (True) or not (False) the S0 values for the fit. args, kwargs : arguments and key-word arguments passed to the fit_method. See dti.wls_fit_tensor, dti.ols_fit_tensor for details min_signal : float The minimum signal value. Needs to be a strictly positive number. Default: minimal signal in the data provided to `fit`. Note ----- In order to increase speed of processing, tensor fitting is done simultaneously over many voxels. Many fit_methods use the 'step' parameter to set the number of voxels that will be fit at once in each iteration. This is the chunk size as a number of voxels. A larger step value should speed things up, but it will also take up more memory. It is advisable to keep an eye on memory consumption as this value is increased. Example : In :func:`iter_fit_tensor` we have a default step value of 1e4 References ---------- .. [1] Basser, P.J., Mattiello, J., LeBihan, D., 1994. Estimation of the effective self-diffusion tensor from the NMR spin echo. J Magn Reson B 103, 247-254. .. [2] Basser, P., Pierpaoli, C., 1996. Microstructural and physiological features of tissues elucidated by quantitative diffusion-tensor MRI. Journal of Magnetic Resonance 111, 209-219. .. [3] Lin-Ching C., Jones D.K., Pierpaoli, C. 2005. RESTORE: Robust estimation of tensors by outlier rejection. MRM 53: 1088-1095 """ ReconstModel.__init__(self, gtab) if not callable(fit_method): try: fit_method = common_fit_methods[fit_method] except KeyError: e_s = '"' + str(fit_method) + '" is not a known fit ' e_s += 'method, the fit method should either be a ' e_s += 'function or one of the common fit methods' raise ValueError(e_s) self.fit_method = fit_method self.return_S0_hat = return_S0_hat self.design_matrix = design_matrix(self.gtab) self.args = args self.kwargs = kwargs self.min_signal = self.kwargs.pop('min_signal', None) if self.min_signal is not None and self.min_signal <= 0: e_s = "The `min_signal` key-word argument needs to be strictly" e_s += " positive." raise ValueError(e_s) def fit(self, data, mask=None): """ Fit method of the DTI model class Parameters ---------- data : array The measured signal from one voxel. mask : array A boolean array used to mark the coordinates in the data that should be analyzed that has the shape data.shape[:-1] """ S0_params = None if mask is not None: # Check for valid shape of the mask if mask.shape != data.shape[:-1]: raise ValueError("Mask is not the same shape as data.") mask = np.array(mask, dtype=bool, copy=False) data_in_mask = np.reshape(data[mask], (-1, data.shape[-1])) if self.min_signal is None: min_signal = MIN_POSITIVE_SIGNAL else: min_signal = self.min_signal data_in_mask = np.maximum(data_in_mask, min_signal) params_in_mask = self.fit_method( self.design_matrix, data_in_mask, return_S0_hat=self.return_S0_hat, *self.args, **self.kwargs) if self.return_S0_hat: params_in_mask, model_S0 = params_in_mask if mask is None: out_shape = data.shape[:-1] + (-1, ) dti_params = params_in_mask.reshape(out_shape) if self.return_S0_hat: S0_params = model_S0.reshape(out_shape[:-1]) else: dti_params = np.zeros(data.shape[:-1] + (12,)) dti_params[mask, :] = params_in_mask if self.return_S0_hat: S0_params = np.zeros(data.shape[:-1] + (1,)) S0_params[mask] = model_S0 return TensorFit(self, dti_params, model_S0=S0_params) def predict(self, dti_params, S0=1.): """ Predict a signal for this TensorModel class instance given parameters. Parameters ---------- dti_params : ndarray The last dimension should have 12 tensor parameters: 3 eigenvalues, followed by the 3 eigenvectors S0 : float or ndarray The non diffusion-weighted signal in every voxel, or across all voxels. Default: 1 """ return tensor_prediction(dti_params, self.gtab, S0) class TensorFit(object): def __init__(self, model, model_params, model_S0=None): """ Initialize a TensorFit class instance. """ self.model = model self.model_params = model_params self.model_S0 = model_S0 def __getitem__(self, index): model_params = self.model_params model_S0 = self.model_S0 N = model_params.ndim if type(index) is not tuple: index = (index,) elif len(index) >= model_params.ndim: raise IndexError("IndexError: invalid index") index = index + (slice(None),) * (N - len(index)) if model_S0 is not None: model_S0 = model_S0[index[:-1]] return type(self)(self.model, model_params[index], model_S0=model_S0) @property def S0_hat(self): return self.model_S0 @property def shape(self): return self.model_params.shape[:-1] @property def directions(self): """ For tracking - return the primary direction in each voxel """ return self.evecs[..., None, :, 0] @property def evals(self): """ Returns the eigenvalues of the tensor as an array """ return self.model_params[..., :3] @property def evecs(self): """ Returns the eigenvectors of the tensor as an array, columnwise """ evecs = self.model_params[..., 3:12] return evecs.reshape(self.shape + (3, 3)) @property def quadratic_form(self): """Calculates the 3x3 diffusion tensor for each voxel""" # do `evecs * evals * evecs.T` where * is matrix multiply # einsum does this with: # np.einsum('...ij,...j,...kj->...ik', evecs, evals, evecs) return vec_val_vect(self.evecs, self.evals) def lower_triangular(self, b0=None): return lower_triangular(self.quadratic_form, b0) @auto_attr def fa(self): """Fractional anisotropy (FA) calculated from cached eigenvalues.""" return fractional_anisotropy(self.evals) @auto_attr def color_fa(self): """Color fractional anisotropy of diffusion tensor""" return color_fa(self.fa, self.evecs) @auto_attr def ga(self): """Geodesic anisotropy (GA) calculated from cached eigenvalues.""" return geodesic_anisotropy(self.evals) @auto_attr def mode(self): """ Tensor mode calculated from cached eigenvalues. """ return mode(self.quadratic_form) @auto_attr def md(self): r""" Mean diffusivity (MD) calculated from cached eigenvalues. Returns --------- md : array (V, 1) Calculated MD. Notes -------- MD is calculated with the following equation: .. math:: MD = \frac{\lambda_1+\lambda_2+\lambda_3}{3} """ return self.trace / 3.0 @auto_attr def rd(self): r""" Radial diffusivity (RD) calculated from cached eigenvalues. Returns --------- rd : array (V, 1) Calculated RD. Notes -------- RD is calculated with the following equation: .. math:: RD = \frac{\lambda_2 + \lambda_3}{2} """ return radial_diffusivity(self.evals) @auto_attr def ad(self): r""" Axial diffusivity (AD) calculated from cached eigenvalues. Returns --------- ad : array (V, 1) Calculated AD. Notes -------- RD is calculated with the following equation: .. math:: AD = \lambda_1 """ return axial_diffusivity(self.evals) @auto_attr def trace(self): r""" Trace of the tensor calculated from cached eigenvalues. Returns --------- trace : array (V, 1) Calculated trace. Notes -------- The trace is calculated with the following equation: .. math:: trace = \lambda_1 + \lambda_2 + \lambda_3 """ return trace(self.evals) @auto_attr def planarity(self): r""" Returns ------- sphericity : array Calculated sphericity of the diffusion tensor [1]_. Notes -------- Sphericity is calculated with the following equation: .. math:: Sphericity = \frac{2 (\lambda_2 - \lambda_3)}{\lambda_1+\lambda_2+\lambda_3} Notes ----- [1] Westin C.-F., Peled S., Gubjartsson H., Kikinis R., Jolesz F., "Geometrical diffusion measures for MRI from tensor basis analysis" in Proc. 5th Annual ISMRM, 1997. """ return planarity(self.evals) @auto_attr def linearity(self): r""" Returns ------- linearity : array Calculated linearity of the diffusion tensor [1]_. Notes -------- Linearity is calculated with the following equation: .. math:: Linearity = \frac{\lambda_1-\lambda_2}{\lambda_1+\lambda_2+\lambda_3} [1] Westin C.-F., Peled S., Gubjartsson H., Kikinis R., Jolesz F., "Geometrical diffusion measures for MRI from tensor basis analysis" in Proc. 5th Annual ISMRM, 1997. """ return linearity(self.evals) @auto_attr def sphericity(self): r""" Returns ------- sphericity : array Calculated sphericity of the diffusion tensor [1]_. Notes -------- Sphericity is calculated with the following equation: .. math:: Sphericity = \frac{3 \lambda_3}{\lambda_1+\lambda_2+\lambda_3} Notes ----- [1] Westin C.-F., Peled S., Gubjartsson H., Kikinis R., Jolesz F., "Geometrical diffusion measures for MRI from tensor basis analysis" in Proc. 5th Annual ISMRM, 1997. """ return sphericity(self.evals) def odf(self, sphere): """ The diffusion orientation distribution function (dODF). This is an estimate of the diffusion distance in each direction Parameters ---------- sphere : Sphere class instance. The dODF is calculated in the vertices of this input. Returns ------- odf : ndarray The diffusion distance in every direction of the sphere in every voxel in the input data. Notes ----- This is based on equation 3 in [Aganj2010]_. To re-derive it from scratch, follow steps in [Descoteaux2008]_, Section 7.9 Equation 7.24 but with an $r^2$ term in the integral. .. [Aganj2010] Aganj, I., Lenglet, C., Sapiro, G., Yacoub, E., Ugurbil, K., & Harel, N. (2010). Reconstruction of the orientation distribution function in single- and multiple-shell q-ball imaging within constant solid angle. Magnetic Resonance in Medicine, 64(2), 554-566. doi:DOI: 10.1002/mrm.22365 .. [Descoteaux2008] Descoteaux, M. (2008). PhD Thesis: High Angular Resolution Diffusion MRI: from Local Estimation to Segmentation and Tractography. ftp://ftp-sop.inria.fr/athena/Publications/PhDs/descoteaux_thesis.pdf """ odf = np.zeros((self.evals.shape[:-1] + (sphere.vertices.shape[0],))) if len(self.evals.shape) > 1: mask = np.where((self.evals[..., 0] > 0) & (self.evals[..., 1] > 0) & (self.evals[..., 2] > 0)) evals = self.evals[mask] evecs = self.evecs[mask] else: evals = self.evals evecs = self.evecs lower = 4 * np.pi * np.sqrt(np.prod(evals, -1)) projection = np.dot(sphere.vertices, evecs) projection /= np.sqrt(evals) result = ((vector_norm(projection) ** -3) / lower).T if len(self.evals.shape) > 1: odf[mask] = result else: odf = result return odf def adc(self, sphere): r""" Calculate the apparent diffusion coefficient (ADC) in each direction on the sphere for each voxel in the data Parameters ---------- sphere : Sphere class instance Returns ------- adc : ndarray The estimates of the apparent diffusion coefficient in every direction on the input sphere Notes ----- The calculation of ADC, relies on the following relationship: .. math :: ADC = \vec{b} Q \vec{b}^T Where Q is the quadratic form of the tensor. """ return apparent_diffusion_coef(self.quadratic_form, sphere) def predict(self, gtab, S0=None, step=None): r""" Given a model fit, predict the signal on the vertices of a sphere Parameters ---------- gtab : a GradientTable class instance This encodes the directions for which a prediction is made S0 : float array The mean non-diffusion weighted signal in each voxel. Default: The fitted S0 value in all voxels if it was fitted. Otherwise 1 in all voxels. step : int The chunk size as a number of voxels. Optional parameter with default value 10,000. In order to increase speed of processing, tensor fitting is done simultaneously over many voxels. This parameter sets the number of voxels that will be fit at once in each iteration. A larger step value should speed things up, but it will also take up more memory. It is advisable to keep an eye on memory consumption as this value is increased. Notes ----- The predicted signal is given by: .. math :: S(\theta, b) = S_0 * e^{-b ADC} Where: .. math :: ADC = \theta Q \theta^T $\theta$ is a unit vector pointing at any direction on the sphere for which a signal is to be predicted and $b$ is the b value provided in the GradientTable input for that direction """ if S0 is None: S0 = self.model_S0 if S0 is None: # if we didn't input or estimate S0 just use 1 S0 = 1. shape = self.model_params.shape[:-1] size = np.prod(shape) if step is None: step = self.model.kwargs.get('step', size) if step >= size: return tensor_prediction(self.model_params[..., 0:12], gtab, S0=S0) params = np.reshape(self.model_params, (-1, self.model_params.shape[-1])) predict = np.empty((size, gtab.bvals.shape[0])) if isinstance(S0, np.ndarray): S0 = S0.ravel() for i in range(0, size, step): if isinstance(S0, np.ndarray): this_S0 = S0[i:i + step] else: this_S0 = S0 predict[i:i + step] = tensor_prediction(params[i:i + step], gtab, S0=this_S0) return predict.reshape(shape + (gtab.bvals.shape[0], )) def iter_fit_tensor(step=1e4): """Wrap a fit_tensor func and iterate over chunks of data with given length Splits data into a number of chunks of specified size and iterates the decorated fit_tensor function over them. This is useful to counteract the temporary but significant memory usage increase in fit_tensor functions that use vectorized operations and need to store large temporary arrays for their vectorized operations. Parameters ---------- step : int The chunk size as a number of voxels. Optional parameter with default value 10,000. In order to increase speed of processing, tensor fitting is done simultaneously over many voxels. This parameter sets the number of voxels that will be fit at once in each iteration. A larger step value should speed things up, but it will also take up more memory. It is advisable to keep an eye on memory consumption as this value is increased. """ def iter_decorator(fit_tensor): """Actual iter decorator returned by iter_fit_tensor dec factory Parameters ---------- fit_tensor : callable A tensor fitting callable (most likely a function). The callable has to have the signature: fit_method(design_matrix, data, *args, **kwargs) """ @functools.wraps(fit_tensor) def wrapped_fit_tensor(design_matrix, data, return_S0_hat=False, step=step, *args, **kwargs): """Iterate fit_tensor function over the data chunks Parameters ---------- design_matrix : array (g, 7) Design matrix holding the covariants used to solve for the regression coefficients. data : array ([X, Y, Z, ...], g) Data or response variables holding the data. Note that the last dimension should contain the data. It makes no copies of data. return_S0_hat : bool Boolean to return (True) or not (False) the S0 values for the fit. step : int The chunk size as a number of voxels. Overrides `step` value of `iter_fit_tensor`. args : {list,tuple} Any extra optional positional arguments passed to `fit_tensor`. kwargs : dict Any extra optional keyword arguments passed to `fit_tensor`. """ shape = data.shape[:-1] size = np.prod(shape) step = int(step) or size if step >= size: return fit_tensor(design_matrix, data, return_S0_hat=return_S0_hat, *args, **kwargs) data = data.reshape(-1, data.shape[-1]) dtiparams = np.empty((size, 12), dtype=np.float64) if return_S0_hat: S0params = np.empty(size, dtype=np.float64) for i in range(0, size, step): if return_S0_hat: dtiparams[i:i + step], S0params[i:i + step] \ = fit_tensor(design_matrix, data[i:i + step], return_S0_hat=return_S0_hat, *args, **kwargs) else: dtiparams[i:i + step] = fit_tensor(design_matrix, data[i:i + step], *args, **kwargs) if return_S0_hat: return (dtiparams.reshape(shape + (12, )), S0params.reshape(shape + (1, ))) else: return dtiparams.reshape(shape + (12, )) return wrapped_fit_tensor return iter_decorator @iter_fit_tensor() def wls_fit_tensor(design_matrix, data, return_S0_hat=False): r""" Computes weighted least squares (WLS) fit to calculate self-diffusion tensor using a linear regression model [1]_. Parameters ---------- design_matrix : array (g, 7) Design matrix holding the covariants used to solve for the regression coefficients. data : array ([X, Y, Z, ...], g) Data or response variables holding the data. Note that the last dimension should contain the data. It makes no copies of data. return_S0_hat : bool Boolean to return (True) or not (False) the S0 values for the fit. Returns ------- eigvals : array (..., 3) Eigenvalues from eigen decomposition of the tensor. eigvecs : array (..., 3, 3) Associated eigenvectors from eigen decomposition of the tensor. Eigenvectors are columnar (e.g. eigvecs[:,j] is associated with eigvals[j]) See Also -------- decompose_tensor Notes ----- In Chung, et al. 2006, the regression of the WLS fit needed an unbiased preliminary estimate of the weights and therefore the ordinary least squares (OLS) estimates were used. A "two pass" method was implemented: 1. calculate OLS estimates of the data 2. apply the OLS estimates as weights to the WLS fit of the data This ensured heteroscedasticity could be properly modeled for various types of bootstrap resampling (namely residual bootstrap). .. math:: y = \mathrm{data} \\ X = \mathrm{design matrix} \\ \hat{\beta}_\mathrm{WLS} = \mathrm{desired regression coefficients (e.g. tensor)}\\ \\ \hat{\beta}_\mathrm{WLS} = (X^T W X)^{-1} X^T W y \\ \\ W = \mathrm{diag}((X \hat{\beta}_\mathrm{OLS})^2), \mathrm{where} \hat{\beta}_\mathrm{OLS} = (X^T X)^{-1} X^T y References ---------- .. [1] Chung, SW., Lu, Y., Henry, R.G., 2006. Comparison of bootstrap approaches for estimation of uncertainties of DTI parameters. NeuroImage 33, 531-541. """ tol = 1e-6 data = np.asarray(data) ols_fit = _ols_fit_matrix(design_matrix) log_s = np.log(data) w = np.exp(np.einsum('...ij,...j', ols_fit, log_s)) fit_result = np.einsum('...ij,...j', pinv(design_matrix * w[..., None]), w * log_s) if return_S0_hat: return (eig_from_lo_tri(fit_result, min_diffusivity=tol / -design_matrix.min()), np.exp(-fit_result[:, -1])) else: return eig_from_lo_tri(fit_result, min_diffusivity=tol / -design_matrix.min()) @iter_fit_tensor() def ols_fit_tensor(design_matrix, data, return_S0_hat=False): r""" Computes ordinary least squares (OLS) fit to calculate self-diffusion tensor using a linear regression model [1]_. Parameters ---------- design_matrix : array (g, 7) Design matrix holding the covariants used to solve for the regression coefficients. data : array ([X, Y, Z, ...], g) Data or response variables holding the data. Note that the last dimension should contain the data. It makes no copies of data. return_S0_hat : bool Boolean to return (True) or not (False) the S0 values for the fit. Returns ------- eigvals : array (..., 3) Eigenvalues from eigen decomposition of the tensor. eigvecs : array (..., 3, 3) Associated eigenvectors from eigen decomposition of the tensor. Eigenvectors are columnar (e.g. eigvecs[:,j] is associated with eigvals[j]) See Also -------- WLS_fit_tensor, decompose_tensor, design_matrix Notes ----- .. math:: y = \mathrm{data} \\ X = \mathrm{design matrix} \\ \hat{\beta}_\mathrm{OLS} = (X^T X)^{-1} X^T y References ---------- .. [1] Chung, SW., Lu, Y., Henry, R.G., 2006. Comparison of bootstrap approaches for estimation of uncertainties of DTI parameters. NeuroImage 33, 531-541. """ tol = 1e-6 data = np.asarray(data) fit_result = np.einsum('...ij,...j', np.linalg.pinv(design_matrix), np.log(data)) if return_S0_hat: return (eig_from_lo_tri(fit_result, min_diffusivity=tol / -design_matrix.min()), np.exp(-fit_result[:, -1])) else: return eig_from_lo_tri(fit_result, min_diffusivity=tol / -design_matrix.min()) def _ols_fit_matrix(design_matrix): """ Helper function to calculate the ordinary least squares (OLS) fit as a matrix multiplication. Mainly used to calculate WLS weights. Can be used to calculate regression coefficients in OLS but not recommended. See Also: --------- wls_fit_tensor, ols_fit_tensor Example: -------- ols_fit = _ols_fit_matrix(design_mat) ols_data = np.dot(ols_fit, data) """ U, S, V = np.linalg.svd(design_matrix, False) return np.dot(U, U.T) def _nlls_err_func(tensor, design_matrix, data, weighting=None, sigma=None): """ Error function for the non-linear least-squares fit of the tensor. Parameters ---------- tensor : array (3,3) The 3-by-3 tensor matrix design_matrix : array The design matrix data : array The voxel signal in all gradient directions weighting : str (optional). Whether to use the Geman-McClure weighting criterion (see [1]_ for details) sigma : float or float array (optional) If 'sigma' weighting is used, we will weight the error function according to the background noise estimated either in aggregate over all directions (when a float is provided), or to an estimate of the noise in each diffusion-weighting direction (if an array is provided). If 'gmm', the Geman-Mclure M-estimator is used for weighting (see Notes). Notes ----- The Geman-McClure M-estimator is described as follows [1]_ (page 1089): "The scale factor C affects the shape of the GMM [Geman-McClure M-estimator] weighting function and represents the expected spread of the residuals (i.e., the SD of the residuals) due to Gaussian distributed noise. The scale factor C can be estimated by many robust scale estimators. We used the median absolute deviation (MAD) estimator because it is very robust to outliers having a 50% breakdown point (6,7). The explicit formula for C using the MAD estimator is: .. math :: C = 1.4826 x MAD = 1.4826 x median{|r1-\hat{r}|,... |r_n-\hat{r}|} where $\hat{r} = median{r_1, r_2, ..., r_3}$ and n is the number of data points. The multiplicative constant 1.4826 makes this an approximately unbiased estimate of scale when the error model is Gaussian." References ---------- [1] Chang, L-C, Jones, DK and Pierpaoli, C (2005). RESTORE: robust estimation of tensors by outlier rejection. MRM, 53: 1088-95. """ # This is the predicted signal given the params: y = np.exp(np.dot(design_matrix, tensor)) # Compute the residuals residuals = data - y # If we don't want to weight the residuals, we are basically done: if weighting is None: # And we return the SSE: return residuals se = residuals ** 2 # If the user provided a sigma (e.g 1.5267 * std(background_noise), as # suggested by Chang et al.) we will use it: if weighting == 'sigma': if sigma is None: e_s = "Must provide sigma value as input to use this weighting" e_s += " method" raise ValueError(e_s) w = 1 / (sigma**2) elif weighting == 'gmm': # We use the Geman-McClure M-estimator to compute the weights on the # residuals: C = 1.4826 * np.median(np.abs(residuals - np.median(residuals))) with warnings.catch_warnings(): warnings.simplefilter("ignore") w = 1 / (se + C**2) # The weights are normalized to the mean weight (see p. 1089): w = w / np.mean(w) # Return the weighted residuals: with warnings.catch_warnings(): warnings.simplefilter("ignore") return np.sqrt(w * se) def _nlls_jacobian_func(tensor, design_matrix, data, *arg, **kwargs): """The Jacobian is the first derivative of the error function [1]_. Notes ----- This is an implementation of equation 14 in [1]_. References ---------- [1] Koay, CG, Chang, L-C, Carew, JD, Pierpaoli, C, Basser PJ (2006). A unifying theoretical and algorithmic framework for least squares methods of estimation in diffusion tensor imaging. MRM 182, 115-25. """ pred = np.exp(np.dot(design_matrix, tensor)) return -pred[:, None] * design_matrix def _decompose_tensor_nan(tensor, tensor_alternative, min_diffusivity=0): """ Helper function that expands the function decompose_tensor to deal with tensor with nan elements. Computes tensor eigen decomposition to calculate eigenvalues and eigenvectors (Basser et al., 1994a). Some fit approaches can produce nan tensor elements in background voxels (particularly non-linear approaches). This function avoids the eigen decomposition errors of nan tensor elements by replacing tensor with nan elements by a given alternative tensor estimate. Parameters ---------- tensor : array (3, 3) Hermitian matrix representing a diffusion tensor. tensor_alternative : array (3, 3) Hermitian matrix representing a diffusion tensor obtain from an approach that does not produce nan tensor elements min_diffusivity : float Because negative eigenvalues are not physical and small eigenvalues, much smaller than the diffusion weighting, cause quite a lot of noise in metrics such as fa, diffusivity values smaller than `min_diffusivity` are replaced with `min_diffusivity`. Returns ------- eigvals : array (3) Eigenvalues from eigen decomposition of the tensor. Negative eigenvalues are replaced by zero. Sorted from largest to smallest. eigvecs : array (3, 3) Associated eigenvectors from eigen decomposition of the tensor. Eigenvectors are columnar (e.g. eigvecs[..., :, j] is associated with eigvals[..., j]) """ try: evals, evecs = decompose_tensor(tensor[:6], min_diffusivity=min_diffusivity) except np.linalg.LinAlgError: evals, evecs = decompose_tensor(tensor_alternative[:6], min_diffusivity=min_diffusivity) return evals, evecs def nlls_fit_tensor(design_matrix, data, weighting=None, sigma=None, jac=True, return_S0_hat=False): """ Fit the tensor params using non-linear least-squares. Parameters ---------- design_matrix : array (g, 7) Design matrix holding the covariants used to solve for the regression coefficients. data : array ([X, Y, Z, ...], g) Data or response variables holding the data. Note that the last dimension should contain the data. It makes no copies of data. weighting: str the weighting scheme to use in considering the squared-error. Default behavior is to use uniform weighting. Other options: 'sigma' 'gmm' sigma: float If the 'sigma' weighting scheme is used, a value of sigma needs to be provided here. According to [Chang2005]_, a good value to use is 1.5267 * std(background_noise), where background_noise is estimated from some part of the image known to contain no signal (only noise). jac : bool Use the Jacobian? Default: True return_S0_hat : bool Boolean to return (True) or not (False) the S0 values for the fit. Returns ------- nlls_params: the eigen-values and eigen-vectors of the tensor in each voxel. """ # Flatten for the iteration over voxels: flat_data = data.reshape((-1, data.shape[-1])) # Use the OLS method parameters as the starting point for the optimization: inv_design = np.linalg.pinv(design_matrix) log_s = np.log(flat_data) D = np.dot(inv_design, log_s.T).T # Flatten for the iteration over voxels: ols_params = np.reshape(D, (-1, D.shape[-1])) # 12 parameters per voxel (evals + evecs): dti_params = np.empty((flat_data.shape[0], 12)) if return_S0_hat: model_S0 = np.empty((flat_data.shape[0], 1)) for vox in range(flat_data.shape[0]): if np.all(flat_data[vox] == 0): raise ValueError("The data in this voxel contains only zeros") start_params = ols_params[vox] # Do the optimization in this voxel: if jac: this_tensor, status = opt.leastsq(_nlls_err_func, start_params, args=(design_matrix, flat_data[vox], weighting, sigma), Dfun=_nlls_jacobian_func) else: this_tensor, status = opt.leastsq(_nlls_err_func, start_params, args=(design_matrix, flat_data[vox], weighting, sigma)) # The parameters are the evals and the evecs: try: evals, evecs = decompose_tensor( from_lower_triangular(this_tensor[:6])) dti_params[vox, :3] = evals dti_params[vox, 3:] = evecs.ravel() if return_S0_hat: model_S0[vox] = np.exp(-this_tensor[6]) # If leastsq failed to converge and produced nans, we'll resort to the # OLS solution in this voxel: except np.linalg.LinAlgError: evals, evecs = decompose_tensor( from_lower_triangular(start_params[:6])) dti_params[vox, :3] = evals dti_params[vox, 3:] = evecs.ravel() if return_S0_hat: model_S0[vox] = np.exp(-start_params[6]) dti_params.shape = data.shape[:-1] + (12,) if return_S0_hat: model_S0.shape = data.shape[:-1] + (1,) return (dti_params, model_S0) else: return dti_params def restore_fit_tensor(design_matrix, data, sigma=None, jac=True, return_S0_hat=False): """ Use the RESTORE algorithm [Chang2005]_ to calculate a robust tensor fit Parameters ---------- design_matrix : array of shape (g, 7) Design matrix holding the covariants used to solve for the regression coefficients. data : array of shape ([X, Y, Z, n_directions], g) Data or response variables holding the data. Note that the last dimension should contain the data. It makes no copies of data. sigma : float An estimate of the variance. [Chang2005]_ recommend to use 1.5267 * std(background_noise), where background_noise is estimated from some part of the image known to contain no signal (only noise). jac : bool, optional Whether to use the Jacobian of the tensor to speed the non-linear optimization procedure used to fit the tensor parameters (see also :func:`nlls_fit_tensor`). Default: True return_S0_hat : bool Boolean to return (True) or not (False) the S0 values for the fit. Returns ------- restore_params : an estimate of the tensor parameters in each voxel. Note ---- Chang, L-C, Jones, DK and Pierpaoli, C (2005). RESTORE: robust estimation of tensors by outlier rejection. MRM, 53: 1088-95. """ # Flatten for the iteration over voxels: flat_data = data.reshape((-1, data.shape[-1])) # Use the OLS method parameters as the starting point for the optimization: inv_design = np.linalg.pinv(design_matrix) log_s = np.log(flat_data) D = np.dot(inv_design, log_s.T).T ols_params = np.reshape(D, (-1, D.shape[-1])) # 12 parameters per voxel (evals + evecs): dti_params = np.empty((flat_data.shape[0], 12)) if return_S0_hat: model_S0 = np.empty((flat_data.shape[0], 1)) for vox in range(flat_data.shape[0]): if np.all(flat_data[vox] == 0): raise ValueError("The data in this voxel contains only zeros") start_params = ols_params[vox] # Do nlls using sigma weighting in this voxel: if jac: this_tensor, status = opt.leastsq(_nlls_err_func, start_params, args=(design_matrix, flat_data[vox], 'sigma', sigma), Dfun=_nlls_jacobian_func) else: this_tensor, status = opt.leastsq(_nlls_err_func, start_params, args=(design_matrix, flat_data[vox], 'sigma', sigma)) # Get the residuals: pred_sig = np.exp(np.dot(design_matrix, this_tensor)) residuals = flat_data[vox] - pred_sig # If any of the residuals are outliers (using 3 sigma as a criterion # following Chang et al., e.g page 1089): if np.any(np.abs(residuals) > 3 * sigma): # Do nlls with GMM-weighting: if jac: this_tensor, status = opt.leastsq(_nlls_err_func, start_params, args=(design_matrix, flat_data[vox], 'gmm'), Dfun=_nlls_jacobian_func) else: this_tensor, status = opt.leastsq(_nlls_err_func, start_params, args=(design_matrix, flat_data[vox], 'gmm')) # How are you doin' on those residuals? pred_sig = np.exp(np.dot(design_matrix, this_tensor)) residuals = flat_data[vox] - pred_sig if np.any(np.abs(residuals) > 3 * sigma): # If you still have outliers, refit without those outliers: non_outlier_idx = np.where(np.abs(residuals) <= 3 * sigma) clean_design = design_matrix[non_outlier_idx] clean_sig = flat_data[vox][non_outlier_idx] if np.iterable(sigma): this_sigma = sigma[non_outlier_idx] else: this_sigma = sigma if jac: this_tensor, status = opt.leastsq(_nlls_err_func, start_params, args=(clean_design, clean_sig, 'sigma', this_sigma), Dfun=_nlls_jacobian_func) else: this_tensor, status = opt.leastsq(_nlls_err_func, start_params, args=(clean_design, clean_sig, 'sigma', this_sigma)) # The parameters are the evals and the evecs: evals, evecs = _decompose_tensor_nan( from_lower_triangular(this_tensor[:6]), from_lower_triangular(start_params[:6])) dti_params[vox, :3] = evals dti_params[vox, 3:] = evecs.ravel() if return_S0_hat: model_S0[vox] = np.exp(-this_tensor[6]) dti_params.shape = data.shape[:-1] + (12,) restore_params = dti_params if return_S0_hat: model_S0.shape = data.shape[:-1] + (1,) return (restore_params, model_S0) else: return restore_params _lt_indices = np.array([[0, 1, 3], [1, 2, 4], [3, 4, 5]]) def from_lower_triangular(D): """ Returns a tensor given the six unique tensor elements Given the six unique tensor elements (in the order: Dxx, Dxy, Dyy, Dxz, Dyz, Dzz) returns a 3 by 3 tensor. All elements after the sixth are ignored. Parameters ----------- D : array_like, (..., >6) Unique elements of the tensors Returns -------- tensor : ndarray (..., 3, 3) 3 by 3 tensors """ return D[..., _lt_indices] _lt_rows = np.array([0, 1, 1, 2, 2, 2]) _lt_cols = np.array([0, 0, 1, 0, 1, 2]) def lower_triangular(tensor, b0=None): """ Returns the six lower triangular values of the tensor and a dummy variable if b0 is not None Parameters ---------- tensor : array_like (..., 3, 3) a collection of 3, 3 diffusion tensors b0 : float if b0 is not none log(b0) is returned as the dummy variable Returns ------- D : ndarray If b0 is none, then the shape will be (..., 6) otherwise (..., 7) """ if tensor.shape[-2:] != (3, 3): raise ValueError("Diffusion tensors should be (..., 3, 3)") if b0 is None: return tensor[..., _lt_rows, _lt_cols] else: D = np.empty(tensor.shape[:-2] + (7,), dtype=tensor.dtype) D[..., 6] = -np.log(b0) D[..., :6] = tensor[..., _lt_rows, _lt_cols] return D def decompose_tensor(tensor, min_diffusivity=0): """ Returns eigenvalues and eigenvectors given a diffusion tensor Computes tensor eigen decomposition to calculate eigenvalues and eigenvectors (Basser et al., 1994a). Parameters ---------- tensor : array (..., 3, 3) Hermitian matrix representing a diffusion tensor. min_diffusivity : float Because negative eigenvalues are not physical and small eigenvalues, much smaller than the diffusion weighting, cause quite a lot of noise in metrics such as fa, diffusivity values smaller than `min_diffusivity` are replaced with `min_diffusivity`. Returns ------- eigvals : array (..., 3) Eigenvalues from eigen decomposition of the tensor. Negative eigenvalues are replaced by zero. Sorted from largest to smallest. eigvecs : array (..., 3, 3) Associated eigenvectors from eigen decomposition of the tensor. Eigenvectors are columnar (e.g. eigvecs[..., :, j] is associated with eigvals[..., j]) """ # outputs multiplicity as well so need to unique eigenvals, eigenvecs = eigh(tensor) # need to sort the eigenvalues and associated eigenvectors if eigenvals.ndim == 1: # this is a lot faster when dealing with a single voxel order = eigenvals.argsort()[::-1] eigenvecs = eigenvecs[:, order] eigenvals = eigenvals[order] else: # temporarily flatten eigenvals and eigenvecs to make sorting easier shape = eigenvals.shape[:-1] eigenvals = eigenvals.reshape(-1, 3) eigenvecs = eigenvecs.reshape(-1, 3, 3) size = eigenvals.shape[0] order = eigenvals.argsort()[:, ::-1] xi, yi = np.ogrid[:size, :3, :3][:2] eigenvecs = eigenvecs[xi, yi, order[:, None, :]] xi = np.ogrid[:size, :3][0] eigenvals = eigenvals[xi, order] eigenvecs = eigenvecs.reshape(shape + (3, 3)) eigenvals = eigenvals.reshape(shape + (3, )) eigenvals = eigenvals.clip(min=min_diffusivity) # eigenvecs: each vector is columnar return eigenvals, eigenvecs def design_matrix(gtab, dtype=None): """ Constructs design matrix for DTI weighted least squares or least squares fitting. (Basser et al., 1994a) Parameters ---------- gtab : A GradientTable class instance dtype : string Parameter to control the dtype of returned designed matrix Returns ------- design_matrix : array (g,7) Design matrix or B matrix assuming Gaussian distributed tensor model design_matrix[j, :] = (Bxx, Byy, Bzz, Bxy, Bxz, Byz, dummy) """ B = np.zeros((gtab.gradients.shape[0], 7)) B[:, 0] = gtab.bvecs[:, 0] * gtab.bvecs[:, 0] * 1. * gtab.bvals # Bxx B[:, 1] = gtab.bvecs[:, 0] * gtab.bvecs[:, 1] * 2. * gtab.bvals # Bxy B[:, 2] = gtab.bvecs[:, 1] * gtab.bvecs[:, 1] * 1. * gtab.bvals # Byy B[:, 3] = gtab.bvecs[:, 0] * gtab.bvecs[:, 2] * 2. * gtab.bvals # Bxz B[:, 4] = gtab.bvecs[:, 1] * gtab.bvecs[:, 2] * 2. * gtab.bvals # Byz B[:, 5] = gtab.bvecs[:, 2] * gtab.bvecs[:, 2] * 1. * gtab.bvals # Bzz B[:, 6] = np.ones(gtab.gradients.shape[0]) return -B def quantize_evecs(evecs, odf_vertices=None): """ Find the closest orientation of an evenly distributed sphere Parameters ---------- evecs : ndarray odf_vertices : None or ndarray If None, then set vertices from symmetric362 sphere. Otherwise use passed ndarray as vertices Returns ------- IN : ndarray """ max_evecs = evecs[..., :, 0] if odf_vertices is None: odf_vertices = get_sphere('symmetric362').vertices tup = max_evecs.shape[:-1] mec = max_evecs.reshape(np.prod(np.array(tup)), 3) IN = np.array([np.argmin(np.dot(odf_vertices, m)) for m in mec]) IN = IN.reshape(tup) return IN def eig_from_lo_tri(data, min_diffusivity=0): """ Calculates tensor eigenvalues/eigenvectors from an array containing the lower diagonal form of the six unique tensor elements. Parameters ---------- data : array_like (..., 6) diffusion tensors elements stored in lower triangular order min_diffusivity : float See decompose_tensor() Returns ------- dti_params : array (..., 12) Eigen-values and eigen-vectors of the same array. """ data = np.asarray(data) evals, evecs = decompose_tensor(from_lower_triangular(data), min_diffusivity=min_diffusivity) dti_params = np.concatenate((evals[..., None, :], evecs), axis=-2) return dti_params.reshape(data.shape[:-1] + (12, )) common_fit_methods = {'WLS': wls_fit_tensor, 'LS': ols_fit_tensor, 'OLS': ols_fit_tensor, 'NLLS': nlls_fit_tensor, 'RT': restore_fit_tensor, 'restore': restore_fit_tensor, 'RESTORE': restore_fit_tensor } dipy-0.13.0/dipy/reconst/fwdti.py000066400000000000000000000770251317371701200166660ustar00rootroot00000000000000""" Classes and functions for fitting tensors without free water contamination """ from __future__ import division, print_function, absolute_import import warnings import numpy as np import scipy.optimize as opt from dipy.reconst.base import ReconstModel from dipy.reconst.dti import (TensorFit, design_matrix, decompose_tensor, _decompose_tensor_nan, from_lower_triangular, lower_triangular) from dipy.reconst.dki import _positive_evals from dipy.reconst.vec_val_sum import vec_val_vect from dipy.core.ndindex import ndindex from dipy.reconst.multi_voxel import multi_voxel_fit def fwdti_prediction(params, gtab, S0=1, Diso=3.0e-3): r""" Signal prediction given the free water DTI model parameters. Parameters ---------- params : (..., 13) ndarray Model parameters. The last dimension should have the 12 tensor parameters (3 eigenvalues, followed by the 3 corresponding eigenvectors) and the volume fraction of the free water compartment. gtab : a GradientTable class instance The gradient table for this prediction S0 : float or ndarray The non diffusion-weighted signal in every voxel, or across all voxels. Default: 1 Diso : float, optional Value of the free water isotropic diffusion. Default is set to 3e-3 $mm^{2}.s^{-1}$. Please adjust this value if you are assuming different units of diffusion. Returns -------- S : (..., N) ndarray Simulated signal based on the free water DTI model Notes ----- The predicted signal is given by: $S(\theta, b) = S_0 * [(1-f) * e^{-b ADC} + f * e^{-b D_{iso}]$, where $ADC = \theta Q \theta^T$, $\theta$ is a unit vector pointing at any direction on the sphere for which a signal is to be predicted, $b$ is the b value provided in the GradientTable input for that direction, $Q$ is the quadratic form of the tensor determined by the input parameters, $f$ is the free water diffusion compartment, $D_{iso}$ is the free water diffusivity which is equal to $3 * 10^{-3} mm^{2}s^{-1} [1]_. References ---------- .. [1] Hoy, A.R., Koay, C.G., Kecskemeti, S.R., Alexander, A.L., 2014. Optimization of a free water elimination two-compartmental model for diffusion tensor imaging. NeuroImage 103, 323-333. doi: 10.1016/j.neuroimage.2014.09.053 """ evals = params[..., :3] evecs = params[..., 3:-1].reshape(params.shape[:-1] + (3, 3)) f = params[..., 12] qform = vec_val_vect(evecs, evals) lower_dt = lower_triangular(qform, S0) lower_diso = lower_dt.copy() lower_diso[..., 0] = lower_diso[..., 2] = lower_diso[..., 5] = Diso lower_diso[..., 1] = lower_diso[..., 3] = lower_diso[..., 4] = 0 D = design_matrix(gtab) pred_sig = np.zeros(f.shape + (gtab.bvals.shape[0],)) mask = _positive_evals(evals[..., 0], evals[..., 1], evals[..., 2]) index = ndindex(f.shape) for v in index: if mask[v]: pred_sig[v] = (1 - f[v]) * np.exp(np.dot(lower_dt[v], D.T)) + \ f[v] * np.exp(np.dot(lower_diso[v], D.T)) return pred_sig class FreeWaterTensorModel(ReconstModel): """ Class for the Free Water Elimination Diffusion Tensor Model """ def __init__(self, gtab, fit_method="NLS", *args, **kwargs): """ Free Water Diffusion Tensor Model [1]_. Parameters ---------- gtab : GradientTable class instance fit_method : str or callable str can be one of the following: 'WLS' for weighted linear least square fit according to [1]_ :func:`fwdti.wls_iter` 'NLS' for non-linear least square fit according to [1]_ :func:`fwdti.nls_iter` callable has to have the signature: fit_method(design_matrix, data, *args, **kwargs) args, kwargs : arguments and key-word arguments passed to the fit_method. See fwdti.wls_iter, fwdti.nls_iter for details References ---------- .. [1] Hoy, A.R., Koay, C.G., Kecskemeti, S.R., Alexander, A.L., 2014. Optimization of a free water elimination two-compartmental model for diffusion tensor imaging. NeuroImage 103, 323-333. doi: 10.1016/j.neuroimage.2014.09.053 """ ReconstModel.__init__(self, gtab) if not callable(fit_method): try: fit_method = common_fit_methods[fit_method] except KeyError: e_s = '"' + str(fit_method) + '" is not a known fit ' e_s += 'method, the fit method should either be a ' e_s += 'function or one of the common fit methods' raise ValueError(e_s) self.fit_method = fit_method self.design_matrix = design_matrix(self.gtab) self.args = args self.kwargs = kwargs # Check if at least three b-values are given bmag = int(np.log10(self.gtab.bvals.max())) b = self.gtab.bvals.copy() / (10 ** (bmag-1)) # normalize b units b = b.round() uniqueb = np.unique(b) if len(uniqueb) < 3: mes = "fwdti fit requires data for at least 2 non zero b-values" raise ValueError(mes) @multi_voxel_fit def fit(self, data, mask=None): """ Fit method of the free water elimination DTI model class Parameters ---------- data : array The measured signal from one voxel. mask : array A boolean array used to mark the coordinates in the data that should be analyzed that has the shape data.shape[:-1] """ S0 = np.mean(data[self.gtab.b0s_mask]) fwdti_params = self.fit_method(self.design_matrix, data, S0, *self.args, **self.kwargs) return FreeWaterTensorFit(self, fwdti_params) def predict(self, fwdti_params, S0=1): """ Predict a signal for this TensorModel class instance given parameters. Parameters ---------- fwdti_params : (..., 13) ndarray The last dimension should have 13 parameters: the 12 tensor parameters (3 eigenvalues, followed by the 3 corresponding eigenvectors) and the free water volume fraction. S0 : float or ndarray The non diffusion-weighted signal in every voxel, or across all voxels. Default: 1 Returns -------- S : (..., N) ndarray Simulated signal based on the free water DTI model """ return fwdti_prediction(fwdti_params, self.gtab, S0=S0) class FreeWaterTensorFit(TensorFit): """ Class for fitting the Free Water Tensor Model """ def __init__(self, model, model_params): """ Initialize a FreeWaterTensorFit class instance. Since the free water tensor model is an extension of DTI, class instance is defined as subclass of the TensorFit from dti.py Parameters ---------- model : FreeWaterTensorModel Class instance Class instance containing the free water tensor model for the fit model_params : ndarray (x, y, z, 13) or (n, 13) All parameters estimated from the free water tensor model. Parameters are ordered as follows: 1) Three diffusion tensor's eigenvalues 2) Three lines of the eigenvector matrix each containing the first, second and third coordinates of the eigenvector 3) The volume fraction of the free water compartment """ TensorFit.__init__(self, model, model_params) @property def f(self): """ Returns the free water diffusion volume fraction f """ return self.model_params[..., 12] def predict(self, gtab, S0=1): r""" Given a free water tensor model fit, predict the signal on the vertices of a gradient table Parameters ---------- gtab : a GradientTable class instance The gradient table for this prediction S0 : float array The mean non-diffusion weighted signal in each voxel. Default: 1 in all voxels. Returns -------- S : (..., N) ndarray Simulated signal based on the free water DTI model """ return fwdti_prediction(self.model_params, gtab, S0=S0) def wls_iter(design_matrix, sig, S0, Diso=3e-3, mdreg=2.7e-3, min_signal=1.0e-6, piterations=3): """ Applies weighted linear least squares fit of the water free elimination model to single voxel signals. Parameters ---------- design_matrix : array (g, 7) Design matrix holding the covariants used to solve for the regression coefficients. sig : array (g, ) Diffusion-weighted signal for a single voxel data. S0 : float Non diffusion weighted signal (i.e. signal for b-value=0). Diso : float, optional Value of the free water isotropic diffusion. Default is set to 3e-3 $mm^{2}.s^{-1}$. Please ajust this value if you are assuming different units of diffusion. mdreg : float, optimal DTI's mean diffusivity regularization threshold. If standard DTI diffusion tensor's mean diffusivity is almost near the free water diffusion value, the diffusion signal is assumed to be only free water diffusion (i.e. volume fraction will be set to 1 and tissue's diffusion parameters are set to zero). Default md_reg is 2.7e-3 $mm^{2}.s^{-1}$ (corresponding to 90% of the free water diffusion value). min_signal : float The minimum signal value. Needs to be a strictly positive number. Default: minimal signal in the data provided to `fit`. piterations : inter, optional Number of iterations used to refine the precision of f. Default is set to 3 corresponding to a precision of 0.01. Returns ------- All parameters estimated from the free water tensor model. Parameters are ordered as follows: 1) Three diffusion tensor's eigenvalues 2) Three lines of the eigenvector matrix each containing the first, second and third coordinates of the eigenvector 3) The volume fraction of the free water compartment """ W = design_matrix # DTI ordinary linear least square solution log_s = np.log(np.maximum(sig, min_signal)) # Define weights S2 = np.diag(sig**2) # DTI weighted linear least square solution WTS2 = np.dot(W.T, S2) inv_WT_S2_W = np.linalg.pinv(np.dot(WTS2, W)) invWTS2W_WTS2 = np.dot(inv_WT_S2_W, WTS2) params = np.dot(invWTS2W_WTS2, log_s) md = (params[0] + params[2] + params[5]) / 3 # Process voxel if it has significant signal from tissue if md < mdreg and np.mean(sig) > min_signal and S0 > min_signal: # General free-water signal contribution fwsig = np.exp(np.dot(design_matrix, np.array([Diso, 0, Diso, 0, 0, Diso, 0]))) df = 1 # initialize precision flow = 0 # lower f evaluated fhig = 1 # higher f evaluated ns = 9 # initial number of samples per iteration for p in range(piterations): df = df * 0.1 fs = np.linspace(flow+df, fhig-df, num=ns) # sampling f SFW = np.array([fwsig, ]*ns) # repeat contributions for all values FS, SI = np.meshgrid(fs, sig) SA = SI - FS*S0*SFW.T # SA < 0 means that the signal components from the free water # component is larger than the total fiber. This cases are present # for inapropriate large volume fractions (given the current S0 # value estimated). To overcome this issue negative SA are replaced # by data's min positive signal. SA[SA <= 0] = min_signal y = np.log(SA / (1-FS)) all_new_params = np.dot(invWTS2W_WTS2, y) # Select params for lower F2 SIpred = (1-FS)*np.exp(np.dot(W, all_new_params)) + FS*S0*SFW.T F2 = np.sum(np.square(SI - SIpred), axis=0) Mind = np.argmin(F2) params = all_new_params[:, Mind] f = fs[Mind] # Updated f flow = f - df # refining precision fhig = f + df ns = 19 evals, evecs = decompose_tensor(from_lower_triangular(params)) fw_params = np.concatenate((evals, evecs[0], evecs[1], evecs[2], np.array([f])), axis=0) else: fw_params = np.zeros(13) if md > mdreg: fw_params[12] = 1.0 return fw_params def wls_fit_tensor(gtab, data, Diso=3e-3, mask=None, min_signal=1.0e-6, piterations=3, mdreg=2.7e-3): r""" Computes weighted least squares (WLS) fit to calculate self-diffusion tensor using a linear regression model [1]_. Parameters ---------- gtab : a GradientTable class instance The gradient table containing diffusion acquisition parameters. data : ndarray ([X, Y, Z, ...], g) Data or response variables holding the data. Note that the last dimension should contain the data. It makes no copies of data. Diso : float, optional Value of the free water isotropic diffusion. Default is set to 3e-3 $mm^{2}.s^{-1}$. Please ajust this value if you are assuming different units of diffusion. mask : array, optional A boolean array used to mark the coordinates in the data that should be analyzed that has the shape data.shape[:-1] min_signal : float The minimum signal value. Needs to be a strictly positive number. Default: 1.0e-6. piterations : inter, optional Number of iterations used to refine the precision of f. Default is set to 3 corresponding to a precision of 0.01. mdreg : float, optimal DTI's mean diffusivity regularization threshold. If standard DTI diffusion tensor's mean diffusivity is almost near the free water diffusion value, the diffusion signal is assumed to be only free water diffusion (i.e. volume fraction will be set to 1 and tissue's diffusion parameters are set to zero). Default md_reg is 2.7e-3 $mm^{2}.s^{-1}$ (corresponding to 90% of the free water diffusion value). Returns ------- fw_params : ndarray (x, y, z, 13) Matrix containing in the last dimention the free water model parameters in the following order: 1) Three diffusion tensor's eigenvalues 2) Three lines of the eigenvector matrix each containing the first, second and third coordinates of the eigenvector 3) The volume fraction of the free water compartment. References ---------- .. [1] Hoy, A.R., Koay, C.G., Kecskemeti, S.R., Alexander, A.L., 2014. Optimization of a free water elimination two-compartmental model for diffusion tensor imaging. NeuroImage 103, 323-333. doi: 10.1016/j.neuroimage.2014.09.053 """ fw_params = np.zeros(data.shape[:-1] + (13,)) W = design_matrix(gtab) # Prepare mask if mask is None: mask = np.ones(data.shape[:-1], dtype=bool) else: if mask.shape != data.shape[:-1]: raise ValueError("Mask is not the same shape as data.") mask = np.array(mask, dtype=bool, copy=False) # Prepare S0 S0 = np.mean(data[:, :, :, gtab.b0s_mask], axis=-1) index = ndindex(mask.shape) for v in index: if mask[v]: params = wls_iter(W, data[v], S0[v], min_signal=min_signal, Diso=3e-3, piterations=piterations, mdreg=mdreg) fw_params[v] = params return fw_params def _nls_err_func(tensor_elements, design_matrix, data, Diso=3e-3, weighting=None, sigma=None, cholesky=False, f_transform=False): """ Error function for the non-linear least-squares fit of the tensor water elimination model. Parameters ---------- tensor_elements : array (8, ) The six independent elements of the diffusion tensor followed by -log(S0) and the volume fraction f of the water elimination compartment. Note that if cholesky is set to true, tensor elements are assumed to be written as Cholesky's decomposition elements. If f_transform is true, volume fraction f has to be converted to ft = arcsin(2*f - 1) + pi/2 design_matrix : array The design matrix data : array The voxel signal in all gradient directions Diso : float, optional Value of the free water isotropic diffusion. Default is set to 3e-3 $mm^{2}.s^{-1}$. Please ajust this value if you are assuming different units of diffusion. weighting : str (optional). Whether to use the Geman McClure weighting criterion (see [1]_ for details) sigma : float or float array (optional) If 'sigma' weighting is used, we will weight the error function according to the background noise estimated either in aggregate over all directions (when a float is provided), or to an estimate of the noise in each diffusion-weighting direction (if an array is provided). If 'gmm', the Geman-Mclure M-estimator is used for weighting. cholesky : bool, optional If true, the diffusion tensor elements were decomposed using cholesky decomposition. See fwdti.nls_fit_tensor Default: False f_transform : bool, optional If true, the water volume fraction was converted to ft = arcsin(2*f - 1) + pi/2, insuring f estimates between 0 and 1. See fwdti.nls_fit_tensor Default: True """ tensor = np.copy(tensor_elements) if cholesky: tensor[:6] = cholesky_to_lower_triangular(tensor[:6]) if f_transform: f = 0.5 * (1 + np.sin(tensor[7] - np.pi/2)) else: f = tensor[7] # This is the predicted signal given the params: y = (1-f) * np.exp(np.dot(design_matrix, tensor[:7])) + \ f * np.exp(np.dot(design_matrix, np.array([Diso, 0, Diso, 0, 0, Diso, tensor[6]]))) # Compute the residuals residuals = data - y # If we don't want to weight the residuals, we are basically done: if weighting is None: # And we return the SSE: return residuals se = residuals ** 2 # If the user provided a sigma (e.g 1.5267 * std(background_noise), as # suggested by Chang et al.) we will use it: if weighting == 'sigma': if sigma is None: e_s = "Must provide sigma value as input to use this weighting" e_s += " method" raise ValueError(e_s) w = 1/(sigma**2) elif weighting == 'gmm': # We use the Geman McClure M-estimator to compute the weights on the # residuals: C = 1.4826 * np.median(np.abs(residuals - np.median(residuals))) with warnings.catch_warnings(): warnings.simplefilter("ignore") w = 1/(se + C**2) # The weights are normalized to the mean weight (see p. 1089): w = w/np.mean(w) # Return the weighted residuals: with warnings.catch_warnings(): warnings.simplefilter("ignore") return np.sqrt(w * se) def _nls_jacobian_func(tensor_elements, design_matrix, data, Diso=3e-3, weighting=None, sigma=None, cholesky=False, f_transform=False): """The Jacobian is the first derivative of the least squares error function. Parameters ---------- tensor_elements : array (8, ) The six independent elements of the diffusion tensor followed by -log(S0) and the volume fraction f of the water elimination compartment. Note that if f_transform is true, volume fraction f is converted to ft = arcsin(2*f - 1) + pi/2 design_matrix : array The design matrix Diso : float, optional Value of the free water isotropic diffusion. Default is set to 3e-3 $mm^{2}.s^{-1}$. Please ajust this value if you are assuming different units of diffusion. f_transform : bool, optional If true, the water volume fraction was converted to ft = arcsin(2*f - 1) + pi/2, insuring f estimates between 0 and 1. See fwdti.nls_fit_tensor Default: True """ tensor = np.copy(tensor_elements) if f_transform: f = 0.5 * (1 + np.sin(tensor[7] - np.pi/2)) else: f = tensor[7] t = np.exp(np.dot(design_matrix, tensor[:7])) s = np.exp(np.dot(design_matrix, np.array([Diso, 0, Diso, 0, 0, Diso, tensor[6]]))) T = (f-1.0) * t[:, None] * design_matrix S = np.zeros(design_matrix.shape) S[:, 6] = f * s if f_transform: df = (t-s) * (0.5*np.cos(tensor[7]-np.pi/2)) else: df = (t-s) return np.concatenate((T - S, df[:, None]), axis=1) def nls_iter(design_matrix, sig, S0, Diso=3e-3, mdreg=2.7e-3, min_signal=1.0e-6, cholesky=False, f_transform=True, jac=False, weighting=None, sigma=None): """ Applies non linear least squares fit of the water free elimination model to single voxel signals. Parameters ---------- design_matrix : array (g, 7) Design matrix holding the covariants used to solve for the regression coefficients. sig : array (g, ) Diffusion-weighted signal for a single voxel data. S0 : float Non diffusion weighted signal (i.e. signal for b-value=0). Diso : float, optional Value of the free water isotropic diffusion. Default is set to 3e-3 $mm^{2}.s^{-1}$. Please ajust this value if you are assuming different units of diffusion. mdreg : float, optimal DTI's mean diffusivity regularization threshold. If standard DTI diffusion tensor's mean diffusivity is almost near the free water diffusion value, the diffusion signal is assumed to be only free water diffusion (i.e. volume fraction will be set to 1 and tissue's diffusion parameters are set to zero). Default md_reg is 2.7e-3 $mm^{2}.s^{-1}$ (corresponding to 90% of the free water diffusion value). min_signal : float The minimum signal value. Needs to be a strictly positive number. cholesky : bool, optional If true it uses cholesky decomposition to insure that diffusion tensor is positive define. Default: False f_transform : bool, optional If true, the water volume fractions is converted during the convergence procedure to ft = arcsin(2*f - 1) + pi/2, insuring f estimates between 0 and 1. Default: True jac : bool Use the Jacobian? Default: False weighting: str, optional the weighting scheme to use in considering the squared-error. Default behavior is to use uniform weighting. Other options: 'sigma' 'gmm' sigma: float, optional If the 'sigma' weighting scheme is used, a value of sigma needs to be provided here. According to [Chang2005]_, a good value to use is 1.5267 * std(background_noise), where background_noise is estimated from some part of the image known to contain no signal (only noise). Returns ------- All parameters estimated from the free water tensor model. Parameters are ordered as follows: 1) Three diffusion tensor's eigenvalues 2) Three lines of the eigenvector matrix each containing the first, second and third coordinates of the eigenvector 3) The volume fraction of the free water compartment. """ # Initial guess params = wls_iter(design_matrix, sig, S0, min_signal=min_signal, Diso=Diso, mdreg=mdreg) # Process voxel if it has significant signal from tissue if params[12] < 0.99 and np.mean(sig) > min_signal and S0 > min_signal: # converting evals and evecs to diffusion tensor elements evals = params[:3] evecs = params[3:12].reshape((3, 3)) dt = lower_triangular(vec_val_vect(evecs, evals)) # Cholesky decomposition if requested if cholesky: dt = lower_triangular_to_cholesky(dt) # f transformation if requested if f_transform: f = np.arcsin(2*params[12] - 1) + np.pi/2 else: f = params[12] # Use the Levenberg-Marquardt algorithm wrapped in opt.leastsq start_params = np.concatenate((dt, [-np.log(S0), f]), axis=0) if jac: this_tensor, status = opt.leastsq(_nls_err_func, start_params[:8], args=(design_matrix, sig, Diso, weighting, sigma, cholesky, f_transform), Dfun=_nls_jacobian_func) else: this_tensor, status = opt.leastsq(_nls_err_func, start_params[:8], args=(design_matrix, sig, Diso, weighting, sigma, cholesky, f_transform)) # Process tissue diffusion tensor if cholesky: this_tensor[:6] = cholesky_to_lower_triangular(this_tensor[:6]) evals, evecs = _decompose_tensor_nan( from_lower_triangular(this_tensor[:6]), from_lower_triangular(start_params[:6])) # Process water volume fraction f f = this_tensor[7] if f_transform: f = 0.5 * (1 + np.sin(f - np.pi/2)) params = np.concatenate((evals, evecs[0], evecs[1], evecs[2], np.array([f])), axis=0) return params def nls_fit_tensor(gtab, data, mask=None, Diso=3e-3, mdreg=2.7e-3, min_signal=1.0e-6, f_transform=True, cholesky=False, jac=False, weighting=None, sigma=None): """ Fit the water elimination tensor model using the non-linear least-squares. Parameters ---------- gtab : a GradientTable class instance The gradient table containing diffusion acquisition parameters. data : ndarray ([X, Y, Z, ...], g) Data or response variables holding the data. Note that the last dimension should contain the data. It makes no copies of data. mask : array, optional A boolean array used to mark the coordinates in the data that should be analyzed that has the shape data.shape[:-1] Diso : float, optional Value of the free water isotropic diffusion. Default is set to 3e-3 $mm^{2}.s^{-1}$. Please ajust this value if you are assuming different units of diffusion. mdreg : float, optimal DTI's mean diffusivity regularization threshold. If standard DTI diffusion tensor's mean diffusivity is almost near the free water diffusion value, the diffusion signal is assumed to be only free water diffusion (i.e. volume fraction will be set to 1 and tissue's diffusion parameters are set to zero). Default md_reg is 2.7e-3 $mm^{2}.s^{-1}$ (corresponding to 90% of the free water diffusion value). min_signal : float The minimum signal value. Needs to be a strictly positive number. Default: 1.0e-6. f_transform : bool, optional If true, the water volume fractions is converted during the convergence procedure to ft = arcsin(2*f - 1) + pi/2, insuring f estimates between 0 and 1. Default: True cholesky : bool, optional If true it uses cholesky decomposition to insure that diffusion tensor is positive define. Default: False jac : bool Use the Jacobian? Default: False weighting: str, optional the weighting scheme to use in considering the squared-error. Default behavior is to use uniform weighting. Other options: 'sigma' 'gmm' sigma: float, optional If the 'sigma' weighting scheme is used, a value of sigma needs to be provided here. According to [Chang2005]_, a good value to use is 1.5267 * std(background_noise), where background_noise is estimated from some part of the image known to contain no signal (only noise). Returns ------- fw_params : ndarray (x, y, z, 13) Matrix containing in the dimention the free water model parameters in the following order: 1) Three diffusion tensor's eigenvalues 2) Three lines of the eigenvector matrix each containing the first, second and third coordinates of the eigenvector 3) The volume fraction of the free water compartment """ fw_params = np.zeros(data.shape[:-1] + (13,)) W = design_matrix(gtab) # Prepare mask if mask is None: mask = np.ones(data.shape[:-1], dtype=bool) else: if mask.shape != data.shape[:-1]: raise ValueError("Mask is not the same shape as data.") mask = np.array(mask, dtype=bool, copy=False) # Prepare S0 S0 = np.mean(data[:, :, :, gtab.b0s_mask], axis=-1) index = ndindex(mask.shape) for v in index: if mask[v]: params = nls_iter(W, data[v], S0[v], Diso=Diso, mdreg=mdreg, min_signal=min_signal, f_transform=f_transform, cholesky=cholesky, jac=jac, weighting=weighting, sigma=sigma) fw_params[v] = params return fw_params def lower_triangular_to_cholesky(tensor_elements): """ Perfoms Cholesky decomposition of the diffusion tensor Parameters ---------- tensor_elements : array (6,) Array containing the six elements of diffusion tensor's lower triangular. Returns ------- cholesky_elements : array (6,) Array containing the six Cholesky's decomposition elements (R0, R1, R2, R3, R4, R5) [1]_. References ---------- .. [1] Koay, C.G., Carew, J.D., Alexander, A.L., Basser, P.J., Meyerand, M.E., 2006. Investigation of anomalous estimates of tensor-derived quantities in diffusion tensor imaging. Magnetic Resonance in Medicine, 55(4), 930-936. doi:10.1002/mrm.20832 """ R0 = np.sqrt(tensor_elements[0]) R3 = tensor_elements[1] / R0 R1 = np.sqrt(tensor_elements[2] - R3**2) R5 = tensor_elements[3] / R0 R4 = (tensor_elements[4] - R3*R5) / R1 R2 = np.sqrt(tensor_elements[5] - R4**2 - R5**2) return np.array([R0, R1, R2, R3, R4, R5]) def cholesky_to_lower_triangular(R): """ Convert Cholesky decompostion elements to the diffusion tensor elements Parameters ---------- R : array (6,) Array containing the six Cholesky's decomposition elements (R0, R1, R2, R3, R4, R5) [1]_. Returns ------- tensor_elements : array (6,) Array containing the six elements of diffusion tensor's lower triangular. References ---------- .. [1] Koay, C.G., Carew, J.D., Alexander, A.L., Basser, P.J., Meyerand, M.E., 2006. Investigation of anomalous estimates of tensor-derived quantities in diffusion tensor imaging. Magnetic Resonance in Medicine, 55(4), 930-936. doi:10.1002/mrm.20832 """ Dxx = R[0]**2 Dxy = R[0]*R[3] Dyy = R[1]**2 + R[3]**2 Dxz = R[0]*R[5] Dyz = R[1]*R[4] + R[3]*R[5] Dzz = R[2]**2 + R[4]**2 + R[5]**2 return np.array([Dxx, Dxy, Dyy, Dxz, Dyz, Dzz]) common_fit_methods = {'WLLS': wls_iter, 'WLS': wls_iter, 'NLLS': nls_iter, 'NLS': nls_iter, } dipy-0.13.0/dipy/reconst/gqi.py000066400000000000000000000215571317371701200163300ustar00rootroot00000000000000""" Classes and functions for generalized q-sampling """ import numpy as np from dipy.reconst.odf import OdfModel, OdfFit, gfa from dipy.reconst.cache import Cache import warnings from dipy.reconst.multi_voxel import multi_voxel_fit from dipy.reconst.recspeed import local_maxima, remove_similar_vertices class GeneralizedQSamplingModel(OdfModel, Cache): def __init__(self, gtab, method='gqi2', sampling_length=1.2, normalize_peaks=False): r""" Generalized Q-Sampling Imaging [1]_ This model has the same assumptions as the DSI method i.e. Cartesian grid sampling in q-space and fast gradient switching. Implements equations 2.14 from [2]_ for standard GQI and equation 2.16 from [2]_ for GQI2. You can think of GQI2 as an analytical solution of the DSI ODF. Parameters ---------- gtab : object, GradientTable method : str, 'standard' or 'gqi2' sampling_length : float, diffusion sampling length (lambda in eq. 2.14 and 2.16) References ---------- .. [1] Yeh F-C et. al, "Generalized Q-Sampling Imaging", IEEE TMI, 2010 .. [2] Garyfallidis E, "Towards an accurate brain tractography", PhD thesis, University of Cambridge, 2012. Notes ----- As of version 0.9, range of the sampling length in GQI2 has changed to match the same scale used in the 'standard' method [1]_. This means that the value of `sampling_length` should be approximately 1 - 1.3 (see [1]_, pg. 1628). Examples -------- Here we create an example where we provide the data, a gradient table and a reconstruction sphere and calculate the ODF for the first voxel in the data. >>> from dipy.data import dsi_voxels >>> data, gtab = dsi_voxels() >>> from dipy.core.subdivide_octahedron import create_unit_sphere >>> sphere = create_unit_sphere(5) >>> from dipy.reconst.gqi import GeneralizedQSamplingModel >>> gq = GeneralizedQSamplingModel(gtab, 'gqi2', 1.1) >>> voxel_signal = data[0, 0, 0] >>> odf = gq.fit(voxel_signal).odf(sphere) See Also -------- dipy.reconst.dsi.DiffusionSpectrumModel """ OdfModel.__init__(self, gtab) self.method = method self.Lambda = sampling_length self.normalize_peaks = normalize_peaks # 0.01506 = 6*D where D is the free water diffusion coefficient # l_values sqrt(6 D tau) D free water diffusion coefficient and # tau included in the b-value scaling = np.sqrt(self.gtab.bvals * 0.01506) tmp = np.tile(scaling, (3, 1)) gradsT = self.gtab.bvecs.T b_vector = gradsT * tmp # element-wise product self.b_vector = b_vector.T @multi_voxel_fit def fit(self, data): return GeneralizedQSamplingFit(self, data) class GeneralizedQSamplingFit(OdfFit): def __init__(self, model, data): """ Calculates PDF and ODF for a single voxel Parameters ---------- model : object, DiffusionSpectrumModel data : 1d ndarray, signal values """ OdfFit.__init__(self, model, data) self._gfa = None self.npeaks = 5 self._peak_values = None self._peak_indices = None self._qa = None def odf(self, sphere): """ Calculates the discrete ODF for a given discrete sphere. """ self.gqi_vector = self.model.cache_get('gqi_vector', key=sphere) if self.gqi_vector is None: if self.model.method == 'gqi2': H = squared_radial_component # print self.gqi_vector.shape self.gqi_vector = np.real(H(np.dot( self.model.b_vector, sphere.vertices.T) * self.model.Lambda)) if self.model.method == 'standard': self.gqi_vector = np.real(np.sinc(np.dot( self.model.b_vector, sphere.vertices.T) * self.model.Lambda / np.pi)) self.model.cache_set('gqi_vector', sphere, self.gqi_vector) return np.dot(self.data, self.gqi_vector) def normalize_qa(qa, max_qa=None): """ Normalize quantitative anisotropy. Used mostly with GQI rather than GQI2. Parameters ---------- qa : array, shape (X, Y, Z, N) where N is the maximum number of peaks stored max_qa : float, maximum qa value. Usually found in the CSF (corticospinal fluid). Returns ------- nqa : array, shape (x, Y, Z, N) normalized quantitative anisotropy Notes ----- Normalized quantitative anisotropy has the very useful property to be very small near gray matter and background areas. Therefore, it can be used to mask out white matter areas. """ if max_qa is None: return qa / qa.max() return qa / max_qa def squared_radial_component(x, tol=0.01): """ Part of the GQI2 integral Eq.8 in the referenced paper by Yeh et al. 2010 """ with warnings.catch_warnings(): warnings.simplefilter("ignore") result = (2 * x * np.cos(x) + (x * x - 2) * np.sin(x)) / (x ** 3) x_near_zero = (x < tol) & (x > -tol) return np.where(x_near_zero, 1./3, result) def npa(self, odf, width=5): """ non-parametric anisotropy Nimmo-Smith et. al ISMRM 2011 """ # odf = self.odf(s) t0, t1, t2 = triple_odf_maxima(self.odf_vertices, odf, width) psi0 = t0[1] ** 2 psi1 = t1[1] ** 2 psi2 = t2[1] ** 2 npa = (np.sqrt( (psi0 - psi1) ** 2 + (psi1 - psi2) ** 2 + (psi2 - psi0) ** 2) / np.sqrt(2 * (psi0 ** 2 + psi1 ** 2 + psi2 ** 2))) # print 'tom >>>> ',t0,t1,t2,npa return t0, t1, t2, npa def equatorial_zone_vertices(vertices, pole, width=5): """ finds the 'vertices' in the equatorial zone conjugate to 'pole' with width half 'width' degrees """ return [i for i, v in enumerate(vertices) if np.abs(np.dot(v, pole)) < np.abs(np.sin(np.pi * width / 180))] def polar_zone_vertices(vertices, pole, width=5): """ finds the 'vertices' in the equatorial band around the 'pole' of radius 'width' degrees """ return [i for i, v in enumerate(vertices) if np.abs(np.dot(v, pole)) > np.abs(np.cos(np.pi * width / 180))] def upper_hemi_map(v): """ maps a 3-vector into the z-upper hemisphere """ return np.sign(v[2])*v def equatorial_maximum(vertices, odf, pole, width): eqvert = equatorial_zone_vertices(vertices, pole, width) # need to test for whether eqvert is empty or not if len(eqvert) == 0: print('empty equatorial band at %s pole with width %f' % (np.array_str(pole), width)) return None, None eqvals = [odf[i] for i in eqvert] eqargmax = np.argmax(eqvals) eqvertmax = eqvert[eqargmax] eqvalmax = eqvals[eqargmax] return eqvertmax, eqvalmax def patch_vertices(vertices, pole, width): """ find 'vertices' within the cone of 'width' degrees around 'pole' """ return [i for i, v in enumerate(vertices) if np.abs(np.dot(v, pole)) > np.abs(np.cos(np.pi * width / 180))] def patch_maximum(vertices, odf, pole, width): eqvert = patch_vertices(vertices, pole, width) # need to test for whether eqvert is empty or not if len(eqvert) == 0: print('empty cone around pole %s with with width %f' % (np.array_str(pole), width)) return np.Null, np.Null eqvals = [odf[i] for i in eqvert] eqargmax = np.argmax(eqvals) eqvertmax = eqvert[eqargmax] eqvalmax = eqvals[eqargmax] return eqvertmax, eqvalmax def odf_sum(odf): return np.sum(odf) def patch_sum(vertices, odf, pole, width): eqvert = patch_vertices(vertices, pole, width) # need to test for whether eqvert is empty or not if len(eqvert) == 0: print('empty cone around pole %s with with width %f' % (np.array_str(pole), width)) return np.Null return np.sum([odf[i] for i in eqvert]) def triple_odf_maxima(vertices, odf, width): indmax1 = np.argmax([odf[i] for i, v in enumerate(vertices)]) odfmax1 = odf[indmax1] pole = vertices[indmax1] eqvert = equatorial_zone_vertices(vertices, pole, width) indmax2, odfmax2 = equatorial_maximum(vertices, odf, pole, width) indmax3 = eqvert[np.argmin([np.abs(np.dot(vertices[indmax2], vertices[p])) for p in eqvert])] odfmax3 = odf[indmax3] """ cross12 = np.cross(vertices[indmax1],vertices[indmax2]) cross12 = cross12/np.sqrt(np.sum(cross12**2)) indmax3, odfmax3 = patch_maximum(vertices, odf, cross12, 2*width) """ return [(indmax1, odfmax1), (indmax2, odfmax2), (indmax3, odfmax3)] dipy-0.13.0/dipy/reconst/interpolate.py000066400000000000000000000035461317371701200200740ustar00rootroot00000000000000"""Interpolators wrap arrays to allow the array to be indexed in continuous coordinates This module uses the trackvis coordinate system, for more information about this coordinate system please see dipy.tracking.utils The following modules also use this coordinate system: dipy.tracking.utils dipy.tracking.integration dipy.reconst.interpolate """ from numpy import array from dipy.reconst.recspeed import trilinear_interp class OutsideImage(Exception): pass class Interpolator(object): """Class to be subclassed by different interpolator types""" def __init__(self, data, voxel_size): self.data = data self.voxel_size = array(voxel_size, dtype=float, copy=True) class NearestNeighborInterpolator(Interpolator): """Interpolates data using nearest neighbor interpolation""" def __getitem__(self, index): index = tuple(index / self.voxel_size) if min(index) < 0: raise OutsideImage('Negative Index') try: return self.data[tuple(array(index).astype(int))] except IndexError: raise OutsideImage class TriLinearInterpolator(Interpolator): """Interpolates data using trilinear interpolation interpolate 4d diffusion volume using 3 indices, ie data[x, y, z] """ def __init__(self, data, voxel_size): super(TriLinearInterpolator, self).__init__(data, voxel_size) if self.voxel_size.shape != (3,) or self.data.ndim != 4: raise ValueError("Data should be 4d volume of diffusion data and " "voxel_size should have 3 values, ie the size " "of a 3d voxel") def __getitem__(self, index): index = array(index, copy=False, dtype="float") try: return trilinear_interp(self.data, index, self.voxel_size) except IndexError: raise OutsideImage dipy-0.13.0/dipy/reconst/ivim.py000066400000000000000000000474211317371701200165120ustar00rootroot00000000000000""" Classes and functions for fitting ivim model """ from __future__ import division, print_function, absolute_import from distutils.version import LooseVersion import numpy as np import scipy import warnings from dipy.reconst.base import ReconstModel from dipy.reconst.multi_voxel import multi_voxel_fit SCIPY_LESS_0_17 = (LooseVersion(scipy.version.short_version) < LooseVersion('0.17')) if SCIPY_LESS_0_17: from scipy.optimize import leastsq else: from scipy.optimize import least_squares def ivim_prediction(params, gtab, S0=1.): """The Intravoxel incoherent motion (IVIM) model function. Parameters ---------- params : array An array of IVIM parameters - [S0, f, D_star, D]. gtab : GradientTable class instance Gradient directions and bvalues. S0 : float, optional This has been added just for consistency with the existing API. Unlike other models, IVIM predicts S0 and this is over written by the S0 value in params. Returns ------- S : array An array containing the IVIM signal estimated using given parameters. """ S0, f, D_star, D = params b = gtab.bvals S = S0 * (f * np.exp(-b * D_star) + (1 - f) * np.exp(-b * D)) return S def _ivim_error(params, gtab, signal): """Error function to be used in fitting the IVIM model. Parameters ---------- params : array An array of IVIM parameters - [S0, f, D_star, D] gtab : GradientTable class instance Gradient directions and bvalues. signal : array Array containing the actual signal values. Returns ------- residual : array An array containing the difference between actual and estimated signal. """ residual = signal - ivim_prediction(params, gtab) return residual def f_D_star_prediction(params, gtab, S0, D): """Function used to predict IVIM signal when S0 and D are known by considering f and D_star as the unknown parameters. Parameters ---------- params : array The value of f and D_star. gtab : GradientTable class instance Gradient directions and bvalues. S0 : float The parameters S0 obtained from a linear fit. D : float The parameters D obtained from a linear fit. Returns ------- S : array An array containing the IVIM signal estimated using given parameters. """ f, D_star = params b = gtab.bvals S = S0 * (f * np.exp(-b * D_star) + (1 - f) * np.exp(-b * D)) return S def f_D_star_error(params, gtab, signal, S0, D): """Error function used to fit f and D_star keeping S0 and D fixed Parameters ---------- params : array The value of f and D_star. gtab : GradientTable class instance Gradient directions and bvalues. signal : array Array containing the actual signal values. S0 : float The parameters S0 obtained from a linear fit. D : float The parameters D obtained from a linear fit. Returns ------- residual : array An array containing the difference of actual and estimated signal. """ f, D_star = params return signal - f_D_star_prediction([f, D_star], gtab, S0, D) class IvimModel(ReconstModel): """Ivim model """ def __init__(self, gtab, split_b_D=400.0, split_b_S0=200., bounds=None, two_stage=True, tol=1e-15, x_scale=[1000., 0.1, 0.001, 0.0001], options={'gtol': 1e-15, 'ftol': 1e-15, 'eps': 1e-15, 'maxiter': 1000}): """ Initialize an IVIM model. The IVIM model assumes that biological tissue includes a volume fraction 'f' of water flowing with a pseudo-perfusion coefficient D* and a fraction (1-f) of static (diffusion only), intra and extracellular water, with a diffusion coefficient D. In this model the echo attenuation of a signal in a single voxel can be written as .. math:: S(b) = S_0[f*e^{(-b*D\*)} + (1-f)e^{(-b*D)}] Where: .. math:: S_0, f, D\* and D are the IVIM parameters. Parameters ---------- gtab : GradientTable class instance Gradient directions and bvalues split_b_D : float, optional The b-value to split the data on for two-stage fit. This will be used while estimating the value of D. The assumption is that at higher b values the effects of perfusion is less and hence the signal can be approximated as a mono-exponential decay. default : 400. split_b_S0 : float, optional The b-value to split the data on for two-stage fit for estimation of S0 and initial guess for D_star. The assumption here is that at low bvalues the effects of perfusion are more. default : 200. bounds : tuple of arrays with 4 elements, optional Bounds to constrain the fitted model parameters. This is only supported for Scipy version > 0.17. When using a older Scipy version, this function will raise an error if bounds are different from None. This parameter is also used to fill nan values for out of bounds parameters in the `IvimFit` class using the method fill_na. default : ([0., 0., 0., 0.], [np.inf, .3, 1., 1.]) two_stage : bool Argument to specify whether to perform a non-linear fitting of all parameters after the linear fitting by splitting the data based on bvalues. This gives more accurate parameters but takes more time. The linear fit can be used to get a quick estimation of the parameters. default : False tol : float, optional Tolerance for convergence of minimization. default : 1e-15 x_scale : array, optional Scaling for the parameters. This is passed to `least_squares` which is only available for Scipy version > 0.17. default: [1000, 0.01, 0.001, 0.0001] options : dict, optional Dictionary containing gtol, ftol, eps and maxiter. This is passed to leastsq. default : options={'gtol': 1e-15, 'ftol': 1e-15, 'eps': 1e-15, 'maxiter': 1000} References ---------- .. [1] Le Bihan, Denis, et al. "Separation of diffusion and perfusion in intravoxel incoherent motion MR imaging." Radiology 168.2 (1988): 497-505. .. [2] Federau, Christian, et al. "Quantitative measurement of brain perfusion with intravoxel incoherent motion MR imaging." Radiology 265.3 (2012): 874-881. """ if not np.any(gtab.b0s_mask): e_s = "No measured signal at bvalue == 0." e_s += "The IVIM model requires signal measured at 0 bvalue" raise ValueError(e_s) ReconstModel.__init__(self, gtab) self.split_b_D = split_b_D self.split_b_S0 = split_b_S0 self.bounds = bounds self.two_stage = two_stage self.tol = tol self.options = options self.x_scale = x_scale if SCIPY_LESS_0_17 and self.bounds is not None: e_s = "Scipy versions less than 0.17 do not support " e_s += "bounds. Please update to Scipy 0.17 to use bounds" raise ValueError(e_s) elif self.bounds is None: self.bounds = ((0., 0., 0., 0.), (np.inf, .3, 1., 1.)) else: self.bounds = bounds @multi_voxel_fit def fit(self, data, mask=None): """ Fit method of the Ivim model class. The fitting takes place in the following steps: Linear fitting for D (bvals > `split_b_D` (default: 400)) and store S0_prime. Another linear fit for S0 (bvals < split_b_S0 (default: 200)). Estimate f using 1 - S0_prime/S0. Use non-linear least squares to fit D_star and f. We do a final non-linear fitting of all four parameters and select the set of parameters which make sense physically. The criteria for selecting a particular set of parameters is checking the pseudo-perfusion fraction. If the fraction is more than `f_threshold` (default: 25%), we will reject the solution obtained from non-linear least squares fitting and consider only the linear fit. Parameters ---------- data : array The measured signal from one voxel. A multi voxel decorator will be applied to this fit method to scale it and apply it to multiple voxels. mask : array A boolean array used to mark the coordinates in the data that should be analyzed that has the shape data.shape[:-1] Returns ------- IvimFit object """ # Get S0_prime and D - paramters assuming a single exponential decay # for signals for bvals greater than `split_b_D` S0_prime, D = self.estimate_linear_fit( data, self.split_b_D, less_than=False) # Get S0 and D_star_prime - paramters assuming a single exponential # decay for for signals for bvals greater than `split_b_S0`. S0, D_star_prime = self.estimate_linear_fit(data, self.split_b_S0, less_than=True) # Estimate f f_guess = 1 - S0_prime / S0 # Fit f and D_star using leastsq. params_f_D_star = [f_guess, D_star_prime] f, D_star = self.estimate_f_D_star(params_f_D_star, data, S0, D) params_linear = np.array([S0, f, D_star, D]) # Fit parameters again if two_stage flag is set. if self.two_stage: params_two_stage = self._leastsq(data, params_linear) bounds_violated = ~(np.all(params_two_stage >= self.bounds[0]) and (np.all(params_two_stage <= self.bounds[1]))) if bounds_violated: warningMsg = "Bounds are violated for leastsq fitting. " warningMsg += "Returning parameters from linear fit" warnings.warn(warningMsg, UserWarning) return IvimFit(self, params_linear) else: return IvimFit(self, params_two_stage) else: return IvimFit(self, params_linear) def estimate_linear_fit(self, data, split_b, less_than=True): """Estimate a linear fit by taking log of data. Parameters ---------- data : array An array containing the data to be fit split_b : float The b value to split the data less_than : bool If True, splitting occurs for bvalues less than split_b Returns ------- S0 : float The estimated S0 value. (intercept) D : float The estimated value of D. """ if less_than: bvals_split = self.gtab.bvals[self.gtab.bvals <= split_b] D, neg_log_S0 = np.polyfit(bvals_split, -np.log(data[self.gtab.bvals <= split_b]), 1) else: bvals_split = self.gtab.bvals[self.gtab.bvals >= split_b] D, neg_log_S0 = np.polyfit(bvals_split, -np.log(data[self.gtab.bvals >= split_b]), 1) S0 = np.exp(-neg_log_S0) return S0, D def estimate_f_D_star(self, params_f_D_star, data, S0, D): """Estimate f and D_star using the values of all the other parameters obtained from a linear fit. Parameters ---------- params_f_D_star: array An array containing the value of f and D_star. data : array Array containing the actual signal values. S0 : float The parameters S0 obtained from a linear fit. D : float The parameters D obtained from a linear fit. Returns ------- f : float Perfusion fraction estimated from the fit. D_star : The value of D_star estimated from the fit. """ gtol = self.options["gtol"] ftol = self.options["ftol"] xtol = self.tol epsfcn = self.options["eps"] maxfev = self.options["maxiter"] if SCIPY_LESS_0_17: try: res = leastsq(f_D_star_error, params_f_D_star, args=(self.gtab, data, S0, D), gtol=gtol, xtol=xtol, ftol=ftol, epsfcn=epsfcn, maxfev=maxfev) f, D_star = res[0] return f, D_star except ValueError: warningMsg = "x0 obtained from linear fitting is not feasibile" warningMsg += " as initial guess for leastsq. Parameters are" warningMsg += " returned only from the linear fit." warnings.warn(warningMsg, UserWarning) f, D_star = params_f_D return f, D_star else: try: res = least_squares(f_D_star_error, params_f_D_star, bounds=((0., 0.), (self.bounds[1][1], self.bounds[1][2])), args=(self.gtab, data, S0, D), ftol=ftol, xtol=xtol, gtol=gtol, max_nfev=maxfev) f, D_star = res.x return f, D_star except ValueError: warningMsg = "x0 obtained from linear fitting is not feasibile" warningMsg += " as initial guess for leastsq while estimating " warningMsg += "f and D_star. Using parameters from the " warningMsg += "linear fit." warnings.warn(warningMsg, UserWarning) f, D_star = params_f_D_star return f, D_star def predict(self, ivim_params, gtab, S0=1.): """ Predict a signal for this IvimModel class instance given parameters. Parameters ---------- ivim_params : array The ivim parameters as an array [S0, f, D_star and D] gtab : GradientTable class instance Gradient directions and bvalues. S0 : float, optional This has been added just for consistency with the existing API. Unlike other models, IVIM predicts S0 and this is over written by the S0 value in params. Returns ------- ivim_signal : array The predicted IVIM signal using given parameters. """ return ivim_prediction(ivim_params, gtab) def _leastsq(self, data, x0): """Use leastsq to find ivim_params Parameters ---------- data : array, (len(bvals)) An array containing the signal from a voxel. If the data was a 3D image of 10x10x10 grid with 21 bvalues, the multi_voxel decorator will run the single voxel fitting on all the 1000 voxels to get the parameters in IvimFit.model_paramters. The shape of the parameter array will be (data[:-1], 4). x0 : array Initial guesses for the parameters S0, f, D_star and D calculated using a linear fitting. Returns ------- x0 : array Estimates of the parameters S0, f, D_star and D. """ gtol = self.options["gtol"] ftol = self.options["ftol"] xtol = self.tol epsfcn = self.options["eps"] maxfev = self.options["maxiter"] bounds = self.bounds if SCIPY_LESS_0_17: try: res = leastsq(_ivim_error, x0, args=(self.gtab, data), gtol=gtol, xtol=xtol, ftol=ftol, epsfcn=epsfcn, maxfev=maxfev) ivim_params = res[0] if np.all(np.isnan(ivim_params)): return np.array([-1, -1, -1, -1]) return ivim_params except ValueError: warningMsg = "x0 is unfeasible for leastsq fitting." warningMsg += " Returning x0 values from the linear fit." warnings.warn(warningMsg, UserWarning) return x0 else: try: res = least_squares(_ivim_error, x0, bounds=bounds, ftol=ftol, xtol=xtol, gtol=gtol, max_nfev=maxfev, args=(self.gtab, data), x_scale=self.x_scale) ivim_params = res.x if np.all(np.isnan(ivim_params)): return np.array([-1, -1, -1, -1]) return ivim_params except ValueError: warningMsg = "x0 is unfeasible for leastsq fitting." warningMsg += " Returning x0 values from the linear fit." warnings.warn(warningMsg, UserWarning) return x0 class IvimFit(object): def __init__(self, model, model_params): """ Initialize a IvimFit class instance. Parameters ---------- model : Model class model_params : array The parameters of the model. In this case it is an array of ivim parameters. If the fitting is done for multi_voxel data, the multi_voxel decorator will run the fitting on all the voxels and model_params will be an array of the dimensions (data[:-1], 4), i.e., there will be 4 parameters for each of the voxels. """ self.model = model self.model_params = model_params def __getitem__(self, index): model_params = self.model_params N = model_params.ndim if type(index) is not tuple: index = (index,) elif len(index) >= model_params.ndim: raise IndexError("IndexError: invalid index") index = index + (slice(None),) * (N - len(index)) return type(self)(self.model, model_params[index]) @property def S0_predicted(self): return self.model_params[..., 0] @property def perfusion_fraction(self): return self.model_params[..., 1] @property def D_star(self): return self.model_params[..., 2] @property def D(self): return self.model_params[..., 3] @property def shape(self): return self.model_params.shape[:-1] def predict(self, gtab, S0=1.): """Given a model fit, predict the signal. Parameters ---------- gtab : GradientTable class instance Gradient directions and bvalues S0 : float S0 value here is not necessary and will not be used to predict the signal. It has been added to conform to the structure of the predict method in multi_voxel which requires a keyword argument S0. Returns ------- signal : array The signal values predicted for this model using its parameters. """ return ivim_prediction(self.model_params, gtab) dipy-0.13.0/dipy/reconst/mapmri.py000066400000000000000000002232141317371701200170270ustar00rootroot00000000000000# -*- coding: utf-8 -*- import numpy as np from dipy.reconst.multi_voxel import multi_voxel_fit from dipy.reconst.base import ReconstModel, ReconstFit from dipy.reconst.cache import Cache from scipy.special import hermite, gamma, genlaguerre try: # preferred scipy >= 0.14, required scipy >= 1.0 from scipy.special import factorial, factorial2 except ImportError: from scipy.misc import factorial, factorial2 from dipy.core.geometry import cart2sphere from dipy.reconst.shm import real_sph_harm, sph_harm_ind_list import dipy.reconst.dti as dti from warnings import warn from dipy.core.gradients import gradient_table from dipy.utils.optpkg import optional_package from dipy.core.optimize import Optimizer cvxpy, have_cvxpy, _ = optional_package("cvxpy") class MapmriModel(ReconstModel, Cache): r"""Mean Apparent Propagator MRI (MAPMRI) [1]_ of the diffusion signal. The main idea is to model the diffusion signal as a linear combination of the continuous functions presented in [2]_ but extending it in three dimensions. The main difference with the SHORE proposed in [3]_ is that MAPMRI 3D extension is provided using a set of three basis functions for the radial part, one for the signal along x, one for y and one for z, while [3]_ uses one basis function to model the radial part and real Spherical Harmonics to model the angular part. From the MAPMRI coefficients is possible to use the analytical formulae to estimate the ODF. References ---------- .. [1] Ozarslan E. et. al, "Mean apparent propagator (MAP) MRI: A novel diffusion imaging method for mapping tissue microstructure", NeuroImage, 2013. .. [2] Ozarslan E. et. al, "Simple harmonic oscillator based reconstruction and estimation for one-dimensional q-space magnetic resonance 1D-SHORE)", eapoc Intl Soc Mag Reson Med, vol. 16, p. 35., 2008. .. [3] Merlet S. et. al, "Continuous diffusion signal, EAP and ODF estimation via Compressive Sensing in diffusion MRI", Medical Image Analysis, 2013. .. [4] Fick, Rutger HJ, et al. "MAPL: Tissue microstructure estimation using Laplacian-regularized MAP-MRI and its application to HCP data." NeuroImage (2016). .. [5] Cheng, J., 2014. Estimation and Processing of Ensemble Average Propagator and Its Features in Diffusion MRI. Ph.D. Thesis. .. [6] Hosseinbor et al. "Bessel fourier orientation reconstruction (bfor): An analytical diffusion propagator reconstruction for hybrid diffusion imaging and computation of q-space indices". NeuroImage 64, 2013, 650–670. .. [7] Craven et al. "Smoothing Noisy Data with Spline Functions." NUMER MATH 31.4 (1978): 377-403. .. [8] Avram et al. "Clinical feasibility of using mean apparent propagator (MAP) MRI to characterize brain tissue microstructure". NeuroImage 2015, in press. """ def __init__(self, gtab, radial_order=6, laplacian_regularization=True, laplacian_weighting=0.2, positivity_constraint=False, pos_grid=15, pos_radius='adaptive', anisotropic_scaling=True, eigenvalue_threshold=1e-04, bval_threshold=np.inf, dti_scale_estimation=True, static_diffusivity=0.7e-3, cvxpy_solver=None): r""" Analytical and continuous modeling of the diffusion signal with respect to the MAPMRI basis [1]_. The main idea is to model the diffusion signal as a linear combination of the continuous functions presented in [2]_ but extending it in three dimensions. The main difference with the SHORE proposed in [3]_ is that MAPMRI 3D extension is provided using a set of three basis functions for the radial part, one for the signal along x, one for y and one for z, while [3]_ uses one basis function to model the radial part and real Spherical Harmonics to model the angular part. From the MAPMRI coefficients it is possible to estimate various q-space indices, the PDF and the ODF. The fitting procedure can be constrained using the positivity constraint proposed in [1]_ and/or the laplacian regularization proposed in [4]_. For the estimation of q-space indices we recommend using the 'regular' anisotropic implementation of MAPMRI. However, it has been shown that the ODF estimation in this implementation has a bias which 'squeezes together' the ODF peaks when there is a crossing at an angle smaller than 90 degrees [4]_. When you want to estimate ODFs for tractography we therefore recommend using the isotropic implementation (which is equivalent to [3]_). The switch between isotropic and anisotropic can be easily made through the anisotropic_scaling option. Parameters ---------- gtab : GradientTable, gradient directions and bvalues container class. the gradient table has to include b0-images. radial_order : unsigned int, an even integer that represent the order of the basis laplacian_regularization: bool, Regularize using the Laplacian of the MAP-MRI basis. laplacian_weighting: string or scalar, The string 'GCV' makes it use generalized cross-validation to find the regularization weight [4]. A scalar sets the regularization weight to that value and an array will make it selected the optimal weight from the values in the array. positivity_constraint : bool, Constrain the propagator to be positive. pos_grid : integer, The number of points in the grid that is used in the positivity constraint. pos_radius : float or string, If set to a float, the maximum distance the the positivity constraint constrains to posivity is that value. If set to `adaptive', the maximum distance is dependent on the estimated tissue diffusivity. anisotropic_scaling : bool, If True, uses the standard anisotropic MAP-MRI basis. If False, uses the isotropic MAP-MRI basis (equal to 3D-SHORE). eigenvalue_threshold : float, Sets the minimum of the tensor eigenvalues in order to avoid stability problem. bval_threshold : float, Sets the b-value threshold to be used in the scale factor estimation. In order for the estimated non-Gaussianity to have meaning this value should set to a lower value (b<2000 s/mm^2) such that the scale factors are estimated on signal points that reasonably represent the spins at Gaussian diffusion. dti_scale_estimation : bool, Whether or not DTI fitting is used to estimate the isotropic scale factor for isotropic MAP-MRI. When set to False the algorithm presets the isotropic tissue diffusivity to static_diffusivity. This vastly increases fitting speed but at the cost of slightly reduced fitting quality. Can still be used in combination with regularization and constraints. static_diffusivity : float, the tissue diffusivity that is used when dti_scale_estimation is set to False. The default is that of typical white matter D=0.7e-3 _[5]. cvxpy_solver : str, optional cvxpy solver name. Optionally optimize the positivity constraint with a particular cvxpy solver. See http://www.cvxpy.org/ for details. Default: None (cvxpy chooses its own solver) References ---------- .. [1] Ozarslan E. et. al, "Mean apparent propagator (MAP) MRI: A novel diffusion imaging method for mapping tissue microstructure", NeuroImage, 2013. .. [2] Ozarslan E. et. al, "Simple harmonic oscillator based reconstruction and estimation for one-dimensional q-space magnetic resonance 1D-SHORE)", eapoc Intl Soc Mag Reson Med, vol. 16, p. 35., 2008. .. [3] Ozarslan E. et. al, "Simple harmonic oscillator based reconstruction and estimation for three-dimensional q-space mri", ISMRM 2009. .. [4] Fick, Rutger HJ, et al. "MAPL: Tissue microstructure estimation using Laplacian-regularized MAP-MRI and its application to HCP data." NeuroImage (2016). .. [5] Merlet S. et. al, "Continuous diffusion signal, EAP and ODF estimation via Compressive Sensing in diffusion MRI", Medical Image Analysis, 2013. Examples -------- In this example, where the data, gradient table and sphere tessellation used for reconstruction are provided, we model the diffusion signal with respect to the SHORE basis and compute the real and analytical ODF. >>> from dipy.data import dsi_voxels, get_sphere >>> from dipy.core.gradients import gradient_table >>> data, gtab_ = dsi_voxels() >>> gtab = gradient_table(gtab_.bvals, gtab_.bvecs, ... b0_threshold=gtab_.bvals.min()) >>> from dipy.sims.voxel import SticksAndBall >>> data, golden_directions = SticksAndBall( ... gtab, d=0.0015, ... S0=1, angles=[(0, 0), (90, 0)], ... fractions=[50, 50], snr=None) >>> from dipy.reconst.mapmri import MapmriModel >>> radial_order = 4 >>> map_model = MapmriModel(gtab, radial_order=radial_order) >>> mapfit = map_model.fit(data) >>> sphere = get_sphere('symmetric724') >>> odf = mapfit.odf(sphere) """ if np.sum(gtab.b0s_mask) == 0: msg = "gtab does not have any b0s, check in the gradient_table " msg += "if b0_threshold needs to be increased." raise ValueError(msg) self.gtab = gtab if radial_order < 0 or radial_order % 2: msg = "radial_order must be a positive, even number." raise ValueError(msg) self.radial_order = radial_order self.bval_threshold = bval_threshold self.dti_scale_estimation = dti_scale_estimation self.laplacian_regularization = laplacian_regularization if self.laplacian_regularization: msg = "Laplacian Regularization weighting must be 'GCV', " msg += "a positive float or an array of positive floats." if isinstance(laplacian_weighting, str): if laplacian_weighting is not 'GCV': raise ValueError(msg) elif (isinstance(laplacian_weighting, float) or isinstance(laplacian_weighting, np.ndarray)): if np.sum(laplacian_weighting < 0) > 0: raise ValueError(msg) self.laplacian_weighting = laplacian_weighting self.positivity_constraint = positivity_constraint if self.positivity_constraint: if not have_cvxpy: raise ValueError( 'CVXPY package needed to enforce constraints') msg = "pos_radius must be 'adaptive' or a positive float" if cvxpy_solver is not None: if cvxpy_solver not in cvxpy.installed_solvers(): msg = "Input `cvxpy_solver` was set to %s." % cvxpy_solver msg += " One of %s" % ', '.join(cvxpy.installed_solvers()) msg += " was expected." raise ValueError(msg) if isinstance(pos_radius, str): if pos_radius != 'adaptive': raise ValueError(msg) elif isinstance(pos_radius, float) or isinstance(pos_radius, int): if pos_radius <= 0: raise ValueError(msg) self.constraint_grid = create_rspace(pos_grid, pos_radius) if not anisotropic_scaling: self.pos_K_independent = mapmri_isotropic_K_mu_independent( radial_order, self.constraint_grid) else: raise ValueError(msg) self.pos_grid = pos_grid self.pos_radius = pos_radius self.anisotropic_scaling = anisotropic_scaling if (gtab.big_delta is None) or (gtab.small_delta is None): self.tau = 1 / (4 * np.pi ** 2) else: self.tau = gtab.big_delta - gtab.small_delta / 3.0 self.eigenvalue_threshold = eigenvalue_threshold self.cvxpy_solver = cvxpy_solver self.cutoff = gtab.bvals < self.bval_threshold gtab_cutoff = gradient_table(bvals=self.gtab.bvals[self.cutoff], bvecs=self.gtab.bvecs[self.cutoff]) self.tenmodel = dti.TensorModel(gtab_cutoff) if self.anisotropic_scaling: self.ind_mat = mapmri_index_matrix(self.radial_order) self.Bm = b_mat(self.ind_mat) self.S_mat, self.T_mat, self.U_mat = mapmri_STU_reg_matrices( radial_order) else: self.ind_mat = mapmri_isotropic_index_matrix(self.radial_order) self.Bm = b_mat_isotropic(self.ind_mat) self.laplacian_matrix = mapmri_isotropic_laplacian_reg_matrix( radial_order, 1.) qvals = np.sqrt(self.gtab.bvals / self.tau) / (2 * np.pi) q = gtab.bvecs * qvals[:, None] if self.dti_scale_estimation: self.M_mu_independent = mapmri_isotropic_M_mu_independent( self.radial_order, q) else: D = static_diffusivity mumean = np.sqrt(2 * D * self.tau) self.mu = np.array([mumean, mumean, mumean]) self.M = mapmri_isotropic_phi_matrix(radial_order, mumean, q) if (self.laplacian_regularization and isinstance(laplacian_weighting, float) and not positivity_constraint): MMt = (np.dot(self.M.T, self.M) + laplacian_weighting * mumean * self.laplacian_matrix) self.MMt_inv_Mt = np.dot(np.linalg.pinv(MMt), self.M.T) @multi_voxel_fit def fit(self, data): errorcode = 0 tenfit = self.tenmodel.fit(data[self.cutoff]) evals = tenfit.evals R = tenfit.evecs evals = np.clip(evals, self.eigenvalue_threshold, evals.max()) qvals = np.sqrt(self.gtab.bvals / self.tau) / (2 * np.pi) mu_max = max(np.sqrt(evals * 2 * self.tau)) # used for constraint if self.anisotropic_scaling: mu = np.sqrt(evals * 2 * self.tau) qvecs = np.dot(self.gtab.bvecs, R) q = qvecs * qvals[:, None] M = mapmri_phi_matrix(self.radial_order, mu, q) else: try: self.MMt_inv_Mt lopt = self.laplacian_weighting coef = np.dot(self.MMt_inv_Mt, data) coef = coef / sum(coef * self.Bm) return MapmriFit(self, coef, self.mu, R, lopt, errorcode) except AttributeError: try: M = self.M mu = self.mu except AttributeError: u0 = isotropic_scale_factor(evals * 2 * self.tau) mu = np.array([u0, u0, u0]) q = self.gtab.bvecs * qvals[:, None] M_mu_dependent = mapmri_isotropic_M_mu_dependent( self.radial_order, mu[0], qvals) M = M_mu_dependent * self.M_mu_independent if self.laplacian_regularization: if self.anisotropic_scaling: laplacian_matrix = mapmri_laplacian_reg_matrix( self.ind_mat, mu, self.S_mat, self.T_mat, self.U_mat) else: laplacian_matrix = self.laplacian_matrix * mu[0] if self.laplacian_weighting is 'GCV': try: lopt = generalized_crossvalidation(data, M, laplacian_matrix) except np.linalg.linalg.LinAlgError: 1/0. lopt = 0.05 errorcode = 1 elif np.isscalar(self.laplacian_weighting): lopt = self.laplacian_weighting else: lopt = generalized_crossvalidation_array( data, M, laplacian_matrix, self.laplacian_weighting) else: lopt = 0. laplacian_matrix = np.ones((self.ind_mat.shape[0], self.ind_mat.shape[0])) if self.positivity_constraint: if self.pos_radius == 'adaptive': # custom constraint grid based on scale factor [Avram2015] constraint_grid = create_rspace(self.pos_grid, np.sqrt(5) * mu_max) else: constraint_grid = self.constraint_grid if self.anisotropic_scaling: K = mapmri_psi_matrix(self.radial_order, mu, constraint_grid) else: if self.pos_radius == 'adaptive': # grid changes per voxel. Recompute entire K matrix. K = mapmri_isotropic_psi_matrix(self.radial_order, mu[0], constraint_grid) else: # grid is static. Only compute mu-dependent part of K. K_dependent = mapmri_isotropic_K_mu_dependent( self.radial_order, mu[0], constraint_grid) K = K_dependent * self.pos_K_independent data_norm = np.asarray(data / data[self.gtab.b0s_mask].mean()) c = cvxpy.Variable(M.shape[1]) design_matrix = cvxpy.Constant(M) objective = cvxpy.Minimize( cvxpy.sum_squares(design_matrix * c - data_norm) + lopt * cvxpy.quad_form(c, laplacian_matrix) ) M0 = M[self.gtab.b0s_mask, :] constraints = [M0[0] * c == 1, K * c > -.1] prob = cvxpy.Problem(objective, constraints) try: prob.solve(solver=self.cvxpy_solver) coef = np.asarray(c.value).squeeze() except: errorcode = 2 warn('Optimization did not find a solution') try: coef = np.dot(np.linalg.pinv(M), data) # least squares except np.linalg.linalg.LinAlgError: errorcode = 3 coef = np.zeros(M.shape[1]) return MapmriFit(self, coef, mu, R, lopt, errorcode) else: try: pseudoInv = np.dot( np.linalg.inv(np.dot(M.T, M) + lopt * laplacian_matrix), M.T) coef = np.dot(pseudoInv, data) except np.linalg.linalg.LinAlgError: errorcode = 1 coef = np.zeros(M.shape[1]) return MapmriFit(self, coef, mu, R, lopt, errorcode) coef = coef / sum(coef * self.Bm) return MapmriFit(self, coef, mu, R, lopt, errorcode) class MapmriFit(ReconstFit): def __init__(self, model, mapmri_coef, mu, R, lopt, errorcode=0): """ Calculates diffusion properties for a single voxel Parameters ---------- model : object, AnalyticalModel mapmri_coef : 1d ndarray, mapmri coefficients mu : array, shape (3,) scale parameters vector for x, y and z R : array, shape (3,3) rotation matrix lopt : float, regularization weight used for laplacian regularization errorcode : int provides information on whether errors occurred in the fitting of each voxel. 0 means no problem, 1 means a LinAlgError occurred when trying to invert the design matrix. 2 means the positivity constraint was unable to solve the problem. 3 means that after positivity constraint failed, also matrix inversion failed. """ self.model = model self._mapmri_coef = mapmri_coef self.gtab = model.gtab self.radial_order = model.radial_order self.mu = mu self.R = R self.lopt = lopt self.errorcode = errorcode @property def mapmri_mu(self): """The MAPMRI scale factors """ return self.mu @property def mapmri_R(self): """The MAPMRI rotation matrix """ return self.R @property def mapmri_coeff(self): """The MAPMRI coefficients """ return self._mapmri_coef def odf(self, sphere, s=2): r""" Calculates the analytical Orientation Distribution Function (ODF) from the signal [1]_ Eq. (32). Parameters ---------- s : unsigned int radial moment of the ODF References ---------- .. [1] Ozarslan E. et. al, "Mean apparent propagator (MAP) MRI: A novel diffusion imaging method for mapping tissue microstructure", NeuroImage, 2013. """ if self.model.anisotropic_scaling: v_ = sphere.vertices v = np.dot(v_, self.R) I_s = mapmri_odf_matrix(self.radial_order, self.mu, s, v) odf = np.dot(I_s, self._mapmri_coef) else: I = self.model.cache_get('ODF_matrix', key=(sphere, s)) if I is None: I = mapmri_isotropic_odf_matrix(self.radial_order, 1, s, sphere.vertices) self.model.cache_set('ODF_matrix', (sphere, s), I) odf = self.mu[0] ** s * np.dot(I, self._mapmri_coef) return odf def odf_sh(self, s=2): r""" Calculates the real analytical odf for a given discrete sphere. Computes the design matrix of the ODF for the given sphere vertices and radial moment [1]_ eq. (32). The radial moment s acts as a sharpening method. The analytical equation for the spherical ODF basis is given in [2]_ eq. (C8). References ---------- .. [1] Ozarslan E. et. al, "Mean apparent propagator (MAP) MRI: A novel diffusion imaging method for mapping tissue microstructure", NeuroImage, 2013. .. [1]_ Fick, Rutger HJ, et al. "MAPL: Tissue microstructure estimation using Laplacian-regularized MAP-MRI and its application to HCP data." NeuroImage (2016). """ if self.model.anisotropic_scaling: msg = 'odf in spherical harmonics not yet implemented for ' msg += 'anisotropic implementation' raise ValueError(msg) I = self.model.cache_get('ODF_sh_matrix', key=(self.radial_order, s)) if I is None: I = mapmri_isotropic_odf_sh_matrix(self.radial_order, 1, s) self.model.cache_set('ODF_sh_matrix', (self.radial_order, s), I) odf = self.mu[0] ** s * np.dot(I, self._mapmri_coef) return odf def rtpp(self): r""" Calculates the analytical return to the plane probability (RTPP) [1]_ eq. (42). The analytical formula for the isotropic MAP-MRI basis was derived in [2]_ eq. (C11). References ---------- .. [1] Ozarslan E. et. al, "Mean apparent propagator (MAP) MRI: A novel diffusion imaging method for mapping tissue microstructure", NeuroImage, 2013. .. [2]_ Fick, Rutger HJ, et al. "MAPL: Tissue microstructure estimation using Laplacian-regularized MAP-MRI and its application to HCP data." NeuroImage (2016). """ Bm = self.model.Bm ind_mat = self.model.ind_mat if self.model.anisotropic_scaling: sel = Bm > 0. # select only relevant coefficients const = 1 / (np.sqrt(2 * np.pi) * self.mu[0]) ind_sum = (-1.0) ** (ind_mat[sel, 0] / 2.0) rtpp_vec = const * Bm[sel] * ind_sum * self._mapmri_coef[sel] rtpp = rtpp_vec.sum() return rtpp else: rtpp_vec = np.zeros((ind_mat.shape[0])) count = 0 for n in range(0, self.model.radial_order + 1, 2): for j in range(1, 2 + n // 2): l = n + 2 - 2 * j const = (-1/2.0) ** (l/2) / np.sqrt(np.pi) matsum = 0 for k in range(0, j): matsum += (-1) ** k * \ binomialfloat(j + l - 0.5, j - k - 1) *\ gamma(l / 2 + k + 1 / 2.0) /\ (factorial(k) * 0.5 ** (l / 2 + 1 / 2.0 + k)) for m in range(-l, l + 1): rtpp_vec[count] = const * matsum count += 1 direction = np.array(self.R[:, 0], ndmin=2) r, theta, phi = cart2sphere(direction[:, 0], direction[:, 1], direction[:, 2]) rtpp = self._mapmri_coef * (1 / self.mu[0]) *\ rtpp_vec * real_sph_harm(ind_mat[:, 2], ind_mat[:, 1], theta, phi) return rtpp.sum() def rtap(self): r""" Calculates the analytical return to the axis probability (RTAP) [1]_ eq. (40, 44a). The analytical formula for the isotropic MAP-MRI basis was derived in [2]_ eq. (C11). References ---------- .. [1] Ozarslan E. et. al, "Mean apparent propagator (MAP) MRI: A novel diffusion imaging method for mapping tissue microstructure", NeuroImage, 2013. .. [2]_ Fick, Rutger HJ, et al. "MAPL: Tissue microstructure estimation using Laplacian-regularized MAP-MRI and its application to HCP data." NeuroImage (2016). """ Bm = self.model.Bm ind_mat = self.model.ind_mat if self.model.anisotropic_scaling: sel = Bm > 0. # select only relevant coefficients const = 1 / (2 * np.pi * np.prod(self.mu[1:])) ind_sum = (-1.0) ** ((np.sum(ind_mat[sel, 1:], axis=1) / 2.0)) rtap_vec = const * Bm[sel] * ind_sum * self._mapmri_coef[sel] rtap = np.sum(rtap_vec) else: rtap_vec = np.zeros((ind_mat.shape[0])) count = 0 for n in range(0, self.model.radial_order + 1, 2): for j in range(1, 2 + n // 2): l = n + 2 - 2 * j kappa = ((-1) ** (j - 1) * 2 ** (-(l + 3) / 2.0)) / np.pi matsum = 0 for k in range(0, j): matsum += ((-1) ** k * binomialfloat(j + l - 0.5, j - k - 1) * gamma((l + 1) / 2.0 + k)) /\ (factorial(k) * 0.5 ** ((l + 1) / 2.0 + k)) for m in range(-l, l + 1): rtap_vec[count] = kappa * matsum count += 1 rtap_vec *= 2 direction = np.array(self.R[:, 0], ndmin=2) r, theta, phi = cart2sphere(direction[:, 0], direction[:, 1], direction[:, 2]) rtap_vec = self._mapmri_coef * (1 / self.mu[0] ** 2) *\ rtap_vec * real_sph_harm(ind_mat[:, 2], ind_mat[:, 1], theta, phi) rtap = rtap_vec.sum() return rtap def rtop(self): r""" Calculates the analytical return to the origin probability (RTOP) [1]_ eq. (36, 43). The analytical formula for the isotropic MAP-MRI basis was derived in [2]_ eq. (C11). References ---------- .. [1] Ozarslan E. et. al, "Mean apparent propagator (MAP) MRI: A novel diffusion imaging method for mapping tissue microstructure", NeuroImage, 2013. .. [2]_ Fick, Rutger HJ, et al. "MAPL: Tissue microstructure estimation using Laplacian-regularized MAP-MRI and its application to HCP data." NeuroImage (2016). """ Bm = self.model.Bm if self.model.anisotropic_scaling: const = 1 / (np.sqrt(8 * np.pi ** 3) * np.prod(self.mu)) ind_sum = (-1.0) ** (np.sum(self.model.ind_mat, axis=1) / 2) rtop_vec = const * ind_sum * Bm * self._mapmri_coef rtop = rtop_vec.sum() else: const = 1 / (2 * np.sqrt(2.0) * np.pi ** (3 / 2.0)) rtop_vec = const * (-1.0) ** (self.model.ind_mat[:, 0] - 1) * Bm rtop = (1 / self.mu[0] ** 3) * rtop_vec * self._mapmri_coef rtop = rtop.sum() return rtop def msd(self): r""" Calculates the analytical Mean Squared Displacement (MSD). It is defined as the Laplacian of the origin of the estimated signal [1]_. The analytical formula for the MAP-MRI basis was derived in [2]_ eq. (C13, D1). References ---------- .. [1] Cheng, J., 2014. Estimation and Processing of Ensemble Average Propagator and Its Features in Diffusion MRI. Ph.D. Thesis. .. [2]_ Fick, Rutger HJ, et al. "MAPL: Tissue microstructure estimation using Laplacian-regularized MAP-MRI and its application to HCP data." NeuroImage (2016). """ mu = self.mu ind_mat = self.model.ind_mat Bm = self.model.Bm sel = self.model.Bm > 0. # select only relevant coefficients mapmri_coef = self._mapmri_coef[sel] if self.model.anisotropic_scaling: ind_sum = np.sum(ind_mat[sel], axis=1) nx, ny, nz = ind_mat[sel].T numerator = (-1) ** (0.5 * (-ind_sum)) * np.pi ** (3 / 2.0) *\ ((1 + 2 * nx) * mu[0] ** 2 + (1 + 2 * ny) * mu[1] ** 2 + (1 + 2 * nz) * mu[2] ** 2) denominator = np.sqrt(2. ** (-ind_sum) * factorial(nx) * factorial(ny) * factorial(nz)) *\ gamma(0.5 - 0.5 * nx) * gamma(0.5 - 0.5 * ny) *\ gamma(0.5 - 0.5 * nz) msd_vec = self._mapmri_coef[sel] * (numerator / denominator) msd = msd_vec.sum() else: msd_vec = (4 * ind_mat[sel, 0] - 1) * Bm[sel] msd = self.mu[0] ** 2 * msd_vec * mapmri_coef msd = msd.sum() return msd def qiv(self): r""" Calculates the analytical Q-space Inverse Variance (QIV). It is defined as the inverse of the Laplacian of the origin of the estimated propagator [1]_ eq. (22). The analytical formula for the MAP-MRI basis was derived in [2]_ eq. (C14, D2). References ---------- .. [1] Hosseinbor et al. "Bessel fourier orientation reconstruction (bfor): An analytical diffusion propagator reconstruction for hybrid diffusion imaging and computation of q-space indices. NeuroImage 64, 2013, 650–670. .. [2]_ Fick, Rutger HJ, et al. "MAPL: Tissue microstructure estimation using Laplacian-regularized MAP-MRI and its application to HCP data." NeuroImage (2016). """ ux, uy, uz = self.mu ind_mat = self.model.ind_mat if self.model.anisotropic_scaling: sel = self.model.Bm > 0 # select only relevant coefficients nx, ny, nz = ind_mat[sel].T numerator = 8 * np.pi ** 2 * (ux * uy * uz) ** 3 *\ np.sqrt(factorial(nx) * factorial(ny) * factorial(nz)) *\ gamma(0.5 - 0.5 * nx) * gamma(0.5 - 0.5 * ny) * \ gamma(0.5 - 0.5 * nz) denominator = np.sqrt(2. ** (-1 + nx + ny + nz)) *\ ((1 + 2 * nx) * uy ** 2 * uz ** 2 + ux ** 2 * ((1 + 2 * nz) * uy ** 2 + (1 + 2 * ny) * uz ** 2)) qiv_vec = self._mapmri_coef[sel] * (numerator / denominator) qiv = qiv_vec.sum() else: sel = self.model.Bm > 0. # select only relevant coefficients j = ind_mat[sel, 0] qiv_vec = ((8 * (-1.0) ** (1 - j) * np.sqrt(2) * np.pi ** (7 / 2.)) / ((4.0 * j - 1) * self.model.Bm[sel])) qiv = ux ** 5 * qiv_vec * self._mapmri_coef[sel] qiv = qiv.sum() return qiv def ng(self): r""" Calculates the analytical non-Gaussiannity (NG) [1]_. For the NG to be meaningful the mapmri scale factors must be estimated only on data representing Gaussian diffusion of spins, i.e., bvals smaller than about 2000 s/mm^2 [2]_. References ---------- .. [1] Ozarslan E. et. al, "Mean apparent propagator (MAP) MRI: A novel diffusion imaging method for mapping tissue microstructure", NeuroImage, 2013. .. [2] Avram et al. "Clinical feasibility of using mean apparent propagator (MAP) MRI to characterize brain tissue microstructure". NeuroImage 2015, in press. """ if self.model.bval_threshold > 2000.: msg = 'model bval_threshold must be lower than 2000 for the ' msg += 'non_Gaussianity to be physically meaningful [2].' warn(msg) if not self.model.anisotropic_scaling: msg = 'Parallel non-Gaussianity is not defined using ' msg += 'isotropic scaling.' raise ValueError(msg) coef = self._mapmri_coef return np.sqrt(1 - coef[0] ** 2 / np.sum(coef ** 2)) def ng_parallel(self): r""" Calculates the analytical parallel non-Gaussiannity (NG) [1]_. For the NG to be meaningful the mapmri scale factors must be estimated only on data representing Gaussian diffusion of spins, i.e., bvals smaller than about 2000 s/mm^2 [2]_. References ---------- .. [1] Ozarslan E. et. al, "Mean apparent propagator (MAP) MRI: A novel diffusion imaging method for mapping tissue microstructure", NeuroImage, 2013. .. [2] Avram et al. "Clinical feasibility of using mean apparent propagator (MAP) MRI to characterize brain tissue microstructure". NeuroImage 2015, in press. """ if self.model.bval_threshold > 2000.: msg = 'Model bval_threshold must be lower than 2000 for the ' msg += 'non_Gaussianity to be physically meaningful [2].' warn(msg) if not self.model.anisotropic_scaling: msg = 'Parallel non-Gaussianity is not defined using ' msg += 'isotropic scaling.' raise ValueError(msg) ind_mat = self.model.ind_mat coef = self._mapmri_coef a_par = np.zeros_like(coef) a0 = np.zeros_like(coef) for i in range(coef.shape[0]): n1, n2, n3 = ind_mat[i] if (n2 % 2 + n3 % 2) == 0: a_par[i] = coef[i] * (-1) ** ((n2 + n3) / 2) *\ np.sqrt(factorial(n2) * factorial(n3)) /\ (factorial2(n2) * factorial2(n3)) if n1 == 0: a0[i] = a_par[i] return np.sqrt(1 - np.sum(a0 ** 2) / np.sum(a_par ** 2)) def ng_perpendicular(self): r""" Calculates the analytical perpendicular non-Gaussiannity (NG) [1]_. For the NG to be meaningful the mapmri scale factors must be estimated only on data representing Gaussian diffusion of spins, i.e., bvals smaller than about 2000 s/mm^2 [2]_. References ---------- .. [1] Ozarslan E. et. al, "Mean apparent propagator (MAP) MRI: A novel diffusion imaging method for mapping tissue microstructure", NeuroImage, 2013. .. [2] Avram et al. "Clinical feasibility of using mean apparent propagator (MAP) MRI to characterize brain tissue microstructure". NeuroImage 2015, in press. """ if self.model.bval_threshold > 2000.: msg = 'model bval_threshold must be lower than 2000 for the ' msg += 'non_Gaussianity to be physically meaningful [2].' warn(msg) if not self.model.anisotropic_scaling: msg = 'Parallel non-Gaussianity is not defined using ' msg += 'isotropic scaling.' raise ValueError(msg) ind_mat = self.model.ind_mat coef = self._mapmri_coef a_perp = np.zeros_like(coef) a00 = np.zeros_like(coef) for i in range(coef.shape[0]): n1, n2, n3 = ind_mat[i] if n1 % 2 == 0: if n2 % 2 == 0 and n3 % 2 == 0: a_perp[i] = coef[i] * (-1) ** (n1 / 2) *\ np.sqrt(factorial(n1)) / factorial2(n1) if n2 == 0 and n3 == 0: a00[i] = a_perp[i] return np.sqrt(1 - np.sum(a00 ** 2) / np.sum(a_perp ** 2)) def norm_of_laplacian_signal(self): """ Calculates the norm of the laplacian of the fitted signal [1]_. This information could be useful to assess if the extrapolation of the fitted signal contains spurious oscillations. A high laplacian may indicate that these are present, and any q-space indices that use integrals of the signal may be corrupted (e.g. RTOP, RTAP, RTPP, QIV). References ---------- .. [1]_ Fick, Rutger HJ, et al. "MAPL: Tissue microstructure estimation using Laplacian-regularized MAP-MRI and its application to HCP data." NeuroImage (2016). """ if self.model.anisotropic_scaling: laplacian_matrix = mapmri_laplacian_reg_matrix( self.model.ind_mat, self.mu, self.model.S_mat, self.model.T_mat, self.model.U_mat) else: laplacian_matrix = self.mu[0] * self.model.laplacian_matrix norm_of_laplacian = np.dot(np.dot(self._mapmri_coef, laplacian_matrix), self._mapmri_coef) return norm_of_laplacian def fitted_signal(self, gtab=None): """ Recovers the fitted signal for the given gradient table. If no gradient table is given it recovers the signal for the gtab of the model object. """ if gtab is None: E = self.predict(self.model.gtab, S0=1.) else: E = self.predict(gtab, S0=1.) return E def predict(self, qvals_or_gtab, S0=100.): r"""Recovers the reconstructed signal for any qvalue array or gradient table. """ if isinstance(qvals_or_gtab, np.ndarray): q = qvals_or_gtab qvals = np.linalg.norm(q, axis=1) else: gtab = qvals_or_gtab qvals = np.sqrt(gtab.bvals / self.model.tau) / (2 * np.pi) q = qvals[:, None] * gtab.bvecs if self.model.anisotropic_scaling: q_rot = np.dot(q, self.R) M = mapmri_phi_matrix(self.radial_order, self.mu, q_rot) else: M = mapmri_isotropic_phi_matrix(self.radial_order, self.mu[0], q) E = S0 * np.dot(M, self._mapmri_coef) return E def pdf(self, r_points): """ Diffusion propagator on a given set of real points. if the array r_points is non writeable, then intermediate results are cached for faster recalculation """ if self.model.anisotropic_scaling: r_point_rotated = np.dot(r_points, self.R) K = mapmri_psi_matrix(self.radial_order, self.mu, r_point_rotated) EAP = np.dot(K, self._mapmri_coef) else: if not r_points.flags.writeable: K_independent = self.model.cache_get( 'mapmri_matrix_pdf_independent', key=hash(r_points.data)) if K_independent is None: K_independent = mapmri_isotropic_K_mu_independent( self.radial_order, r_points) self.model.cache_set('mapmri_matrix_pdf_independent', hash(r_points.data), K_independent) K_dependent = mapmri_isotropic_K_mu_dependent( self.radial_order, self.mu[0], r_points) K = K_dependent * K_independent else: K = mapmri_isotropic_psi_matrix( self.radial_order, self.mu[0], r_points) EAP = np.dot(K, self._mapmri_coef) return EAP def isotropic_scale_factor(mu_squared): r"""Estimated isotropic scaling factor _[1] Eq. (49). Parameters ---------- mu_squared : array, shape (N,3) squared scale factors of mapmri basis in x, y, z Returns ------- u0 : float closest isotropic scale factor for the isotropic basis References ---------- .. [1] Ozarslan E. et. al, "Mean apparent propagator (MAP) MRI: A novel diffusion imaging method for mapping tissue microstructure", NeuroImage, 2013. """ X, Y, Z = mu_squared coef_array = np.array([-3, -(X + Y + Z), (X * Y + X * Z + Y * Z), 3 * X * Y * Z]) # take the real, positive root of the problem. u0 = np.sqrt(np.real(np.roots(coef_array).max())) return u0 def mapmri_index_matrix(radial_order): r""" Calculates the indices for the MAPMRI [1]_ basis in x, y and z. Parameters ---------- radial_order : unsigned int radial order of MAPMRI basis Returns ------- index_matrix : array, shape (N,3) ordering of the basis in x, y, z References ---------- .. [1] Ozarslan E. et. al, "Mean apparent propagator (MAP) MRI: A novel diffusion imaging method for mapping tissue microstructure", NeuroImage, 2013. """ index_matrix = [] for n in range(0, radial_order + 1, 2): for i in range(0, n + 1): for j in range(0, n - i + 1): index_matrix.append([n - i - j, j, i]) return np.array(index_matrix) def b_mat(index_matrix): r""" Calculates the B coefficients from [1]_ Eq. (27). Parameters ---------- index_matrix : array, shape (N,3) ordering of the basis in x, y, z Returns ------- B : array, shape (N,) B coefficients for the basis References ---------- .. [1] Ozarslan E. et. al, "Mean apparent propagator (MAP) MRI: A novel diffusion imaging method for mapping tissue microstructure", NeuroImage, 2013. """ B = np.zeros(index_matrix.shape[0]) for i in range(index_matrix.shape[0]): n1, n2, n3 = index_matrix[i] K = int(not(n1 % 2) and not(n2 % 2) and not(n3 % 2)) B[i] = ( K * np.sqrt(factorial(n1) * factorial(n2) * factorial(n3)) / (factorial2(n1) * factorial2(n2) * factorial2(n3)) ) return B def b_mat_isotropic(index_matrix): r""" Calculates the isotropic B coefficients from [1]_ Fig 8. Parameters ---------- index_matrix : array, shape (N,3) ordering of the isotropic basis in j, l, m Returns ------- B : array, shape (N,) B coefficients for the isotropic basis References ---------- .. [1] Ozarslan E. et. al, "Mean apparent propagator (MAP) MRI: A novel diffusion imaging method for mapping tissue microstructure", NeuroImage, 2013. """ B = np.zeros((index_matrix.shape[0])) for i in range(index_matrix.shape[0]): if index_matrix[i, 1] == 0: B[i] = genlaguerre(index_matrix[i, 0] - 1, 0.5)(0) return B def mapmri_phi_1d(n, q, mu): r""" One dimensional MAPMRI basis function from [1]_ Eq. (4). Parameters ------- n : unsigned int order of the basis q : array, shape (N,) points in the q-space in which evaluate the basis mu : float scale factor of the basis References ---------- .. [1] Ozarslan E. et. al, "Mean apparent propagator (MAP) MRI: A novel diffusion imaging method for mapping tissue microstructure", NeuroImage, 2013. """ qn = 2 * np.pi * mu * q H = hermite(n)(qn) i = np.complex(0, 1) f = factorial(n) k = i ** (-n) / np.sqrt(2 ** (n) * f) phi = k * np.exp(- qn ** 2 / 2) * H return phi def mapmri_phi_matrix(radial_order, mu, q_gradients): r"""Compute the MAPMRI phi matrix for the signal [1]_ eq. (23). Parameters ---------- radial_order : unsigned int, an even integer that represent the order of the basis mu : array, shape (3,) scale factors of the basis for x, y, z q_gradients : array, shape (N,3) points in the q-space in which evaluate the basis References ---------- .. [1] Ozarslan E. et. al, "Mean apparent propagator (MAP) MRI: A novel diffusion imaging method for mapping tissue microstructure", NeuroImage, 2013. """ ind_mat = mapmri_index_matrix(radial_order) n_elem = ind_mat.shape[0] n_qgrad = q_gradients.shape[0] qx, qy, qz = q_gradients.T mux, muy, muz = mu Mx_storage = np.array(np.zeros((n_qgrad, radial_order + 1)), dtype=complex) My_storage = np.array(np.zeros((n_qgrad, radial_order + 1)), dtype=complex) Mz_storage = np.array(np.zeros((n_qgrad, radial_order + 1)), dtype=complex) M = np.zeros((n_qgrad, n_elem)) for n in range(radial_order + 1): Mx_storage[:, n] = mapmri_phi_1d(n, qx, mux) My_storage[:, n] = mapmri_phi_1d(n, qy, muy) Mz_storage[:, n] = mapmri_phi_1d(n, qz, muz) counter = 0 for nx, ny, nz in ind_mat: M[:, counter] = ( np.real(Mx_storage[:, nx] * My_storage[:, ny] * Mz_storage[:, nz]) ) counter += 1 return M def mapmri_psi_1d(n, x, mu): r""" One dimensional MAPMRI propagator basis function from [1]_ Eq. (10). Parameters ---------- n : unsigned int order of the basis x : array, shape (N,) points in the r-space in which evaluate the basis mu : float scale factor of the basis References ---------- .. [1] Ozarslan E. et. al, "Mean apparent propagator (MAP) MRI: A novel diffusion imaging method for mapping tissue microstructure", NeuroImage, 2013. """ H = hermite(n)(x / mu) f = factorial(n) k = 1 / (np.sqrt(2 ** (n + 1) * np.pi * f) * mu) psi = k * np.exp(- x ** 2 / (2 * mu ** 2)) * H return psi def mapmri_psi_matrix(radial_order, mu, rgrad): r"""Compute the MAPMRI psi matrix for the propagator [1]_ eq. (22). Parameters ---------- radial_order : unsigned int, an even integer that represent the order of the basis mu : array, shape (3,) scale factors of the basis for x, y, z rgrad : array, shape (N,3) points in the r-space in which evaluate the EAP References ---------- .. [1] Ozarslan E. et. al, "Mean apparent propagator (MAP) MRI: A novel diffusion imaging method for mapping tissue microstructure", NeuroImage, 2013. """ ind_mat = mapmri_index_matrix(radial_order) n_elem = ind_mat.shape[0] n_qgrad = rgrad.shape[0] rx, ry, rz = rgrad.T mux, muy, muz = mu Kx_storage = np.zeros((n_qgrad, radial_order + 1)) Ky_storage = np.zeros((n_qgrad, radial_order + 1)) Kz_storage = np.zeros((n_qgrad, radial_order + 1)) K = np.zeros((n_qgrad, n_elem)) for n in range(radial_order + 1): Kx_storage[:, n] = mapmri_psi_1d(n, rx, mux) Ky_storage[:, n] = mapmri_psi_1d(n, ry, muy) Kz_storage[:, n] = mapmri_psi_1d(n, rz, muz) counter = 0 for nx, ny, nz in ind_mat: K[:, counter] = ( Kx_storage[:, nx] * Ky_storage[:, ny] * Kz_storage[:, nz] ) counter += 1 return K def mapmri_odf_matrix(radial_order, mu, s, vertices): r"""Compute the MAPMRI ODF matrix [1]_ Eq. (33). Parameters ---------- radial_order : unsigned int, an even integer that represent the order of the basis mu : array, shape (3,) scale factors of the basis for x, y, z s : unsigned int radial moment of the ODF vertices : array, shape (N,3) points of the sphere shell in the r-space in which evaluate the ODF References ---------- .. [1] Ozarslan E. et. al, "Mean apparent propagator (MAP) MRI: A novel diffusion imaging method for mapping tissue microstructure", NeuroImage, 2013. """ ind_mat = mapmri_index_matrix(radial_order) n_vert = vertices.shape[0] n_elem = ind_mat.shape[0] odf_mat = np.zeros((n_vert, n_elem)) mux, muy, muz = mu # Eq, 35a rho = 1.0 / np.sqrt((vertices[:, 0] / mux) ** 2 + (vertices[:, 1] / muy) ** 2 + (vertices[:, 2] / muz) ** 2) # Eq, 35b alpha = 2 * rho * (vertices[:, 0] / mux) # Eq, 35c beta = 2 * rho * (vertices[:, 1] / muy) # Eq, 35d gamma = 2 * rho * (vertices[:, 2] / muz) const = rho ** (3 + s) / np.sqrt(2 ** (2 - s) * np.pi ** 3 * (mux ** 2 * muy ** 2 * muz ** 2)) for j in range(n_elem): n1, n2, n3 = ind_mat[j] f = np.sqrt(factorial(n1) * factorial(n2) * factorial(n3)) odf_mat[:, j] = const * f * \ _odf_cfunc(n1, n2, n3, alpha, beta, gamma, s) return odf_mat def _odf_cfunc(n1, n2, n3, a, b, g, s): r"""Compute the MAPMRI ODF function from [1]_ Eq. (34). References ---------- .. [1] Ozarslan E. et. al, "Mean apparent propagator (MAP) MRI: A novel diffusion imaging method for mapping tissue microstructure", NeuroImage, 2013. """ f = factorial f2 = factorial2 sumc = 0 for i in range(0, n1 + 1, 2): for j in range(0, n2 + 1, 2): for k in range(0, n3 + 1, 2): nn = n1 + n2 + n3 - i - j - k gam = (-1) ** ((i + j + k) / 2.0) * gamma((3 + s + nn) / 2.0) num1 = a ** (n1 - i) num2 = b ** (n2 - j) num3 = g ** (n3 - k) num = gam * num1 * num2 * num3 denom = f(n1 - i) * f(n2 - j) * f( n3 - k) * f2(i) * f2(j) * f2(k) sumc += num / denom return sumc def mapmri_isotropic_phi_matrix(radial_order, mu, q): r""" Three dimensional isotropic MAPMRI signal basis function from [1]_ Eq. (61). Parameters ---------- radial_order : unsigned int, radial order of the mapmri basis. mu : float, positive isotropic scale factor of the basis q : array, shape (N,3) points in the q-space in which evaluate the basis References ---------- .. [1] Ozarslan E. et. al, "Mean apparent propagator (MAP) MRI: A novel diffusion imaging method for mapping tissue microstructure", NeuroImage, 2013. """ qval, theta, phi = cart2sphere(q[:, 0], q[:, 1], q[:, 2]) theta[np.isnan(theta)] = 0 ind_mat = mapmri_isotropic_index_matrix(radial_order) n_elem = ind_mat.shape[0] n_qgrad = q.shape[0] M = np.zeros((n_qgrad, n_elem)) counter = 0 for n in range(0, radial_order + 1, 2): for j in range(1, 2 + n // 2): l = n + 2 - 2 * j const = mapmri_isotropic_radial_signal_basis(j, l, mu, qval) for m in range(-l, l+1): M[:, counter] = const * real_sph_harm(m, l, theta, phi) counter += 1 return M def mapmri_isotropic_radial_signal_basis(j, l, mu, qval): r"""Radial part of the isotropic 1D-SHORE signal basis [1]_ eq. (61). Parameters ---------- j : unsigned int, a positive integer related to the radial order l : unsigned int, the spherical harmonic order mu : float, isotropic scale factor of the basis qval : float, points in the q-space in which evaluate the basis References ---------- .. [1] Ozarslan E. et. al, "Mean apparent propagator (MAP) MRI: A novel diffusion imaging method for mapping tissue microstructure", NeuroImage, 2013. """ pi2_mu2_q2 = 2 * np.pi ** 2 * mu ** 2 * qval ** 2 const = ( (-1) ** (l / 2) * np.sqrt(4.0 * np.pi) * (pi2_mu2_q2) ** (l / 2) * np.exp(-pi2_mu2_q2) * genlaguerre(j - 1, l + 0.5)(2 * pi2_mu2_q2) ) return const def mapmri_isotropic_M_mu_independent(radial_order, q): r"""Computed the mu independent part of the signal design matrix. """ ind_mat = mapmri_isotropic_index_matrix(radial_order) qval, theta, phi = cart2sphere(q[:, 0], q[:, 1], q[:, 2]) theta[np.isnan(theta)] = 0 n_elem = ind_mat.shape[0] n_rgrad = theta.shape[0] Q_mu_independent = np.zeros((n_rgrad, n_elem)) counter = 0 for n in range(0, radial_order + 1, 2): for j in range(1, 2 + n // 2): l = n + 2 - 2 * j const = np.sqrt(4 * np.pi) * (-1) ** (-l / 2) * \ (2 * np.pi ** 2 * qval ** 2) ** (l / 2) for m in range(-1 * (n + 2 - 2 * j), (n + 3 - 2 * j)): Q_mu_independent[:, counter] = const * \ real_sph_harm(m, l, theta, phi) counter += 1 return Q_mu_independent def mapmri_isotropic_M_mu_dependent(radial_order, mu, qval): '''Computed the mu dependent part of the signal design matrix. ''' ind_mat = mapmri_isotropic_index_matrix(radial_order) n_elem = ind_mat.shape[0] n_qgrad = qval.shape[0] Q_u0_dependent = np.zeros((n_qgrad, n_elem)) pi2q2mu2 = 2 * np.pi ** 2 * mu ** 2 * qval ** 2 counter = 0 for n in range(0, radial_order + 1, 2): for j in range(1, 2 + n // 2): l = n + 2 - 2 * j const = mu ** l * np.exp(-pi2q2mu2) *\ genlaguerre(j - 1, l + 0.5)(2 * pi2q2mu2) for m in range(-l, l + 1): Q_u0_dependent[:, counter] = const counter += 1 return Q_u0_dependent def mapmri_isotropic_psi_matrix(radial_order, mu, rgrad): r""" Three dimensional isotropic MAPMRI propagator basis function from [1]_ Eq. (61). Parameters ---------- radial_order : unsigned int, radial order of the mapmri basis. mu : float, positive isotropic scale factor of the basis rgrad : array, shape (N,3) points in the r-space in which evaluate the basis References ---------- .. [1] Ozarslan E. et. al, "Mean apparent propagator (MAP) MRI: A novel diffusion imaging method for mapping tissue microstructure", NeuroImage, 2013. """ r, theta, phi = cart2sphere(rgrad[:, 0], rgrad[:, 1], rgrad[:, 2]) theta[np.isnan(theta)] = 0 ind_mat = mapmri_isotropic_index_matrix(radial_order) n_elem = ind_mat.shape[0] n_rgrad = rgrad.shape[0] K = np.zeros((n_rgrad, n_elem)) counter = 0 for n in range(0, radial_order + 1, 2): for j in range(1, 2 + n // 2): l = n + 2 - 2 * j const = mapmri_isotropic_radial_pdf_basis(j, l, mu, r) for m in range(-l, l + 1): K[:, counter] = const * real_sph_harm(m, l, theta, phi) counter += 1 return K def mapmri_isotropic_radial_pdf_basis(j, l, mu, r): r"""Radial part of the isotropic 1D-SHORE propagator basis [1]_ eq. (61). Parameters ---------- j : unsigned int, a positive integer related to the radial order l : unsigned int, the spherical harmonic order mu : float, isotropic scale factor of the basis r : float, points in the r-space in which evaluate the basis References ---------- .. [1] Ozarslan E. et. al, "Mean apparent propagator (MAP) MRI: A novel diffusion imaging method for mapping tissue microstructure", NeuroImage, 2013. """ r2u2 = r ** 2 / (2 * mu ** 2) const = ( (-1) ** (j - 1) / (np.sqrt(2) * np.pi * mu ** 3) * r2u2 ** (l / 2) * np.exp(-r2u2) * genlaguerre(j - 1, l + 0.5)(2 * r2u2) ) return const def mapmri_isotropic_K_mu_independent(radial_order, rgrad): '''Computes mu independent part of K. Same trick as with M. ''' r, theta, phi = cart2sphere(rgrad[:, 0], rgrad[:, 1], rgrad[:, 2]) theta[np.isnan(theta)] = 0 ind_mat = mapmri_isotropic_index_matrix(radial_order) n_elem = ind_mat.shape[0] n_rgrad = rgrad.shape[0] K = np.zeros((n_rgrad, n_elem)) counter = 0 for n in range(0, radial_order + 1, 2): for j in range(1, 2 + n // 2): l = n + 2 - 2 * j const = (-1) ** (j - 1) *\ (np.sqrt(2) * np.pi) ** (-1) *\ (r ** 2 / 2) ** (l / 2) for m in range(-l, l+1): K[:, counter] = const * real_sph_harm(m, l, theta, phi) counter += 1 return K def mapmri_isotropic_K_mu_dependent(radial_order, mu, rgrad): '''Computes mu dependent part of M. Same trick as with M. ''' r, theta, phi = cart2sphere(rgrad[:, 0], rgrad[:, 1], rgrad[:, 2]) theta[np.isnan(theta)] = 0 ind_mat = mapmri_isotropic_index_matrix(radial_order) n_elem = ind_mat.shape[0] n_rgrad = rgrad.shape[0] K = np.zeros((n_rgrad, n_elem)) r2mu2 = r ** 2 / (2 * mu ** 2) counter = 0 for n in range(0, radial_order + 1, 2): for j in range(1, 2 + n // 2): l = n + 2 - 2 * j const = (mu ** 3) ** (-1) * mu ** (-l) *\ np.exp(-r2mu2) * genlaguerre(j - 1, l + 0.5)(2 * r2mu2) for m in range(-l, l + 1): K[:, counter] = const counter += 1 return K def binomialfloat(n, k): """Custom Binomial function """ return factorial(n) / (factorial(n - k) * factorial(k)) def mapmri_isotropic_odf_matrix(radial_order, mu, s, vertices): r"""Compute the isotropic MAPMRI ODF matrix [1]_ Eq. 32 but for the isotropic propagator in [1]_ eq. (60). Analytical derivation in [2]_ eq. (C8). Parameters ---------- radial_order : unsigned int, an even integer that represent the order of the basis mu : float, isotropic scale factor of the isotropic MAP-MRI basis s : unsigned int radial moment of the ODF vertices : array, shape (N,3) points of the sphere shell in the r-space in which evaluate the ODF Returns ------- odf_mat : Matrix, shape (N_vertices, N_mapmri_coef) ODF design matrix to discrete sphere function References ---------- .. [1] Ozarslan E. et. al, "Mean apparent propagator (MAP) MRI: A novel diffusion imaging method for mapping tissue microstructure", NeuroImage, 2013. .. [2]_ Fick, Rutger HJ, et al. "MAPL: Tissue microstructure estimation using Laplacian-regularized MAP-MRI and its application to HCP data." NeuroImage (2016). """ r, theta, phi = cart2sphere(vertices[:, 0], vertices[:, 1], vertices[:, 2]) theta[np.isnan(theta)] = 0 ind_mat = mapmri_isotropic_index_matrix(radial_order) n_vert = vertices.shape[0] n_elem = ind_mat.shape[0] odf_mat = np.zeros((n_vert, n_elem)) counter = 0 for n in range(0, radial_order + 1, 2): for j in range(1, 2 + n // 2): l = n + 2 - 2 * j kappa = ((-1) ** (j - 1) * 2 ** (-(l + 3) / 2.0) * mu ** s) / np.pi matsum = 0 for k in range(0, j): matsum += ((-1) ** k * binomialfloat(j + l - 0.5, j - k - 1) * gamma((l + s + 3) / 2.0 + k)) /\ (factorial(k) * 0.5 ** ((l + s + 3) / 2.0 + k)) for m in range(-l, l + 1): odf_mat[:, counter] = kappa * matsum *\ real_sph_harm(m, l, theta, phi) counter += 1 return odf_mat def mapmri_isotropic_odf_sh_matrix(radial_order, mu, s): r"""Compute the isotropic MAPMRI ODF matrix [1]_ Eq. 32 for the isotropic propagator in [1]_ eq. (60). Here we do not compute the sphere function but the spherical harmonics by only integrating the radial part of the propagator. We use the same derivation of the ODF in the isotropic implementation as in [2]_ eq. (C8). Parameters ---------- radial_order : unsigned int, an even integer that represent the order of the basis mu : float, isotropic scale factor of the isotropic MAP-MRI basis s : unsigned int radial moment of the ODF Returns ------- odf_sh_mat : Matrix, shape (N_sh_coef, N_mapmri_coef) ODF design matrix to spherical harmonics References ---------- .. [1] Ozarslan E. et. al, "Mean apparent propagator (MAP) MRI: A novel diffusion imaging method for mapping tissue microstructure", NeuroImage, 2013. .. [2]_ Fick, Rutger HJ, et al. "MAPL: Tissue microstructure estimation using Laplacian-regularized MAP-MRI and its application to HCP data." NeuroImage (2016). """ sh_mat = sph_harm_ind_list(radial_order) ind_mat = mapmri_isotropic_index_matrix(radial_order) n_elem_shore = ind_mat.shape[0] n_elem_sh = sh_mat[0].shape[0] odf_sh_mat = np.zeros((n_elem_sh, n_elem_shore)) counter = 0 for n in range(0, radial_order + 1, 2): for j in range(1, 2 + n // 2): l = n + 2 - 2 * j kappa = ((-1) ** (j - 1) * 2 ** (-(l + 3) / 2.0) * mu ** s) / np.pi matsum = 0 for k in range(0, j): matsum += ((-1) ** k * binomialfloat(j + l - 0.5, j - k - 1) * gamma((l + s + 3) / 2.0 + k)) /\ (factorial(k) * 0.5 ** ((l + s + 3) / 2.0 + k)) for m in range(-l, l + 1): index_overlap = np.all([l == sh_mat[1], m == sh_mat[0]], 0) odf_sh_mat[:, counter] = kappa * matsum * index_overlap counter += 1 return odf_sh_mat def mapmri_isotropic_laplacian_reg_matrix(radial_order, mu): r''' Computes the Laplacian regularization matrix for MAP-MRI's isotropic implementation [1]_ eq. (C7). Parameters ---------- radial_order : unsigned int, an even integer that represent the order of the basis mu : float, isotropic scale factor of the isotropic MAP-MRI basis Returns ------- LR : Matrix, shape (N_coef, N_coef) Laplacian regularization matrix References ---------- .. [1]_ Fick, Rutger HJ, et al. "MAPL: Tissue microstructure estimation using Laplacian-regularized MAP-MRI and its application to HCP data." NeuroImage (2016). ''' ind_mat = mapmri_isotropic_index_matrix(radial_order) n_elem = ind_mat.shape[0] LR = np.zeros((n_elem, n_elem)) for i in range(n_elem): for k in range(i, n_elem): if ind_mat[i, 1] == ind_mat[k, 1] and \ ind_mat[i, 2] == ind_mat[k, 2]: ji = ind_mat[i, 0] jk = ind_mat[k, 0] l = ind_mat[i, 1] if ji == (jk + 2): LR[i, k] = LR[k, i] = 2.0 ** (2 - l) * np.pi ** 2 * mu *\ gamma(5 / 2.0 + jk + l) / gamma(jk) elif ji == (jk + 1): LR[i, k] = LR[k, i] = 2.0 ** (2 - l) * np.pi ** 2 * mu *\ (-3 + 4 * ji + 2 * l) * gamma(3 / 2.0 + jk + l) /\ gamma(jk) elif ji == jk: LR[i, k] = 2.0 ** (-l) * np.pi ** 2 * mu *\ (3 + 24 * ji ** 2 + 4 * (-2 + l) * l + 12 * ji * (-1 + 2 * l)) *\ gamma(1 / 2.0 + ji + l) / gamma(ji) elif ji == (jk - 1): LR[i, k] = LR[k, i] = 2.0 ** (2 - l) * np.pi ** 2 * mu *\ (-3 + 4 * jk + 2 * l) * gamma(3 / 2.0 + ji + l) /\ gamma(ji) elif ji == (jk - 2): LR[i, k] = LR[k, i] = 2.0 ** (2 - l) * np.pi ** 2 * mu *\ gamma(5 / 2.0 + ji + l) / gamma(ji) return LR def mapmri_isotropic_index_matrix(radial_order): r""" Calculates the indices for the isotropic MAPMRI basis [1]_ Fig 8. Parameters ---------- radial_order : unsigned int radial order of isotropic MAPMRI basis Returns ------- index_matrix : array, shape (N,3) ordering of the basis in x, y, z References ---------- .. [1] Ozarslan E. et. al, "Mean apparent propagator (MAP) MRI: A novel diffusion imaging method for mapping tissue microstructure", NeuroImage, 2013. """ index_matrix = [] for n in range(0, radial_order + 1, 2): for j in range(1, 2 + n // 2): for m in range(-1 * (n + 2 - 2 * j), (n + 3 - 2 * j)): index_matrix.append([j, n + 2 - 2 * j, m]) return np.array(index_matrix) def create_rspace(gridsize, radius_max): """ Create the real space table, that contains the points in which to compute the pdf. Parameters ---------- gridsize : unsigned int dimension of the propagator grid radius_max : float maximal radius in which compute the propagator Returns ------- tab : array, shape (N,3) real space points in which calculates the pdf """ radius = gridsize // 2 vecs = [] for i in range(-radius, radius + 1): for j in range(-radius, radius + 1): for k in range(0, radius + 1): vecs.append([i, j, k]) vecs = np.array(vecs, dtype=np.float32) # there are points in the corners farther than sphere radius points_inside_sphere = np.sqrt(np.einsum('ij,ij->i', vecs, vecs)) <= radius vecs_inside_sphere = vecs[points_inside_sphere] tab = vecs_inside_sphere / radius tab = tab * radius_max return tab def delta(n, m): if n == m: return 1 return 0 def map_laplace_u(n, m): """ S(n, m) static matrix for Laplacian regularization [1]_ eq. (13). Parameters ---------- n, m : unsigned int basis order of the MAP-MRI basis in different directions Returns ------- U : float, Analytical integral of $\phi_n(q) * \phi_m(q)$ References ---------- .. [1]_ Fick, Rutger HJ, et al. "MAPL: Tissue microstructure estimation using Laplacian-regularized MAP-MRI and its application to HCP data." NeuroImage (2016). """ return (-1) ** n * delta(n, m) / (2 * np.sqrt(np.pi)) def map_laplace_t(n, m): """ L(m, n) static matrix for Laplacian regularization [1]_ eq. (12). Parameters ---------- n, m : unsigned int basis order of the MAP-MRI basis in different directions Returns ------- T : float Analytical integral of $\phi_n(q) * \phi_m''(q)$ References ---------- .. [1]_ Fick, Rutger HJ, et al. "MAPL: Tissue microstructure estimation using Laplacian-regularized MAP-MRI and its application to HCP data." NeuroImage (2016). """ a = np.sqrt((m - 1) * m) * delta(m - 2, n) b = np.sqrt((n - 1) * n) * delta(n - 2, m) c = (2 * n + 1) * delta(m, n) return np.pi ** (3 / 2.) * (-1) ** (n + 1) * (a + b + c) def map_laplace_s(n, m): """ R(m,n) static matrix for Laplacian regularization [1]_ eq. (11). Parameters ---------- n, m : unsigned int basis order of the MAP-MRI basis in different directions Returns ------- S : float Analytical integral of $\phi_n''(q) * \phi_m''(q)$ References ---------- .. [1]_ Fick, Rutger HJ, et al. "MAPL: Tissue microstructure estimation using Laplacian-regularized MAP-MRI and its application to HCP data." NeuroImage (2016). """ k = 2 * np.pi ** (7 / 2.) * (-1) ** (n) a0 = 3 * (2 * n ** 2 + 2 * n + 1) * delta(n, m) sqmn = np.sqrt(gamma(m + 1) / gamma(n + 1)) sqnm = 1 / sqmn an2 = 2 * (2 * n + 3) * sqmn * delta(m, n + 2) an4 = sqmn * delta(m, n + 4) am2 = 2 * (2 * m + 3) * sqnm * delta(m + 2, n) am4 = sqnm * delta(m + 4, n) return k * (a0 + an2 + an4 + am2 + am4) def mapmri_STU_reg_matrices(radial_order): """ Generates the static portions of the Laplacian regularization matrix according to [1]_ eq. (11, 12, 13). Parameters ---------- radial_order : unsigned int, an even integer that represent the order of the basis Returns ------- S, T, U : Matrices, shape (N_coef,N_coef) Regularization submatrices References ---------- .. [1]_ Fick, Rutger HJ, et al. "MAPL: Tissue microstructure estimation using Laplacian-regularized MAP-MRI and its application to HCP data." NeuroImage (2016). """ S = np.zeros((radial_order + 1, radial_order + 1)) for i in range(radial_order + 1): for j in range(radial_order + 1): S[i, j] = map_laplace_s(i, j) T = np.zeros((radial_order + 1, radial_order + 1)) for i in range(radial_order + 1): for j in range(radial_order + 1): T[i, j] = map_laplace_t(i, j) U = np.zeros((radial_order + 1, radial_order + 1)) for i in range(radial_order + 1): for j in range(radial_order + 1): U[i, j] = map_laplace_u(i, j) return S, T, U def mapmri_laplacian_reg_matrix(ind_mat, mu, S_mat, T_mat, U_mat): """ Puts the Laplacian regularization matrix together [1]_ eq. (10). The static parts in S, T and U are multiplied and divided by the voxel-specific scale factors. Parameters ---------- ind_mat : matrix (N_coef, 3), Basis order matrix mu : array, shape (3,) scale factors of the basis for x, y, z S, T, U : matrices, shape (N_coef,N_coef) Regularization submatrices Returns ------- LR : matrix (N_coef, N_coef), Voxel-specific Laplacian regularization matrix References ---------- .. [1]_ Fick, Rutger HJ, et al. "MAPL: Tissue microstructure estimation using Laplacian-regularized MAP-MRI and its application to HCP data." NeuroImage (2016). """ ux, uy, uz = mu x, y, z = ind_mat.T n_elem = ind_mat.shape[0] LR = np.zeros((n_elem, n_elem)) for i in range(n_elem): for j in range(i, n_elem): if ( (x[i] - x[j]) % 2 == 0 and (y[i] - y[j]) % 2 == 0 and (z[i] - z[j]) % 2 == 0 ): LR[i, j] = LR[j, i] = \ (ux ** 3 / (uy * uz)) *\ S_mat[x[i], x[j]] * U_mat[y[i], y[j]] * U_mat[z[i], z[j]] +\ (uy ** 3 / (ux * uz)) *\ S_mat[y[i], y[j]] * U_mat[z[i], z[j]] * U_mat[x[i], x[j]] +\ (uz ** 3 / (ux * uy)) *\ S_mat[z[i], z[j]] * U_mat[x[i], x[j]] * U_mat[y[i], y[j]] +\ 2 * ((ux * uy) / uz) *\ T_mat[x[i], x[j]] * T_mat[y[i], y[j]] * U_mat[z[i], z[j]] +\ 2 * ((ux * uz) / uy) *\ T_mat[x[i], x[j]] * T_mat[z[i], z[j]] * U_mat[y[i], y[j]] +\ 2 * ((uz * uy) / ux) *\ T_mat[z[i], z[j]] * T_mat[y[i], y[j]] * U_mat[x[i], x[j]] return LR def generalized_crossvalidation_array(data, M, LR, weights_array=None): """Generalized Cross Validation Function [1]_ eq. (15). Here weights_array is a numpy array with all values that should be considered in the GCV. It will run through the weights until the cost function starts to increase, then stop and take the last value as the optimum weight. Parameters ---------- data : array (N), Basis order matrix M : matrix, shape (N, Ncoef) mapmri observation matrix LR : matrix, shape (N_coef, N_coef) regularization matrix weights_array : array (N_of_weights) array of optional regularization weights """ if weights_array is None: lrange = np.linspace(0.05, 1, 20) # reasonably fast standard range else: lrange = weights_array samples = lrange.shape[0] MMt = np.dot(M.T, M) K = len(data) gcvold = gcvnew = 10e10 # set initialization gcv threshold very high i = -1 while gcvold >= gcvnew and i < samples - 2: gcvold = gcvnew i = i + 1 S = np.dot(np.dot(M, np.linalg.pinv(MMt + lrange[i] * LR)), M.T) trS = np.matrix.trace(S) normyytilde = np.linalg.norm(data - np.dot(S, data), 2) gcvnew = normyytilde / (K - trS) lopt = lrange[i - 1] return lopt def generalized_crossvalidation(data, M, LR, gcv_startpoint=5e-2): """Generalized Cross Validation Function [1]_ eq. (15). Finds optimal regularization weight based on generalized cross-validation. Parameters ---------- data : array (N), data array M : matrix, shape (N, Ncoef) mapmri observation matrix LR : matrix, shape (N_coef, N_coef) regularization matrix gcv_startpoint : float startpoint for the gcv optimization Returns ------- optimal_lambda : float, optimal regularization weight References ---------- .. [1]_ Craven et al. "Smoothing Noisy Data with Spline Functions." NUMER MATH 31.4 (1978): 377-403. """ MMt = np.dot(M.T, M) K = len(data) bounds = ((1e-5, 10),) solver = Optimizer(fun=gcv_cost_function, x0=(gcv_startpoint,), args=((data, M, MMt, K, LR),), bounds=bounds) optimal_lambda = solver.xopt return optimal_lambda def gcv_cost_function(weight, args): """The GCV cost function that is iterated [4] """ data, M, MMt, K, LR = args S = np.dot(np.dot(M, np.linalg.pinv(MMt + weight * LR)), M.T) trS = np.matrix.trace(S) normyytilde = np.linalg.norm(data - np.dot(S, data), 2) gcv_value = normyytilde / (K - trS) return gcv_value dipy-0.13.0/dipy/reconst/multi_voxel.py000066400000000000000000000073721317371701200201160ustar00rootroot00000000000000"""Tools to easily make multi voxel models""" import numpy as np from numpy.lib.stride_tricks import as_strided from dipy.core.ndindex import ndindex from dipy.reconst.quick_squash import quick_squash as _squash from dipy.reconst.base import ReconstFit def multi_voxel_fit(single_voxel_fit): """Method decorator to turn a single voxel model fit definition into a multi voxel model fit definition """ def new_fit(self, data, mask=None): """Fit method for every voxel in data""" # If only one voxel just return a normal fit if data.ndim == 1: return single_voxel_fit(self, data) # Make a mask if mask is None if mask is None: shape = data.shape[:-1] strides = (0,) * len(shape) mask = as_strided(np.array(True), shape=shape, strides=strides) # Check the shape of the mask if mask is not None elif mask.shape != data.shape[:-1]: raise ValueError("mask and data shape do not match") # Fit data where mask is True fit_array = np.empty(data.shape[:-1], dtype=object) for ijk in ndindex(data.shape[:-1]): if mask[ijk]: fit_array[ijk] = single_voxel_fit(self, data[ijk]) return MultiVoxelFit(self, fit_array, mask) return new_fit class MultiVoxelFit(ReconstFit): """Holds an array of fits and allows access to their attributes and methods""" def __init__(self, model, fit_array, mask): self.model = model self.fit_array = fit_array self.mask = mask @property def shape(self): return self.fit_array.shape def __getattr__(self, attr): result = CallableArray(self.fit_array.shape, dtype=object) for ijk in ndindex(result.shape): if self.mask[ijk]: result[ijk] = getattr(self.fit_array[ijk], attr) return _squash(result, self.mask) def __getitem__(self, index): item = self.fit_array[index] if isinstance(item, np.ndarray): return MultiVoxelFit(self.model, item, self.mask[index]) else: return item def predict(self, *args, **kwargs): """ Predict for the multi-voxel object using each single-object's prediction API, with S0 provided from an array. """ S0 = kwargs.get('S0', np.ones(self.fit_array.shape)) idx = ndindex(self.fit_array.shape) ijk = next(idx) def gimme_S0(S0, ijk): if isinstance(S0, np.ndarray): return S0[ijk] else: return S0 kwargs['S0'] = gimme_S0(S0, ijk) # If we have a mask, we might have some Nones up front, skip those: while self.fit_array[ijk] is None: ijk = next(idx) if not hasattr(self.fit_array[ijk], 'predict'): msg = "This model does not have prediction implemented yet" raise NotImplementedError(msg) first_pred = self.fit_array[ijk].predict(*args, **kwargs) result = np.zeros(self.fit_array.shape + (first_pred.shape[-1],)) result[ijk] = first_pred for ijk in idx: kwargs['S0'] = gimme_S0(S0, ijk) # If it's masked, we predict a 0: if self.fit_array[ijk] is None: result[ijk] *= 0 else: result[ijk] = self.fit_array[ijk].predict(*args, **kwargs) return result class CallableArray(np.ndarray): """An array which can be called like a function""" def __call__(self, *args, **kwargs): result = np.empty(self.shape, dtype=object) for ijk in ndindex(self.shape): item = self[ijk] if item is not None: result[ijk] = item(*args, **kwargs) return _squash(result) dipy-0.13.0/dipy/reconst/odf.py000066400000000000000000000057211317371701200163130ustar00rootroot00000000000000from __future__ import division, print_function, absolute_import from dipy.reconst.base import ReconstModel, ReconstFit import numpy as np # Classes OdfModel and OdfFit are using API ReconstModel and ReconstFit from # .base class OdfModel(ReconstModel): """An abstract class to be sub-classed by specific odf models All odf models should provide a fit method which may take data as it's first and only argument. """ def __init__(self, gtab): ReconstModel.__init__(self, gtab) def fit(self, data): """To be implemented by specific odf models""" raise NotImplementedError("To be implemented in sub classes") class OdfFit(ReconstFit): def odf(self, sphere): """To be implemented but specific odf models""" raise NotImplementedError("To be implemented in sub classes") def gfa(samples): r"""The general fractional anisotropy of a function evaluated on the unit sphere Parameters ---------- samples : ndarray Values of data on the unit sphere. Returns ------- gfa : ndarray GFA evaluated in each entry of the array, along the last dimension. An `np.nan` is returned for coordinates that contain all-zeros in `samples`. Notes ----- The GFA is defined as [1]_ :: \sqrt{\frac{n \sum_i{(\Psi_i - <\Psi>)^2}}{(n-1) \sum{\Psi_i ^ 2}}} Where $\Psi$ is an orientation distribution function sampled discretely on the unit sphere and angle brackets denote average over the samples on the sphere. .. [1] Quality assessment of High Angular Resolution Diffusion Imaging data using bootstrap on Q-ball reconstruction. J. Cohen Adad, M. Descoteaux, L.L. Wald. JMRI 33: 1194-1208. """ diff = samples - samples.mean(-1)[..., None] n = samples.shape[-1] numer = np.array([n * (diff ** 2).sum(-1)]) denom = np.array([(n - 1) * (samples ** 2).sum(-1)]) result = np.ones_like(denom) * np.nan idx = np.where(denom > 0) result[idx] = np.sqrt(numer[idx] / denom[idx]) return result.squeeze() def minmax_normalize(samples, out=None): """Min-max normalization of a function evaluated on the unit sphere Normalizes samples to ``(samples - min(samples)) / (max(samples) - min(samples))`` for each unit sphere. Parameters ---------- samples : ndarray (..., N) N samples on a unit sphere for each point, stored along the last axis of the array. out : ndrray (..., N), optional An array to store the normalized samples. Returns ------- out : ndarray, (..., N) Normalized samples. """ if out is None: dtype = np.common_type(np.empty(0, 'float32'), samples) out = np.array(samples, dtype=dtype, copy=True) else: out[:] = samples sample_mins = np.min(samples, -1)[..., None] sample_maxes = np.max(samples, -1)[..., None] out -= sample_mins out /= (sample_maxes - sample_mins) return out dipy-0.13.0/dipy/reconst/peak_direction_getter.pyx000066400000000000000000000104221317371701200222570ustar00rootroot00000000000000cimport cython cimport numpy as np import numpy as np from dipy.tracking.propspeed cimport _propagation_direction from dipy.tracking.local.direction_getter cimport DirectionGetter cdef extern from "dpy_math.h" nogil: double dpy_rint(double x) def make_nd(array, N): """Makes an array that's less than Nd - Nd We need this because numpy 1.6 does not return a "c contiguous array" when you call ``array(a, order='c', ndmin=N)`` """ if array.ndim > N: raise ValueError() new_shape = (1,) * (N - array.ndim) + array.shape return array.reshape(new_shape) cdef class PeaksAndMetricsDirectionGetter(DirectionGetter): """Deterministic Direction Getter based on peak directions. This class contains the cython portion of the code for PeaksAndMetrics and is not meant to be used on its own. """ cdef: public double qa_thr, ang_thr, total_weight public double[:, :, :, ::1] _qa, _ind public double[:, ::1] _odf_vertices int initialized def __cinit__(self): initialized = False self.qa_thr = 0.0239 self.ang_thr = 60 self.total_weight = .5 def _initialize(self): """First time that a PAM instance is used as a direction getter, initialize all the memoryviews. """ if self.peak_values.shape != self.peak_indices.shape: msg = "shapes of peak_values and peak_indices do not match" raise ValueError(msg) self._qa = make_nd(np.array(self.peak_values, copy=False, dtype='double', order='C'), 4) self._ind = make_nd(np.array(self.peak_indices, copy=False, dtype='double', order='C'), 4) self._odf_vertices = np.array(self.sphere.vertices, copy=False, dtype='double', order='C') self.initialized = True def initial_direction(self, double[::1] point): """The best starting directions for fiber tracking from point All the valid peaks in the voxel closest to point are returned as initial directions. """ if not self.initialized: self._initialize() cdef: np.npy_intp numpeaks, i np.npy_intp ijk[3] # ijk is the closest voxel to point for i in range(3): ijk[i] = dpy_rint(point[i]) if ijk[i] < 0 or ijk[i] >= self._ind.shape[i]: raise IndexError("point outside data") # Check to see how many peaks were found in the voxel for numpeaks in range(self._ind.shape[3]): if self._ind[ijk[0], ijk[1], ijk[2], numpeaks] < 0: break # Create directions array and copy peak directions from vertices res = np.empty((numpeaks, 3)) for i in range(numpeaks): peak_index = self._ind[ijk[0], ijk[1], ijk[2], i] res[i, :] = self._odf_vertices[ peak_index, :] return res @cython.initializedcheck(False) @cython.boundscheck(False) @cython.wraparound(False) cpdef int get_direction(self, double[::1] point, double[::1] direction) except -1: """Interpolate closest peaks to direction from voxels neighboring point Update direction and return 0 if successful. If no tracking direction could be found, return 1. """ if not self.initialized: self._initialize() cdef: np.npy_intp s double newdirection[3] np.npy_intp qa_shape[4] np.npy_intp qa_strides[4] for i in range(4): qa_shape[i] = self._qa.shape[i] qa_strides[i] = self._qa.strides[i] s = _propagation_direction(&point[0], &direction[0], &self._qa[0, 0, 0, 0], &self._ind[0, 0, 0, 0], &self._odf_vertices[0, 0], self.qa_thr, self.ang_thr, qa_shape, qa_strides, newdirection, self.total_weight) if s: for i in range(3): direction[i] = newdirection[i] return 0 else: return 1 dipy-0.13.0/dipy/reconst/peaks.py000066400000000000000000000003221317371701200166360ustar00rootroot00000000000000import warnings w_s = "The module 'dipy.reconst.peaks' is deprecated." w_s += " Please use the module 'dipy.direction.peaks' instead" warnings.warn(w_s, DeprecationWarning) from dipy.direction.peaks import * dipy-0.13.0/dipy/reconst/quick_squash.pyx000066400000000000000000000100771317371701200204330ustar00rootroot00000000000000""" Detect common dtype across object array """ from functools import reduce cimport numpy as cnp cimport cython import numpy as np cdef enum: SCALAR, ARRAY SCALAR_TYPES = np.ScalarType @cython.boundscheck(False) @cython.wraparound(False) def quick_squash(obj_arr, mask=None, fill=0): """Try and make a standard array from an object array This function takes an object array and attempts to convert it to a more useful dtype. If array can be converted to a better dtype, Nones are replaced by `fill`. To make the behaviour of this function more clear, here are the most common cases: 1. `obj_arr` is an array of scalars of type `T`. Returns an array like `obj_arr.astype(T)` 2. `obj_arr` is an array of arrays. All items in `obj_arr` have the same shape ``S``. Returns an array with shape ``obj_arr.shape + S`` 3. `obj_arr` is an array of arrays of different shapes. Returns `obj_arr`. 4. Items in `obj_arr` are not ndarrays or scalars. Returns `obj_arr`. Parameters ---------- obj_arr : array, dtype=object The array to be converted. mask : array, dtype=bool, optional mask is nonzero where `obj_arr` has Nones. fill : number, optional Nones are replaced by `fill`. Returns ------- result : array Examples -------- >>> arr = np.empty(3, dtype=object) >>> arr.fill(2) >>> quick_squash(arr) array([2, 2, 2]) >>> arr[0] = None >>> quick_squash(arr) array([0, 2, 2]) >>> arr.fill(np.ones(2)) >>> r = quick_squash(arr) >>> r.shape (3, 2) >>> r.dtype dtype('float64') """ cdef: cnp.npy_intp i, j, N, dtypes_i object [:] flat_obj char [:] flat_mask cnp.dtype [:] dtypes int have_mask = not mask is None int search_for cnp.ndarray result cnp.dtype dtype, last_dtype object common_shape if have_mask: flat_mask = np.array(mask.reshape(-1), dtype=np.int8) N = obj_arr.size dtypes = np.empty((N,), dtype=object) flat_obj = obj_arr.reshape((-1)) # Find first valid value for i in range(N): e = flat_obj[i] if ((have_mask and flat_mask[i] == 0) or (not have_mask and e is None)): continue t = type(e) if issubclass(t, np.generic) or t in SCALAR_TYPES: search_for = SCALAR common_shape = () dtype = np.dtype(t) break elif t == cnp.ndarray: search_for = ARRAY common_shape = e.shape dtype = e.dtype break else: # something other than scalar or array return obj_arr else: # Nothing outside mask / all None return obj_arr # Check rest of values to confirm common type / shape, and collect dtypes last_dtype = dtype dtypes[0] = dtype dtypes_i = 1 for j in range(i+1, N): e = flat_obj[j] if ((have_mask and flat_mask[j] == 0) or (not have_mask and e is None)): continue t = type(e) if search_for == SCALAR: if not issubclass(t, np.generic) and not t in SCALAR_TYPES: return obj_arr dtype = np.dtype(t) else: # search_for == ARRAY: if not t == cnp.ndarray: return obj_arr if not e.shape == common_shape: return obj_arr dtype = e.dtype if dtype != last_dtype: last_dtype = dtype dtypes[dtypes_i] = dtype dtypes_i += 1 # Find common dtype unique_dtypes = set(dtypes[:dtypes_i]) tiny_arrs = [np.zeros((1,), dtype=dt) for dt in unique_dtypes] dtype = reduce(np.add, tiny_arrs).dtype # Create and fill output array result = np.empty((N,) + common_shape, dtype=dtype) for i in range(N): e = flat_obj[i] if ((have_mask and flat_mask[i] == 0) or (not have_mask and e is None)): result[i] = fill else: result[i] = e return result.reshape(obj_arr.shape + common_shape) dipy-0.13.0/dipy/reconst/recspeed.pyx000066400000000000000000000472671317371701200175400ustar00rootroot00000000000000# Emacs should think this is a -*- python -*- file """ Optimized routines for creating voxel diffusion models """ # cython: profile=True # cython: embedsignature=True cimport cython import numpy as np cimport numpy as cnp from libc.stdlib cimport malloc, free from libc.string cimport memcpy cdef extern from "dpy_math.h" nogil: double floor(double x) double fabs(double x) double cos(double x) double sin(double x) float acos(float x ) double sqrt(double x) double DPY_PI # initialize numpy runtime cnp.import_array() #numpy pointers cdef inline float* asfp(cnp.ndarray pt): return pt.data cdef inline double* asdp(cnp.ndarray pt): return pt.data cdef void splitoffset(float *offset, size_t *index, size_t shape) nogil: """Splits a global offset into an integer index and a relative offset""" offset[0] -= .5 if offset[0] <= 0: index[0] = 0 offset[0] = 0. elif offset[0] >= (shape - 1): index[0] = shape - 2 offset[0] = 1. else: index[0] = offset[0] offset[0] = offset[0] - index[0] @cython.boundscheck(False) @cython.wraparound(False) @cython.cdivision(True) def trilinear_interp(cnp.ndarray[cnp.float32_t, ndim=4, mode='strided'] data, cnp.ndarray[cnp.float_t, ndim=1, mode='strided'] index, cnp.ndarray[cnp.float_t, ndim=1, mode='c'] voxel_size): """Interpolates vector from 4D `data` at 3D point given by `index` Interpolates a vector of length T from a 4D volume of shape (I, J, K, T), given point (x, y, z) where (x, y, z) are the coordinates of the point in real units (not yet adjusted for voxel size). """ cdef: float x = index[0] / voxel_size[0] float y = index[1] / voxel_size[1] float z = index[2] / voxel_size[2] float weight size_t x_ind, y_ind, z_ind, ii, jj, kk, LL size_t last_d = data.shape[3] bint bounds_check cnp.ndarray[cnp.float32_t, ndim=1, mode='c'] result bounds_check = (x < 0 or y < 0 or z < 0 or x > data.shape[0] or y > data.shape[1] or z > data.shape[2]) if bounds_check: raise IndexError splitoffset(&x, &x_ind, data.shape[0]) splitoffset(&y, &y_ind, data.shape[1]) splitoffset(&z, &z_ind, data.shape[2]) result = np.zeros(last_d, dtype='float32') for ii from 0 <= ii <= 1: for jj from 0 <= jj <= 1: for kk from 0 <= kk <= 1: weight = wght(ii, x)*wght(jj, y)*wght(kk, z) for LL from 0 <= LL < last_d: result[LL] += data[x_ind+ii,y_ind+jj,z_ind+kk,LL]*weight return result @cython.profile(False) cdef float wght(int i, float r) nogil: if i: return r else: return 1.-r @cython.boundscheck(False) @cython.wraparound(False) def remove_similar_vertices( cnp.ndarray[cnp.float_t, ndim=2, mode='strided'] vertices, double theta, bint return_mapping=False, bint return_index=False): """Remove vertices that are less than `theta` degrees from any other Returns vertices that are at least theta degrees from any other vertex. Vertex v and -v are considered the same so if v and -v are both in `vertices` only one is kept. Also if v and w are both in vertices, w must be separated by theta degrees from both v and -v to be unique. Parameters ---------- vertices : (N, 3) ndarray N unit vectors. theta : float The minimum separation between vertices in degrees. return_mapping : {False, True}, optional If True, return `mapping` as well as `vertices` and maybe `indices` (see below). return_indices : {False, True}, optional If True, return `indices` as well as `vertices` and maybe `mapping` (see below). Returns ------- unique_vertices : (M, 3) ndarray Vertices sufficiently separated from one another. mapping : (N,) ndarray For each element ``vertices[i]`` ($i \in 0..N-1$), the index $j$ to a vertex in `unique_vertices` that is less than `theta` degrees from ``vertices[i]``. Only returned if `return_mapping` is True. indices : (N,) ndarray `indices` gives the reverse of `mapping`. For each element ``unique_vertices[j]`` ($j \in 0..M-1$), the index $i$ to a vertex in `vertices` that is less than `theta` degrees from ``unique_vertices[j]``. If there is more than one element of `vertices` that is less than theta degrees from `unique_vertices[j]`, return the first (lowest index) matching value. Only return if `return_indices` is True. """ if vertices.shape[1] != 3: raise ValueError('Vertices should be 2D with second dim length 3') cdef: cnp.ndarray[cnp.float_t, ndim=2, mode='c'] unique_vertices cnp.ndarray[cnp.uint16_t, ndim=1, mode='c'] mapping cnp.ndarray[cnp.uint16_t, ndim=1, mode='c'] index char pass_all # Variable has to be large enough for all valid sizes of vertices cnp.npy_int32 i, j cnp.npy_int32 n_unique = 0 # Large enough for all possible sizes of vertices cnp.npy_intp n = vertices.shape[0] double a, b, c, sim double cos_similarity = cos(DPY_PI/180 * theta) if n >= 2**16: # constrained by input data type raise ValueError("too many vertices") unique_vertices = np.empty((n, 3), dtype=np.float) if return_mapping: mapping = np.empty(n, dtype=np.uint16) if return_index: index = np.empty(n, dtype=np.uint16) for i in range(n): pass_all = 1 a = vertices[i, 0] b = vertices[i, 1] c = vertices[i, 2] # Check all other accepted vertices for similarity to this one for j in range(n_unique): sim = fabs(a * unique_vertices[j, 0] + b * unique_vertices[j, 1] + c * unique_vertices[j, 2]) if sim > cos_similarity: # too similar, drop pass_all = 0 if return_mapping: mapping[i] = j # This point unique_vertices[j] already has an entry in index, # so we do not need to update. break if pass_all: # none similar, keep unique_vertices[n_unique, 0] = a unique_vertices[n_unique, 1] = b unique_vertices[n_unique, 2] = c if return_mapping: mapping[i] = n_unique if return_index: index[n_unique] = i n_unique += 1 verts = unique_vertices[:n_unique].copy() if not return_mapping and not return_index: return verts out = [verts] if return_mapping: out.append(mapping) if return_index: out.append(index[:n_unique].copy()) return out @cython.boundscheck(False) @cython.wraparound(False) def search_descending(cnp.ndarray[cnp.float_t, ndim=1, mode='c'] a, double relative_threshold): """`i` in descending array `a` so `a[i] < a[0] * relative_threshold` Call ``T = a[0] * relative_threshold``. Return value `i` will be the smallest index in the descending array `a` such that ``a[i] < T``. Equivalently, `i` will be the largest index such that ``all(a[:i] >= T)``. If all values in `a` are >= T, return the length of array `a`. Parameters ---------- a : ndarray, ndim=1, c-contiguous Array to be searched. We assume `a` is in descending order. relative_threshold : float Applied threshold will be ``T`` with ``T = a[0] * relative_threshold``. Returns ------- i : np.intp If ``T = a[0] * relative_threshold`` then `i` will be the largest index such that ``all(a[:i] >= T)``. If all values in `a` are >= T then `i` will be `len(a)`. Examples -------- >>> a = np.arange(10, 0, -1, dtype=float) >>> a array([ 10., 9., 8., 7., 6., 5., 4., 3., 2., 1.]) >>> search_descending(a, 0.5) 6 >>> a < 10 * 0.5 array([False, False, False, False, False, False, True, True, True, True], dtype=bool) >>> search_descending(a, 1) 1 >>> search_descending(a, 2) 0 >>> search_descending(a, 0) 10 """ if a.shape[0] == 0: return 0 cdef: cnp.npy_intp left = 0 cnp.npy_intp right = a.shape[0] cnp.npy_intp mid double threshold = relative_threshold * a[0] while left != right: mid = (left + right) // 2 if a[mid] >= threshold: left = mid + 1 else: right = mid return left @cython.wraparound(False) @cython.boundscheck(False) @cython.profile(True) def local_maxima(cnp.ndarray odf, cnp.ndarray edges): """Local maxima of a function evaluated on a discrete set of points. If a function is evaluated on some set of points where each pair of neighboring points is an edge in edges, find the local maxima. Parameters ---------- odf : array, 1d, dtype=double The function evaluated on a set of discrete points. edges : array (N, 2) The set of neighbor relations between the points. Every edge, ie `edges[i, :]`, is a pair of neighboring points. Returns ------- peak_values : ndarray Value of odf at a maximum point. Peak values is sorted in descending order. peak_indices : ndarray Indices of maximum points. Sorted in the same order as `peak_values` so `odf[peak_indices[i]] == peak_values[i]`. Note ---- A point is a local maximum if it is > at least one neighbor and >= all neighbors. If no points meet the above criteria, 1 maximum is returned such that `odf[maximum] == max(odf)`. See Also -------- dipy.core.sphere """ cdef: cnp.ndarray[cnp.npy_intp] wpeak wpeak = np.zeros((odf.shape[0],), dtype=np.intp) count = _compare_neighbors(odf, edges, &wpeak[0]) if count == -1: raise IndexError("Values in edges must be < len(odf)") elif count == -2: raise ValueError("odf can not have nans") indices = wpeak[:count].copy() # Get peak values return values = odf.take(indices) # Sort both values and indices _cosort(values, indices) return values, indices @cython.wraparound(False) @cython.boundscheck(False) cdef void _cosort(double[::1] A, cnp.npy_intp[::1] B) nogil: """Sorts `A` in-place and applies the same reordering to `B`""" cdef: size_t n = A.shape[0] size_t hole double insert_A long insert_B for i in range(1, n): insert_A = A[i] insert_B = B[i] hole = i while hole > 0 and insert_A > A[hole -1]: A[hole] = A[hole - 1] B[hole] = B[hole - 1] hole -= 1 A[hole] = insert_A B[hole] = insert_B @cython.wraparound(False) @cython.boundscheck(False) cdef long _compare_neighbors(double[:] odf, cnp.uint16_t[:, :] edges, cnp.npy_intp *wpeak_ptr) nogil: """Compares every pair of points in edges Parameters ---------- odf : array of double values of points on sphere. edges : array of uint16 neighbor relationships on sphere. Every set of neighbors on the sphere should be an edge. wpeak_ptr : pointer pointer to a block of memory which will be updated with the result of the comparisons. This block of memory must be large enough to hold len(odf) longs. The first `count` elements of wpeak will be updated with the indices of the peaks. Returns ------- count : long Number of maxima in odf. A value < 0 indicates an error: -1 : value in edges too large, >= than len(odf) -2 : odf contains nans """ cdef: size_t lenedges = edges.shape[0] size_t lenodf = odf.shape[0] size_t i cnp.uint16_t find0, find1 double odf0, odf1 long count = 0 for i in range(lenedges): find0 = edges[i, 0] find1 = edges[i, 1] if find0 >= lenodf or find1 >= lenodf: count = -1 break odf0 = odf[find0] odf1 = odf[find1] """ Here `wpeak_ptr` is used as an indicator array that can take one of three values. If `wpeak_ptr[i]` is: * -1 : point i of the sphere is smaller than at least one neighbor. * 0 : point i is equal to all its neighbors. * 1 : point i is > at least one neighbor and >= all its neighbors. Each iteration of the loop is a comparison between neighboring points (the two point of an edge). At each iteration we update wpeak_ptr in the following way:: wpeak_ptr[smaller_point] = -1 if wpeak_ptr[larger_point] == 0: wpeak_ptr[larger_point] = 1 If the two points are equal, wpeak is left unchanged. """ if odf0 < odf1: wpeak_ptr[find0] = -1 wpeak_ptr[find1] |= 1 elif odf0 > odf1: wpeak_ptr[find0] |= 1 wpeak_ptr[find1] = -1 elif (odf0 != odf0) or (odf1 != odf1): count = -2 break if count < 0: return count # Count the number of peaks and use first count elements of wpeak_ptr to # hold indices of those peaks for i in range(lenodf): if wpeak_ptr[i] > 0: wpeak_ptr[count] = i count += 1 return count @cython.boundscheck(False) @cython.wraparound(False) def le_to_odf(cnp.ndarray[double, ndim=1] odf, \ cnp.ndarray[double, ndim=1] LEs,\ cnp.ndarray[double, ndim=1] radius,\ int odfn,\ int radiusn,\ int anglesn): """odf for interpolated Laplacian normalized signal """ cdef int m, i, j with nogil: for m in range(odfn): for i in range(radiusn): for j in range(anglesn): odf[m]=odf[m]-LEs[(m*radiusn+i)*anglesn+j]*radius[i] return @cython.boundscheck(False) @cython.wraparound(False) def sum_on_blocks_1d(cnp.ndarray[double, ndim=1] arr,\ cnp.ndarray[long, ndim=1] blocks,\ cnp.ndarray[double, ndim=1] out,int outn): """Summations on blocks of 1d array """ cdef: int m,i,j double sum with nogil: j=0 for m in range(outn): sum=0 for i in range(j,j+blocks[m]): sum+=arr[i] out[m]=sum j+=blocks[m] return def argmax_from_adj(vals, vertex_inds, adj_inds): """Indices of local maxima from `vals` given adjacent points Parameters ---------- vals : (N,) array, dtype np.float64 values at all vertices referred to in either of `vertex_inds` or `adj_inds`' vertex_inds : (V,) array indices into `vals` giving vertices that may be local maxima. adj_inds : sequence For every vertex in ``vertex_inds``, the indices (into `vals`) of the neighboring points Returns ------- inds : (M,) array Indices into `vals` giving local maxima of vals, given topology from `adj_inds`, and restrictions from `vertex_inds`. Inds are returned sorted by value at that index - i.e. smallest value (at index) first. """ cvals, cvertinds = proc_reco_args(vals, vertex_inds) cadj_counts, cadj_inds = adj_to_countarrs(adj_inds) return argmax_from_countarrs(cvals, cvertinds, cadj_counts, cadj_inds) def proc_reco_args(vals, vertinds): vals = np.ascontiguousarray(vals.astype(np.float)) vertinds = np.ascontiguousarray(vertinds.astype(np.uint32)) return vals, vertinds def adj_to_countarrs(adj_inds): """Convert adjacency sequence to counts and flattened indices We use this to provide expected input to ``argmax_from_countarrs`` Parameters ------------ adj_indices : sequence length V sequence of sequences, where sequence ``i`` contains the neighbors of a particular vertex. Returns --------- counts : (V,) array Number of neighbors for each vertex adj_inds : (n,) array flat array containing `adj_indices` unrolled as a vector """ counts = [] all_inds = [] for verts in adj_inds: v = list(verts) all_inds += v counts.append(len(v)) adj_inds = np.array(all_inds, dtype=np.uint32) counts = np.array(counts, dtype=np.uint32) return counts, adj_inds # prefetch argsort for small speedup cdef object argsort = np.argsort def argmax_from_countarrs(cnp.ndarray vals, cnp.ndarray vertinds, cnp.ndarray adj_counts, cnp.ndarray adj_inds): """Indices of local maxima from `vals` from count, array neighbors Parameters ---------- vals : (N,) array, dtype float values at all vertices referred to in either of `vertex_inds` or `adj_inds`' vertinds : (V,) array, dtype uint32 indices into `vals` giving vertices that may be local maxima. adj_counts : (V,) array, dtype uint32 For every vertex ``i`` in ``vertex_inds``, the number of neighbors for vertex ``i`` adj_inds : (P,) array, dtype uint32 Indices for neighbors for each point. ``P=sum(adj_counts)`` Returns ------- inds : (M,) array Indices into `vals` giving local maxima of vals, given topology from `adj_counts` and `adj_inds`, and restrictions from `vertex_inds`. Inds are returned sorted by value at that index - i.e. smallest value (at index) first. """ cdef: cnp.ndarray[cnp.float64_t, ndim=1] cvals = vals cnp.ndarray[cnp.uint32_t, ndim=1] cvertinds = vertinds cnp.ndarray[cnp.uint32_t, ndim=1] cadj_counts = adj_counts cnp.ndarray[cnp.uint32_t, ndim=1] cadj_inds = adj_inds # temporary arrays for storing maxes cnp.ndarray[cnp.float64_t, ndim=1] maxes = vals.copy() cnp.ndarray[cnp.uint32_t, ndim=1] maxinds = vertinds.copy() cnp.npy_intp i, j, V, C, n_maxes=0, adj_size, adj_pos=0 int is_max cnp.float64_t *vals_ptr double val cnp.uint32_t vert_ind, *vertinds_ptr, *counts_ptr, *adj_ptr, ind cnp.uint32_t vals_size, vert_size if not (cnp.PyArray_ISCONTIGUOUS(cvals) and cnp.PyArray_ISCONTIGUOUS(cvertinds) and cnp.PyArray_ISCONTIGUOUS(cadj_counts) and cnp.PyArray_ISCONTIGUOUS(cadj_inds)): raise ValueError('Need contiguous arrays as input') vals_size = cvals.shape[0] vals_ptr = cvals.data vertinds_ptr = cvertinds.data adj_ptr = cadj_inds.data counts_ptr = cadj_counts.data V = cadj_counts.shape[0] adj_size = cadj_inds.shape[0] if cvertinds.shape[0] < V: raise ValueError('Too few indices for adj arrays') for i in range(V): vert_ind = vertinds_ptr[i] if vert_ind >= vals_size: raise IndexError('Overshoot on vals') val = vals_ptr[vert_ind] C = counts_ptr[i] # check for overshoot adj_pos += C if adj_pos > adj_size: raise IndexError('Overshoot on adj_inds array') is_max = 1 for j in range(C): ind = adj_ptr[j] if ind >= vals_size: raise IndexError('Overshoot on vals') if val <= vals_ptr[ind]: is_max = 0 break if is_max: maxinds[n_maxes] = vert_ind maxes[n_maxes] = val n_maxes +=1 adj_ptr += C if n_maxes == 0: return np.array([]) # fancy indexing always produces a copy return maxinds[argsort(maxes[:n_maxes])] dipy-0.13.0/dipy/reconst/sfm.py000066400000000000000000000500231317371701200163230ustar00rootroot00000000000000""" The Sparse Fascicle Model. This is an implementation of the sparse fascicle model described in [Rokem2015]_. The multi b-value version of this model is described in [Rokem2014]_. .. [Rokem2015] Ariel Rokem, Jason D. Yeatman, Franco Pestilli, Kendrick N. Kay, Aviv Mezer, Stefan van der Walt, Brian A. Wandell (2015). Evaluating the accuracy of diffusion MRI models in white matter. PLoS ONE 10(4): e0123272. doi:10.1371/journal.pone.0123272 .. [Rokem2014] Ariel Rokem, Kimberly L. Chan, Jason D. Yeatman, Franco Pestilli, Brian A. Wandell (2014). Evaluating the accuracy of diffusion models at multiple b-values with cross-validation. ISMRM 2014. """ import warnings import numpy as np try: from numpy import nanmean except ImportError: from scipy.stats import nanmean from dipy.utils.optpkg import optional_package import dipy.core.geometry as geo import dipy.core.gradients as grad import dipy.core.optimize as opt import dipy.sims.voxel as sims import dipy.reconst.dti as dti import dipy.data as dpd from dipy.reconst.base import ReconstModel, ReconstFit from dipy.reconst.cache import Cache from dipy.core.onetime import auto_attr lm, has_sklearn, _ = optional_package('sklearn.linear_model') # If sklearn is unavailable, we can fall back on nnls (but we also warn the # user that we are about to do that): if not has_sklearn: w = "sklearn is not available, you can use 'nnls' method to fit" w += " the SparseFascicleModel" warnings.warn(w) # Isotropic signal models: these are models of the part of the signal that # changes with b-value, but does not change with direction. This collection is # extensible, by inheriting from IsotropicModel/IsotropicFit below: # First, a helper function to derive the fit signal for these models: def _to_fit_iso(data, gtab): data_no_b0 = data[..., ~gtab.b0s_mask] nzb0 = data_no_b0 > 0 nzb0_idx = np.where(nzb0) zb0_idx = np.where(~nzb0) if np.sum(gtab.b0s_mask) > 0: s0 = np.mean(data[..., gtab.b0s_mask], -1) to_fit = np.empty(data_no_b0.shape) to_fit[nzb0_idx] = data_no_b0[nzb0_idx] / s0[nzb0_idx[0]] to_fit[zb0_idx] = 0 else: to_fit = data_no_b0 return to_fit class IsotropicModel(ReconstModel): """ A base-class for the representation of isotropic signals. The default behavior, suitable for single b-value data is to calculate the mean in each voxel as an estimate of the signal that does not depend on direction. """ def __init__(self, gtab): """ Initialize an IsotropicModel. Parameters ---------- gtab : a GradientTable class instance """ ReconstModel.__init__(self, gtab) def fit(self, data): """ Fit an IsotropicModel. This boils down to finding the mean diffusion-weighted signal in each voxel Parameters ---------- data : ndarray Returns ------- IsotropicFit class instance. """ to_fit = _to_fit_iso(data, self.gtab) params = np.mean(np.reshape(to_fit, (-1, to_fit.shape[-1])), -1) return IsotropicFit(self, params) class IsotropicFit(ReconstFit): """ A fit object for representing the isotropic signal as the mean of the diffusion-weighted signal. """ def __init__(self, model, params): """ Initialize an IsotropicFit object. Parameters ---------- model : IsotropicModel class instance params : ndarray The mean isotropic model parameters (the mean diffusion-weighted signal in each voxel). n_vox : int The number of voxels for which the fit was done. """ self.model = model self.params = params def predict(self, gtab=None): """ Predict the isotropic signal. Based on a gradient table. In this case, the (naive!) prediction will be the mean of the diffusion-weighted signal in the voxels. Parameters ---------- gtab : a GradientTable class instance (optional) Defaults to use the gtab from the IsotropicModel from which this fit was derived. """ if gtab is None: gtab = self.model.gtab return self.params[..., np.newaxis] + np.zeros((self.params.shape[0], np.sum(~gtab.b0s_mask)) ) class ExponentialIsotropicModel(IsotropicModel): """ Representing the isotropic signal as a fit to an exponential decay function with b-values """ def fit(self, data): """ Parameters ---------- data : ndarray Returns ------- ExponentialIsotropicFit class instance. """ to_fit = _to_fit_iso(data, self.gtab) # Fitting to the log-transformed relative data is much faster: nz_idx = to_fit > 0 to_fit[nz_idx] = np.log(to_fit[nz_idx]) to_fit[~nz_idx] = -np.inf p = nanmean(to_fit / self.gtab.bvals[~self.gtab.b0s_mask], -1) params = -p return ExponentialIsotropicFit(self, params) class ExponentialIsotropicFit(IsotropicFit): """ A fit to the ExponentialIsotropicModel object, based on data. """ def predict(self, gtab=None): """ Predict the isotropic signal, based on a gradient table. In this case, the prediction will be for an exponential decay with the mean diffusivity derived from the data that was fit. Parameters ---------- gtab : a GradientTable class instance (optional) Defaults to use the gtab from the IsotropicModel from which this fit was derived. """ if gtab is None: gtab = self.model.gtab return np.exp(-gtab.bvals[~gtab.b0s_mask] * (np.zeros((self.params.shape[0], np.sum(~gtab.b0s_mask))) + self.params[..., np.newaxis])) def sfm_design_matrix(gtab, sphere, response, mode='signal'): """ Construct the SFM design matrix Parameters ---------- gtab : GradientTable or Sphere Sets the rows of the matrix, if the mode is 'signal', this should be a GradientTable. If mode is 'odf' this should be a Sphere sphere : Sphere Sets the columns of the matrix response : list of 3 elements The eigenvalues of a tensor which will serve as a kernel function. mode : str {'signal' | 'odf'}, optional Choose the (default) 'signal' for a design matrix containing predicted signal in the measurements defined by the gradient table for putative fascicles oriented along the vertices of the sphere. Otherwise, choose 'odf' for an odf convolution matrix, with values of the odf calculated from a tensor with the provided response eigenvalues, evaluated at the b-vectors in the gradient table, for the tensors with prinicipal diffusion directions along the vertices of the sphere. Returns ------- mat : ndarray A design matrix that can be used for one of the following operations: when the 'signal' mode is used, each column contains the putative signal in each of the bvectors of the `gtab` if a fascicle is oriented in the direction encoded by the sphere vertex corresponding to this column. This is used for deconvolution with a measured DWI signal. If the 'odf' mode is chosen, each column instead contains the values of the tensor ODF for a tensor with a principal diffusion direction corresponding to this vertex. This is used to generate odfs from the fits of the SFM for the purpose of tracking. Examples -------- >>> import dipy.data as dpd >>> data, gtab = dpd.dsi_voxels() >>> sphere = dpd.get_sphere() >>> from dipy.reconst.sfm import sfm_design_matrix A canonical tensor approximating corpus-callosum voxels [Rokem2014]_: >>> tensor_matrix = sfm_design_matrix(gtab, sphere, ... [0.0015, 0.0005, 0.0005]) A 'stick' function ([Behrens2007]_): >>> stick_matrix = sfm_design_matrix(gtab, sphere, [0.001, 0, 0]) Notes ----- .. [Rokem2015] Ariel Rokem, Jason D. Yeatman, Franco Pestilli, Kendrick N. Kay, Aviv Mezer, Stefan van der Walt, Brian A. Wandell (2015). Evaluating the accuracy of diffusion MRI models in white matter. PLoS ONE 10(4): e0123272. doi:10.1371/journal.pone.0123272 .. [Rokem2014] Ariel Rokem, Kimberly L. Chan, Jason D. Yeatman, Franco Pestilli, Brian A. Wandell (2014). Evaluating the accuracy of diffusion models at multiple b-values with cross-validation. ISMRM 2014. .. [Behrens2007] Behrens TEJ, Berg HJ, Jbabdi S, Rushworth MFS, Woolrich MW (2007): Probabilistic diffusion tractography with multiple fibre orientations: What can we gain? Neuroimage 34:144-55. """ if mode == 'signal': mat_gtab = grad.gradient_table(gtab.bvals[~gtab.b0s_mask], gtab.bvecs[~gtab.b0s_mask]) # Preallocate: mat = np.empty((np.sum(~gtab.b0s_mask), sphere.vertices.shape[0])) elif mode == 'odf': mat = np.empty((gtab.x.shape[0], sphere.vertices.shape[0])) # Calculate column-wise: for ii, this_dir in enumerate(sphere.vertices): # Rotate the canonical tensor towards this vertex and calculate the # signal you would have gotten in the direction evecs = sims.all_tensor_evecs(this_dir) if mode == 'signal': sig = sims.single_tensor(mat_gtab, evals=response, evecs=evecs) # For regressors based on the single tensor, remove $e^{-bD}$ iso_sig = np.exp(-mat_gtab.bvals * np.mean(response)) mat[:, ii] = sig - iso_sig elif mode == 'odf': # Stick function if response[1] == 0 or response[2] == 0: jj = sphere.find_closest(evecs[0]) mat[jj, ii] = 1 else: odf = sims.single_tensor_odf(gtab.vertices, evals=response, evecs=evecs) mat[:, ii] = odf return mat class SparseFascicleModel(ReconstModel, Cache): def __init__(self, gtab, sphere=None, response=[0.0015, 0.0005, 0.0005], solver='ElasticNet', l1_ratio=0.5, alpha=0.001, isotropic=None): """ Initialize a Sparse Fascicle Model Parameters ---------- gtab : GradientTable class instance sphere : Sphere class instance, optional A sphere on which coefficients will be estimated. Default: symmetric sphere with 362 points (from :mod:`dipy.data`). response : (3,) array-like, optional The eigenvalues of a canonical tensor to be used as the response function of single-fascicle signals. Default:[0.0015, 0.0005, 0.0005] solver : string, dipy.core.optimize.SKLearnLinearSolver object, or sklearn.linear_model.base.LinearModel object, optional. This will determine the algorithm used to solve the set of linear equations underlying this model. If it is a string it needs to be one of the following: {'ElasticNet', 'NNLS'}. Otherwise, it can be an object that inherits from `dipy.optimize.SKLearnLinearSolver`. Default: 'ElasticNet'. l1_ratio : float, optional Sets the balance betwee L1 and L2 regularization in ElasticNet [Zou2005]_. Default: 0.5 alpha : float, optional Sets the balance between least-squares error and L1/L2 regularization in ElasticNet [Zou2005]_. Default: 0.001 isotropic : IsotropicModel class instance This is a class that implements the function that calculates the value of the isotropic signal. This is a value of the signal that is independent of direction, and therefore removed from both sides of the SFM equation. The default is an instance of IsotropicModel, but other functions can be inherited from IsotropicModel to implement other fits to the aspects of the data that depend on b-value, but not on direction. Notes ----- This is an implementation of the SFM, described in [Rokem2015]_. .. [Rokem2014] Ariel Rokem, Jason D. Yeatman, Franco Pestilli, Kendrick N. Kay, Aviv Mezer, Stefan van der Walt, Brian A. Wandell (2014). Evaluating the accuracy of diffusion MRI models in white matter. PLoS ONE 10(4): e0123272. doi:10.1371/journal.pone.0123272 .. [Zou2005] Zou H, Hastie T (2005). Regularization and variable selection via the elastic net. J R Stat Soc B:301-320 """ ReconstModel.__init__(self, gtab) if sphere is None: sphere = dpd.get_sphere() self.sphere = sphere self.response = np.asarray(response) if isotropic is None: isotropic = IsotropicModel self.isotropic = isotropic if solver == 'ElasticNet': self.solver = lm.ElasticNet(l1_ratio=l1_ratio, alpha=alpha, positive=True, warm_start=True) elif solver == 'NNLS' or solver == 'nnls': self.solver = opt.NonNegativeLeastSquares() elif (isinstance(solver, opt.SKLearnLinearSolver) or has_sklearn and isinstance(solver, lm.base.LinearModel)): self.solver = solver else: e_s = "The `solver` key-word argument needs to be: " e_s += "'ElasticNet', 'NNLS', or a " e_s += "`dipy.optimize.SKLearnLinearSolver` object" raise ValueError(e_s) @auto_attr def design_matrix(self): """ The design matrix for a SFM. Returns ------- ndarray The design matrix, where each column is a rotated version of the response function. """ return sfm_design_matrix(self.gtab, self.sphere, self.response, 'signal') def fit(self, data, mask=None): """ Fit the SparseFascicleModel object to data. Parameters ---------- data : array The measured signal. mask : array, optional A boolean array used to mark the coordinates in the data that should be analyzed. Has the shape `data.shape[:-1]`. Default: None, which implies that all points should be analyzed. Returns ------- SparseFascicleFit object """ if mask is None: # Flatten it to 2D either way: data_in_mask = np.reshape(data, (-1, data.shape[-1])) else: # Check for valid shape of the mask if mask.shape != data.shape[:-1]: raise ValueError("Mask is not the same shape as data.") mask = np.array(mask, dtype=bool, copy=False) data_in_mask = np.reshape(data[mask], (-1, data.shape[-1])) # Fitting is done on the relative signal (S/S0): flat_S0 = np.mean(data_in_mask[..., self.gtab.b0s_mask], -1) flat_S = (data_in_mask[..., ~self.gtab.b0s_mask] / flat_S0[..., None]) isotropic = self.isotropic(self.gtab).fit(data_in_mask) flat_params = np.zeros((data_in_mask.shape[0], self.design_matrix.shape[-1])) isopredict = isotropic.predict() for vox, vox_data in enumerate(flat_S): # In voxels in which S0 is 0, we just want to keep the # parameters at all-zeros, and avoid nasty sklearn errors: if not (np.any(~np.isfinite(vox_data)) or np.all(vox_data == 0)): fit_it = vox_data - isopredict[vox] with warnings.catch_warnings(): warnings.simplefilter("ignore") flat_params[vox] = self.solver.fit(self.design_matrix, fit_it).coef_ if mask is None: out_shape = data.shape[:-1] + (-1, ) beta = flat_params.reshape(out_shape) S0 = flat_S0.reshape(data.shape[:-1]) else: beta = np.zeros(data.shape[:-1] + (self.design_matrix.shape[-1],)) beta[mask, :] = flat_params S0 = np.zeros(data.shape[:-1]) S0[mask] = flat_S0 return SparseFascicleFit(self, beta, S0, isotropic) class SparseFascicleFit(ReconstFit): def __init__(self, model, beta, S0, iso): """ Initalize a SparseFascicleFit class instance Parameters ---------- model : a SparseFascicleModel object. beta : ndarray The parameters of fit to data. S0 : ndarray The mean non-diffusion-weighted signal. iso : IsotropicFit class instance A representation of the isotropic signal, together with parameters of the isotropic signal in each voxel, that is capable of deriving/predicting an isotropic signal, based on a gradient-table. """ self.model = model self.beta = beta self.S0 = S0 self.iso = iso def odf(self, sphere): """ The orientation distribution function of the SFM Parameters ---------- sphere : Sphere The points in which the ODF is evaluated Returns ------- odf : ndarray of shape (x, y, z, sphere.vertices.shape[0]) """ odf_matrix = self.model.cache_get('odf_matrix', key=sphere) if odf_matrix is None: odf_matrix = sfm_design_matrix(sphere, self.model.sphere, self.model.response, mode='odf') self.model.cache_set('odf_matrix', key=sphere, value=odf_matrix) flat_beta = self.beta.reshape(-1, self.beta.shape[-1]) flat_odf = np.dot(odf_matrix, flat_beta.T) return flat_odf.T.reshape(self.beta.shape[:-1] + (odf_matrix.shape[0], )) def predict(self, gtab=None, response=None, S0=None): """ Predict the signal based on the SFM parameters Parameters ---------- gtab : GradientTable, optional The bvecs/bvals to predict the signal on. Default: the gtab from the model object. response : list of 3 elements, optional The eigenvalues of a tensor which will serve as a kernel function. Default: the response of the model object. Default to use `model.response`. S0 : float or array, optional The non-diffusion-weighted signal. Default: use the S0 of the data Returns ------- pred_sig : ndarray The signal predicted in each voxel/direction """ if response is None: response = self.model.response if gtab is None: _matrix = self.model.design_matrix gtab = self.model.gtab # The only thing we can't change at this point is the sphere we use # (which sets the width of our design matrix): else: _matrix = sfm_design_matrix(gtab, self.model.sphere, response) # Get them all at once: beta_all = self.beta.reshape(-1, self.beta.shape[-1]) pred_weighted = np.dot(_matrix, beta_all.T).T pred_weighted = pred_weighted.reshape(self.beta.shape[:-1] + (_matrix.shape[0],)) if S0 is None: S0 = self.S0 if isinstance(S0, np.ndarray): S0 = S0[..., None] iso_signal = self.iso.predict(gtab) pre_pred_sig = S0 * (pred_weighted + iso_signal.reshape(pred_weighted.shape)) pred_sig = np.zeros(pre_pred_sig.shape[:-1] + (gtab.bvals.shape[0],)) pred_sig[..., ~gtab.b0s_mask] = pre_pred_sig pred_sig[..., gtab.b0s_mask] = S0 return pred_sig.squeeze() dipy-0.13.0/dipy/reconst/shm.py000077500000000000000000001034451317371701200163370ustar00rootroot00000000000000""" Tools for using spherical harmonic models to fit diffusion data References ---------- Aganj, I., et. al. 2009. ODF Reconstruction in Q-Ball Imaging With Solid Angle Consideration. Descoteaux, M., et. al. 2007. Regularized, fast, and robust analytical Q-ball imaging. Tristan-Vega, A., et. al. 2010. A new methodology for estimation of fiber populations in white matter of the brain with Funk-Radon transform. Tristan-Vega, A., et. al. 2009. Estimation of fiber orientation probability density functions in high angular resolution diffusion imaging. Note about the Transpose: In the literature the matrix representation of these methods is often written as Y = Bx where B is some design matrix and Y and x are column vectors. In our case the input data, a dwi stored as a nifti file for example, is stored as row vectors (ndarrays) of the form (x, y, z, n), where n is the number of diffusion directions. We could transpose and reshape the data to be (n, x*y*z), so that we could directly plug it into the above equation. However, I have chosen to keep the data as is and implement the relevant equations rewritten in the following form: Y.T = x.T B.T, or in python syntax data = np.dot(sh_coef, B.T) where data is Y.T and sh_coef is x.T. """ import numpy as np from numpy import concatenate, diag, diff, empty, eye, sqrt, unique, dot from numpy.linalg import pinv, svd from numpy.random import randint from dipy.reconst.odf import OdfModel, OdfFit from dipy.core.geometry import cart2sphere from dipy.core.onetime import auto_attr from dipy.reconst.cache import Cache from distutils.version import LooseVersion import scipy from scipy.special import lpn, lpmv, gammaln if LooseVersion(scipy.version.short_version) >= LooseVersion('0.15.0'): SCIPY_15_PLUS = True import scipy.special as sps else: SCIPY_15_PLUS = False def _copydoc(obj): def bandit(f): f.__doc__ = obj.__doc__ return f return bandit def forward_sdeconv_mat(r_rh, n): """ Build forward spherical deconvolution matrix Parameters ---------- r_rh : ndarray Rotational harmonics coefficients for the single fiber response function. Each element `rh[i]` is associated with spherical harmonics of degree `2*i`. n : ndarray The degree of spherical harmonic function associated with each row of the deconvolution matrix. Only even degrees are allowed Returns ------- R : ndarray (N, N) Deconvolution matrix with shape (N, N) """ if np.any(n % 2): raise ValueError("n has odd degrees, expecting only even degrees") return np.diag(r_rh[n // 2]) def sh_to_rh(r_sh, m, n): """ Spherical harmonics (SH) to rotational harmonics (RH) Calculate the rotational harmonic decomposition up to harmonic order `m`, degree `n` for an axially and antipodally symmetric function. Note that all ``m != 0`` coefficients will be ignored as axial symmetry is assumed. Hence, there will be ``(sh_order/2 + 1)`` non-zero coefficients. Parameters ---------- r_sh : ndarray (N,) ndarray of SH coefficients for the single fiber response function. These coefficients must correspond to the real spherical harmonic functions produced by `shm.real_sph_harm`. m : ndarray (N,) The order of the spherical harmonic function associated with each coefficient. n : ndarray (N,) The degree of the spherical harmonic function associated with each coefficient. Returns ------- r_rh : ndarray (``(sh_order + 1)*(sh_order + 2)/2``,) Rotational harmonics coefficients representing the input `r_sh` See Also -------- shm.real_sph_harm, shm.real_sym_sh_basis References ---------- .. [1] Tournier, J.D., et al. NeuroImage 2007. Robust determination of the fibre orientation distribution in diffusion MRI: Non-negativity constrained super-resolved spherical deconvolution """ mask = m == 0 # The delta function at theta = phi = 0 is known to have zero coefficients # where m != 0, therefore we need only compute the coefficients at m=0. dirac_sh = gen_dirac(0, n[mask], 0, 0) r_rh = r_sh[mask] / dirac_sh return r_rh def gen_dirac(m, n, theta, phi): """ Generate Dirac delta function orientated in (theta, phi) on the sphere The spherical harmonics (SH) representation of this Dirac is returned as coefficients to spherical harmonic functions produced by `shm.real_sph_harm`. Parameters ---------- m : ndarray (N,) The order of the spherical harmonic function associated with each coefficient. n : ndarray (N,) The degree of the spherical harmonic function associated with each coefficient. theta : float [0, 2*pi] The azimuthal (longitudinal) coordinate. phi : float [0, pi] The polar (colatitudinal) coordinate. See Also -------- shm.real_sph_harm, shm.real_sym_sh_basis Returns ------- dirac : ndarray SH coefficients representing the Dirac function. The shape of this is `(m + 2) * (m + 1) / 2`. """ return real_sph_harm(m, n, theta, phi) def spherical_harmonics(m, n, theta, phi): x = np.cos(phi) val = lpmv(m, n, x).astype(complex) val *= np.sqrt((2 * n + 1) / 4.0 / np.pi) val *= np.exp(0.5 * (gammaln(n - m + 1) - gammaln(n + m + 1))) val = val * np.exp(1j * m * theta) return val if SCIPY_15_PLUS: def spherical_harmonics(m, n, theta, phi): return sps.sph_harm(m, n, theta, phi, dtype=complex) spherical_harmonics.__doc__ = r""" Compute spherical harmonics This may take scalar or array arguments. The inputs will be broadcasted against each other. Parameters ---------- m : int ``|m| <= n`` The order of the harmonic. n : int ``>= 0`` The degree of the harmonic. theta : float [0, 2*pi] The azimuthal (longitudinal) coordinate. phi : float [0, pi] The polar (colatitudinal) coordinate. Returns ------- y_mn : complex float The harmonic $Y^m_n$ sampled at `theta` and `phi`. Notes ----- This is a faster implementation of scipy.special.sph_harm for scipy version < 0.15.0. For scipy 0.15 and onwards, we use the scipy implementation of the function """ def real_sph_harm(m, n, theta, phi): r""" Compute real spherical harmonics. Where the real harmonic $Y^m_n$ is defined to be: Imag($Y^m_n$) * sqrt(2) if m > 0 $Y^0_n$ if m = 0 Real($Y^|m|_n$) * sqrt(2) if m < 0 This may take scalar or array arguments. The inputs will be broadcasted against each other. Parameters ---------- m : int ``|m| <= n`` The order of the harmonic. n : int ``>= 0`` The degree of the harmonic. theta : float [0, 2*pi] The azimuthal (longitudinal) coordinate. phi : float [0, pi] The polar (colatitudinal) coordinate. Returns -------- y_mn : real float The real harmonic $Y^m_n$ sampled at `theta` and `phi`. See Also -------- scipy.special.sph_harm """ # dipy uses a convention for theta and phi that is reversed with respect to # function signature of scipy.special.sph_harm sh = spherical_harmonics(np.abs(m), n, phi, theta) real_sh = np.where(m > 0, sh.imag, sh.real) real_sh *= np.where(m == 0, 1., np.sqrt(2)) return real_sh def real_sym_sh_mrtrix(sh_order, theta, phi): """ Compute real spherical harmonics as in mrtrix, where the real harmonic $Y^m_n$ is defined to be:: Real($Y^m_n$) if m > 0 $Y^0_n$ if m = 0 Imag($Y^|m|_n$) if m < 0 This may take scalar or array arguments. The inputs will be broadcasted against each other. Parameters ----------- sh_order : int The maximum degree or the spherical harmonic basis. theta : float [0, pi] The polar (colatitudinal) coordinate. phi : float [0, 2*pi] The azimuthal (longitudinal) coordinate. Returns -------- y_mn : real float The real harmonic $Y^m_n$ sampled at `theta` and `phi` as implemented in mrtrix. Warning: the basis is Tournier et al 2004 and 2007 is slightly different. m : array The order of the harmonics. n : array The degree of the harmonics. """ m, n = sph_harm_ind_list(sh_order) phi = np.reshape(phi, [-1, 1]) theta = np.reshape(theta, [-1, 1]) m = -m real_sh = real_sph_harm(m, n, theta, phi) real_sh /= np.where(m == 0, 1., np.sqrt(2)) return real_sh, m, n def real_sym_sh_basis(sh_order, theta, phi): """Samples a real symmetric spherical harmonic basis at point on the sphere Samples the basis functions up to order `sh_order` at points on the sphere given by `theta` and `phi`. The basis functions are defined here the same way as in fibernavigator [1]_ where the real harmonic $Y^m_n$ is defined to be: Imag($Y^m_n$) * sqrt(2) if m > 0 $Y^0_n$ if m = 0 Real($Y^|m|_n$) * sqrt(2) if m < 0 This may take scalar or array arguments. The inputs will be broadcasted against each other. Parameters ----------- sh_order : int even int > 0, max spherical harmonic degree theta : float [0, 2*pi] The azimuthal (longitudinal) coordinate. phi : float [0, pi] The polar (colatitudinal) coordinate. Returns -------- y_mn : real float The real harmonic $Y^m_n$ sampled at `theta` and `phi` m : array The order of the harmonics. n : array The degree of the harmonics. References ---------- .. [1] https://github.com/scilus/fibernavigator """ m, n = sph_harm_ind_list(sh_order) phi = np.reshape(phi, [-1, 1]) theta = np.reshape(theta, [-1, 1]) real_sh = real_sph_harm(m, n, theta, phi) return real_sh, m, n sph_harm_lookup = {None: real_sym_sh_basis, "mrtrix": real_sym_sh_mrtrix, "fibernav": real_sym_sh_basis} def sph_harm_ind_list(sh_order): """ Returns the degree (n) and order (m) of all the symmetric spherical harmonics of degree less then or equal to `sh_order`. The results, `m_list` and `n_list` are kx1 arrays, where k depends on sh_order. They can be passed to :func:`real_sph_harm`. Parameters ---------- sh_order : int even int > 0, max degree to return Returns ------- m_list : array orders of even spherical harmonics n_list : array degrees of even spherical harmonics See also -------- real_sph_harm """ if sh_order % 2 != 0: raise ValueError('sh_order must be an even integer >= 0') n_range = np.arange(0, sh_order + 1, 2, dtype=int) n_list = np.repeat(n_range, n_range * 2 + 1) ncoef = int((sh_order + 2) * (sh_order + 1) // 2) offset = 0 m_list = empty(ncoef, 'int') for ii in n_range: m_list[offset:offset + 2 * ii + 1] = np.arange(-ii, ii + 1) offset = offset + 2 * ii + 1 # makes the arrays ncoef by 1, allows for easy broadcasting later in code return (m_list, n_list) def order_from_ncoef(ncoef): """ Given a number n of coefficients, calculate back the sh_order """ # Solve the quadratic equation derived from : # ncoef = (sh_order + 2) * (sh_order + 1) / 2 return int(-3 + np.sqrt(9 - 4 * (2-2*ncoef)))/2 def smooth_pinv(B, L): """Regularized pseudo-inverse Computes a regularized least square inverse of B Parameters ---------- B : array_like (n, m) Matrix to be inverted L : array_like (n,) Returns ------- inv : ndarray (m, n) regularized least square inverse of B Notes ----- In the literature this inverse is often written $(B^{T}B+L^{2})^{-1}B^{T}$. However here this inverse is implemented using the pseudo-inverse because it is more numerically stable than the direct implementation of the matrix product. """ L = diag(L) inv = pinv(concatenate((B, L))) return inv[:, :len(B)] def lazy_index(index): """Produces a lazy index Returns a slice that can be used for indexing an array, if no slice can be made index is returned as is. """ index = np.array(index) assert index.ndim == 1 if index.dtype.kind == 'b': index = index.nonzero()[0] if len(index) == 1: return slice(index[0], index[0] + 1) step = unique(diff(index)) if len(step) != 1 or step[0] == 0: return index else: return slice(index[0], index[-1] + 1, step[0]) def _gfa_sh(coef, sh0_index=0): """The gfa of the odf, computed from the spherical harmonic coefficients This is a private function because it only works for coefficients of normalized sh bases. Parameters ---------- coef : array The coefficients, using a normalized sh basis, that represent each odf. sh0_index : int The index of the coefficient associated with the 0th order sh harmonic. Returns ------- gfa_values : array The gfa of each odf. """ coef_sq = coef**2 numer = coef_sq[..., sh0_index] denom = (coef_sq).sum(-1) # The sum of the square of the coefficients being zero is the same as all # the coefficients being zero allzero = denom == 0 # By adding 1 to numer and denom where both and are 0, we prevent 0/0 numer = numer + allzero denom = denom + allzero return np.sqrt(1. - (numer / denom)) class SphHarmModel(OdfModel, Cache): """To be subclassed by all models that return a SphHarmFit when fit.""" def sampling_matrix(self, sphere): """The matrix needed to sample ODFs from coefficients of the model. Parameters ---------- sphere : Sphere Points used to sample ODF. Returns ------- sampling_matrix : array The size of the matrix will be (N, M) where N is the number of vertices on sphere and M is the number of coefficients needed by the model. """ sampling_matrix = self.cache_get("sampling_matrix", sphere) if sampling_matrix is None: sh_order = self.sh_order theta = sphere.theta phi = sphere.phi sampling_matrix, m, n = real_sym_sh_basis(sh_order, theta, phi) self.cache_set("sampling_matrix", sphere, sampling_matrix) return sampling_matrix class QballBaseModel(SphHarmModel): """To be subclassed by Qball type models.""" def __init__(self, gtab, sh_order, smooth=0.006, min_signal=1., assume_normed=False): """Creates a model that can be used to fit or sample diffusion data Arguments --------- gtab : GradientTable Diffusion gradients used to acquire data sh_order : even int >= 0 the spherical harmonic order of the model smooth : float between 0 and 1, optional The regularization parameter of the model min_signal : float, > 0, optional During fitting, all signal values less than `min_signal` are clipped to `min_signal`. This is done primarily to avoid values less than or equal to zero when taking logs. assume_normed : bool, optional If True, clipping and normalization of the data with respect to the mean B0 signal are skipped during mode fitting. This is an advanced feature and should be used with care. See Also -------- normalize_data """ SphHarmModel.__init__(self, gtab) self._where_b0s = lazy_index(gtab.b0s_mask) self._where_dwi = lazy_index(~gtab.b0s_mask) self.assume_normed = assume_normed self.min_signal = min_signal x, y, z = gtab.gradients[self._where_dwi].T r, theta, phi = cart2sphere(x, y, z) B, m, n = real_sym_sh_basis(sh_order, theta[:, None], phi[:, None]) L = -n * (n + 1) legendre0 = lpn(sh_order, 0)[0] F = legendre0[n] self.sh_order = sh_order self.B = B self.m = m self.n = n self._set_fit_matrix(B, L, F, smooth) def _set_fit_matrix(self, *args): """Should be set in a subclass and is called by __init__""" msg = "User must implement this method in a subclass" raise NotImplementedError(msg) def fit(self, data, mask=None): """Fits the model to diffusion data and returns the model fit""" # Normalize the data and fit coefficients if not self.assume_normed: data = normalize_data(data, self._where_b0s, self.min_signal) # Compute coefficients using abstract method coef = self._get_shm_coef(data) # Apply the mask to the coefficients if mask is not None: mask = np.asarray(mask, dtype=bool) coef *= mask[..., None] return SphHarmFit(self, coef, mask) class SphHarmFit(OdfFit): """Diffusion data fit to a spherical harmonic model""" def __init__(self, model, shm_coef, mask): self.model = model self._shm_coef = shm_coef self.mask = mask @property def shape(self): return self._shm_coef.shape[:-1] def __getitem__(self, index): """Allowing indexing into fit""" # Index shm_coefficients if isinstance(index, tuple): coef_index = index + (Ellipsis,) else: coef_index = index new_coef = self._shm_coef[coef_index] # Index mask if self.mask is not None: new_mask = self.mask[index] assert new_mask.shape == new_coef.shape[:-1] else: new_mask = None return SphHarmFit(self.model, new_coef, new_mask) def odf(self, sphere): """Samples the odf function on the points of a sphere Parameters ---------- sphere : Sphere The points on which to sample the odf. Returns ------- values : ndarray The value of the odf on each point of `sphere`. """ B = self.model.sampling_matrix(sphere) return dot(self._shm_coef, B.T) @auto_attr def gfa(self): return _gfa_sh(self._shm_coef, 0) @property def shm_coeff(self): """The spherical harmonic coefficients of the odf Make this a property for now, if there is a usecase for modifying the coefficients we can add a setter or expose the coefficients more directly """ return self._shm_coef def predict(self, gtab=None, S0=1.0): """ Predict the diffusion signal from the model coefficients. Parameters ---------- gtab : a GradientTable class instance The directions and bvalues on which prediction is desired S0 : float array The mean non-diffusion-weighted signal in each voxel. Default: 1.0 in all voxels """ if not hasattr(self.model, 'predict'): msg = "This model does not have prediction implemented yet" raise NotImplementedError(msg) return self.model.predict(self.shm_coeff, gtab, S0) class CsaOdfModel(QballBaseModel): """Implementation of Constant Solid Angle reconstruction method. References ---------- .. [1] Aganj, I., et. al. 2009. ODF Reconstruction in Q-Ball Imaging With Solid Angle Consideration. """ min = .001 max = .999 _n0_const = .5 / np.sqrt(np.pi) def _set_fit_matrix(self, B, L, F, smooth): """The fit matrix, is used by fit_coefficients to return the coefficients of the odf""" invB = smooth_pinv(B, sqrt(smooth) * L) L = L[:, None] F = F[:, None] self._fit_matrix = (F * L) / (8 * np.pi) * invB def _get_shm_coef(self, data, mask=None): """Returns the coefficients of the model""" data = data[..., self._where_dwi] data = data.clip(self.min, self.max) loglog_data = np.log(-np.log(data)) sh_coef = dot(loglog_data, self._fit_matrix.T) sh_coef[..., 0] = self._n0_const return sh_coef class OpdtModel(QballBaseModel): """Implementation of Orientation Probability Density Transform reconstruction method. References ---------- .. [1] Tristan-Vega, A., et. al. 2010. A new methodology for estimation of fiber populations in white matter of the brain with Funk-Radon transform. .. [2] Tristan-Vega, A., et. al. 2009. Estimation of fiber orientation probability density functions in high angular resolution diffusion imaging. """ def _set_fit_matrix(self, B, L, F, smooth): invB = smooth_pinv(B, sqrt(smooth) * L) L = L[:, None] F = F[:, None] delta_b = F * L * invB delta_q = 4 * F * invB self._fit_matrix = delta_b, delta_q def _get_shm_coef(self, data, mask=None): """Returns the coefficients of the model""" delta_b, delta_q = self._fit_matrix return _slowadc_formula(data[..., self._where_dwi], delta_b, delta_q) def _slowadc_formula(data, delta_b, delta_q): """formula used in SlowAdcOpdfModel""" logd = -np.log(data) return dot(logd * (1.5 - logd) * data, delta_q.T) - dot(data, delta_b.T) class QballModel(QballBaseModel): """Implementation of regularized Qball reconstruction method. References ---------- .. [1] Descoteaux, M., et. al. 2007. Regularized, fast, and robust analytical Q-ball imaging. """ def _set_fit_matrix(self, B, L, F, smooth): invB = smooth_pinv(B, sqrt(smooth) * L) F = F[:, None] self._fit_matrix = F * invB def _get_shm_coef(self, data, mask=None): """Returns the coefficients of the model""" return dot(data[..., self._where_dwi], self._fit_matrix.T) def normalize_data(data, where_b0, min_signal=1., out=None): """Normalizes the data with respect to the mean b0 """ if out is None: out = np.array(data, dtype='float32', copy=True) else: if out.dtype.kind != 'f': raise ValueError("out must be floating point") out[:] = data out.clip(min_signal, out=out) b0 = out[..., where_b0].mean(-1) out /= b0[..., None] return out def hat(B): """Returns the hat matrix for the design matrix B """ U, S, V = svd(B, False) H = dot(U, U.T) return H def lcr_matrix(H): """Returns a matrix for computing leveraged, centered residuals from data if r = (d-Hd), the leveraged centered residuals are lcr = (r/l)-mean(r/l) ruturns the matrix R, such lcr = Rd """ if H.ndim != 2 or H.shape[0] != H.shape[1]: raise ValueError('H should be a square matrix') leverages = sqrt(1 - H.diagonal()) leverages = leverages[:, None] R = (eye(len(H)) - H) / leverages return R - R.mean(0) def bootstrap_data_array(data, H, R, permute=None): """Applies the Residual Bootstraps to the data given H and R data must be normalized, ie 0 < data <= 1 This function, and the bootstrap_data_voxel function, calculate residual-bootsrap samples given a Hat matrix and a Residual matrix. These samples can be used for non-parametric statistics or for bootstrap probabilistic tractography: References ---------- .. [1] J. I. Berman, et al., "Probabilistic streamline q-ball tractography using the residual bootstrap" 2008. .. [2] HA Haroon, et al., "Using the model-based residual bootstrap to quantify uncertainty in fiber orientations from Q-ball analysis" 2009. .. [3] B. Jeurissen, et al., "Probabilistic Fiber Tracking Using the Residual Bootstrap with Constrained Spherical Deconvolution" 2011. """ if permute is None: permute = randint(data.shape[-1], size=data.shape[-1]) assert R.shape == H.shape assert len(permute) == R.shape[-1] R = R[permute] data = dot(data, (H + R).T) return data def bootstrap_data_voxel(data, H, R, permute=None): """Like bootstrap_data_array but faster when for a single voxel data must be 1d and normalized """ if permute is None: permute = randint(data.shape[-1], size=data.shape[-1]) r = dot(data, R.T) boot_data = dot(data, H.T) boot_data += r[permute] return boot_data class ResidualBootstrapWrapper(object): """Returns a residual bootstrap sample of the signal_object when indexed Wraps a signal_object, this signal object can be an interpolator. When indexed, the the wrapper indexes the signal_object to get the signal. There wrapper than samples the residual boostrap distribution of signal and returns that sample. """ def __init__(self, signal_object, B, where_dwi, min_signal=1.): """Builds a ResidualBootstrapWapper Given some linear model described by B, the design matrix, and a signal_object, returns an object which can sample the residual bootstrap distribution of the signal. We assume that the signals are normalized so we clip the bootsrap samples to be between `min_signal` and 1. Parameters ---------- signal_object : some object that can be indexed This object should return diffusion weighted signals when indexed. B : ndarray, ndim=2 The design matrix of the spherical harmonics model used to fit the data. This is the model that will be used to compute the residuals and sample the residual bootstrap distribution where_dwi : indexing object to find diffusion weighted signals from signal min_signal : float The lowest allowable signal. """ self._signal_object = signal_object self._H = hat(B) self._R = lcr_matrix(self._H) self._min_signal = min_signal self._where_dwi = where_dwi self.data = signal_object.data self.voxel_size = signal_object.voxel_size def __getitem__(self, index): """Indexes self._signal_object and bootstraps the result""" signal = self._signal_object[index].copy() dwi_signal = signal[self._where_dwi] boot_signal = bootstrap_data_voxel(dwi_signal, self._H, self._R) boot_signal.clip(self._min_signal, 1., out=boot_signal) signal[self._where_dwi] = boot_signal return signal def sf_to_sh(sf, sphere, sh_order=4, basis_type=None, smooth=0.0): """Spherical function to spherical harmonics (SH). Parameters ---------- sf : ndarray Values of a function on the given `sphere`. sphere : Sphere The points on which the sf is defined. sh_order : int, optional Maximum SH order in the SH fit. For `sh_order`, there will be ``(sh_order + 1) * (sh_order_2) / 2`` SH coefficients (default 4). basis_type : {None, 'mrtrix', 'fibernav'} ``None`` for the default dipy basis, ``mrtrix`` for the MRtrix basis, and ``fibernav`` for the FiberNavigator basis (default ``None``). smooth : float, optional Lambda-regularization in the SH fit (default 0.0). Returns ------- sh : ndarray SH coefficients representing the input function. """ sph_harm_basis = sph_harm_lookup.get(basis_type) if sph_harm_basis is None: raise ValueError("Invalid basis name.") B, m, n = sph_harm_basis(sh_order, sphere.theta, sphere.phi) L = -n * (n + 1) invB = smooth_pinv(B, sqrt(smooth) * L) sh = np.dot(sf, invB.T) return sh def sh_to_sf(sh, sphere, sh_order, basis_type=None): """Spherical harmonics (SH) to spherical function (SF). Parameters ---------- sh : ndarray SH coefficients representing a spherical function. sphere : Sphere The points on which to sample the spherical function. sh_order : int, optional Maximum SH order in the SH fit. For `sh_order`, there will be ``(sh_order + 1) * (sh_order_2) / 2`` SH coefficients (default 4). basis_type : {None, 'mrtrix', 'fibernav'} ``None`` for the default dipy basis, ``mrtrix`` for the MRtrix basis, and ``fibernav`` for the FiberNavigator basis (default ``None``). Returns ------- sf : ndarray Spherical function values on the `sphere`. """ sph_harm_basis = sph_harm_lookup.get(basis_type) if sph_harm_basis is None: raise ValueError("Invalid basis name.") B, m, n = sph_harm_basis(sh_order, sphere.theta, sphere.phi) sf = np.dot(sh, B.T) return sf def sh_to_sf_matrix(sphere, sh_order, basis_type=None, return_inv=True, smooth=0): """ Matrix that transforms Spherical harmonics (SH) to spherical function (SF). Parameters ---------- sphere : Sphere The points on which to sample the spherical function. sh_order : int, optional Maximum SH order in the SH fit. For `sh_order`, there will be ``(sh_order + 1) * (sh_order_2) / 2`` SH coefficients (default 4). basis_type : {None, 'mrtrix', 'fibernav'} ``None`` for the default dipy basis, ``mrtrix`` for the MRtrix basis, and ``fibernav`` for the FiberNavigator basis (default ``None``). return_inv : bool If True then the inverse of the matrix is also returned smooth : float, optional Lambda-regularization in the SH fit (default 0.0). Returns ------- B : ndarray Matrix that transforms spherical harmonics to spherical function ``sf = np.dot(sh, B)``. invB : ndarray Inverse of B. """ sph_harm_basis = sph_harm_lookup.get(basis_type) if sph_harm_basis is None: raise ValueError("Invalid basis name.") B, m, n = sph_harm_basis(sh_order, sphere.theta, sphere.phi) if return_inv: L = -n * (n + 1) invB = smooth_pinv(B, np.sqrt(smooth) * L) return B.T, invB.T return B.T def calculate_max_order(n_coeffs): """Calculate the maximal harmonic order, given that you know the number of parameters that were estimated. Parameters ---------- n_coeffs : int The number of SH coefficients Returns ------- L : int The maximal SH order, given the number of coefficients Notes ----- The calculation in this function proceeds according to the following logic: .. math:: n = \frac{1}{2} (L+1) (L+2) \rarrow 2n = L^2 + 3L + 2 \rarrow L^2 + 3L + 2 - 2n = 0 \rarrow L^2 + 3L + 2(1-n) = 0 \rarrow L_{1,2} = \frac{-3 \pm \sqrt{9 - 8 (1-n)}}{2} \rarrow L{1,2} = \frac{-3 \pm \sqrt{1 + 8n}}{2} Finally, the positive value is chosen between the two options. """ L1 = (-3 + np.sqrt(1 + 8 * n_coeffs)) / 2 L2 = (-3 - np.sqrt(1 + 8 * n_coeffs)) / 2 return np.int(max([L1, L2])) def anisotropic_power(sh_coeffs, norm_factor=0.00001, power=2, non_negative=True): """Calculates anisotropic power map with a given SH coefficient matrix Parameters ---------- sh_coeffs : ndarray A ndarray where the last dimension is the SH coefficients estimates for that voxel. norm_factor: float, optional The value to normalize the ap values. Default is 10^-5. power : int, optional The degree to which power maps are calculated. Default: 2. non_negative: bool, optional Whether to rectify the resulting map to be non-negative. Default: True. Returns ------- log_ap : ndarray The log of the resulting power image. Notes ---------- Calculate AP image based on a IxJxKxC SH coefficient matrix based on the equation: .. math:: AP = \sum_{l=2,4,6,...}{\frac{1}{2l+1} \sum_{m=-l}^l{|a_{l,m}|^n}} Where the last dimension, C, is made of a flattened array of $l$x$m$ coefficients, where $l$ are the SH orders, and $m = 2l+1$, So l=1 has 1 coeffecient, l=2 has 5, ... l=8 has 17 and so on. A l=2 SH coefficient matrix will then be composed of a IxJxKx6 volume. The power, $n$ is usually set to $n=2$. The final AP image is then shifted by -log(norm_factor), to be strictly non-negative. Remaining values < 0 are discarded (set to 0), per default, and this option is controlled through the `non_negative` keyword argument. References ---------- .. [1] Dell'Acqua, F., Lacerda, L., Catani, M., Simmons, A., 2014. Anisotropic Power Maps: A diffusion contrast to reveal low anisotropy tissues from HARDI data, in: Proceedings of International Society for Magnetic Resonance in Medicine. Milan, Italy. """ dim = sh_coeffs.shape[:-1] n_coeffs = sh_coeffs.shape[-1] max_order = calculate_max_order(n_coeffs) ap = np.zeros(dim) n_start = 1 for L in range(2, max_order + 2, 2): n_stop = n_start + (2 * L + 1) ap_i = np.mean(np.abs(sh_coeffs[..., n_start:n_stop]) ** power, -1) ap += ap_i n_start = n_stop # Shift the map to be mostly non-negative, # only applying the log operation to positive elements # to avoid getting numpy warnings on log(0). # It is impossible to get ap values smaller than 0. # Also avoids getting voxels with -inf when non_negative=False. if ap.ndim < 1: # For the off chance we have a scalar on our hands ap = np.reshape(ap, (1, )) log_ap = np.zeros_like(ap) log_ap[ap > 0] = np.log(ap[ap > 0]) - np.log(norm_factor) # Deal with residual negative values: if non_negative: if isinstance(log_ap, np.ndarray): # zero all values < 0 log_ap[log_ap < 0] = 0 else: # assume this is a singleton float (input was 1D): if log_ap < 0: return 0 return log_ap dipy-0.13.0/dipy/reconst/shore.py000066400000000000000000000710031317371701200166570ustar00rootroot00000000000000from __future__ import division from warnings import warn from math import factorial import numpy as np from scipy.special import genlaguerre, gamma, hyp2f1 from dipy.reconst.cache import Cache from dipy.reconst.multi_voxel import multi_voxel_fit from dipy.reconst.shm import real_sph_harm from dipy.core.geometry import cart2sphere from dipy.utils.optpkg import optional_package cvxpy, have_cvxpy, _ = optional_package("cvxpy") class ShoreModel(Cache): r"""Simple Harmonic Oscillator based Reconstruction and Estimation (SHORE) [1]_ of the diffusion signal. The main idea is to model the diffusion signal as a linear combination of continuous functions $\phi_i$, ..math:: :nowrap: \begin{equation} S(\mathbf{q})= \sum_{i=0}^I c_{i} \phi_{i}(\mathbf{q}). \end{equation} where $\mathbf{q}$ is the wavector which corresponds to different gradient directions. Numerous continuous functions $\phi_i$ can be used to model $S$. Some are presented in [2,3,4]_. From the $c_i$ coefficients, there exist analytical formulae to estimate the ODF, the return to the origin porbability (RTOP), the mean square displacement (MSD), amongst others [5]_. References ---------- .. [1] Ozarslan E. et. al, "Simple harmonic oscillator based reconstruction and estimation for one-dimensional q-space magnetic resonance 1D-SHORE)", eapoc Intl Soc Mag Reson Med, vol. 16, p. 35., 2008. .. [2] Merlet S. et. al, "Continuous diffusion signal, EAP and ODF estimation via Compressive Sensing in diffusion MRI", Medical Image Analysis, 2013. .. [3] Rathi Y. et. al, "Sparse multi-shell diffusion imaging", MICCAI, 2011. .. [4] Cheng J. et. al, "Theoretical Analysis and eapactical Insights on EAP Estimation via a Unified HARDI Framework", MICCAI workshop on Computational Diffusion MRI, 2011. .. [5] Ozarslan E. et. al, "Mean apparent propagator (MAP) MRI: A novel diffusion imaging method for mapping tissue microstructure", NeuroImage, 2013. Notes ----- The implementation of SHORE depends on CVXPY (http://www.cvxpy.org/). """ def __init__(self, gtab, radial_order=6, zeta=700, lambdaN=1e-8, lambdaL=1e-8, tau=1. / (4 * np.pi ** 2), constrain_e0=False, positive_constraint=False, pos_grid=11, pos_radius=20e-03, cvxpy_solver=None ): r""" Analytical and continuous modeling of the diffusion signal with respect to the SHORE basis [1,2]_. This implementation is a modification of SHORE presented in [1]_. The modification was made to obtain the same ordering of the basis presented in [2,3]_. The main idea is to model the diffusion signal as a linear combination of continuous functions $\phi_i$, ..math:: :nowrap: \begin{equation} S(\mathbf{q})= \sum_{i=0}^I c_{i} \phi_{i}(\mathbf{q}). \end{equation} where $\mathbf{q}$ is the wavector which corresponds to different gradient directions. From the $c_i$ coefficients, there exists an analytical formula to estimate the ODF. Parameters ---------- gtab : GradientTable, gradient directions and bvalues container class radial_order : unsigned int, an even integer that represent the order of the basis zeta : unsigned int, scale factor lambdaN : float, radial regularisation constant lambdaL : float, angular regularisation constant tau : float, diffusion time. By default the value that makes q equal to the square root of the b-value. constrain_e0 : bool, Constrain the optimization such that E(0) = 1. positive_constraint : bool, Constrain the propagator to be positive. pos_grid : int, Grid that define the points of the EAP in which we want to enforce positivity. pos_radius : float, Radius of the grid of the EAP in which enforce positivity in millimeters. By default 20e-03 mm. cvxpy_solver : str, optional cvxpy solver name. Optionally optimize the positivity constraint with a particular cvxpy solver. See http://www.cvxpy.org/ for details. Default: None (cvxpy chooses its own solver) References ---------- .. [1] Merlet S. et al., "Continuous diffusion signal, EAP and ODF estimation via Compressive Sensing in diffusion MRI", Medical Image Analysis, 2013. .. [2] Cheng J. et al., "Theoretical Analysis and eapactical Insights on EAP Estimation via a Unified HARDI Framework", MICCAI workshop on Computational Diffusion MRI, 2011. .. [3] Ozarslan E. et al., "Mean apparent propagator (MAP) MRI: A novel diffusion imaging method for mapping tissue microstructure", NeuroImage, 2013. Examples -------- In this example, where the data, gradient table and sphere tessellation used for reconstruction are provided, we model the diffusion signal with respect to the SHORE basis and compute the real and analytical ODF. from dipy.data import get_data,get_sphere sphere = get_sphere('symmetric724') fimg, fbvals, fbvecs = get_data('ISBI_testing_2shells_table') bvals, bvecs = read_bvals_bvecs(fbvals, fbvecs) gtab = gradient_table(bvals, bvecs) from dipy.sims.voxel import SticksAndBall data, golden_directions = SticksAndBall( gtab, d=0.0015, S0=1., angles=[(0, 0), (90, 0)], fractions=[50, 50], snr=None) from dipy.reconst.canal import ShoreModel radial_order = 4 zeta = 700 asm = ShoreModel(gtab, radial_order=radial_order, zeta=zeta, lambdaN=1e-8, lambdaL=1e-8) asmfit = asm.fit(data) odf= asmfit.odf(sphere) """ self.bvals = gtab.bvals self.bvecs = gtab.bvecs self.gtab = gtab self.constrain_e0 = constrain_e0 if radial_order > 0 and not(bool(radial_order % 2)): self.radial_order = radial_order else: msg = "radial_order must be a non-zero even positive number." raise ValueError(msg) self.zeta = zeta self.lambdaL = lambdaL self.lambdaN = lambdaN if (gtab.big_delta is None) or (gtab.small_delta is None): self.tau = tau else: self.tau = gtab.big_delta - gtab.small_delta / 3.0 if positive_constraint and not(constrain_e0): msg = "Constrain_e0 must be True to enfore positivity." raise ValueError(msg) if positive_constraint or constrain_e0: if not have_cvxpy: msg = "cvxpy must be installed for positive_constraint or " msg += "constraint_e0." raise ValueError(msg) if cvxpy_solver is not None: if cvxpy_solver not in cvxpy.installed_solvers(): msg = "Input `cvxpy_solver` was set to %s." % cvxpy_solver msg += " One of %s" % ', '.join(cvxpy.installed_solvers()) msg += " was expected." raise ValueError(msg) self.cvxpy_solver = cvxpy_solver self.positive_constraint = positive_constraint self.pos_grid = pos_grid self.pos_radius = pos_radius @multi_voxel_fit def fit(self, data): Lshore = l_shore(self.radial_order) Nshore = n_shore(self.radial_order) # Generate the SHORE basis M = self.cache_get('shore_matrix', key=self.gtab) if M is None: M = shore_matrix( self.radial_order, self.zeta, self.gtab, self.tau) self.cache_set('shore_matrix', self.gtab, M) MpseudoInv = self.cache_get('shore_matrix_reg_pinv', key=self.gtab) if MpseudoInv is None: MpseudoInv = np.dot( np.linalg.inv(np.dot(M.T, M) + self.lambdaN * Nshore + self.lambdaL * Lshore), M.T) self.cache_set('shore_matrix_reg_pinv', self.gtab, MpseudoInv) # Compute the signal coefficients in SHORE basis if not self.constrain_e0: coef = np.dot(MpseudoInv, data) signal_0 = 0 for n in range(int(self.radial_order / 2) + 1): signal_0 += ( coef[n] * (genlaguerre(n, 0.5)(0) * ( (factorial(n)) / (2 * np.pi * (self.zeta ** 1.5) * gamma(n + 1.5)) ) ** 0.5) ) coef = coef / signal_0 else: data_norm = data / data[self.gtab.b0s_mask].mean() M0 = M[self.gtab.b0s_mask, :] c = cvxpy.Variable(M.shape[1]) design_matrix = cvxpy.Constant(M) objective = cvxpy.Minimize( cvxpy.sum_squares(design_matrix * c - data_norm) + self.lambdaN * cvxpy.quad_form(c, Nshore) + self.lambdaL * cvxpy.quad_form(c, Lshore) ) if not self.positive_constraint: constraints = [M0[0] * c == 1] else: lg = int(np.floor(self.pos_grid ** 3 / 2)) v, t = create_rspace(self.pos_grid, self.pos_radius) psi = self.cache_get( 'shore_matrix_positive_constraint', key=(self.pos_grid, self.pos_radius) ) if psi is None: psi = shore_matrix_pdf( self.radial_order, self.zeta, t[:lg]) self.cache_set( 'shore_matrix_positive_constraint', (self.pos_grid, self.pos_radius), psi) constraints = [M0[0] * c == 1., psi * c > 1e-3] prob = cvxpy.Problem(objective, constraints) try: prob.solve(solver=self.cvxpy_solver) coef = np.asarray(c.value).squeeze() except: warn('Optimization did not find a solution') coef = np.zeros(M.shape[1]) return ShoreFit(self, coef) class ShoreFit(): def __init__(self, model, shore_coef): """ Calculates diffusion properties for a single voxel Parameters ---------- model : object, AnalyticalModel shore_coef : 1d ndarray, shore coefficients """ self.model = model self._shore_coef = shore_coef self.gtab = model.gtab self.radial_order = model.radial_order self.zeta = model.zeta def pdf_grid(self, gridsize, radius_max): r""" Applies the analytical FFT on $S$ to generate the diffusion propagator. This is calculated on a discrete 3D grid in order to obtain an EAP similar to that which is obtained with DSI. Parameters ---------- gridsize : unsigned int dimension of the propagator grid radius_max : float maximal radius in which to compute the propagator Returns ------- eap : ndarray the ensemble average propagator in the 3D grid """ # Create the grid in which to compute the pdf rgrid_rtab = self.model.cache_get( 'pdf_grid', key=(gridsize, radius_max)) if rgrid_rtab is None: rgrid_rtab = create_rspace(gridsize, radius_max) self.model.cache_set( 'pdf_grid', (gridsize, radius_max), rgrid_rtab) rgrid, rtab = rgrid_rtab psi = self.model.cache_get( 'shore_matrix_pdf', key=(gridsize, radius_max)) if psi is None: psi = shore_matrix_pdf(self.radial_order, self.zeta, rtab) self.model.cache_set( 'shore_matrix_pdf', (gridsize, radius_max), psi) propagator = np.dot(psi, self._shore_coef) eap = np.empty((gridsize, gridsize, gridsize), dtype=float) eap[tuple(rgrid.astype(int).T)] = propagator eap *= (2 * radius_max / (gridsize - 1)) ** 3 return eap def pdf(self, r_points): """ Diffusion propagator on a given set of real points. if the array r_points is non writeable, then intermediate results are cached for faster recalculation """ if not r_points.flags.writeable: psi = self.model.cache_get( 'shore_matrix_pdf', key=hash(r_points.data)) else: psi = None if psi is None: psi = shore_matrix_pdf(self.radial_order, self.zeta, r_points) if not r_points.flags.writeable: self.model.cache_set( 'shore_matrix_pdf', hash(r_points.data), psi) eap = np.dot(psi, self._shore_coef) return np.clip(eap, 0, eap.max()) def odf_sh(self): r""" Calculates the real analytical ODF in terms of Spherical Harmonics. """ # Number of Spherical Harmonics involved in the estimation J = (self.radial_order + 1) * (self.radial_order + 2) // 2 # Compute the Spherical Harmonics Coefficients c_sh = np.zeros(J) counter = 0 for l in range(0, self.radial_order + 1, 2): for n in range(l, int((self.radial_order + l) / 2) + 1): for m in range(-l, l + 1): j = int(l + m + (2 * np.array(range(0, l, 2)) + 1).sum()) Cnl = ( ((-1) ** (n - l / 2)) / (2.0 * (4.0 * np.pi ** 2 * self.zeta) ** (3.0 / 2.0)) * ((2.0 * (4.0 * np.pi ** 2 * self.zeta) ** (3.0 / 2.0) * factorial(n - l)) / (gamma(n + 3.0 / 2.0))) ** (1.0 / 2.0) ) Gnl = (gamma(l / 2 + 3.0 / 2.0) * gamma(3.0 / 2.0 + n)) / \ (gamma(l + 3.0 / 2.0) * factorial(n - l)) * \ (1.0 / 2.0) ** (-l / 2 - 3.0 / 2.0) Fnl = hyp2f1(-n + l, l / 2 + 3.0 / 2.0, l + 3.0 / 2.0, 2.0) c_sh[j] += self._shore_coef[counter] * Cnl * Gnl * Fnl counter += 1 return c_sh def odf(self, sphere): r""" Calculates the ODF for a given discrete sphere. """ upsilon = self.model.cache_get('shore_matrix_odf', key=sphere) if upsilon is None: upsilon = shore_matrix_odf( self.radial_order, self.zeta, sphere.vertices) self.model.cache_set('shore_matrix_odf', sphere, upsilon) odf = np.dot(upsilon, self._shore_coef) return odf def rtop_signal(self): r""" Calculates the analytical return to origin probability (RTOP) from the signal [1]_. References ---------- .. [1] Ozarslan E. et. al, "Mean apparent propagator (MAP) MRI: A novel diffusion imaging method for mapping tissue microstructure", NeuroImage, 2013. """ rtop = 0 c = self._shore_coef for n in range(int(self.radial_order / 2) + 1): rtop += c[n] * (-1) ** n * \ ((16 * np.pi * self.zeta ** 1.5 * gamma(n + 1.5)) / ( factorial(n))) ** 0.5 return np.clip(rtop, 0, rtop.max()) def rtop_pdf(self): r""" Calculates the analytical return to origin probability (RTOP) from the pdf [1]_. References ---------- .. [1] Ozarslan E. et. al, "Mean apparent propagator (MAP) MRI: A novel diffusion imaging method for mapping tissue microstructure", NeuroImage, 2013. """ rtop = 0 c = self._shore_coef for n in range(int(self.radial_order / 2) + 1): rtop += c[n] * (-1) ** n * \ ((4 * np.pi ** 2 * self.zeta ** 1.5 * factorial(n)) / (gamma(n + 1.5))) ** 0.5 * \ genlaguerre(n, 0.5)(0) return np.clip(rtop, 0, rtop.max()) def msd(self): r""" Calculates the analytical mean squared displacement (MSD) [1]_ ..math:: :nowrap: \begin{equation} MSD:{DSI}=\int_{-\infty}^{\infty}\int_{-\infty}^{\infty} \int_{-\infty}^{\infty} P(\hat{\mathbf{r}}) \cdot \hat{\mathbf{r}}^{2} \ dr_x \ dr_y \ dr_z \end{equation} where $\hat{\mathbf{r}}$ is a point in the 3D propagator space (see Wu et. al [1]_). References ---------- .. [1] Wu Y. et. al, "Hybrid diffusion imaging", NeuroImage, vol 36, p. 617-629, 2007. """ msd = 0 c = self._shore_coef for n in range(int(self.radial_order / 2) + 1): msd += c[n] * (-1) ** n *\ (9 * (gamma(n + 1.5)) / (8 * np.pi ** 6 * self.zeta ** 3.5 * factorial(n))) ** 0.5 *\ hyp2f1(-n, 2.5, 1.5, 2) return np.clip(msd, 0, msd.max()) def fitted_signal(self): """ The fitted signal. """ phi = self.model.cache_get('shore_matrix', key=self.model.gtab) return np.dot(phi, self._shore_coef) @property def shore_coeff(self): """The SHORE coefficients """ return self._shore_coef def shore_matrix(radial_order, zeta, gtab, tau=1 / (4 * np.pi ** 2)): r"""Compute the SHORE matrix for modified Merlet's 3D-SHORE [1]_ ..math:: :nowrap: \begin{equation} \textbf{E}(q\textbf{u})=\sum_{l=0, even}^{N_{max}} \sum_{n=l}^{(N_{max}+l)/2} \sum_{m=-l}^l c_{nlm} \phi_{nlm}(q\textbf{u}) \end{equation} where $\phi_{nlm}$ is ..math:: :nowrap: \begin{equation} \phi_{nlm}^{SHORE}(q\textbf{u})=\Biggl[\dfrac{2(n-l)!} {\zeta^{3/2} \Gamma(n+3/2)} \Biggr]^{1/2} \Biggl(\dfrac{q^2}{\zeta}\Biggr)^{l/2} exp\Biggl(\dfrac{-q^2}{2\zeta}\Biggr) L^{l+1/2}_{n-l} \Biggl(\dfrac{q^2}{\zeta}\Biggr) Y_l^m(\textbf{u}). \end{equation} Parameters ---------- radial_order : unsigned int, an even integer that represent the order of the basis zeta : unsigned int, scale factor gtab : GradientTable, gradient directions and bvalues container class tau : float, diffusion time. By default the value that makes q=sqrt(b). References ---------- .. [1] Merlet S. et. al, "Continuous diffusion signal, EAP and ODF estimation via Compressive Sensing in diffusion MRI", Medical Image Analysis, 2013. """ qvals = np.sqrt(gtab.bvals / (4 * np.pi ** 2 * tau)) qvals[gtab.b0s_mask] = 0 bvecs = gtab.bvecs qgradients = qvals[:, None] * bvecs r, theta, phi = cart2sphere(qgradients[:, 0], qgradients[:, 1], qgradients[:, 2]) theta[np.isnan(theta)] = 0 F = radial_order / 2 n_c = int(np.round(1 / 6.0 * (F + 1) * (F + 2) * (4 * F + 3))) M = np.zeros((r.shape[0], n_c)) counter = 0 for l in range(0, radial_order + 1, 2): for n in range(l, int((radial_order + l) / 2) + 1): for m in range(-l, l + 1): M[:, counter] = real_sph_harm(m, l, theta, phi) * \ genlaguerre(n - l, l + 0.5)(r ** 2 / zeta) * \ np.exp(- r ** 2 / (2.0 * zeta)) * \ _kappa(zeta, n, l) * \ (r ** 2 / zeta) ** (l / 2) counter += 1 return M def _kappa(zeta, n, l): return np.sqrt((2 * factorial(n - l)) / (zeta ** 1.5 * gamma(n + 1.5))) def shore_matrix_pdf(radial_order, zeta, rtab): r"""Compute the SHORE propagator matrix [1]_" Parameters ---------- radial_order : unsigned int, an even integer that represent the order of the basis zeta : unsigned int, scale factor rtab : array, shape (N,3) real space points in which calculates the pdf References ---------- .. [1] Merlet S. et. al, "Continuous diffusion signal, EAP and ODF estimation via Compressive Sensing in diffusion MRI", Medical Image Analysis, 2013. """ r, theta, phi = cart2sphere(rtab[:, 0], rtab[:, 1], rtab[:, 2]) theta[np.isnan(theta)] = 0 F = radial_order / 2 n_c = int(np.round(1 / 6.0 * (F + 1) * (F + 2) * (4 * F + 3))) psi = np.zeros((r.shape[0], n_c)) counter = 0 for l in range(0, radial_order + 1, 2): for n in range(l, int((radial_order + l) / 2) + 1): for m in range(-l, l + 1): psi[:, counter] = real_sph_harm(m, l, theta, phi) * \ genlaguerre(n - l, l + 0.5)(4 * np.pi ** 2 * zeta * r ** 2) *\ np.exp(-2 * np.pi ** 2 * zeta * r ** 2) *\ _kappa_pdf(zeta, n, l) *\ (4 * np.pi ** 2 * zeta * r ** 2) ** (l / 2) * \ (-1) ** (n - l / 2) counter += 1 return psi def _kappa_pdf(zeta, n, l): return np.sqrt((16 * np.pi ** 3 * zeta ** 1.5 * factorial(n - l)) / gamma(n + 1.5)) def shore_matrix_odf(radial_order, zeta, sphere_vertices): r"""Compute the SHORE ODF matrix [1]_" Parameters ---------- radial_order : unsigned int, an even integer that represent the order of the basis zeta : unsigned int, scale factor sphere_vertices : array, shape (N,3) vertices of the odf sphere References ---------- .. [1] Merlet S. et. al, "Continuous diffusion signal, EAP and ODF estimation via Compressive Sensing in diffusion MRI", Medical Image Analysis, 2013. """ r, theta, phi = cart2sphere(sphere_vertices[:, 0], sphere_vertices[:, 1], sphere_vertices[:, 2]) theta[np.isnan(theta)] = 0 F = radial_order / 2 n_c = int(np.round(1 / 6.0 * (F + 1) * (F + 2) * (4 * F + 3))) upsilon = np.zeros((len(sphere_vertices), n_c)) counter = 0 for l in range(0, radial_order + 1, 2): for n in range(l, int((radial_order + l) / 2) + 1): for m in range(-l, l + 1): upsilon[:, counter] = (-1) ** (n - l / 2.0) * \ _kappa_odf(zeta, n, l) * \ hyp2f1(l - n, l / 2.0 + 1.5, l + 1.5, 2.0) * \ real_sph_harm(m, l, theta, phi) counter += 1 return upsilon def _kappa_odf(zeta, n, l): return np.sqrt((gamma(l / 2.0 + 1.5) ** 2 * gamma(n + 1.5) * 2 ** (l + 3)) / (16 * np.pi ** 3 * (zeta) ** 1.5 * factorial(n - l) * gamma(l + 1.5) ** 2)) def l_shore(radial_order): "Returns the angular regularisation matrix for SHORE basis" F = radial_order / 2 n_c = int(np.round(1 / 6.0 * (F + 1) * (F + 2) * (4 * F + 3))) diagL = np.zeros(n_c) counter = 0 for l in range(0, radial_order + 1, 2): for n in range(l, int((radial_order + l) / 2) + 1): for m in range(-l, l + 1): diagL[counter] = (l * (l + 1)) ** 2 counter += 1 return np.diag(diagL) def n_shore(radial_order): "Returns the angular regularisation matrix for SHORE basis" F = radial_order / 2 n_c = int(np.round(1 / 6.0 * (F + 1) * (F + 2) * (4 * F + 3))) diagN = np.zeros(n_c) counter = 0 for l in range(0, radial_order + 1, 2): for n in range(l, int((radial_order + l) / 2) + 1): for m in range(-l, l + 1): diagN[counter] = (n * (n + 1)) ** 2 counter += 1 return np.diag(diagN) def create_rspace(gridsize, radius_max): """ Create the real space table, that contains the points in which to compute the pdf. Parameters ---------- gridsize : unsigned int dimension of the propagator grid radius_max : float maximal radius in which compute the propagator Returns ------- vecs : array, shape (N,3) positions of the pdf points in a 3D matrix tab : array, shape (N,3) real space points in which calculates the pdf """ radius = gridsize // 2 vecs = [] for i in range(-radius, radius + 1): for j in range(-radius, radius + 1): for k in range(-radius, radius + 1): vecs.append([i, j, k]) vecs = np.array(vecs, dtype=np.float32) tab = vecs / radius tab = tab * radius_max vecs = vecs + radius return vecs, tab def shore_indices(radial_order, index): r"""Given the basis order and the index, return the shore indices n, l, m for modified Merlet's 3D-SHORE ..math:: :nowrap: \begin{equation} \textbf{E}(q\textbf{u})=\sum_{l=0, even}^{N_{max}} \sum_{n=l}^{(N_{max}+l)/2} \sum_{m=-l}^l c_{nlm} \phi_{nlm}(q\textbf{u}) \end{equation} where $\phi_{nlm}$ is ..math:: :nowrap: \begin{equation} \phi_{nlm}^{SHORE}(q\textbf{u})=\Biggl[\dfrac{2(n-l)!} {\zeta^{3/2} \Gamma(n+3/2)} \Biggr]^{1/2} \Biggl(\dfrac{q^2}{\zeta}\Biggr)^{l/2} exp\Biggl(\dfrac{-q^2}{2\zeta}\Biggr) L^{l+1/2}_{n-l} \Biggl(\dfrac{q^2}{\zeta}\Biggr) Y_l^m(\textbf{u}). \end{equation} Parameters ---------- radial_order : unsigned int an even integer that represent the maximal order of the basis index : unsigned int index of the coefficients, start from 0 Returns ------- n : unsigned int the index n of the modified shore basis l : unsigned int the index l of the modified shore basis m : unsigned int the index m of the modified shore basis """ F = radial_order / 2 n_c = np.round(1 / 6.0 * (F + 1) * (F + 2) * (4 * F + 3)) n_i = 0 l_i = 0 m_i = 0 if n_c < (index + 1): msg = "The index %s is higher than the number of" % index msg += " coefficients of the truncated basis," msg += " which is %s starting from 0." % int(n_c - 1) msg += " Select a lower index." raise ValueError(msg) else: counter = 0 for l in range(0, radial_order + 1, 2): for n in range(l, int((radial_order + l) / 2) + 1): for m in range(-l, l + 1): if counter == index: n_i = n l_i = l m_i = m counter += 1 return n_i, l_i, m_i def shore_order(n, l, m): r"""Given the indices (n,l,m) of the basis, return the minimum order for those indices and their index for modified Merlet's 3D-SHORE. Parameters ---------- n : unsigned int the index n of the modified shore basis l : unsigned int the index l of the modified shore basis m : unsigned int the index m of the modified shore basis Returns ------- radial_order : unsigned int an even integer that represent the maximal order of the basis index : unsigned int index of the coefficient correspondig to (n,l,m), start from 0 """ if l % 2 == 1 or l > n or l < 0 or n < 0 or np.abs(m) > l: msg = "The index l must be even and 0 <= l <= n, the index m must be " msg += "-l <= m <= l. Given values were" msg += " [n,l,m]=[%s]." % ','.join([str(n), str(l), str(m)]) raise ValueError(msg) else: if n % 2 == 1: radial_order = n + 1 else: radial_order = n counter_i = 0 counter = 0 for l_i in range(0, radial_order + 1, 2): for n_i in range(l_i, int((radial_order + l_i) / 2) + 1): for m_i in range(-l_i, l_i + 1): if n == n_i and l == l_i and m == m_i: counter_i = counter counter += 1 return radial_order, counter_i dipy-0.13.0/dipy/reconst/tests/000077500000000000000000000000001317371701200163265ustar00rootroot00000000000000dipy-0.13.0/dipy/reconst/tests/__init__.py000066400000000000000000000000401317371701200204310ustar00rootroot00000000000000# tests for reconstruction code dipy-0.13.0/dipy/reconst/tests/test_cache.py000066400000000000000000000011211317371701200207750ustar00rootroot00000000000000from dipy.reconst.cache import Cache from dipy.core.sphere import Sphere from numpy.testing import assert_, assert_equal, run_module_suite class TestModel(Cache): def __init__(self): pass def test_basic_cache(): t = TestModel() s = Sphere(theta=[0], phi=[0]) assert_(t.cache_get("design_matrix", s) is None) m = [[1, 0], [0, 1]] t.cache_set("design_matrix", key=s, value=m) assert_equal(t.cache_get("design_matrix", s), m) t.cache_clear() assert_(t.cache_get("design_matrix", s) is None) if __name__ == "__main__": run_module_suite() dipy-0.13.0/dipy/reconst/tests/test_cross_validation.py000066400000000000000000000114351317371701200233060ustar00rootroot00000000000000""" Testing cross-validation analysis """ from __future__ import division, print_function, absolute_import import numpy as np import numpy.testing as npt import nibabel as nib import dipy.reconst.cross_validation as xval import dipy.data as dpd import dipy.reconst.dti as dti import dipy.core.gradients as gt import dipy.sims.voxel as sims import dipy.reconst.csdeconv as csd import dipy.reconst.base as base # We'll set these globally: fdata, fbval, fbvec = dpd.get_data('small_64D') def test_coeff_of_determination(): """ Test the calculation of the coefficient of determination """ model = np.random.randn(10, 10, 10, 150) data = np.copy(model) # If the model predicts the data perfectly, the COD is all 100s: cod = xval.coeff_of_determination(data, model) npt.assert_array_equal(100, cod) def test_dti_xval(): """ Test k-fold cross-validation """ data = nib.load(fdata).get_data() gtab = gt.gradient_table(fbval, fbvec) dm = dti.TensorModel(gtab, 'LS') # The data has 102 directions, so will not divide neatly into 10 bits npt.assert_raises(ValueError, xval.kfold_xval, dm, data, 10) # But we can do this with 2 folds: kf_xval = xval.kfold_xval(dm, data, 2) # In simulation with no noise, COD should be perfect: psphere = dpd.get_sphere('symmetric362') bvecs = np.concatenate(([[0, 0, 0]], psphere.vertices)) bvals = np.zeros(len(bvecs)) + 1000 bvals[0] = 0 gtab = gt.gradient_table(bvals, bvecs) mevals = np.array(([0.0015, 0.0003, 0.0001], [0.0015, 0.0003, 0.0003])) mevecs = [np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]]), np.array([[0, 0, 1], [0, 1, 0], [1, 0, 0]])] S = sims.single_tensor(gtab, 100, mevals[0], mevecs[0], snr=None) dm = dti.TensorModel(gtab, 'LS') kf_xval = xval.kfold_xval(dm, S, 2) cod = xval.coeff_of_determination(S, kf_xval) npt.assert_array_almost_equal(cod, np.ones(kf_xval.shape[:-1]) * 100) # Test with 2D data for use of a mask S = np.array([[S, S], [S, S]]) mask = np.ones(S.shape[:-1], dtype=bool) mask[1, 1] = 0 kf_xval = xval.kfold_xval(dm, S, 2, mask=mask) cod2d = xval.coeff_of_determination(S, kf_xval) npt.assert_array_almost_equal(np.round(cod2d[0, 0]), cod) def test_csd_xval(): # First, let's see that it works with some data: data = nib.load(fdata).get_data()[1:3, 1:3, 1:3] # Make it *small* gtab = gt.gradient_table(fbval, fbvec) S0 = np.mean(data[..., gtab.b0s_mask]) response = ([0.0015, 0.0003, 0.0001], S0) csdm = csd.ConstrainedSphericalDeconvModel(gtab, response) kf_xval = xval.kfold_xval(csdm, data, 2, response, sh_order=2) # In simulation, it should work rather well (high COD): psphere = dpd.get_sphere('symmetric362') bvecs = np.concatenate(([[0, 0, 0]], psphere.vertices)) bvals = np.zeros(len(bvecs)) + 1000 bvals[0] = 0 gtab = gt.gradient_table(bvals, bvecs) mevals = np.array(([0.0015, 0.0003, 0.0001], [0.0015, 0.0003, 0.0003])) mevecs = [np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]]), np.array([[0, 0, 1], [0, 1, 0], [1, 0, 0]])] S0 = 100 S = sims.single_tensor(gtab, S0, mevals[0], mevecs[0], snr=None) sm = csd.ConstrainedSphericalDeconvModel(gtab, response) np.random.seed(12345) response = ([0.0015, 0.0003, 0.0001], S0) kf_xval = xval.kfold_xval(sm, S, 2, response, sh_order=2) # Because of the regularization, COD is not going to be perfect here: cod = xval.coeff_of_determination(S, kf_xval) # We'll just test for regressions: csd_cod = 97 # pre-computed by hand for this random seed # We're going to be really lenient here: npt.assert_array_almost_equal(np.round(cod), csd_cod) # Test for sD data with more than one voxel for use of a mask: S = np.array([[S, S], [S, S]]) mask = np.ones(S.shape[:-1], dtype=bool) mask[1, 1] = 0 kf_xval = xval.kfold_xval(sm, S, 2, response, sh_order=2, mask=mask) cod = xval.coeff_of_determination(S, kf_xval) npt.assert_array_almost_equal(np.round(cod[0]), csd_cod) def test_no_predict(): """ Test that if you try to do this with a model that doesn't have a `predict` method, you get something reasonable. """ class NoPredictModel(base.ReconstModel): def __init__(self, gtab): base.ReconstModel.__init__(self, gtab) def fit(self, data, mask=None): return NoPredictFit(self, data, mask=mask) class NoPredictFit(base.ReconstFit): def __init__(self, model, data, mask=None): base.ReconstFit.__init__(self, model, data) gtab = gt.gradient_table(fbval, fbvec) my_model = NoPredictModel(gtab) data = nib.load(fdata).get_data()[1:3, 1:3, 1:3] # Whatever npt.assert_raises(ValueError, xval.kfold_xval, my_model, data, 2) dipy-0.13.0/dipy/reconst/tests/test_csdeconv.py000066400000000000000000000473111317371701200215510ustar00rootroot00000000000000import warnings import nibabel as nib import numpy as np import numpy.testing as npt from numpy.testing import (assert_, assert_equal, assert_almost_equal, assert_array_almost_equal, run_module_suite, assert_array_equal, assert_warns) from dipy.data import get_sphere, get_data, default_sphere, small_sphere from dipy.sims.voxel import (multi_tensor, single_tensor, multi_tensor_odf, all_tensor_evecs, single_tensor_odf) from dipy.core.gradients import gradient_table from dipy.reconst.csdeconv import (ConstrainedSphericalDeconvModel, ConstrainedSDTModel, forward_sdeconv_mat, odf_deconv, odf_sh_to_sharp, auto_response, fa_superior, fa_inferior, recursive_response, response_from_mask) from dipy.direction.peaks import peak_directions from dipy.core.sphere_stats import angular_similarity from dipy.reconst.dti import TensorModel, fractional_anisotropy from dipy.reconst.shm import (CsaOdfModel, QballModel, sf_to_sh, sh_to_sf, real_sym_sh_basis, sph_harm_ind_list) from dipy.reconst.shm import lazy_index from dipy.core.geometry import cart2sphere import dipy.reconst.dti as dti from dipy.reconst.dti import fractional_anisotropy from dipy.core.sphere import Sphere def test_recursive_response_calibration(): """ Test the recursive response calibration method. """ SNR = 100 S0 = 1 sh_order = 8 _, fbvals, fbvecs = get_data('small_64D') bvals = np.load(fbvals) bvecs = np.load(fbvecs) sphere = get_sphere('symmetric724') gtab = gradient_table(bvals, bvecs) evals = np.array([0.0015, 0.0003, 0.0003]) evecs = np.array([[0, 1, 0], [0, 0, 1], [1, 0, 0]]).T mevals = np.array(([0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003])) angles = [(0, 0), (90, 0)] where_dwi = lazy_index(~gtab.b0s_mask) S_cross, sticks_cross = multi_tensor(gtab, mevals, S0, angles=angles, fractions=[50, 50], snr=SNR) S_single = single_tensor(gtab, S0, evals, evecs, snr=SNR) data = np.concatenate((np.tile(S_cross, (8, 1)), np.tile(S_single, (2, 1))), axis=0) odf_gt_cross = multi_tensor_odf(sphere.vertices, mevals, angles, [50, 50]) odf_gt_single = single_tensor_odf(sphere.vertices, evals, evecs) response = recursive_response(gtab, data, mask=None, sh_order=8, peak_thr=0.01, init_fa=0.05, init_trace=0.0021, iter=8, convergence=0.001, parallel=False) csd = ConstrainedSphericalDeconvModel(gtab, response) csd_fit = csd.fit(data) assert_equal(np.all(csd_fit.shm_coeff[:, 0] >= 0), True) fodf = csd_fit.odf(sphere) directions_gt_single, _, _ = peak_directions(odf_gt_single, sphere) directions_gt_cross, _, _ = peak_directions(odf_gt_cross, sphere) directions_single, _, _ = peak_directions(fodf[8, :], sphere) directions_cross, _, _ = peak_directions(fodf[0, :], sphere) ang_sim = angular_similarity(directions_cross, directions_gt_cross) assert_equal(ang_sim > 1.9, True) assert_equal(directions_cross.shape[0], 2) assert_equal(directions_gt_cross.shape[0], 2) ang_sim = angular_similarity(directions_single, directions_gt_single) assert_equal(ang_sim > 0.9, True) assert_equal(directions_single.shape[0], 1) assert_equal(directions_gt_single.shape[0], 1) sphere = Sphere(xyz=gtab.gradients[where_dwi]) sf = response.on_sphere(sphere) S = np.concatenate(([response.S0], sf)) tenmodel = dti.TensorModel(gtab, min_signal=0.001) tenfit = tenmodel.fit(S) FA = fractional_anisotropy(tenfit.evals) FA_gt = fractional_anisotropy(evals) assert_almost_equal(FA, FA_gt, 1) def test_auto_response(): fdata, fbvals, fbvecs = get_data('small_64D') bvals = np.load(fbvals) bvecs = np.load(fbvecs) data = nib.load(fdata).get_data() gtab = gradient_table(bvals, bvecs) radius = 3 def test_fa_superior(FA, fa_thr): return FA > fa_thr def test_fa_inferior(FA, fa_thr): return FA < fa_thr predefined_functions = [fa_superior, fa_inferior] defined_functions = [test_fa_superior, test_fa_inferior] for fa_thr in np.arange(0.1, 1, 0.1): for predefined, defined in zip(predefined_functions, defined_functions): response_predefined, ratio_predefined, nvoxels_predefined = auto_response( gtab, data, roi_center=None, roi_radius=radius, fa_callable=predefined, fa_thr=fa_thr, return_number_of_voxels=True) response_defined, ratio_defined, nvoxels_defined = auto_response( gtab, data, roi_center=None, roi_radius=radius, fa_callable=defined, fa_thr=fa_thr, return_number_of_voxels=True) assert_equal(nvoxels_predefined, nvoxels_defined) assert_array_almost_equal(response_predefined[0], response_defined[0]) assert_almost_equal(response_predefined[1], response_defined[1]) assert_almost_equal(ratio_predefined, ratio_defined) def test_response_from_mask(): fdata, fbvals, fbvecs = get_data('small_64D') bvals = np.load(fbvals) bvecs = np.load(fbvecs) data = nib.load(fdata).get_data() gtab = gradient_table(bvals, bvecs) ten = TensorModel(gtab) tenfit = ten.fit(data) FA = fractional_anisotropy(tenfit.evals) FA[np.isnan(FA)] = 0 radius = 3 for fa_thr in np.arange(0, 1, 0.1): response_auto, ratio_auto, nvoxels = auto_response( gtab, data, roi_center=None, roi_radius=radius, fa_thr=fa_thr, return_number_of_voxels=True) ci, cj, ck = np.array(data.shape[:3]) // 2 mask = np.zeros(data.shape[:3]) mask[ci - radius: ci + radius, cj - radius: cj + radius, ck - radius: ck + radius] = 1 mask[FA <= fa_thr] = 0 response_mask, ratio_mask = response_from_mask(gtab, data, mask) assert_equal(int(np.sum(mask)), nvoxels) assert_array_almost_equal(response_mask[0], response_auto[0]) assert_almost_equal(response_mask[1], response_auto[1]) assert_almost_equal(ratio_mask, ratio_auto) def test_csdeconv(): SNR = 100 S0 = 1 _, fbvals, fbvecs = get_data('small_64D') bvals = np.load(fbvals) bvecs = np.load(fbvecs) gtab = gradient_table(bvals, bvecs) mevals = np.array(([0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003])) angles = [(0, 0), (60, 0)] S, sticks = multi_tensor(gtab, mevals, S0, angles=angles, fractions=[50, 50], snr=SNR) sphere = get_sphere('symmetric362') odf_gt = multi_tensor_odf(sphere.vertices, mevals, angles, [50, 50]) response = (np.array([0.0015, 0.0003, 0.0003]), S0) csd = ConstrainedSphericalDeconvModel(gtab, response) csd_fit = csd.fit(S) assert_equal(csd_fit.shm_coeff[0] > 0, True) fodf = csd_fit.odf(sphere) directions, _, _ = peak_directions(odf_gt, sphere) directions2, _, _ = peak_directions(fodf, sphere) ang_sim = angular_similarity(directions, directions2) assert_equal(ang_sim > 1.9, True) assert_equal(directions.shape[0], 2) assert_equal(directions2.shape[0], 2) assert_warns(UserWarning, ConstrainedSphericalDeconvModel, gtab, response, sh_order=10) with warnings.catch_warnings(record=True) as w: ConstrainedSphericalDeconvModel(gtab, response, sh_order=8) assert_equal(len([local_warn for local_warn in w if issubclass(local_warn.category, UserWarning)]) > 0, False) mevecs = [] for s in sticks: mevecs += [all_tensor_evecs(s).T] S2 = single_tensor(gtab, 100, mevals[0], mevecs[0], snr=None) big_S = np.zeros((10, 10, 10, len(S2))) big_S[:] = S2 aresponse, aratio = auto_response(gtab, big_S, roi_center=(5, 5, 4), roi_radius=3, fa_thr=0.5) assert_array_almost_equal(aresponse[0], response[0]) assert_almost_equal(aresponse[1], 100) assert_almost_equal(aratio, response[0][1] / response[0][0]) aresponse2, aratio2 = auto_response(gtab, big_S, roi_radius=3, fa_thr=0.5) assert_array_almost_equal(aresponse[0], response[0]) _, _, nvoxels = auto_response(gtab, big_S, roi_center=(5, 5, 4), roi_radius=30, fa_thr=0.5, return_number_of_voxels=True) assert_equal(nvoxels, 1000) _, _, nvoxels = auto_response(gtab, big_S, roi_center=(5, 5, 4), roi_radius=30, fa_thr=1, return_number_of_voxels=True) assert_equal(nvoxels, 0) def test_odfdeconv(): SNR = 100 S0 = 1 _, fbvals, fbvecs = get_data('small_64D') bvals = np.load(fbvals) bvecs = np.load(fbvecs) gtab = gradient_table(bvals, bvecs) mevals = np.array(([0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003])) angles = [(0, 0), (90, 0)] S, sticks = multi_tensor(gtab, mevals, S0, angles=angles, fractions=[50, 50], snr=SNR) sphere = get_sphere('symmetric362') odf_gt = multi_tensor_odf(sphere.vertices, mevals, angles, [50, 50]) e1 = 15.0 e2 = 3.0 ratio = e2 / e1 csd = ConstrainedSDTModel(gtab, ratio, None) csd_fit = csd.fit(S) fodf = csd_fit.odf(sphere) directions, _, _ = peak_directions(odf_gt, sphere) directions2, _, _ = peak_directions(fodf, sphere) ang_sim = angular_similarity(directions, directions2) assert_equal(ang_sim > 1.9, True) assert_equal(directions.shape[0], 2) assert_equal(directions2.shape[0], 2) with warnings.catch_warnings(record=True) as w: ConstrainedSDTModel(gtab, ratio, sh_order=10) assert_equal(len(w) > 0, True) with warnings.catch_warnings(record=True) as w: ConstrainedSDTModel(gtab, ratio, sh_order=8) assert_equal(len(w) > 0, False) csd_fit = csd.fit(np.zeros_like(S)) fodf = csd_fit.odf(sphere) assert_array_equal(fodf, np.zeros_like(fodf)) odf_sh = np.zeros_like(fodf) odf_sh[1] = np.nan fodf, it = odf_deconv(odf_sh, csd.R, csd.B_reg) assert_array_equal(fodf, np.zeros_like(fodf)) def test_odf_sh_to_sharp(): SNR = None S0 = 1 _, fbvals, fbvecs = get_data('small_64D') bvals = np.load(fbvals) bvecs = np.load(fbvecs) gtab = gradient_table(bvals, bvecs) mevals = np.array(([0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003])) S, sticks = multi_tensor(gtab, mevals, S0, angles=[(10, 0), (100, 0)], fractions=[50, 50], snr=SNR) sphere = get_sphere('symmetric724') qb = QballModel(gtab, sh_order=8, assume_normed=True) qbfit = qb.fit(S) odf_gt = qbfit.odf(sphere) Z = np.linalg.norm(odf_gt) odfs_gt = np.zeros((3, 1, 1, odf_gt.shape[0])) odfs_gt[:, :, :] = odf_gt[:] odfs_sh = sf_to_sh(odfs_gt, sphere, sh_order=8, basis_type=None) odfs_sh /= Z fodf_sh = odf_sh_to_sharp(odfs_sh, sphere, basis=None, ratio=3 / 15., sh_order=8, lambda_=1., tau=0.1) fodf = sh_to_sf(fodf_sh, sphere, sh_order=8, basis_type=None) directions2, _, _ = peak_directions(fodf[0, 0, 0], sphere) assert_equal(directions2.shape[0], 2) def test_forward_sdeconv_mat(): m, n = sph_harm_ind_list(4) mat = forward_sdeconv_mat(np.array([0, 2, 4]), n) expected = np.diag([0, 2, 2, 2, 2, 2, 4, 4, 4, 4, 4, 4, 4, 4, 4]) npt.assert_array_equal(mat, expected) sh_order = 8 expected_size = (sh_order + 1) * (sh_order + 2) / 2 r_rh = np.arange(0, sh_order + 1, 2) m, n = sph_harm_ind_list(sh_order) mat = forward_sdeconv_mat(r_rh, n) npt.assert_equal(mat.shape, (expected_size, expected_size)) npt.assert_array_equal(mat.diagonal(), n) # Odd spherical harmonic degrees should raise a ValueError n[2] = 3 npt.assert_raises(ValueError, forward_sdeconv_mat, r_rh, n) def test_r2_term_odf_sharp(): SNR = None S0 = 1 angle = 45 # 45 degrees is a very tight angle to disentangle _, fbvals, fbvecs = get_data('small_64D') # get_data('small_64D') bvals = np.load(fbvals) bvecs = np.load(fbvecs) sphere = get_sphere('symmetric724') gtab = gradient_table(bvals, bvecs) mevals = np.array(([0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003])) angles = [(0, 0), (angle, 0)] S, sticks = multi_tensor(gtab, mevals, S0, angles=angles, fractions=[50, 50], snr=SNR) odf_gt = multi_tensor_odf(sphere.vertices, mevals, angles, [50, 50]) odfs_sh = sf_to_sh(odf_gt, sphere, sh_order=8, basis_type=None) fodf_sh = odf_sh_to_sharp(odfs_sh, sphere, basis=None, ratio=3 / 15., sh_order=8, lambda_=1., tau=0.1, r2_term=True) fodf = sh_to_sf(fodf_sh, sphere, sh_order=8, basis_type=None) directions_gt, _, _ = peak_directions(odf_gt, sphere) directions, _, _ = peak_directions(fodf, sphere) ang_sim = angular_similarity(directions_gt, directions) assert_equal(ang_sim > 1.9, True) assert_equal(directions.shape[0], 2) # This should pass as well sdt_model = ConstrainedSDTModel(gtab, ratio=3/15., sh_order=8) sdt_fit = sdt_model.fit(S) fodf = sdt_fit.odf(sphere) directions_gt, _, _ = peak_directions(odf_gt, sphere) directions, _, _ = peak_directions(fodf, sphere) ang_sim = angular_similarity(directions_gt, directions) assert_equal(ang_sim > 1.9, True) assert_equal(directions.shape[0], 2) def test_csd_predict(): """ Test prediction API """ SNR = 100 S0 = 1 _, fbvals, fbvecs = get_data('small_64D') bvals = np.load(fbvals) bvecs = np.load(fbvecs) gtab = gradient_table(bvals, bvecs) mevals = np.array(([0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003])) angles = [(0, 0), (60, 0)] S, sticks = multi_tensor(gtab, mevals, S0, angles=angles, fractions=[50, 50], snr=SNR) sphere = small_sphere odf_gt = multi_tensor_odf(sphere.vertices, mevals, angles, [50, 50]) response = (np.array([0.0015, 0.0003, 0.0003]), S0) csd = ConstrainedSphericalDeconvModel(gtab, response) csd_fit = csd.fit(S) # Predicting from a fit should give the same result as predicting from a # model, S0 is 1 by default prediction1 = csd_fit.predict() prediction2 = csd.predict(csd_fit.shm_coeff) npt.assert_array_equal(prediction1, prediction2) npt.assert_array_equal(prediction1[..., gtab.b0s_mask], 1.) # Same with a different S0 prediction1 = csd_fit.predict(S0=123.) prediction2 = csd.predict(csd_fit.shm_coeff, S0=123.) npt.assert_array_equal(prediction1, prediction2) npt.assert_array_equal(prediction1[..., gtab.b0s_mask], 123.) # For "well behaved" coefficients, the model should be able to find the # coefficients from the predicted signal. coeff = np.random.random(csd_fit.shm_coeff.shape) - .5 coeff[..., 0] = 10. S = csd.predict(coeff) csd_fit = csd.fit(S) npt.assert_array_almost_equal(coeff, csd_fit.shm_coeff) # Test predict on nd-data set S_nd = np.zeros((2, 3, 4, S.size)) S_nd[:] = S fit = csd.fit(S_nd) predict1 = fit.predict() predict2 = csd.predict(fit.shm_coeff) npt.assert_array_almost_equal(predict1, predict2) def test_csd_predict_multi(): """ Check that we can predict reasonably from multi-voxel fits: """ SNR = 100 S0 = 123. _, fbvals, fbvecs = get_data('small_64D') bvals = np.load(fbvals) bvecs = np.load(fbvecs) gtab = gradient_table(bvals, bvecs) response = (np.array([0.0015, 0.0003, 0.0003]), S0) csd = ConstrainedSphericalDeconvModel(gtab, response) coeff = np.random.random(45) - .5 coeff[..., 0] = 10. S = csd.predict(coeff, S0=123.) multi_S = np.array([[S, S], [S, S]]) csd_fit_multi = csd.fit(multi_S) S0_multi = np.mean(multi_S[..., gtab.b0s_mask], -1) pred_multi = csd_fit_multi.predict(S0=S0_multi) npt.assert_array_almost_equal(pred_multi, multi_S) def test_sphere_scaling_csdmodel(): """Check that mirroring regularization sphere does not change the result of the model""" _, fbvals, fbvecs = get_data('small_64D') bvals = np.load(fbvals) bvecs = np.load(fbvecs) gtab = gradient_table(bvals, bvecs) mevals = np.array(([0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003])) angles = [(0, 0), (60, 0)] S, sticks = multi_tensor(gtab, mevals, 100., angles=angles, fractions=[50, 50], snr=None) hemi = small_sphere sphere = hemi.mirror() response = (np.array([0.0015, 0.0003, 0.0003]), 100) model_full = ConstrainedSphericalDeconvModel(gtab, response, reg_sphere=sphere) model_hemi = ConstrainedSphericalDeconvModel(gtab, response, reg_sphere=hemi) csd_fit_full = model_full.fit(S) csd_fit_hemi = model_hemi.fit(S) assert_array_almost_equal(csd_fit_full.shm_coeff, csd_fit_hemi.shm_coeff) expected_lambda = {4: 27.5230088, 8: 82.5713865, 16: 216.0843135} def test_default_lambda_csdmodel(): """We check that the default value of lambda is the expected value with the symmetric362 sphere. This value has empirically been found to work well and changes to this default value should be discussed with the dipy team. """ sphere = default_sphere # Create gradient table _, fbvals, fbvecs = get_data('small_64D') bvals = np.load(fbvals) bvecs = np.load(fbvecs) gtab = gradient_table(bvals, bvecs) # Some response function response = (np.array([0.0015, 0.0003, 0.0003]), 100) for sh_order, expected in expected_lambda.items(): model_full = ConstrainedSphericalDeconvModel(gtab, response, sh_order=sh_order, reg_sphere=sphere) B_reg, _, _ = real_sym_sh_basis(sh_order, sphere.theta, sphere.phi) npt.assert_array_almost_equal(model_full.B_reg, expected * B_reg) def test_csd_superres(): """ Check the quality of csdfit with high SH order. """ _, fbvals, fbvecs = get_data('small_64D') bvals = np.load(fbvals) bvecs = np.load(fbvecs) gtab = gradient_table(bvals, bvecs) # img, gtab = read_stanford_hardi() evals = np.array([[1.5, .3, .3]]) * [[1.], [1.]] / 1000. S, sticks = multi_tensor(gtab, evals, snr=None, fractions=[55., 45.]) model16 = ConstrainedSphericalDeconvModel(gtab, (evals[0], 3.), sh_order=16) fit16 = model16.fit(S) # print local_maxima(fit16.odf(default_sphere), default_sphere.edges) d, v, ind = peak_directions(fit16.odf(default_sphere), default_sphere, relative_peak_threshold=.2, min_separation_angle=0) # Check that there are two peaks assert_equal(len(d), 2) # Check that peaks line up with sticks cos_sim = abs((d * sticks).sum(1)) ** .5 assert_(all(cos_sim > .99)) if __name__ == '__main__': run_module_suite() dipy-0.13.0/dipy/reconst/tests/test_dki.py000066400000000000000000000657611317371701200205250ustar00rootroot00000000000000""" Testing DKI """ from __future__ import division, print_function, absolute_import import numpy as np import random import dipy.reconst.dki as dki import dipy.reconst.dti as dti from numpy.testing import (assert_array_almost_equal, assert_array_equal, assert_almost_equal) from nose.tools import assert_raises from dipy.sims.voxel import multi_tensor_dki from dipy.io.gradients import read_bvals_bvecs from dipy.core.gradients import gradient_table from dipy.data import get_data from dipy.reconst.dti import (from_lower_triangular, decompose_tensor) from dipy.reconst.dki import (mean_kurtosis, carlson_rf, carlson_rd, axial_kurtosis, radial_kurtosis, _positive_evals, lower_triangular) from dipy.core.sphere import Sphere from dipy.data import get_sphere from dipy.core.geometry import (sphere2cart, perpendicular_directions) fimg, fbvals, fbvecs = get_data('small_64D') bvals, bvecs = read_bvals_bvecs(fbvals, fbvecs) gtab = gradient_table(bvals, bvecs) # 2 shells for techniques that requires multishell data bvals_2s = np.concatenate((bvals, bvals * 2), axis=0) bvecs_2s = np.concatenate((bvecs, bvecs), axis=0) gtab_2s = gradient_table(bvals_2s, bvecs_2s) # Simulation 1. signals of two crossing fibers are simulated mevals_cross = np.array([[0.00099, 0, 0], [0.00226, 0.00087, 0.00087], [0.00099, 0, 0], [0.00226, 0.00087, 0.00087]]) angles_cross = [(80, 10), (80, 10), (20, 30), (20, 30)] fie = 0.49 frac_cross = [fie*50, (1-fie) * 50, fie*50, (1-fie) * 50] # Noise free simulates signal_cross, dt_cross, kt_cross = multi_tensor_dki(gtab_2s, mevals_cross, S0=100, angles=angles_cross, fractions=frac_cross, snr=None) evals_cross, evecs_cross = decompose_tensor(from_lower_triangular(dt_cross)) crossing_ref = np.concatenate((evals_cross, evecs_cross[0], evecs_cross[1], evecs_cross[2], kt_cross), axis=0) # Simulation 2. Spherical kurtosis tensor.- for white matter, this can be a # biological implaussible scenario, however this simulation is usefull for # testing the estimation of directional apparent kurtosis and the mean # kurtosis, since its directional and mean kurtosis ground truth are a constant # which can be easly mathematicaly calculated. Di = 0.00099 De = 0.00226 mevals_sph = np.array([[Di, Di, Di], [De, De, De]]) frac_sph = [50, 50] signal_sph, dt_sph, kt_sph = multi_tensor_dki(gtab_2s, mevals_sph, S0=100, fractions=frac_sph, snr=None) evals_sph, evecs_sph = decompose_tensor(from_lower_triangular(dt_sph)) params_sph = np.concatenate((evals_sph, evecs_sph[0], evecs_sph[1], evecs_sph[2], kt_sph), axis=0) # Compute ground truth - since KT is spherical, appparent kurtosic coeficient # for all gradient directions and mean kurtosis have to be equal to Kref_sph. f = 0.5 Dg = f*Di + (1-f)*De Kref_sphere = 3 * f * (1-f) * ((Di-De) / Dg) ** 2 # Simulation 3. Multi-voxel simulations - dataset of four voxels is simulated. # Since the objective of this simulation is to see if procedures are able to # work with multi-dimentional data all voxels contains the same crossing signal # produced in simulation 1. DWI = np.zeros((2, 2, 1, len(gtab_2s.bvals))) DWI[0, 0, 0] = DWI[0, 1, 0] = DWI[1, 0, 0] = DWI[1, 1, 0] = signal_cross multi_params = np.zeros((2, 2, 1, 27)) multi_params[0, 0, 0] = multi_params[0, 1, 0] = crossing_ref multi_params[1, 0, 0] = multi_params[1, 1, 0] = crossing_ref def test_positive_evals(): # Tested evals L1 = np.array([[1e-3, 1e-3, 2e-3], [0, 1e-3, 0]]) L2 = np.array([[3e-3, 0, 2e-3], [1e-3, 1e-3, 0]]) L3 = np.array([[4e-3, 1e-4, 0], [0, 1e-3, 0]]) # only the first voxels have all eigenvalues larger than zero, thus: expected_ind = np.array([[True, False, False], [False, True, False]], dtype=bool) # test function _positive_evals ind = _positive_evals(L1, L2, L3) assert_array_equal(ind, expected_ind) def test_split_dki_param(): dkiM = dki.DiffusionKurtosisModel(gtab_2s, fit_method="OLS") dkiF = dkiM.fit(DWI) evals, evecs, kt = dki.split_dki_param(dkiF.model_params) assert_array_almost_equal(evals, dkiF.evals) assert_array_almost_equal(evecs, dkiF.evecs) assert_array_almost_equal(kt, dkiF.kt) def test_dki_fits(): """ DKI fits are tested on noise free crossing fiber simulates """ # OLS fitting dkiM = dki.DiffusionKurtosisModel(gtab_2s, fit_method="OLS") dkiF = dkiM.fit(signal_cross) assert_array_almost_equal(dkiF.model_params, crossing_ref) # WLS fitting dki_wlsM = dki.DiffusionKurtosisModel(gtab_2s, fit_method="WLS") dki_wlsF = dki_wlsM.fit(signal_cross) assert_array_almost_equal(dki_wlsF.model_params, crossing_ref) # testing multi-voxels dkiF_multi = dkiM.fit(DWI) assert_array_almost_equal(dkiF_multi.model_params, multi_params) dkiF_multi = dki_wlsM.fit(DWI) assert_array_almost_equal(dkiF_multi.model_params, multi_params) def test_apparent_kurtosis_coef(): """ Apparent kurtosis coeficients are tested for a spherical kurtosis tensor """ sph = Sphere(xyz=gtab.bvecs[gtab.bvals > 0]) AKC = dki.apparent_kurtosis_coef(params_sph, sph) # check all direction for d in range(len(gtab.bvecs[gtab.bvals > 0])): assert_array_almost_equal(AKC[d], Kref_sphere) def test_dki_predict(): dkiM = dki.DiffusionKurtosisModel(gtab_2s) pred = dkiM.predict(crossing_ref, S0=100) assert_array_almost_equal(pred, signal_cross) # just to check that it works with more than one voxel: pred_multi = dkiM.predict(multi_params, S0=100) assert_array_almost_equal(pred_multi, DWI) # Check that it works with more than one voxel, and with a different S0 # in each voxel: pred_multi = dkiM.predict(multi_params, S0=100*np.ones(pred_multi.shape[:3])) assert_array_almost_equal(pred_multi, DWI) # check the function predict of the DiffusionKurtosisFit object dkiF = dkiM.fit(DWI) pred_multi = dkiF.predict(gtab_2s, S0=100) assert_array_almost_equal(pred_multi, DWI) dkiF = dkiM.fit(pred_multi) pred_from_fit = dkiF.predict(dkiM.gtab, S0=100) assert_array_almost_equal(pred_from_fit, DWI) # Test the module function: pred = dki.dki_prediction(crossing_ref, gtab_2s, S0=100) assert_array_almost_equal(pred, signal_cross) # Test the module function with S0 volume: pred = dki.dki_prediction(multi_params, gtab_2s, S0=100 * np.ones(multi_params.shape[:3])) assert_array_almost_equal(pred, DWI) def test_carlson_rf(): # Define inputs that we know the outputs from: # Carlson, B.C., 1994. Numerical computation of real or complex # elliptic integrals. arXiv:math/9409227 [math.CA] # Real values (test in 2D format) x = np.array([[1.0, 0.5], [2.0, 2.0]]) y = np.array([[2.0, 1.0], [3.0, 3.0]]) z = np.array([[0.0, 0.0], [4.0, 4.0]]) # Defene reference outputs RF_ref = np.array([[1.3110287771461, 1.8540746773014], [0.58408284167715, 0.58408284167715]]) # Compute integrals RF = carlson_rf(x, y, z) # Compare assert_array_almost_equal(RF, RF_ref) # Complex values x = np.array([1j, 1j - 1, 1j, 1j - 1]) y = np.array([-1j, 1j, -1j, 1j]) z = np.array([0.0, 0.0, 2, 1 - 1j]) # Defene reference outputs RF_ref = np.array([1.8540746773014, 0.79612586584234 - 1.2138566698365j, 1.0441445654064, 0.93912050218619 - 0.53296252018635j]) # Compute integrals RF = carlson_rf(x, y, z, errtol=3e-5) # Compare assert_array_almost_equal(RF, RF_ref) def test_carlson_rd(): # Define inputs that we know the outputs from: # Carlson, B.C., 1994. Numerical computation of real or complex # elliptic integrals. arXiv:math/9409227 [math.CA] # Real values x = np.array([0.0, 2.0]) y = np.array([2.0, 3.0]) z = np.array([1.0, 4.0]) # Defene reference outputs RD_ref = np.array([1.7972103521034, 0.16510527294261]) # Compute integrals RD = carlson_rd(x, y, z, errtol=1e-5) # Compare assert_array_almost_equal(RD, RD_ref) # Complex values (testing in 2D format) x = np.array([[1j, 0.0], [0.0, -2 - 1j]]) y = np.array([[-1j, 1j], [1j-1, -1j]]) z = np.array([[2.0, -1j], [1j, -1 + 1j]]) # Defene reference outputs RD_ref = np.array([[0.65933854154220, 1.2708196271910 + 2.7811120159521j], [-1.8577235439239 - 0.96193450888839j, 1.8249027393704 - 1.2218475784827j]]) # Compute integrals RD = carlson_rd(x, y, z, errtol=1e-5) # Compare assert_array_almost_equal(RD, RD_ref) def test_Wrotate_single_fiber(): # Rotate the kurtosis tensor of single fiber simulate to the diffusion # tensor diagonal and check that is equal to the kurtosis tensor of the # same single fiber simulated directly to the x-axis # Define single fiber simulate mevals = np.array([[0.00099, 0, 0], [0.00226, 0.00087, 0.00087]]) fie = 0.49 frac = [fie*100, (1 - fie)*100] # simulate single fiber not aligned to the x-axis theta = random.uniform(0, 180) phi = random.uniform(0, 320) angles = [(theta, phi), (theta, phi)] signal, dt, kt = multi_tensor_dki(gtab_2s, mevals, angles=angles, fractions=frac, snr=None) evals, evecs = decompose_tensor(from_lower_triangular(dt)) kt_rotated = dki.Wrotate(kt, evecs) # Now coordinate system has the DT diagonal aligned to the x-axis # Reference simulation in which DT diagonal is directly aligned to the # x-axis angles = (90, 0), (90, 0) signal, dt_ref, kt_ref = multi_tensor_dki(gtab_2s, mevals, angles=angles, fractions=frac, snr=None) assert_array_almost_equal(kt_rotated, kt_ref) def test_Wrotate_crossing_fibers(): # Test 2 - simulate crossing fibers intersecting at 70 degrees. # In this case, diffusion tensor principal eigenvector will be aligned in # the middle of the crossing fibers. Thus, after rotating the kurtosis # tensor, this will be equal to a kurtosis tensor simulate of crossing # fibers both deviating 35 degrees from the x-axis. Moreover, we know that # crossing fibers will be aligned to the x-y plane, because the smaller # diffusion eigenvalue, perpendicular to both crossings fibers, will be # aligned to the z-axis. # Simulate the crossing fiber angles = [(90, 30), (90, 30), (20, 30), (20, 30)] fie = 0.49 frac = [fie*50, (1-fie) * 50, fie*50, (1-fie) * 50] mevals = np.array([[0.00099, 0, 0], [0.00226, 0.00087, 0.00087], [0.00099, 0, 0], [0.00226, 0.00087, 0.00087]]) signal, dt, kt = multi_tensor_dki(gtab_2s, mevals, angles=angles, fractions=frac, snr=None) evals, evecs = decompose_tensor(from_lower_triangular(dt)) kt_rotated = dki.Wrotate(kt, evecs) # Now coordinate system has diffusion tensor diagonal aligned to the x-axis # Simulate the reference kurtosis tensor angles = [(90, 35), (90, 35), (90, -35), (90, -35)] signal, dt, kt_ref = multi_tensor_dki(gtab_2s, mevals, angles=angles, fractions=frac, snr=None) # Compare rotated with the reference assert_array_almost_equal(kt_rotated, kt_ref) def test_Wcons(): # Construct the 4D kurtosis tensor manualy from the crossing fiber kt # simulate Wfit = np.zeros([3, 3, 3, 3]) # Wxxxx Wfit[0, 0, 0, 0] = kt_cross[0] # Wyyyy Wfit[1, 1, 1, 1] = kt_cross[1] # Wzzzz Wfit[2, 2, 2, 2] = kt_cross[2] # Wxxxy Wfit[0, 0, 0, 1] = Wfit[0, 0, 1, 0] = Wfit[0, 1, 0, 0] = kt_cross[3] Wfit[1, 0, 0, 0] = kt_cross[3] # Wxxxz Wfit[0, 0, 0, 2] = Wfit[0, 0, 2, 0] = Wfit[0, 2, 0, 0] = kt_cross[4] Wfit[2, 0, 0, 0] = kt_cross[4] # Wxyyy Wfit[0, 1, 1, 1] = Wfit[1, 0, 1, 1] = Wfit[1, 1, 1, 0] = kt_cross[5] Wfit[1, 1, 0, 1] = kt_cross[5] # Wxxxz Wfit[1, 1, 1, 2] = Wfit[1, 2, 1, 1] = Wfit[2, 1, 1, 1] = kt_cross[6] Wfit[1, 1, 2, 1] = kt_cross[6] # Wxzzz Wfit[0, 2, 2, 2] = Wfit[2, 2, 2, 0] = Wfit[2, 0, 2, 2] = kt_cross[7] Wfit[2, 2, 0, 2] = kt_cross[7] # Wyzzz Wfit[1, 2, 2, 2] = Wfit[2, 2, 2, 1] = Wfit[2, 1, 2, 2] = kt_cross[8] Wfit[2, 2, 1, 2] = kt_cross[8] # Wxxyy Wfit[0, 0, 1, 1] = Wfit[0, 1, 0, 1] = Wfit[0, 1, 1, 0] = kt_cross[9] Wfit[1, 0, 0, 1] = Wfit[1, 0, 1, 0] = Wfit[1, 1, 0, 0] = kt_cross[9] # Wxxzz Wfit[0, 0, 2, 2] = Wfit[0, 2, 0, 2] = Wfit[0, 2, 2, 0] = kt_cross[10] Wfit[2, 0, 0, 2] = Wfit[2, 0, 2, 0] = Wfit[2, 2, 0, 0] = kt_cross[10] # Wyyzz Wfit[1, 1, 2, 2] = Wfit[1, 2, 1, 2] = Wfit[1, 2, 2, 1] = kt_cross[11] Wfit[2, 1, 1, 2] = Wfit[2, 2, 1, 1] = Wfit[2, 1, 2, 1] = kt_cross[11] # Wxxyz Wfit[0, 0, 1, 2] = Wfit[0, 0, 2, 1] = Wfit[0, 1, 0, 2] = kt_cross[12] Wfit[0, 1, 2, 0] = Wfit[0, 2, 0, 1] = Wfit[0, 2, 1, 0] = kt_cross[12] Wfit[1, 0, 0, 2] = Wfit[1, 0, 2, 0] = Wfit[1, 2, 0, 0] = kt_cross[12] Wfit[2, 0, 0, 1] = Wfit[2, 0, 1, 0] = Wfit[2, 1, 0, 0] = kt_cross[12] # Wxyyz Wfit[0, 1, 1, 2] = Wfit[0, 1, 2, 1] = Wfit[0, 2, 1, 1] = kt_cross[13] Wfit[1, 0, 1, 2] = Wfit[1, 1, 0, 2] = Wfit[1, 1, 2, 0] = kt_cross[13] Wfit[1, 2, 0, 1] = Wfit[1, 2, 1, 0] = Wfit[2, 0, 1, 1] = kt_cross[13] Wfit[2, 1, 0, 1] = Wfit[2, 1, 1, 0] = Wfit[1, 0, 2, 1] = kt_cross[13] # Wxyzz Wfit[0, 1, 2, 2] = Wfit[0, 2, 1, 2] = Wfit[0, 2, 2, 1] = kt_cross[14] Wfit[1, 0, 2, 2] = Wfit[1, 2, 0, 2] = Wfit[1, 2, 2, 0] = kt_cross[14] Wfit[2, 0, 1, 2] = Wfit[2, 0, 2, 1] = Wfit[2, 1, 0, 2] = kt_cross[14] Wfit[2, 1, 2, 0] = Wfit[2, 2, 0, 1] = Wfit[2, 2, 1, 0] = kt_cross[14] # Function to be tested W4D = dki.Wcons(kt_cross) Wfit = Wfit.reshape(-1) W4D = W4D.reshape(-1) assert_array_almost_equal(W4D, Wfit) def test_spherical_dki_statistics(): # tests if MK, AK and RK are equal to expected values of a spherical # kurtosis tensor # Define multi voxel spherical kurtosis simulations MParam = np.zeros((2, 2, 2, 27)) MParam[0, 0, 0] = MParam[0, 0, 1] = MParam[0, 1, 0] = params_sph MParam[0, 1, 1] = MParam[1, 1, 0] = params_sph # MParam[1, 1, 1], MParam[1, 0, 0], and MParam[1, 0, 1] remains zero MRef = np.zeros((2, 2, 2)) MRef[0, 0, 0] = MRef[0, 0, 1] = MRef[0, 1, 0] = Kref_sphere MRef[0, 1, 1] = MRef[1, 1, 0] = Kref_sphere MRef[1, 1, 1] = MRef[1, 0, 0] = MRef[1, 0, 1] = 0 # Mean kurtosis analytical solution MK_multi = mean_kurtosis(MParam) assert_array_almost_equal(MK_multi, MRef) # radial kurtosis analytical solution RK_multi = radial_kurtosis(MParam) assert_array_almost_equal(RK_multi, MRef) # axial kurtosis analytical solution AK_multi = axial_kurtosis(MParam) assert_array_almost_equal(AK_multi, MRef) def test_compare_MK_method(): # tests if analytical solution of MK is equal to the average of directional # kurtosis sampled from a sphere # DKI Model fitting dkiM = dki.DiffusionKurtosisModel(gtab_2s) dkiF = dkiM.fit(signal_cross) # MK analytical solution MK_as = dkiF.mk() # MK numerical method sph = Sphere(xyz=gtab.bvecs[gtab.bvals > 0]) MK_nm = np.mean(dki.apparent_kurtosis_coef(dkiF.model_params, sph), axis=-1) assert_array_almost_equal(MK_as, MK_nm, decimal=1) def test_single_voxel_DKI_stats(): # tests if AK and RK are equal to expected values for a single fiber # simulate randomly oriented ADi = 0.00099 ADe = 0.00226 RDi = 0 RDe = 0.00087 # Reference values AD = fie * ADi + (1 - fie) * ADe AK = 3 * fie * (1 - fie) * ((ADi-ADe) / AD) ** 2 RD = fie * RDi + (1 - fie) * RDe RK = 3 * fie * (1 - fie) * ((RDi-RDe) / RD) ** 2 ref_vals = np.array([AD, AK, RD, RK]) # simulate fiber randomly oriented theta = random.uniform(0, 180) phi = random.uniform(0, 320) angles = [(theta, phi), (theta, phi)] mevals = np.array([[ADi, RDi, RDi], [ADe, RDe, RDe]]) frac = [fie * 100, (1 - fie) * 100] signal, dt, kt = multi_tensor_dki(gtab_2s, mevals, S0=100, angles=angles, fractions=frac, snr=None) evals, evecs = decompose_tensor(from_lower_triangular(dt)) dki_par = np.concatenate((evals, evecs[0], evecs[1], evecs[2], kt), axis=0) # Estimates using dki functions ADe1 = dti.axial_diffusivity(evals) RDe1 = dti.radial_diffusivity(evals) AKe1 = axial_kurtosis(dki_par) RKe1 = radial_kurtosis(dki_par) e1_vals = np.array([ADe1, AKe1, RDe1, RKe1]) assert_array_almost_equal(e1_vals, ref_vals) # Estimates using the kurtosis class object dkiM = dki.DiffusionKurtosisModel(gtab_2s) dkiF = dkiM.fit(signal) e2_vals = np.array([dkiF.ad, dkiF.ak(), dkiF.rd, dkiF.rk()]) assert_array_almost_equal(e2_vals, ref_vals) # test MK (note this test correspond to the MK singularity L2==L3) MK_as = dkiF.mk() sph = Sphere(xyz=gtab.bvecs[gtab.bvals > 0]) MK_nm = np.mean(dkiF.akc(sph)) assert_array_almost_equal(MK_as, MK_nm, decimal=1) def test_compare_RK_methods(): # tests if analytical solution of RK is equal to the perpendicular kurtosis # relative to the first diffusion axis # DKI Model fitting dkiM = dki.DiffusionKurtosisModel(gtab_2s) dkiF = dkiM.fit(signal_cross) # MK analytical solution RK_as = dkiF.rk() # MK numerical method evecs = dkiF.evecs p_dir = perpendicular_directions(evecs[:, 0], num=30, half=True) ver = Sphere(xyz=p_dir) RK_nm = np.mean(dki.apparent_kurtosis_coef(dkiF.model_params, ver), axis=-1) assert_array_almost_equal(RK_as, RK_nm) def test_MK_singularities(): # To test MK in case that analytical solution was a singularity not covered # by other tests dkiM = dki.DiffusionKurtosisModel(gtab_2s) # test singularity L1 == L2 - this is the case of a prolate diffusion # tensor for crossing fibers at 90 degrees angles_all = np.array([[(90, 0), (90, 0), (0, 0), (0, 0)], [(89.9, 0), (89.9, 0), (0, 0), (0, 0)]]) for angles_90 in angles_all: s_90, dt_90, kt_90 = multi_tensor_dki(gtab_2s, mevals_cross, S0=100, angles=angles_90, fractions=frac_cross, snr=None) dkiF = dkiM.fit(s_90) MK = dkiF.mk() sph = Sphere(xyz=gtab.bvecs[gtab.bvals > 0]) MK_nm = np.mean(dkiF.akc(sph)) assert_almost_equal(MK, MK_nm, decimal=2) # test singularity L1 == L3 and L1 != L2 # since L1 is defined as the larger eigenvalue and L3 the smallest # eigenvalue, this singularity teoretically will never be called, # because for L1 == L3, L2 have also to be = L1 and L2. # Nevertheless, I decided to include this test since this singularity # is revelant for cases that eigenvalues are not ordered # artificially revert the eigenvalue and eigenvector order dki_params = dkiF.model_params.copy() dki_params[1] = dkiF.model_params[2] dki_params[2] = dkiF.model_params[1] dki_params[4] = dkiF.model_params[5] dki_params[5] = dkiF.model_params[4] dki_params[7] = dkiF.model_params[8] dki_params[8] = dkiF.model_params[7] dki_params[10] = dkiF.model_params[11] dki_params[11] = dkiF.model_params[10] MK = dki.mean_kurtosis(dki_params) MK_nm = np.mean(dki.apparent_kurtosis_coef(dki_params, sph)) assert_almost_equal(MK, MK_nm, decimal=2) def test_dki_errors(): # first error of DKI module is if a unknown fit method is given assert_raises(ValueError, dki.DiffusionKurtosisModel, gtab_2s, fit_method="JOANA") # second error of DKI module is if a min_signal is defined as negative assert_raises(ValueError, dki.DiffusionKurtosisModel, gtab_2s, min_signal=-1) # try case with correct min_signal dkiM = dki.DiffusionKurtosisModel(gtab_2s, min_signal=1) dkiF = dkiM.fit(DWI) assert_array_almost_equal(dkiF.model_params, multi_params) # third error is if a given mask do not have same shape as data dkiM = dki.DiffusionKurtosisModel(gtab_2s) # test a correct mask dkiF = dkiM.fit(DWI) mask_correct = dkiF.fa > 0 mask_correct[1, 1] = False multi_params[1, 1] = np.zeros(27) mask_not_correct = np.array([[True, True, False], [True, False, False]]) dkiF = dkiM.fit(DWI, mask=mask_correct) assert_array_almost_equal(dkiF.model_params, multi_params) # test a incorrect mask assert_raises(ValueError, dkiM.fit, DWI, mask=mask_not_correct) # error if data with only one non zero b-value is given assert_raises(ValueError, dki.DiffusionKurtosisModel, gtab) def test_kurtosis_maximum(): # TEST 1 # simulate a crossing fibers interserting at 70 degrees. The first fiber # is aligned to the x-axis while the second fiber is aligned to the x-z # plane with an angular deviation of 70 degrees from the first one. # According to Neto Henriques et al, 2015 (NeuroImage 111: 85-99), the # kurtosis tensor of this simulation will have a maxima aligned to axis y angles = [(90, 0), (90, 0), (20, 0), (20, 0)] signal_70, dt_70, kt_70 = multi_tensor_dki(gtab_2s, mevals_cross, S0=100, angles=angles, fractions=frac_cross, snr=None) # prepare inputs dkiM = dki.DiffusionKurtosisModel(gtab_2s, fit_method="WLS") dkiF = dkiM.fit(signal_70) MD = dkiF.md kt = dkiF.kt R = dkiF.evecs evals = dkiF.evals dt = lower_triangular(np.dot(np.dot(R, np.diag(evals)), R.T)) sphere = get_sphere('symmetric724') # compute maxima k_max_cross, max_dir = dki._voxel_kurtosis_maximum(dt, MD, kt, sphere, gtol=1e-5) yaxis = np.array([0., 1., 0.]) cos_angle = np.abs(np.dot(max_dir[0], yaxis)) assert_almost_equal(cos_angle, 1.) # TEST 2 # test the function on cases of well aligned fibers oriented in a random # defined direction. According to Neto Henriques et al, 2015 (NeuroImage # 111: 85-99), the maxima of kurtosis is any direction perpendicular to the # fiber direction. Moreover, according to multicompartmetal simulations, # kurtosis in this direction has to be equal to: fie = 0.49 ADi = 0.00099 ADe = 0.00226 RDi = 0 RDe = 0.00087 RD = fie*RDi + (1-fie)*RDe RK = 3 * fie * (1-fie) * ((RDi-RDe) / RD) ** 2 # prepare simulation: theta = random.uniform(0, 180) phi = random.uniform(0, 320) angles = [(theta, phi), (theta, phi)] mevals = np.array([[ADi, RDi, RDi], [ADe, RDe, RDe]]) frac = [fie*100, (1 - fie)*100] signal, dt, kt = multi_tensor_dki(gtab_2s, mevals, angles=angles, fractions=frac, snr=None) # prepare inputs dkiM = dki.DiffusionKurtosisModel(gtab_2s, fit_method="WLS") dkiF = dkiM.fit(signal) MD = dkiF.md kt = dkiF.kt R = dkiF.evecs evals = dkiF.evals dt = lower_triangular(np.dot(np.dot(R, np.diag(evals)), R.T)) # compute maxima k_max, max_dir = dki._voxel_kurtosis_maximum(dt, MD, kt, sphere, gtol=1e-5) # check if max direction is perpendicular to fiber direction fdir = np.array([sphere2cart(1., np.deg2rad(theta), np.deg2rad(phi))]) cos_angle = np.abs(np.dot(max_dir[0], fdir[0])) assert_almost_equal(cos_angle, 0., decimal=5) # check if max direction is equal to expected value assert_almost_equal(k_max, RK) # According to Neto Henriques et al., 2015 (NeuroImage 111: 85-99), # e.g. see figure 1 of this article, kurtosis maxima for the first test is # also equal to the maxima kurtosis value of well-aligned fibers, since # simulations parameters (apart from fiber directions) are equal assert_almost_equal(k_max_cross, RK) # Test 3 - Test performance when kurtosis is spherical - this case, can be # problematic since a spherical kurtosis does not have an maximum k_max, max_dir = dki._voxel_kurtosis_maximum(dt_sph, np.mean(evals_sph), kt_sph, sphere, gtol=1e-2) assert_almost_equal(k_max, Kref_sphere) # Test 4 - Test performance when kt have all elements zero - this case, can # be problematic this case does not have an maximum k_max, max_dir = dki._voxel_kurtosis_maximum(dt_sph, np.mean(evals_sph), np.zeros(15), sphere, gtol=1e-2) assert_almost_equal(k_max, 0.0) def test_multi_voxel_kurtosis_maximum(): # Multi-voxel simulations parameters FIE = np.array([[[0.30, 0.32], [0.74, 0.51]], [[0.47, 0.21], [0.80, 0.63]]]) RDI = np.zeros((2, 2, 2)) ADI = np.array([[[1e-3, 1.3e-3], [0.8e-3, 1e-3]], [[0.9e-3, 0.99e-3], [0.89e-3, 1.1e-3]]]) ADE = np.array([[[2.2e-3, 2.3e-3], [2.8e-3, 2.1e-3]], [[1.9e-3, 2.5e-3], [1.89e-3, 2.1e-3]]]) Tor = np.array([[[2.6, 2.4], [2.8, 2.1]], [[2.9, 2.5], [2.7, 2.3]]]) RDE = ADE / Tor # prepare simulation: DWIsim = np.zeros((2, 2, 2, gtab_2s.bvals.size)) for i in range(2): for j in range(2): for k in range(2): ADi = ADI[i, j, k] RDi = RDI[i, j, k] ADe = ADE[i, j, k] RDe = RDE[i, j, k] fie = FIE[i, j, k] mevals = np.array([[ADi, RDi, RDi], [ADe, RDe, RDe]]) frac = [fie*100, (1 - fie)*100] theta = random.uniform(0, 180) phi = random.uniform(0, 320) angles = [(theta, phi), (theta, phi)] signal, dt, kt = multi_tensor_dki(gtab_2s, mevals, angles=angles, fractions=frac, snr=None) DWIsim[i, j, k, :] = signal # Ground truth Maximum kurtosis RD = FIE*RDI + (1-FIE)*RDE RK = 3 * FIE * (1-FIE) * ((RDI-RDE) / RD) ** 2 # prepare inputs dkiM = dki.DiffusionKurtosisModel(gtab_2s, fit_method="WLS") dkiF = dkiM.fit(DWIsim) sphere = get_sphere('symmetric724') # TEST - when no sphere is given k_max = dki.kurtosis_maximum(dkiF.model_params) assert_almost_equal(k_max, RK, decimal=5) # TEST - when sphere is given k_max = dki.kurtosis_maximum(dkiF.model_params, sphere) assert_almost_equal(k_max, RK, decimal=5) # TEST - when mask is given mask = np.ones((2, 2, 2), dtype='bool') mask[1, 1, 1] = 0 RK[1, 1, 1] = 0 k_max = dki.kurtosis_maximum(dkiF.model_params, mask=mask) assert_almost_equal(k_max, RK, decimal=5) dipy-0.13.0/dipy/reconst/tests/test_dki_micro.py000066400000000000000000000310361317371701200217020ustar00rootroot00000000000000""" Testing DKI microstructure """ from __future__ import division, print_function, absolute_import import numpy as np import random import dipy.reconst.dki_micro as dki_micro from numpy.testing import (assert_array_almost_equal, assert_almost_equal, assert_, assert_raises) from dipy.sims.voxel import (multi_tensor_dki, _check_directions, multi_tensor) from dipy.io.gradients import read_bvals_bvecs from dipy.core.gradients import gradient_table from dipy.data import get_data from dipy.reconst.dti import (eig_from_lo_tri) from dipy.data import get_sphere fimg, fbvals, fbvecs = get_data('small_64D') bvals, bvecs = read_bvals_bvecs(fbvals, fbvecs) gtab = gradient_table(bvals, bvecs) # 2 shells for techniques that requires multishell data bvals_2s = np.concatenate((bvals, bvals * 2), axis=0) bvecs_2s = np.concatenate((bvecs, bvecs), axis=0) gtab_2s = gradient_table(bvals_2s, bvecs_2s) # single fiber simulate (which is the assumption of our model) FIE = np.array([[[0.30, 0.32], [0.74, 0.51]], [[0.47, 0.21], [0.80, 0.63]]]) RDI = np.zeros((2, 2, 2)) ADI = np.array([[[1e-3, 1.3e-3], [0.8e-3, 1e-3]], [[0.9e-3, 0.99e-3], [0.89e-3, 1.1e-3]]]) ADE = np.array([[[2.2e-3, 2.3e-3], [2.8e-3, 2.1e-3]], [[1.9e-3, 2.5e-3], [1.89e-3, 2.1e-3]]]) Tor = np.array([[[2.6, 2.4], [2.8, 2.1]], [[2.9, 2.5], [2.7, 2.3]]]) RDE = ADE / Tor # prepare simulation: DWIsim = np.zeros((2, 2, 2, gtab_2s.bvals.size)) # Diffusion microstructural model assumes that signal does not have Taylor # approximation components larger than the fourth order. Thus parameter # estimates are only equal to the ground truth values of the simulation # if signals taylor components larger than the fourth order are removed. # Signal whithout this taylor components can be generated using the # multi_tensor_dki simulations. Therefore we used this function to test the # expected estimates of the model. DWIsim_all_taylor = np.zeros((2, 2, 2, gtab_2s.bvals.size)) # Signal with all taylor components can be simulated using the function # multi_tensor. Generating this signals will be usefull to test the prediction # procedures of DKI-based microstructural model. for i in range(2): for j in range(2): for k in range(2): ADi = ADI[i, j, k] RDi = RDI[i, j, k] ADe = ADE[i, j, k] RDe = RDE[i, j, k] fie = FIE[i, j, k] mevals = np.array([[ADi, RDi, RDi], [ADe, RDe, RDe]]) frac = [fie*100, (1 - fie)*100] theta = random.uniform(0, 180) phi = random.uniform(0, 320) angles = [(theta, phi), (theta, phi)] signal, dt, kt = multi_tensor_dki(gtab_2s, mevals, angles=angles, fractions=frac, snr=None) DWIsim[i, j, k, :] = signal signal, sticks = multi_tensor(gtab_2s, mevals, angles=angles, fractions=frac, snr=None) DWIsim_all_taylor[i, j, k, :] = signal def test_single_fiber_model(): # single fiber simulate (which is the assumption of our model) fie = 0.49 ADi = 0.00099 ADe = 0.00226 RDi = 0 RDe = 0.00087 # prepare simulation: theta = random.uniform(0, 180) phi = random.uniform(0, 320) angles = [(theta, phi), (theta, phi)] mevals = np.array([[ADi, RDi, RDi], [ADe, RDe, RDe]]) frac = [fie*100, (1 - fie)*100] signal, dt, kt = multi_tensor_dki(gtab_2s, mevals, angles=angles, fractions=frac, snr=None) # DKI fit dkiM = dki_micro.DiffusionKurtosisModel(gtab_2s, fit_method="WLS") dkiF = dkiM.fit(signal) # Axonal Water Fraction sphere = get_sphere('symmetric724') AWF = dki_micro.axonal_water_fraction(dkiF.model_params, sphere, mask=None, gtol=1e-5) assert_almost_equal(AWF, fie) # Extra-cellular and intra-cellular components edt, idt = dki_micro.diffusion_components(dkiF.model_params, sphere) EDT = eig_from_lo_tri(edt) IDT = eig_from_lo_tri(idt) # check eigenvalues assert_array_almost_equal(EDT[0:3], np.array([ADe, RDe, RDe])) assert_array_almost_equal(IDT[0:3], np.array([ADi, RDi, RDi])) # first eigenvalue should be the direction of the fibers fiber_direction = _check_directions([(theta, phi)]) f_norm = abs(np.dot(fiber_direction, np.array((EDT[3], EDT[6], EDT[9])))) assert_almost_equal(f_norm, 1.) f_norm = abs(np.dot(fiber_direction, np.array((IDT[3], IDT[6], IDT[9])))) assert_almost_equal(f_norm, 1.) # Test model and fit objects wmtiM = dki_micro.KurtosisMicrostructureModel(gtab_2s, fit_method="WLS") wmtiF = wmtiM.fit(signal) assert_almost_equal(wmtiF.awf, AWF) assert_array_almost_equal(wmtiF.hindered_evals, np.array([ADe, RDe, RDe])) assert_array_almost_equal(wmtiF.restricted_evals, np.array([ADi, RDi, RDi])) assert_almost_equal(wmtiF.hindered_ad, ADe) assert_almost_equal(wmtiF.hindered_rd, RDe) assert_almost_equal(wmtiF.axonal_diffusivity, ADi) assert_almost_equal(wmtiF.tortuosity, ADe/RDe, decimal=4) # Test diffusion_components when a kurtosis tensors is associated with # negative kurtosis values. E.g of this cases is given below: dkiparams = np.array([1.67135726e-03, 5.03651205e-04, 9.35365328e-05, -7.11167583e-01, 6.23186820e-01, -3.25390313e-01, -1.75247376e-02, -4.78415563e-01, -8.77958674e-01, 7.02804064e-01, 6.18673368e-01, -3.51154825e-01, 2.18384153, -2.76378153e-02, 2.22893297, -2.68306546e-01, -1.28411610, -1.56557645e-01, -1.80850619e-01, -8.33152110e-01, -3.62410766e-01, 1.57775442e-01, 8.73775381e-01, 2.77188975e-01, -3.67415502e-02, -1.56330984e-01, -1.62295407e-02]) edt, idt = dki_micro.diffusion_components(dkiparams) assert_(np.all(np.isfinite(edt))) def test_wmti_model_multi_voxel(): # DKI fit dkiM = dki_micro.DiffusionKurtosisModel(gtab_2s, fit_method="WLS") dkiF = dkiM.fit(DWIsim) # Axonal Water Fraction sphere = get_sphere() AWF = dki_micro.axonal_water_fraction(dkiF.model_params, sphere, mask=None, gtol=1e-5) assert_almost_equal(AWF, FIE) # Extra-cellular and intra-cellular components edt, idt = dki_micro.diffusion_components(dkiF.model_params, sphere) EDT = eig_from_lo_tri(edt) IDT = eig_from_lo_tri(idt) # check eigenvalues assert_array_almost_equal(EDT[..., 0], ADE, decimal=3) assert_array_almost_equal(EDT[..., 1], RDE, decimal=3) assert_array_almost_equal(EDT[..., 2], RDE, decimal=3) assert_array_almost_equal(IDT[..., 0], ADI, decimal=3) assert_array_almost_equal(IDT[..., 1], RDI, decimal=3) assert_array_almost_equal(IDT[..., 2], RDI, decimal=3) # Test methods performance when a signal with all zeros is present FIEc = FIE.copy() RDIc = RDI.copy() ADIc = ADI.copy() ADEc = ADE.copy() Torc = Tor.copy() RDEc = RDE.copy() DWIsimc = DWIsim.copy() FIEc[0, 0, 0] = 0 RDIc[0, 0, 0] = 0 ADIc[0, 0, 0] = 0 ADEc[0, 0, 0] = 0 Torc[0, 0, 0] = 0 RDEc[0, 0, 0] = 0 DWIsimc[0, 0, 0, :] = 0 mask = np.ones((2, 2, 2)) mask[0, 0, 0] = 0 dkiF = dkiM.fit(DWIsimc) awf = dki_micro.axonal_water_fraction(dkiF.model_params, sphere, gtol=1e-5) assert_almost_equal(awf, FIEc) # Extra-cellular and intra-cellular components edt, idt = dki_micro.diffusion_components(dkiF.model_params, sphere, awf=awf) EDT = eig_from_lo_tri(edt) IDT = eig_from_lo_tri(idt) assert_array_almost_equal(EDT[..., 0], ADEc, decimal=3) assert_array_almost_equal(EDT[..., 1], RDEc, decimal=3) assert_array_almost_equal(EDT[..., 2], RDEc, decimal=3) assert_array_almost_equal(IDT[..., 0], ADIc, decimal=3) assert_array_almost_equal(IDT[..., 1], RDIc, decimal=3) assert_array_almost_equal(IDT[..., 2], RDIc, decimal=3) # Check when mask is given dkiF = dkiM.fit(DWIsim) awf = dki_micro.axonal_water_fraction(dkiF.model_params, sphere, gtol=1e-5, mask=mask) assert_almost_equal(awf, FIEc, decimal=3) # Extra-cellular and intra-cellular components edt, idt = dki_micro.diffusion_components(dkiF.model_params, sphere, awf=awf, mask=mask) EDT = eig_from_lo_tri(edt) IDT = eig_from_lo_tri(idt) assert_array_almost_equal(EDT[..., 0], ADEc, decimal=3) assert_array_almost_equal(EDT[..., 1], RDEc, decimal=3) assert_array_almost_equal(EDT[..., 2], RDEc, decimal=3) assert_array_almost_equal(IDT[..., 0], ADIc, decimal=3) assert_array_almost_equal(IDT[..., 1], RDIc, decimal=3) assert_array_almost_equal(IDT[..., 2], RDIc, decimal=3) # Check class object wmtiM = dki_micro.KurtosisMicrostructureModel(gtab_2s, fit_method="WLS") wmtiF = wmtiM.fit(DWIsim, mask=mask) assert_almost_equal(wmtiF.awf, FIEc, decimal=3) assert_almost_equal(wmtiF.axonal_diffusivity, ADIc, decimal=3) assert_almost_equal(wmtiF.hindered_ad, ADEc, decimal=3) assert_almost_equal(wmtiF.hindered_rd, RDEc, decimal=3) assert_almost_equal(wmtiF.tortuosity, Torc, decimal=3) def test_dki_micro_predict_single_voxel(): # single fiber simulate (which is the assumption of our model) fie = 0.49 ADi = 0.00099 ADe = 0.00226 RDi = 0 RDe = 0.00087 # prepare simulation: theta = random.uniform(0, 180) phi = random.uniform(0, 320) angles = [(theta, phi), (theta, phi)] mevals = np.array([[ADi, RDi, RDi], [ADe, RDe, RDe]]) frac = [fie*100, (1 - fie)*100] signal, dt, kt = multi_tensor_dki(gtab_2s, mevals, angles=angles, fractions=frac, snr=None) signal_gt, da = multi_tensor(gtab_2s, mevals, angles=angles, fractions=frac, snr=None) # Defined DKI microstrutural model dkiM = dki_micro.KurtosisMicrostructureModel(gtab_2s) # Fit single voxel signal dkiF = dkiM.fit(signal) # Check predict of KurtosisMicrostruturalModel pred = dkiM.predict(dkiF.model_params) assert_array_almost_equal(pred, signal_gt, decimal=4) pred = dkiM.predict(dkiF.model_params, S0=100) assert_array_almost_equal(pred, signal_gt * 100, decimal=4) # Check predict of KurtosisMicrostruturalFit pred = dkiF.predict(gtab_2s, S0=100) assert_array_almost_equal(pred, signal_gt * 100, decimal=4) def test_dki_micro_predict_multi_voxel(): dkiM = dki_micro.KurtosisMicrostructureModel(gtab_2s) dkiF = dkiM.fit(DWIsim) # Check predict of KurtosisMicrostruturalModel pred = dkiM.predict(dkiF.model_params) assert_array_almost_equal(pred, DWIsim_all_taylor, decimal=3) pred = dkiM.predict(dkiF.model_params, S0=100) assert_array_almost_equal(pred, DWIsim_all_taylor * 100, decimal=3) # Check predict of KurtosisMicrostruturalFit pred = dkiF.predict(gtab_2s, S0=100) assert_array_almost_equal(pred, DWIsim_all_taylor * 100, decimal=3) def _help_test_awf_only(dkimicrofit, string): exec(string) def test_dki_micro_awf_only(): dkiM = dki_micro.KurtosisMicrostructureModel(gtab_2s) dkiF = dkiM.fit(DWIsim, awf_only=True) awf = dkiF.awf assert_almost_equal(awf, FIE, decimal=3) # assert_raises(dkiF.hindered_evals) assert_raises(ValueError, _help_test_awf_only, dkiF, 'dkimicrofit.hindered_evals') assert_raises(ValueError, _help_test_awf_only, dkiF, 'dkimicrofit.restricted_evals') assert_raises(ValueError, _help_test_awf_only, dkiF, 'dkimicrofit.axonal_diffusivity') assert_raises(ValueError, _help_test_awf_only, dkiF, 'dkimicrofit.hindered_ad') assert_raises(ValueError, _help_test_awf_only, dkiF, 'dkimicrofit.hindered_rd') assert_raises(ValueError, _help_test_awf_only, dkiF, 'dkimicrofit.tortuosity') def additional_tortuosity_tests(): # Test tortuosity when rd is zero # single voxel t = dki_micro.tortuosity(1.7e-3, 0.0) assert_almost_equal(t, 0.0) # multi-voxel RDEc = RDE.copy() Torc = Tor.copy() RDEc[1, 1, 1] = 0.0 Torc[1, 1, 1] = 0.0 t = dki_micro.tortuosity(ADE, RDEc) assert_almost_equal(Torc, t) dipy-0.13.0/dipy/reconst/tests/test_dsi.py000066400000000000000000000111761317371701200205240ustar00rootroot00000000000000import numpy as np from numpy.testing import (assert_equal, assert_almost_equal, run_module_suite, assert_array_equal, assert_raises) from dipy.data import get_data, dsi_voxels from dipy.reconst.dsi import DiffusionSpectrumModel from dipy.reconst.odf import gfa from dipy.direction.peaks import peak_directions from dipy.sims.voxel import SticksAndBall from dipy.core.sphere import Sphere from dipy.core.gradients import gradient_table from dipy.data import get_sphere from numpy.testing import assert_equal from dipy.core.subdivide_octahedron import create_unit_sphere from dipy.core.sphere_stats import angular_similarity def test_dsi(): # load symmetric 724 sphere sphere = get_sphere('symmetric724') # load icosahedron sphere sphere2 = create_unit_sphere(5) btable = np.loadtxt(get_data('dsi515btable')) gtab = gradient_table(btable[:, 0], btable[:, 1:]) data, golden_directions = SticksAndBall(gtab, d=0.0015, S0=100, angles=[(0, 0), (90, 0)], fractions=[50, 50], snr=None) ds = DiffusionSpectrumModel(gtab) # symmetric724 dsfit = ds.fit(data) odf = dsfit.odf(sphere) directions, _, _ = peak_directions(odf, sphere) assert_equal(len(directions), 2) assert_almost_equal(angular_similarity(directions, golden_directions), 2, 1) # 5 subdivisions dsfit = ds.fit(data) odf2 = dsfit.odf(sphere2) directions, _, _ = peak_directions(odf2, sphere2) assert_equal(len(directions), 2) assert_almost_equal(angular_similarity(directions, golden_directions), 2, 1) assert_equal(dsfit.pdf().shape, 3 * (ds.qgrid_size, )) sb_dummies = sticks_and_ball_dummies(gtab) for sbd in sb_dummies: data, golden_directions = sb_dummies[sbd] odf = ds.fit(data).odf(sphere2) directions, _, _ = peak_directions(odf, sphere2) if len(directions) <= 3: assert_equal(len(directions), len(golden_directions)) if len(directions) > 3: assert_equal(gfa(odf) < 0.1, True) assert_raises(ValueError, DiffusionSpectrumModel, gtab, qgrid_size=16) def test_multivox_dsi(): data, gtab = dsi_voxels() DS = DiffusionSpectrumModel(gtab) sphere = get_sphere('symmetric724') DSfit = DS.fit(data) PDF = DSfit.pdf() assert_equal(data.shape[:-1] + (17, 17, 17), PDF.shape) assert_equal(np.alltrue(np.isreal(PDF)), True) def test_multib0_dsi(): data, gtab = dsi_voxels() # Create a new data-set with a b0 measurement: new_data = np.concatenate([data, data[..., 0, None]], -1) new_bvecs = np.concatenate([gtab.bvecs, np.zeros((1, 3))]) new_bvals = np.concatenate([gtab.bvals, [0]]) new_gtab = gradient_table(new_bvals, new_bvecs) ds = DiffusionSpectrumModel(new_gtab) sphere = get_sphere('repulsion724') dsfit = ds.fit(new_data) pdf = dsfit.pdf() odf = dsfit.odf(sphere) assert_equal(new_data.shape[:-1] + (17, 17, 17), pdf.shape) assert_equal(np.alltrue(np.isreal(pdf)), True) # And again, with one more b0 measurement (two in total): new_data = np.concatenate([data, data[..., 0, None]], -1) new_bvecs = np.concatenate([gtab.bvecs, np.zeros((1, 3))]) new_bvals = np.concatenate([gtab.bvals, [0]]) new_gtab = gradient_table(new_bvals, new_bvecs) ds = DiffusionSpectrumModel(new_gtab) dsfit = ds.fit(new_data) pdf = dsfit.pdf() odf = dsfit.odf(sphere) assert_equal(new_data.shape[:-1] + (17, 17, 17), pdf.shape) assert_equal(np.alltrue(np.isreal(pdf)), True) def sticks_and_ball_dummies(gtab): sb_dummies = {} S, sticks = SticksAndBall(gtab, d=0.0015, S0=100, angles=[(0, 0)], fractions=[100], snr=None) sb_dummies['1fiber'] = (S, sticks) S, sticks = SticksAndBall(gtab, d=0.0015, S0=100, angles=[(0, 0), (90, 0)], fractions=[50, 50], snr=None) sb_dummies['2fiber'] = (S, sticks) S, sticks = SticksAndBall(gtab, d=0.0015, S0=100, angles=[(0, 0), (90, 0), (90, 90)], fractions=[33, 33, 33], snr=None) sb_dummies['3fiber'] = (S, sticks) S, sticks = SticksAndBall(gtab, d=0.0015, S0=100, angles=[(0, 0), (90, 0), (90, 90)], fractions=[0, 0, 0], snr=None) sb_dummies['isotropic'] = (S, sticks) return sb_dummies if __name__ == '__main__': run_module_suite() dipy-0.13.0/dipy/reconst/tests/test_dsi_deconv.py000066400000000000000000000053521317371701200220610ustar00rootroot00000000000000import numpy as np from numpy.testing import (assert_equal, assert_almost_equal, run_module_suite, assert_array_equal, assert_raises) from dipy.data import get_data, dsi_deconv_voxels from dipy.reconst.dsi import DiffusionSpectrumDeconvModel from dipy.reconst.odf import gfa from dipy.direction.peaks import peak_directions from dipy.sims.voxel import SticksAndBall from dipy.core.sphere import Sphere from dipy.core.gradients import gradient_table from dipy.data import get_sphere from numpy.testing import assert_equal from dipy.core.subdivide_octahedron import create_unit_sphere from dipy.core.sphere_stats import angular_similarity from dipy.reconst.tests.test_dsi import sticks_and_ball_dummies def test_dsi(): # load symmetric 724 sphere sphere = get_sphere('symmetric724') # load icosahedron sphere sphere2 = create_unit_sphere(5) btable = np.loadtxt(get_data('dsi515btable')) gtab = gradient_table(btable[:, 0], btable[:, 1:]) data, golden_directions = SticksAndBall(gtab, d=0.0015, S0=100, angles=[(0, 0), (90, 0)], fractions=[50, 50], snr=None) ds = DiffusionSpectrumDeconvModel(gtab) # symmetric724 dsfit = ds.fit(data) odf = dsfit.odf(sphere) directions, _, _ = peak_directions(odf, sphere, .35, 25) assert_equal(len(directions), 2) assert_almost_equal(angular_similarity(directions, golden_directions), 2, 1) # 5 subdivisions dsfit = ds.fit(data) odf2 = dsfit.odf(sphere2) directions, _, _ = peak_directions(odf2, sphere2, .35, 25) assert_equal(len(directions), 2) assert_almost_equal(angular_similarity(directions, golden_directions), 2, 1) assert_equal(dsfit.pdf().shape, 3 * (ds.qgrid_size, )) sb_dummies = sticks_and_ball_dummies(gtab) for sbd in sb_dummies: data, golden_directions = sb_dummies[sbd] odf = ds.fit(data).odf(sphere2) directions, _, _ = peak_directions(odf, sphere2, .35, 25) if len(directions) <= 3: assert_equal(len(directions), len(golden_directions)) if len(directions) > 3: assert_equal(gfa(odf) < 0.1, True) assert_raises(ValueError, DiffusionSpectrumDeconvModel, gtab, qgrid_size=16) def test_multivox_dsi(): data, gtab = dsi_deconv_voxels() DS = DiffusionSpectrumDeconvModel(gtab) sphere = get_sphere('symmetric724') DSfit = DS.fit(data) PDF = DSfit.pdf() assert_equal(data.shape[:-1] + (35, 35, 35), PDF.shape) assert_equal(np.alltrue(np.isreal(PDF)), True) if __name__ == '__main__': run_module_suite() dipy-0.13.0/dipy/reconst/tests/test_dsi_metrics.py000066400000000000000000000032611317371701200222460ustar00rootroot00000000000000import numpy as np from dipy.reconst.dsi import DiffusionSpectrumModel from dipy.data import get_data from dipy.core.gradients import gradient_table from numpy.testing import (assert_almost_equal, run_module_suite) from dipy.sims.voxel import (SticksAndBall, MultiTensor) def test_dsi_metrics(): btable = np.loadtxt(get_data('dsi4169btable')) gtab = gradient_table(btable[:, 0], btable[:, 1:]) data, golden_directions = SticksAndBall(gtab, d=0.0015, S0=100, angles=[(0, 0), (60, 0)], fractions=[50, 50], snr=None) dsmodel = DiffusionSpectrumModel(gtab, qgrid_size=21, filter_width=4500) rtop_signal_norm = dsmodel.fit(data).rtop_signal() rtop_pdf_norm = dsmodel.fit(data).rtop_pdf() rtop_pdf = dsmodel.fit(data).rtop_pdf(normalized=False) assert_almost_equal(rtop_signal_norm, rtop_pdf, 6) dsmodel = DiffusionSpectrumModel(gtab, qgrid_size=21, filter_width=4500) mevals = np.array(([0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003])) S_0, sticks_0 = MultiTensor(gtab, mevals, S0=100, angles=[(0, 0), (60, 0)], fractions=[50, 50], snr=None) S_1, sticks_0 = MultiTensor(gtab, mevals * 2.0, S0=100, angles=[(0, 0), (60, 0)], fractions=[50, 50], snr=None) MSD_norm_0 = dsmodel.fit(S_0).msd_discrete(normalized=True) MSD_norm_1 = dsmodel.fit(S_1).msd_discrete(normalized=True) assert_almost_equal(MSD_norm_0, 0.5 * MSD_norm_1, 4) if __name__ == '__main__': run_module_suite() dipy-0.13.0/dipy/reconst/tests/test_dti.py000066400000000000000000000716241317371701200205310ustar00rootroot00000000000000""" Testing DTI """ from __future__ import division, print_function, absolute_import import numpy as np from nose.tools import (assert_true, assert_equal, assert_almost_equal, assert_raises) import numpy.testing as npt from numpy.testing import (assert_array_equal, assert_array_almost_equal, assert_) import nibabel as nib import scipy.optimize as opt import dipy.reconst.dti as dti from dipy.reconst.dti import (axial_diffusivity, color_fa, fractional_anisotropy, from_lower_triangular, geodesic_anisotropy, lower_triangular, mean_diffusivity, radial_diffusivity, TensorModel, trace, linearity, planarity, sphericity, decompose_tensor, _decompose_tensor_nan) from dipy.io.bvectxt import read_bvec_file from dipy.data import get_data, dsi_voxels, get_sphere from dipy.core.subdivide_octahedron import create_unit_sphere import dipy.core.gradients as grad import dipy.core.sphere as dps from dipy.sims.voxel import single_tensor def test_roll_evals(): """ """ # Just making sure this never passes through weird_evals = np.array([1, 0.5]) npt.assert_raises(ValueError, dti._roll_evals, weird_evals) def test_tensor_algebra(): """ Test that the computation of tensor determinant and norm is correct """ test_arr = np.random.rand(10, 3, 3) t_det = dti.determinant(test_arr) t_norm = dti.norm(test_arr) for i, x in enumerate(test_arr): assert_almost_equal(np.linalg.det(x), t_det[i]) assert_almost_equal(np.linalg.norm(x), t_norm[i]) def test_odf_with_zeros(): fdata, fbval, fbvec = get_data('small_25') gtab = grad.gradient_table(fbval, fbvec) data = nib.load(fdata).get_data() dm = dti.TensorModel(gtab) df = dm.fit(data) df.evals[0, 0, 0] = np.array([0, 0, 0]) sphere = create_unit_sphere(4) odf = df.odf(sphere) npt.assert_equal(odf[0, 0, 0], np.zeros(sphere.vertices.shape[0])) def test_tensor_model(): fdata, fbval, fbvec = get_data('small_25') data1 = nib.load(fdata).get_data() gtab1 = grad.gradient_table(fbval, fbvec) data2, gtab2 = dsi_voxels() for data, gtab in zip([data1, data2], [gtab1, gtab2]): dm = dti.TensorModel(gtab, 'LS') dtifit = dm.fit(data[0, 0, 0]) assert_equal(dtifit.fa < 0.9, True) dm = dti.TensorModel(gtab, 'WLS') dtifit = dm.fit(data[0, 0, 0]) assert_equal(dtifit.fa < 0.9, True) assert_equal(dtifit.fa > 0, True) sphere = create_unit_sphere(4) assert_equal(len(dtifit.odf(sphere)), len(sphere.vertices)) # Check that the multivoxel case works: dtifit = dm.fit(data) # Check that it works on signal that has already been normalized to S0: dm_to_relative = dti.TensorModel(gtab) if np.any(gtab.b0s_mask): relative_data = (data[0, 0, 0]/np.mean(data[0, 0, 0, gtab.b0s_mask])) dtifit_to_relative = dm_to_relative.fit(relative_data) npt.assert_almost_equal(dtifit.fa[0, 0, 0], dtifit_to_relative.fa, decimal=3) # And smoke-test that all these operations return sensibly-shaped arrays: assert_equal(dtifit.fa.shape, data.shape[:3]) assert_equal(dtifit.ad.shape, data.shape[:3]) assert_equal(dtifit.md.shape, data.shape[:3]) assert_equal(dtifit.rd.shape, data.shape[:3]) assert_equal(dtifit.trace.shape, data.shape[:3]) assert_equal(dtifit.mode.shape, data.shape[:3]) assert_equal(dtifit.linearity.shape, data.shape[:3]) assert_equal(dtifit.planarity.shape, data.shape[:3]) assert_equal(dtifit.sphericity.shape, data.shape[:3]) # Test for the shape of the mask assert_raises(ValueError, dm.fit, np.ones((10, 10, 3)), np.ones((3, 3))) # Make some synthetic data b0 = 1000. bvecs, bvals = read_bvec_file(get_data('55dir_grad.bvec')) gtab = grad.gradient_table_from_bvals_bvecs(bvals, bvecs.T) # The first b value is 0., so we take the second one: B = bvals[1] # Scale the eigenvalues and tensor by the B value so the units match D = np.array([1., 1., 1., 0., 0., 1., -np.log(b0) * B]) / B evals = np.array([2., 1., 0.]) / B md = evals.mean() tensor = from_lower_triangular(D) A_squiggle = tensor - (1 / 3.0) * np.trace(tensor) * np.eye(3) mode = (3 * np.sqrt(6) * np.linalg.det(A_squiggle / np.linalg.norm(A_squiggle))) evals_eigh, evecs_eigh = np.linalg.eigh(tensor) # Sort according to eigen-value from large to small: evecs = evecs_eigh[:, np.argsort(evals_eigh)[::-1]] # Check that eigenvalues and eigenvectors are properly sorted through # that previous operation: for i in range(3): assert_array_almost_equal(np.dot(tensor, evecs[:, i]), evals[i] * evecs[:, i]) # Design Matrix X = dti.design_matrix(gtab) # Signals Y = np.exp(np.dot(X, D)) assert_almost_equal(Y[0], b0) Y.shape = (-1,) + Y.shape # Test fitting with different methods: for fit_method in ['OLS', 'WLS', 'NLLS']: tensor_model = dti.TensorModel(gtab, fit_method=fit_method, return_S0_hat=True) tensor_fit = tensor_model.fit(Y) assert_true(tensor_fit.model is tensor_model) assert_equal(tensor_fit.shape, Y.shape[:-1]) assert_array_almost_equal(tensor_fit.evals[0], evals) assert_array_almost_equal(tensor_fit.S0_hat, b0, decimal=3) # Test that the eigenvectors are correct, one-by-one: for i in range(3): # Eigenvectors have intrinsic sign ambiguity # (see # http://prod.sandia.gov/techlib/access-control.cgi/2007/076422.pdf) # so we need to allow for sign flips. One of the following should # always be true: assert_( np.all(np.abs(tensor_fit.evecs[0][:, i] - evecs[:, i]) < 10e-6) or np.all(np.abs(-tensor_fit.evecs[0][:, i] - evecs[:, i]) < 10e-6)) # We set a fixed tolerance of 10e-6, similar to array_almost_equal err_msg = "Calculation of tensor from Y does not compare to " err_msg += "analytical solution" assert_array_almost_equal(tensor_fit.quadratic_form[0], tensor, err_msg=err_msg) assert_almost_equal(tensor_fit.md[0], md) assert_array_almost_equal(tensor_fit.mode, mode, decimal=5) assert_equal(tensor_fit.directions.shape[-2], 1) assert_equal(tensor_fit.directions.shape[-1], 3) # Test error-handling: assert_raises(ValueError, dti.TensorModel, gtab, fit_method='crazy_method') # Test custom fit tensor method try: model = dti.TensorModel(gtab, fit_method=lambda *args, **kwargs: 42) fit = model.fit_method() except Exception as exc: assert False, "TensorModel should accept custom fit methods: %s" % exc assert fit == 42, "Custom fit method for TensorModel returned %s." % fit # Test multi-voxel data data = np.zeros((3, Y.shape[1])) # Normal voxel data[0] = Y # High diffusion voxel, all diffusing weighted signal equal to zero data[1, gtab.b0s_mask] = b0 data[1, ~gtab.b0s_mask] = 0 # Masked voxel, all data set to zero data[2] = 0. tensor_model = dti.TensorModel(gtab) fit = tensor_model.fit(data) assert_array_almost_equal(fit[0].evals, evals) # Return S0_test tensor_model = dti.TensorModel(gtab, return_S0_hat=True) fit = tensor_model.fit(data) assert_array_almost_equal(fit[0].evals, evals) assert_array_almost_equal(fit[0].S0_hat, b0) # Evals should be high for high diffusion voxel assert_(all(fit[1].evals > evals[0] * .9)) # Evals should be zero where data is masked assert_array_almost_equal(fit[2].evals, 0.) def test_indexing_on_tensor_fit(): params = np.zeros([2, 3, 4, 12]) fit = dti.TensorFit(None, params) # Should return a TensorFit of appropriate shape assert_equal(fit.shape, (2, 3, 4)) fit1 = fit[0] assert_equal(fit1.shape, (3, 4)) assert_equal(type(fit1), dti.TensorFit) fit1 = fit[0, 0, 0] assert_equal(fit1.shape, ()) assert_equal(type(fit1), dti.TensorFit) fit1 = fit[[0], slice(None)] assert_equal(fit1.shape, (1, 3, 4)) assert_equal(type(fit1), dti.TensorFit) # Should raise an index error if too many indices are passed assert_raises(IndexError, fit.__getitem__, (0, 0, 0, 0)) def test_fa_of_zero(): evals = np.zeros((4, 3)) fa = fractional_anisotropy(evals) assert_array_equal(fa, 0) def test_ga_of_zero(): evals = np.zeros((4, 3)) ga = geodesic_anisotropy(evals) assert_array_equal(ga, 0) def test_diffusivities(): psphere = get_sphere('symmetric362') bvecs = np.concatenate(([[0, 0, 0]], psphere.vertices)) bvals = np.zeros(len(bvecs)) + 1000 bvals[0] = 0 gtab = grad.gradient_table(bvals, bvecs) mevals = np.array(([0.0015, 0.0003, 0.0001], [0.0015, 0.0003, 0.0003])) mevecs = [np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]]), np.array([[0, 0, 1], [0, 1, 0], [1, 0, 0]])] S = single_tensor(gtab, 100, mevals[0], mevecs[0], snr=None) dm = dti.TensorModel(gtab, 'LS') dmfit = dm.fit(S) md = mean_diffusivity(dmfit.evals) Trace = trace(dmfit.evals) rd = radial_diffusivity(dmfit.evals) ad = axial_diffusivity(dmfit.evals) lin = linearity(dmfit.evals) plan = planarity(dmfit.evals) spher = sphericity(dmfit.evals) assert_almost_equal(md, (0.0015 + 0.0003 + 0.0001) / 3) assert_almost_equal(Trace, (0.0015 + 0.0003 + 0.0001)) assert_almost_equal(ad, 0.0015) assert_almost_equal(rd, (0.0003 + 0.0001) / 2) assert_almost_equal(lin, (0.0015 - 0.0003)/Trace) assert_almost_equal(plan, 2 * (0.0003 - 0.0001)/Trace) assert_almost_equal(spher, (3 * 0.0001)/Trace) def test_color_fa(): data, gtab = dsi_voxels() dm = dti.TensorModel(gtab, 'LS') dmfit = dm.fit(data) fa = fractional_anisotropy(dmfit.evals) cfa = color_fa(fa, dmfit.evecs) fa = np.ones((3, 3, 3)) # evecs should be of shape (fa, 3, 3) evecs = np.zeros(fa.shape + (3, 2)) npt.assert_raises(ValueError, color_fa, fa, evecs) evecs = np.zeros(fa.shape + (3, 3)) evecs[..., :, :] = np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]]) assert_equal(fa.shape, evecs[..., 0, 0].shape) assert_equal((3, 3), evecs.shape[-2:]) # 3D test case fa = np.ones((3, 3, 3)) evecs = np.zeros(fa.shape + (3, 3)) evecs[..., :, :] = np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]]) cfa = color_fa(fa, evecs) cfa_truth = np.array([1, 0, 0]) true_cfa = np.reshape(np.tile(cfa_truth, 27), [3, 3, 3, 3]) assert_array_equal(cfa, true_cfa) # 2D test case fa = np.ones((3, 3)) evecs = np.zeros(fa.shape + (3, 3)) evecs[..., :, :] = np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]]) cfa = color_fa(fa, evecs) cfa_truth = np.array([1, 0, 0]) true_cfa = np.reshape(np.tile(cfa_truth, 9), [3, 3, 3]) assert_array_equal(cfa, true_cfa) # 1D test case fa = np.ones((3)) evecs = np.zeros(fa.shape + (3, 3)) evecs[..., :, :] = np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]]) cfa = color_fa(fa, evecs) cfa_truth = np.array([1, 0, 0]) true_cfa = np.reshape(np.tile(cfa_truth, 3), [3, 3]) assert_array_equal(cfa, true_cfa) def test_wls_and_ls_fit(): """ Tests the WLS and LS fitting functions to see if they returns the correct eigenvalues and eigenvectors. Uses data/55dir_grad.bvec as the gradient table and 3by3by56.nii as the data. """ # Defining Test Voxel (avoid nibabel dependency) ### # Recall: D = [Dxx,Dyy,Dzz,Dxy,Dxz,Dyz,log(S_0)] and D ~ 10^-4 mm^2 /s b0 = 1000. bvec, bval = read_bvec_file(get_data('55dir_grad.bvec')) B = bval[1] # Scale the eigenvalues and tensor by the B value so the units match D = np.array([1., 1., 1., 0., 0., 1., -np.log(b0) * B]) / B evals = np.array([2., 1., 0.]) / B md = evals.mean() tensor = from_lower_triangular(D) # Design Matrix gtab = grad.gradient_table(bval, bvec) X = dti.design_matrix(gtab) # Signals Y = np.exp(np.dot(X, D)) assert_almost_equal(Y[0], b0) Y.shape = (-1,) + Y.shape # Testing WLS Fit on Single Voxel # If you do something wonky (passing min_signal<0), you should get an # error: npt.assert_raises(ValueError, TensorModel, gtab, fit_method='WLS', min_signal=-1) # Estimate tensor from test signals model = TensorModel(gtab, fit_method='WLS', return_S0_hat=True) tensor_est = model.fit(Y) assert_equal(tensor_est.shape, Y.shape[:-1]) assert_array_almost_equal(tensor_est.evals[0], evals) assert_array_almost_equal(tensor_est.quadratic_form[0], tensor, err_msg="Calculation of tensor from Y does not " "compare to analytical solution") assert_almost_equal(tensor_est.md[0], md) assert_array_almost_equal(tensor_est.S0_hat[0], b0, decimal=3) # Test that we can fit a single voxel's worth of data (a 1d array) y = Y[0] tensor_est = model.fit(y) assert_equal(tensor_est.shape, tuple()) assert_array_almost_equal(tensor_est.evals, evals) assert_array_almost_equal(tensor_est.quadratic_form, tensor) assert_almost_equal(tensor_est.md, md) assert_array_almost_equal(tensor_est.lower_triangular(b0), D) # Test using fit_method='LS' model = TensorModel(gtab, fit_method='LS') tensor_est = model.fit(y) assert_equal(tensor_est.shape, tuple()) assert_array_almost_equal(tensor_est.evals, evals) assert_array_almost_equal(tensor_est.quadratic_form, tensor) assert_almost_equal(tensor_est.md, md) assert_array_almost_equal(tensor_est.lower_triangular(b0), D) assert_array_almost_equal(tensor_est.linearity, linearity(evals)) assert_array_almost_equal(tensor_est.planarity, planarity(evals)) assert_array_almost_equal(tensor_est.sphericity, sphericity(evals)) def test_masked_array_with_tensor(): data = np.ones((2, 4, 56)) mask = np.array([[True, False, False, True], [True, False, True, False]]) bvec, bval = read_bvec_file(get_data('55dir_grad.bvec')) gtab = grad.gradient_table_from_bvals_bvecs(bval, bvec.T) tensor_model = TensorModel(gtab) tensor = tensor_model.fit(data, mask=mask) assert_equal(tensor.shape, (2, 4)) assert_equal(tensor.fa.shape, (2, 4)) assert_equal(tensor.evals.shape, (2, 4, 3)) assert_equal(tensor.evecs.shape, (2, 4, 3, 3)) tensor = tensor[0] assert_equal(tensor.shape, (4,)) assert_equal(tensor.fa.shape, (4,)) assert_equal(tensor.evals.shape, (4, 3)) assert_equal(tensor.evecs.shape, (4, 3, 3)) tensor = tensor[0] assert_equal(tensor.shape, tuple()) assert_equal(tensor.fa.shape, tuple()) assert_equal(tensor.evals.shape, (3,)) assert_equal(tensor.evecs.shape, (3, 3)) assert_equal(type(tensor.model_params), np.ndarray) def test_fit_method_error(): bvec, bval = read_bvec_file(get_data('55dir_grad.bvec')) gtab = grad.gradient_table_from_bvals_bvecs(bval, bvec.T) # This should work (smoke-testing!): TensorModel(gtab, fit_method='WLS') # This should raise an error because there is no such fit_method assert_raises(ValueError, TensorModel, gtab, min_signal=1e-9, fit_method='s') def test_lower_triangular(): tensor = np.arange(9).reshape((3, 3)) D = lower_triangular(tensor) assert_array_equal(D, [0, 3, 4, 6, 7, 8]) D = lower_triangular(tensor, 1) assert_array_equal(D, [0, 3, 4, 6, 7, 8, 0]) assert_raises(ValueError, lower_triangular, np.zeros((2, 3))) shape = (4, 5, 6) many_tensors = np.empty(shape + (3, 3)) many_tensors[:] = tensor result = np.empty(shape + (6,)) result[:] = [0, 3, 4, 6, 7, 8] D = lower_triangular(many_tensors) assert_array_equal(D, result) D = lower_triangular(many_tensors, 1) result = np.empty(shape + (7,)) result[:] = [0, 3, 4, 6, 7, 8, 0] assert_array_equal(D, result) def test_from_lower_triangular(): result = np.array([[0, 1, 3], [1, 2, 4], [3, 4, 5]]) D = np.arange(7) tensor = from_lower_triangular(D) assert_array_equal(tensor, result) result = result * np.ones((5, 4, 1, 1)) D = D * np.ones((5, 4, 1)) tensor = from_lower_triangular(D) assert_array_equal(tensor, result) def test_all_constant(): bvecs, bvals = read_bvec_file(get_data('55dir_grad.bvec')) gtab = grad.gradient_table_from_bvals_bvecs(bvals, bvecs.T) fit_methods = ['LS', 'OLS', 'NNLS', 'RESTORE'] for fit_method in fit_methods: dm = dti.TensorModel(gtab) assert_almost_equal(dm.fit(100 * np.ones(bvals.shape[0])).fa, 0) # Doesn't matter if the signal is smaller than 1: assert_almost_equal(dm.fit(0.4 * np.ones(bvals.shape[0])).fa, 0) def test_all_zeros(): bvecs, bvals = read_bvec_file(get_data('55dir_grad.bvec')) gtab = grad.gradient_table_from_bvals_bvecs(bvals, bvecs.T) fit_methods = ['LS', 'OLS', 'NNLS', 'RESTORE'] for fit_method in fit_methods: dm = dti.TensorModel(gtab) assert_array_almost_equal(dm.fit(np.zeros(bvals.shape[0])).evals, 0) def test_mask(): data, gtab = dsi_voxels() dm = dti.TensorModel(gtab, 'LS') mask = np.zeros(data.shape[:-1], dtype=bool) mask[0, 0, 0] = True dtifit = dm.fit(data) dtifit_w_mask = dm.fit(data, mask=mask) # Without a mask it has some value assert_(not np.isnan(dtifit.fa[0, 0, 0])) # Where mask is False, evals, evecs and fa should all be 0 assert_array_equal(dtifit_w_mask.evals[~mask], 0) assert_array_equal(dtifit_w_mask.evecs[~mask], 0) assert_array_equal(dtifit_w_mask.fa[~mask], 0) # Except for the one voxel that was selected by the mask: assert_almost_equal(dtifit_w_mask.fa[0, 0, 0], dtifit.fa[0, 0, 0]) # Test with returning S0_hat dm = dti.TensorModel(gtab, 'LS', return_S0_hat=True) mask = np.zeros(data.shape[:-1], dtype=bool) mask[0, 0, 0] = True dtifit = dm.fit(data) dtifit_w_mask = dm.fit(data, mask=mask) # Without a mask it has some value assert_(not np.isnan(dtifit.fa[0, 0, 0])) # Where mask is False, evals, evecs and fa should all be 0 assert_array_equal(dtifit_w_mask.evals[~mask], 0) assert_array_equal(dtifit_w_mask.evecs[~mask], 0) assert_array_equal(dtifit_w_mask.fa[~mask], 0) assert_array_equal(dtifit_w_mask.S0_hat[~mask], 0) # Except for the one voxel that was selected by the mask: assert_almost_equal(dtifit_w_mask.fa[0, 0, 0], dtifit.fa[0, 0, 0]) assert_almost_equal(dtifit_w_mask.S0_hat[0, 0, 0], dtifit.S0_hat[0, 0, 0]) def test_nnls_jacobian_fucn(): b0 = 1000. bvecs, bval = read_bvec_file(get_data('55dir_grad.bvec')) gtab = grad.gradient_table(bval, bvecs) B = bval[1] # Scale the eigenvalues and tensor by the B value so the units match D = np.array([1., 1., 1., 0., 0., 1., -np.log(b0) * B]) / B # Design Matrix X = dti.design_matrix(gtab) # Signals Y = np.exp(np.dot(X, D)) # Test Jacobian at D args = [X, Y] analytical = dti._nlls_jacobian_func(D, *args) for i in range(len(X)): args = [X[i], Y[i]] approx = opt.approx_fprime(D, dti._nlls_err_func, 1e-8, *args) assert_true(np.allclose(approx, analytical[i])) # Test Jacobian at zero D = np.zeros_like(D) args = [X, Y] analytical = dti._nlls_jacobian_func(D, *args) for i in range(len(X)): args = [X[i], Y[i]] approx = opt.approx_fprime(D, dti._nlls_err_func, 1e-8, *args) assert_true(np.allclose(approx, analytical[i])) def test_nlls_fit_tensor(): """ Test the implementation of NLLS and RESTORE """ b0 = 1000. bvecs, bval = read_bvec_file(get_data('55dir_grad.bvec')) gtab = grad.gradient_table(bval, bvecs) B = bval[1] # Scale the eigenvalues and tensor by the B value so the units match D = np.array([1., 1., 1., 0., 0., 1., -np.log(b0) * B]) / B evals = np.array([2., 1., 0.]) / B md = evals.mean() tensor = from_lower_triangular(D) # Design Matrix X = dti.design_matrix(gtab) # Signals Y = np.exp(np.dot(X, D)) Y.shape = (-1,) + Y.shape # Estimate tensor from test signals and compare against expected result # using non-linear least squares: tensor_model = dti.TensorModel(gtab, fit_method='NLLS') tensor_est = tensor_model.fit(Y) assert_equal(tensor_est.shape, Y.shape[:-1]) assert_array_almost_equal(tensor_est.evals[0], evals) assert_array_almost_equal(tensor_est.quadratic_form[0], tensor) assert_almost_equal(tensor_est.md[0], md) # You can also do this without the Jacobian (though it's slower): tensor_model = dti.TensorModel(gtab, fit_method='NLLS', jac=False) tensor_est = tensor_model.fit(Y) assert_equal(tensor_est.shape, Y.shape[:-1]) assert_array_almost_equal(tensor_est.evals[0], evals) assert_array_almost_equal(tensor_est.quadratic_form[0], tensor) assert_almost_equal(tensor_est.md[0], md) # Using the gmm weighting scheme: tensor_model = dti.TensorModel(gtab, fit_method='NLLS', weighting='gmm') tensor_est = tensor_model.fit(Y) assert_equal(tensor_est.shape, Y.shape[:-1]) assert_array_almost_equal(tensor_est.evals[0], evals) assert_array_almost_equal(tensor_est.quadratic_form[0], tensor) assert_almost_equal(tensor_est.md[0], md) # If you use sigma weighting, you'd better provide a sigma: tensor_model = dti.TensorModel(gtab, fit_method='NLLS', weighting='sigma') npt.assert_raises(ValueError, tensor_model.fit, Y) # Use NLLS with some actual 4D data: data, bvals, bvecs = get_data('small_25') gtab = grad.gradient_table(bvals, bvecs) tm1 = dti.TensorModel(gtab, fit_method='NLLS') dd = nib.load(data).get_data() tf1 = tm1.fit(dd) tm2 = dti.TensorModel(gtab) tf2 = tm2.fit(dd) assert_array_almost_equal(tf1.fa, tf2.fa, decimal=1) def test_restore(): """ Test the implementation of the RESTORE algorithm """ b0 = 1000. bvecs, bval = read_bvec_file(get_data('55dir_grad.bvec')) gtab = grad.gradient_table(bval, bvecs) B = bval[1] # Scale the eigenvalues and tensor by the B value so the units match D = np.array([1., 1., 1., 0., 0., 1., -np.log(b0) * B]) / B evals = np.array([2., 1., 0.]) / B tensor = from_lower_triangular(D) # Design Matrix X = dti.design_matrix(gtab) # Signals Y = np.exp(np.dot(X, D)) Y.shape = (-1,) + Y.shape for drop_this in range(1, Y.shape[-1]): for jac in [True, False]: # RESTORE estimates should be robust to dropping this_y = Y.copy() this_y[:, drop_this] = 1.0 for sigma in [67.0, np.ones(this_y.shape[-1]) * 67.0]: tensor_model = dti.TensorModel(gtab, fit_method='restore', jac=jac, sigma=67.0) tensor_est = tensor_model.fit(this_y) assert_array_almost_equal(tensor_est.evals[0], evals, decimal=3) assert_array_almost_equal(tensor_est.quadratic_form[0], tensor, decimal=3) # If sigma is very small, it still needs to work: tensor_model = dti.TensorModel(gtab, fit_method='restore', sigma=0.0001) tensor_model.fit(Y.copy()) # Test return_S0_hat tensor_model = dti.TensorModel(gtab, fit_method='restore', sigma=0.0001, return_S0_hat=True) tmf = tensor_model.fit(Y.copy()) assert_almost_equal(tmf[0].S0_hat, b0) def test_adc(): """ Test the implementation of the calculation of apparent diffusion coefficient """ data, gtab = dsi_voxels() dm = dti.TensorModel(gtab, 'LS') mask = np.zeros(data.shape[:-1], dtype=bool) mask[0, 0, 0] = True dtifit = dm.fit(data) # The ADC in the principal diffusion direction should be equal to the AD in # each voxel: pdd0 = dtifit.evecs[0, 0, 0, 0] sphere_pdd0 = dps.Sphere(x=pdd0[0], y=pdd0[1], z=pdd0[2]) assert_array_almost_equal(dtifit.adc(sphere_pdd0)[0, 0, 0], dtifit.ad[0, 0, 0], decimal=5) # Test that it works for cases in which the data is 1D dtifit = dm.fit(data[0, 0, 0]) sphere_pdd0 = dps.Sphere(x=pdd0[0], y=pdd0[1], z=pdd0[2]) assert_array_almost_equal(dtifit.adc(sphere_pdd0), dtifit.ad, decimal=5) def test_predict(): """ Test model prediction API """ psphere = get_sphere('symmetric362') bvecs = np.concatenate(([[1, 0, 0]], psphere.vertices)) bvals = np.zeros(len(bvecs)) + 1000 bvals[0] = 0 gtab = grad.gradient_table(bvals, bvecs) mevals = np.array(([0.0015, 0.0003, 0.0001], [0.0015, 0.0003, 0.0003])) mevecs = [np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]]), np.array([[0, 0, 1], [0, 1, 0], [1, 0, 0]])] S = single_tensor(gtab, 100, mevals[0], mevecs[0], snr=None) dm = dti.TensorModel(gtab, 'LS', return_S0_hat=True) dmfit = dm.fit(S) assert_array_almost_equal(dmfit.predict(gtab, S0=100), S) assert_array_almost_equal(dmfit.predict(gtab), S) assert_array_almost_equal(dm.predict(dmfit.model_params, S0=100), S) fdata, fbvals, fbvecs = get_data() data = nib.load(fdata).get_data() # Make the data cube a bit larger: data = np.tile(data.T, 2).T gtab = grad.gradient_table(fbvals, fbvecs) dtim = dti.TensorModel(gtab) dtif = dtim.fit(data) S0 = np.mean(data[..., gtab.b0s_mask], -1) p = dtif.predict(gtab, S0) assert_equal(p.shape, data.shape) # Predict using S0_hat: dtim = dti.TensorModel(gtab, return_S0_hat=True) dtif = dtim.fit(data) p = dtif.predict(gtab) assert_equal(p.shape, data.shape) p = dtif.predict(gtab, S0) assert_equal(p.shape, data.shape) # Test iter_fit_tensor with S0_hat dtim = dti.TensorModel(gtab, step=2, return_S0_hat=True) dtif = dtim.fit(data) S0 = np.mean(data[..., gtab.b0s_mask], -1) p = dtif.predict(gtab, S0) assert_equal(p.shape, data.shape) # Use a smaller step in predicting: dtim = dti.TensorModel(gtab, step=2) dtif = dtim.fit(data) S0 = np.mean(data[..., gtab.b0s_mask], -1) p = dtif.predict(gtab, S0) assert_equal(p.shape, data.shape) # And with a scalar S0: S0 = 1 p = dtif.predict(gtab, S0) assert_equal(p.shape, data.shape) # Assign the step through kwarg: p = dtif.predict(gtab, S0, step=1) assert_equal(p.shape, data.shape) # And without S0: p = dtif.predict(gtab, step=1) assert_equal(p.shape, data.shape) def test_eig_from_lo_tri(): psphere = get_sphere('symmetric362') bvecs = np.concatenate(([[0, 0, 0]], psphere.vertices)) bvals = np.zeros(len(bvecs)) + 1000 bvals[0] = 0 gtab = grad.gradient_table(bvals, bvecs) mevals = np.array(([0.0015, 0.0003, 0.0001], [0.0015, 0.0003, 0.0003])) mevecs = [np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]]), np.array([[0, 0, 1], [0, 1, 0], [1, 0, 0]])] S = np.array([[single_tensor(gtab, 100, mevals[0], mevecs[0], snr=None), single_tensor(gtab, 100, mevals[0], mevecs[0], snr=None)]]) dm = dti.TensorModel(gtab, 'LS') dmfit = dm.fit(S) lo_tri = lower_triangular(dmfit.quadratic_form) assert_array_almost_equal(dti.eig_from_lo_tri(lo_tri), dmfit.model_params) def test_min_signal_alone(): fdata, fbvals, fbvecs = get_data() data = nib.load(fdata).get_data() gtab = grad.gradient_table(fbvals, fbvecs) idx = tuple(np.array(np.where(data == np.min(data)))[:-1, 0]) ten_model = dti.TensorModel(gtab) fit_alone = ten_model.fit(data[idx]) fit_together = ten_model.fit(data) npt.assert_array_almost_equal(fit_together.model_params[idx], fit_alone.model_params, decimal=12) def test_decompose_tensor_nan(): D_fine = np.array([1.7e-3, 0.0, 0.3e-3, 0.0, 0.0, 0.2e-3]) D_alter = np.array([1.6e-3, 0.0, 0.4e-3, 0.0, 0.0, 0.3e-3]) D_nan = np.nan * np.ones(6) lref, vref = decompose_tensor(from_lower_triangular(D_fine)) lfine, vfine = _decompose_tensor_nan(from_lower_triangular(D_fine), from_lower_triangular(D_alter)) assert_array_almost_equal(lfine, np.array([1.7e-3, 0.3e-3, 0.2e-3])) assert_array_almost_equal(vfine, vref) lref, vref = decompose_tensor(from_lower_triangular(D_alter)) lalter, valter = _decompose_tensor_nan(from_lower_triangular(D_nan), from_lower_triangular(D_alter)) assert_array_almost_equal(lalter, np.array([1.6e-3, 0.4e-3, 0.3e-3])) assert_array_almost_equal(valter, vref) dipy-0.13.0/dipy/reconst/tests/test_fwdti.py000066400000000000000000000272531317371701200210650ustar00rootroot00000000000000""" Testing Free Water Elimination Model """ from __future__ import division, print_function, absolute_import import numpy as np import dipy.reconst.dti as dti import dipy.reconst.fwdti as fwdti from dipy.reconst.fwdti import fwdti_prediction from numpy.testing import (assert_array_almost_equal, assert_almost_equal) from nose.tools import assert_raises from dipy.reconst.dti import (from_lower_triangular, decompose_tensor, fractional_anisotropy) from dipy.reconst.fwdti import (lower_triangular_to_cholesky, cholesky_to_lower_triangular, nls_fit_tensor, wls_fit_tensor) from dipy.sims.voxel import (multi_tensor, single_tensor, all_tensor_evecs, multi_tensor_dki) from dipy.io.gradients import read_bvals_bvecs from dipy.core.gradients import gradient_table from dipy.data import get_data fimg, fbvals, fbvecs = get_data('small_64D') bvals, bvecs = read_bvals_bvecs(fbvals, fbvecs) gtab = gradient_table(bvals, bvecs) # FW model requires multishell data bvals_2s = np.concatenate((bvals, bvals * 1.5), axis=0) bvecs_2s = np.concatenate((bvecs, bvecs), axis=0) gtab_2s = gradient_table(bvals_2s, bvecs_2s) # Simulation a typical DT and DW signal for no water contamination S0 = np.array(100) dt = np.array([0.0017, 0, 0.0003, 0, 0, 0.0003]) evals, evecs = decompose_tensor(from_lower_triangular(dt)) S_tissue = single_tensor(gtab_2s, S0=100, evals=evals, evecs=evecs, snr=None) dm = dti.TensorModel(gtab_2s, 'WLS') dtifit = dm.fit(S_tissue) FAdti = dtifit.fa MDdti = dtifit.md dtiparams = dtifit.model_params # Simulation of 8 voxels tested DWI = np.zeros((2, 2, 2, len(gtab_2s.bvals))) FAref = np.zeros((2, 2, 2)) MDref = np.zeros((2, 2, 2)) # Diffusion of tissue and water compartments are constant for all voxel mevals = np.array([[0.0017, 0.0003, 0.0003], [0.003, 0.003, 0.003]]) # volume fractions GTF = np.array([[[0.06, 0.71], [0.33, 0.91]], [[0., 0.], [0., 0.]]]) # S0 multivoxel S0m = 100 * np.ones((2, 2, 2)) # model_params ground truth (to be fill) model_params_mv = np.zeros((2, 2, 2, 13)) for i in range(2): for j in range(2): gtf = GTF[0, i, j] S, p = multi_tensor(gtab_2s, mevals, S0=100, angles=[(90, 0), (90, 0)], fractions=[(1-gtf) * 100, gtf*100], snr=None) DWI[0, i, j] = S FAref[0, i, j] = FAdti MDref[0, i, j] = MDdti R = all_tensor_evecs(p[0]) R = R.reshape((9)) model_params_mv[0, i, j] = np.concatenate(([0.0017, 0.0003, 0.0003], R, [gtf]), axis=0) def test_fwdti_singlevoxel(): # Simulation when water contamination is added gtf = 0.44444 # ground truth volume fraction mevals = np.array([[0.0017, 0.0003, 0.0003], [0.003, 0.003, 0.003]]) S_conta, peaks = multi_tensor(gtab_2s, mevals, S0=100, angles=[(90, 0), (90, 0)], fractions=[(1-gtf) * 100, gtf*100], snr=None) fwdm = fwdti.FreeWaterTensorModel(gtab_2s, 'WLS') fwefit = fwdm.fit(S_conta) FAfwe = fwefit.fa Ffwe = fwefit.f MDfwe = fwefit.md assert_almost_equal(FAdti, FAfwe, decimal=3) assert_almost_equal(Ffwe, gtf, decimal=3) assert_almost_equal(MDfwe, MDdti, decimal=3) # Test non-linear fit fwdm = fwdti.FreeWaterTensorModel(gtab_2s, 'NLS', cholesky=False) fwefit = fwdm.fit(S_conta) FAfwe = fwefit.fa Ffwe = fwefit.f MDfwe = fwefit.md assert_almost_equal(FAdti, FAfwe) assert_almost_equal(Ffwe, gtf) assert_almost_equal(MDfwe, MDdti) # Test cholesky fwdm = fwdti.FreeWaterTensorModel(gtab_2s, 'NLS', cholesky=True) fwefit = fwdm.fit(S_conta) FAfwe = fwefit.fa Ffwe = fwefit.f MDfwe = fwefit.md assert_almost_equal(FAdti, FAfwe) assert_almost_equal(Ffwe, gtf) assert_almost_equal(MDfwe, MDfwe) def test_fwdti_precision(): # Simulation when water contamination is added gtf = 0.63416 # ground truth volume fraction mevals = np.array([[0.0017, 0.0003, 0.0003], [0.003, 0.003, 0.003]]) S_conta, peaks = multi_tensor(gtab_2s, mevals, S0=100, angles=[(90, 0), (90, 0)], fractions=[(1-gtf) * 100, gtf*100], snr=None) fwdm = fwdti.FreeWaterTensorModel(gtab_2s, 'WLS', piterations=5) fwefit = fwdm.fit(S_conta) FAfwe = fwefit.fa Ffwe = fwefit.f MDfwe = fwefit.md assert_almost_equal(FAdti, FAfwe, decimal=5) assert_almost_equal(Ffwe, gtf, decimal=5) assert_almost_equal(MDfwe, MDdti, decimal=5) def test_fwdti_multi_voxel(): fwdm = fwdti.FreeWaterTensorModel(gtab_2s, 'NLS', cholesky=False) fwefit = fwdm.fit(DWI) FAfwe = fwefit.fa Ffwe = fwefit.f MDfwe = fwefit.md assert_almost_equal(FAfwe, FAref) assert_almost_equal(Ffwe, GTF) assert_almost_equal(MDfwe, MDref) # Test cholesky fwdm = fwdti.FreeWaterTensorModel(gtab_2s, 'NLS', cholesky=True) fwefit = fwdm.fit(DWI) FAfwe = fwefit.fa Ffwe = fwefit.f MDfwe = fwefit.md assert_almost_equal(FAfwe, FAref) assert_almost_equal(Ffwe, GTF) assert_almost_equal(MDfwe, MDref) def test_fwdti_predictions(): # single voxel case gtf = 0.50 # ground truth volume fraction angles = [(90, 0), (90, 0)] mevals = np.array([[0.0017, 0.0003, 0.0003], [0.003, 0.003, 0.003]]) S_conta, peaks = multi_tensor(gtab_2s, mevals, S0=100, angles=angles, fractions=[(1-gtf) * 100, gtf*100], snr=None) R = all_tensor_evecs(peaks[0]) R = R.reshape((9)) model_params = np.concatenate(([0.0017, 0.0003, 0.0003], R, [gtf]), axis=0) S_pred1 = fwdti_prediction(model_params, gtab_2s, S0=100) assert_array_almost_equal(S_pred1, S_conta) # Testing in model class fwdm = fwdti.FreeWaterTensorModel(gtab_2s) S_pred2 = fwdm.predict(model_params, S0=100) assert_array_almost_equal(S_pred2, S_conta) # Testing in fit class fwefit = fwdm.fit(S_conta) S_pred3 = fwefit.predict(gtab_2s, S0=100) assert_array_almost_equal(S_pred3, S_conta, decimal=5) # Multi voxel simulation S_pred1 = fwdti_prediction(model_params_mv, gtab_2s, S0=100) # function assert_array_almost_equal(S_pred1, DWI) S_pred2 = fwdm.predict(model_params_mv, S0=100) # Model class assert_array_almost_equal(S_pred2, DWI) fwefit = fwdm.fit(DWI) # Fit class S_pred3 = fwefit.predict(gtab_2s, S0=100) assert_array_almost_equal(S_pred3, DWI) def test_fwdti_errors(): # 1st error - if a unknown fit method is given to the FWTM assert_raises(ValueError, fwdti.FreeWaterTensorModel, gtab_2s, fit_method="pKT") # 2nd error - if incorrect mask is given fwdtiM = fwdti.FreeWaterTensorModel(gtab_2s) incorrect_mask = np.array([[True, True, False], [True, False, False]]) assert_raises(ValueError, fwdtiM.fit, DWI, mask=incorrect_mask) # 3rd error - if data with only one non zero b-value is given assert_raises(ValueError, fwdti.FreeWaterTensorModel, gtab) # Testing the correct usage fwdtiM = fwdti.FreeWaterTensorModel(gtab_2s, min_signal=1) correct_mask = np.zeros((2, 2, 2)) correct_mask[0, :, :] = 1 correct_mask = correct_mask > 0 fwdtiF = fwdtiM.fit(DWI, mask=correct_mask) assert_array_almost_equal(fwdtiF.fa, FAref) assert_array_almost_equal(fwdtiF.f, GTF) # 4th error - if a sigma is selected by no value of sigma is given fwdm = fwdti.FreeWaterTensorModel(gtab_2s, 'NLS', weighting='sigma') assert_raises(ValueError, fwdm.fit, DWI) def test_fwdti_restore(): # Restore has to work well even in nonproblematic cases # Simulate a signal corrupted by free water diffusion contamination gtf = 0.50 # ground truth volume fraction mevals = np.array([[0.0017, 0.0003, 0.0003], [0.003, 0.003, 0.003]]) S_conta, peaks = multi_tensor(gtab_2s, mevals, S0=100, angles=[(90, 0), (90, 0)], fractions=[(1-gtf) * 100, gtf*100], snr=None) fwdm = fwdti.FreeWaterTensorModel(gtab_2s, 'NLS', weighting='sigma', sigma=4) fwdtiF = fwdm.fit(S_conta) assert_array_almost_equal(fwdtiF.fa, FAdti) assert_array_almost_equal(fwdtiF.f, gtf) fwdm2 = fwdti.FreeWaterTensorModel(gtab_2s, 'NLS', weighting='gmm') fwdtiF2 = fwdm2.fit(S_conta) assert_array_almost_equal(fwdtiF2.fa, FAdti) assert_array_almost_equal(fwdtiF2.f, gtf) def test_cholesky_functions(): S, dt, kt = multi_tensor_dki(gtab, mevals, S0=100, angles=[(45., 45.), (45., 45.)], fractions=[80, 20]) R = lower_triangular_to_cholesky(dt) tensor = cholesky_to_lower_triangular(R) assert_array_almost_equal(dt, tensor) def test_fwdti_jac_multi_voxel(): fwdm = fwdti.FreeWaterTensorModel(gtab_2s, 'WLS') fwefit = fwdm.fit(DWI[0, :, :]) # no f transform fwdm = fwdti.FreeWaterTensorModel(gtab_2s, 'NLS', f_transform=False, jac=True) fwefit = fwdm.fit(DWI[0, :, :]) Ffwe = fwefit.f assert_array_almost_equal(Ffwe, GTF[0, :]) # with f transform fwdm = fwdti.FreeWaterTensorModel(gtab_2s, 'NLS', f_transform=True, jac=True) fwefit = fwdm.fit(DWI[0, :, :]) Ffwe = fwefit.f assert_array_almost_equal(Ffwe, GTF[0, :]) def test_standalone_functions(): # WLS procedure params = wls_fit_tensor(gtab_2s, DWI) assert_array_almost_equal(params[..., 12], GTF) fa = fractional_anisotropy(params[..., :3]) assert_array_almost_equal(fa, FAref) # NLS procedure params = nls_fit_tensor(gtab_2s, DWI) assert_array_almost_equal(params[..., 12], GTF) fa = fractional_anisotropy(params[..., :3]) assert_array_almost_equal(fa, FAref) def test_md_regularization(): # single voxel gtf = 0.97 # for this ground truth value, md is larger than 2.7e-3 mevals = np.array([[0.0017, 0.0003, 0.0003], [0.003, 0.003, 0.003]]) S_conta, peaks = multi_tensor(gtab_2s, mevals, S0=100, angles=[(90, 0), (90, 0)], fractions=[(1-gtf) * 100, gtf*100], snr=None) fwdm = fwdti.FreeWaterTensorModel(gtab_2s, 'NLS') fwefit = fwdm.fit(S_conta) assert_array_almost_equal(fwefit.fa, 0.0) assert_array_almost_equal(fwefit.md, 0.0) assert_array_almost_equal(fwefit.f, 1.0) # multi voxel DWI[0, 1, 1] = S_conta GTF[0, 1, 1] = 1 FAref[0, 1, 1] = 0 MDref[0, 1, 1] = 0 fwefit = fwdm.fit(DWI) assert_array_almost_equal(fwefit.fa, FAref) assert_array_almost_equal(fwefit.md, MDref) assert_array_almost_equal(fwefit.f, GTF) def test_negative_s0(): # single voxel gtf = 0.55 mevals = np.array([[0.0017, 0.0003, 0.0003], [0.003, 0.003, 0.003]]) S_conta, peaks = multi_tensor(gtab_2s, mevals, S0=100, angles=[(90, 0), (90, 0)], fractions=[(1-gtf) * 100, gtf*100], snr=None) S_conta[gtab_2s.bvals == 0] = -100 fwdm = fwdti.FreeWaterTensorModel(gtab_2s, 'NLS') fwefit = fwdm.fit(S_conta) assert_array_almost_equal(fwefit.fa, 0.0) assert_array_almost_equal(fwefit.md, 0.0) assert_array_almost_equal(fwefit.f, 0.0) # multi voxel DWI[0, 0, 1, gtab_2s.bvals == 0] = -100 GTF[0, 0, 1] = 0 FAref[0, 0, 1] = 0 MDref[0, 0, 1] = 0 fwefit = fwdm.fit(DWI) assert_array_almost_equal(fwefit.fa, FAref) assert_array_almost_equal(fwefit.md, MDref) assert_array_almost_equal(fwefit.f, GTF) dipy-0.13.0/dipy/reconst/tests/test_gqi.py000066400000000000000000000054571317371701200205320ustar00rootroot00000000000000import numpy as np from dipy.data import get_data, dsi_voxels from dipy.core.sphere import Sphere from dipy.core.gradients import gradient_table from dipy.sims.voxel import SticksAndBall from dipy.reconst.gqi import GeneralizedQSamplingModel from dipy.data import get_sphere from numpy.testing import (assert_equal, assert_almost_equal, run_module_suite) from dipy.reconst.tests.test_dsi import sticks_and_ball_dummies from dipy.core.subdivide_octahedron import create_unit_sphere from dipy.core.sphere_stats import angular_similarity from dipy.reconst.odf import gfa from dipy.direction.peaks import peak_directions def test_gqi(): # load symmetric 724 sphere sphere = get_sphere('symmetric724') # load icosahedron sphere sphere2 = create_unit_sphere(5) btable = np.loadtxt(get_data('dsi515btable')) bvals = btable[:, 0] bvecs = btable[:, 1:] gtab = gradient_table(bvals, bvecs) data, golden_directions = SticksAndBall(gtab, d=0.0015, S0=100, angles=[(0, 0), (90, 0)], fractions=[50, 50], snr=None) gq = GeneralizedQSamplingModel(gtab, method='gqi2', sampling_length=1.4) # symmetric724 gqfit = gq.fit(data) odf = gqfit.odf(sphere) directions, values, indices = peak_directions(odf, sphere, .35, 25) assert_equal(len(directions), 2) assert_almost_equal(angular_similarity(directions, golden_directions), 2, 1) # 5 subdivisions gqfit = gq.fit(data) odf2 = gqfit.odf(sphere2) directions, values, indices = peak_directions(odf2, sphere2, .35, 25) assert_equal(len(directions), 2) assert_almost_equal(angular_similarity(directions, golden_directions), 2, 1) sb_dummies = sticks_and_ball_dummies(gtab) for sbd in sb_dummies: data, golden_directions = sb_dummies[sbd] odf = gq.fit(data).odf(sphere2) directions, values, indices = peak_directions(odf, sphere2, .35, 25) if len(directions) <= 3: assert_equal(len(directions), len(golden_directions)) if len(directions) > 3: assert_equal(gfa(odf) < 0.1, True) def test_mvoxel_gqi(): data, gtab = dsi_voxels() sphere = get_sphere('symmetric724') gq = GeneralizedQSamplingModel(gtab, 'standard') gqfit = gq.fit(data) all_odfs = gqfit.odf(sphere) # Check that the first and last voxels each have 2 peaks odf = all_odfs[0, 0, 0] directions, values, indices = peak_directions(odf, sphere, .35, 25) assert_equal(directions.shape[0], 2) odf = all_odfs[-1, -1, -1] directions, values, indices = peak_directions(odf, sphere, .35, 25) assert_equal(directions.shape[0], 2) if __name__ == "__main__": run_module_suite() dipy-0.13.0/dipy/reconst/tests/test_interpolate.py000066400000000000000000000042211317371701200222640ustar00rootroot00000000000000from __future__ import division, print_function, absolute_import from dipy.utils.six.moves import xrange from nose.tools import assert_equal, assert_raises, assert_true, assert_false from numpy.testing import (assert_array_equal, assert_array_almost_equal, assert_equal) import numpy as np from dipy.reconst.interpolate import (NearestNeighborInterpolator, TriLinearInterpolator, OutsideImage) def test_NearestNeighborInterpolator(): # Place integers values at the center of every voxel l, m, n, o = np.ogrid[0:6.01, 0:6.01, 0:6.01, 0:4] data = l + m + n + o nni = NearestNeighborInterpolator(data, (1, 1, 1)) a, b, c = np.mgrid[.5:6.5:1.6, .5:6.5:2.7, .5:6.5:3.8] for ii in xrange(a.size): x = a.flat[ii] y = b.flat[ii] z = c.flat[ii] expected_result = int(x) + int(y) + int(z) + o.ravel() assert_array_equal(nni[x, y, z], expected_result) ind = np.array([x, y, z]) assert_array_equal(nni[ind], expected_result) assert_raises(OutsideImage, nni.__getitem__, (-.1, 0, 0)) assert_raises(OutsideImage, nni.__getitem__, (0, 8.2, 0)) def test_TriLinearInterpolator(): # Place (0, 0, 0) at the bottom left of the image l, m, n, o = np.ogrid[.5:6.51, .5:6.51, .5:6.51, 0:4] data = l + m + n + o data = data.astype("float32") tli = TriLinearInterpolator(data, (1, 1, 1)) a, b, c = np.mgrid[.5:6.5:1.6, .5:6.5:2.7, .5:6.5:3.8] for ii in xrange(a.size): x = a.flat[ii] y = b.flat[ii] z = c.flat[ii] expected_result = x + y + z + o.ravel() assert_array_almost_equal(tli[x, y, z], expected_result, decimal=5) ind = np.array([x, y, z]) assert_array_almost_equal(tli[ind], expected_result) # Index at 0 expected_value = np.arange(4) + 1.5 assert_array_almost_equal(tli[0, 0, 0], expected_value) # Index at shape expected_value = np.arange(4) + (6.5 * 3) assert_array_almost_equal(tli[7, 7, 7], expected_value) assert_raises(OutsideImage, tli.__getitem__, (-.1, 0, 0)) assert_raises(OutsideImage, tli.__getitem__, (0, 7.01, 0)) dipy-0.13.0/dipy/reconst/tests/test_ivim.py000066400000000000000000000365251317371701200207160ustar00rootroot00000000000000""" Testing the Intravoxel incoherent motion module The values of the various parameters used in the tests are inspired by the study of the IVIM model applied to MR images of the brain by Federau, Christian, et al. [1]. References ---------- .. [1] Federau, Christian, et al. "Quantitative measurement of brain perfusion with intravoxel incoherent motion MR imaging." Radiology 265.3 (2012): 874-881. """ import numpy as np from numpy.testing import (assert_array_equal, assert_array_almost_equal, assert_raises, assert_array_less, run_module_suite, assert_warns, dec) from dipy.reconst.ivim import ivim_prediction, IvimModel from dipy.core.gradients import gradient_table, generate_bvecs from dipy.sims.voxel import multi_tensor from distutils.version import LooseVersion import scipy SCIPY_VERSION = LooseVersion(scipy.version.short_version) # Let us generate some data for testing. bvals = np.array([0., 10., 20., 30., 40., 60., 80., 100., 120., 140., 160., 180., 200., 300., 400., 500., 600., 700., 800., 900., 1000.]) N = len(bvals) bvecs = generate_bvecs(N) gtab = gradient_table(bvals, bvecs.T) S0, f, D_star, D = 1000.0, 0.132, 0.00885, 0.000921 # params for a single voxel params = np.array([S0, f, D_star, D]) mevals = np.array(([D_star, D_star, D_star], [D, D, D])) # This gives an isotropic signal. signal = multi_tensor(gtab, mevals, snr=None, S0=S0, fractions=[f * 100, 100 * (1 - f)]) # Single voxel data data_single = signal[0] data_multi = np.zeros((2, 2, 1, len(gtab.bvals))) data_multi[0, 0, 0] = data_multi[0, 1, 0] = data_multi[ 1, 0, 0] = data_multi[1, 1, 0] = data_single ivim_params = np.zeros((2, 2, 1, 4)) ivim_params[0, 0, 0] = ivim_params[0, 1, 0] = params ivim_params[1, 0, 0] = ivim_params[1, 1, 0] = params ivim_model = IvimModel(gtab) ivim_model_one_stage = IvimModel(gtab) ivim_fit_single = ivim_model.fit(data_single) ivim_fit_multi = ivim_model.fit(data_multi) ivim_fit_single_one_stage = ivim_model_one_stage.fit(data_single) ivim_fit_multi_one_stage = ivim_model_one_stage.fit(data_multi) bvals_no_b0 = np.array([5., 10., 20., 30., 40., 60., 80., 100., 120., 140., 160., 180., 200., 300., 400., 500., 600., 700., 800., 900., 1000.]) bvecs_no_b0 = generate_bvecs(N) gtab_no_b0 = gradient_table(bvals_no_b0, bvecs.T) bvals_with_multiple_b0 = np.array([0., 0., 0., 0., 40., 60., 80., 100., 120., 140., 160., 180., 200., 300., 400., 500., 600., 700., 800., 900., 1000.]) bvecs_with_multiple_b0 = generate_bvecs(N) gtab_with_multiple_b0 = gradient_table(bvals_with_multiple_b0, bvecs_with_multiple_b0.T) noisy_single = np.array([4243.71728516, 4317.81298828, 4244.35693359, 4439.36816406, 4420.06201172, 4152.30078125, 4114.34912109, 4104.59375, 4151.61914062, 4003.58374023, 4013.68408203, 3906.39428711, 3909.06079102, 3495.27197266, 3402.57006836, 3163.10180664, 2896.04003906, 2663.7253418, 2614.87695312, 2316.55371094, 2267.7722168]) noisy_multi = np.zeros((2, 2, 1, len(gtab.bvals))) noisy_multi[0, 1, 0] = noisy_multi[ 1, 0, 0] = noisy_multi[1, 1, 0] = noisy_single noisy_multi[0, 0, 0] = data_single def single_exponential(S0, D, bvals): return S0 * np.exp(-bvals * D) def test_single_voxel_fit(): """ Test the implementation of the fitting for a single voxel. Here, we will use the multi_tensor function to generate a bi-exponential signal. The multi_tensor generates a multi tensor signal and expects eigenvalues of each tensor in mevals. Our basic test requires a scalar signal isotropic signal and hence we set the same eigenvalue in all three directions to generate the required signal. The bvals, f, D_star and D are inspired from the paper by Federau, Christian, et al. We use the function "generate_bvecs" to simulate bvectors corresponding to the bvalues. In the two stage fitting routine, initially we fit the signal values for bvals less than the specified split_b using the TensorModel and get an intial guess for f and D. Then, using these parameters we fit the entire data for all bvalues. """ est_signal = ivim_prediction(ivim_fit_single.model_params, gtab) assert_array_equal(est_signal.shape, data_single.shape) assert_array_almost_equal(ivim_fit_single.model_params, params) assert_array_almost_equal(est_signal, data_single) # Test predict function for single voxel p = ivim_fit_single.predict(gtab) assert_array_equal(p.shape, data_single.shape) assert_array_almost_equal(p, data_single) def test_multivoxel(): """Test fitting with multivoxel data. We generate a multivoxel signal to test the fitting for multivoxel data. This is to ensure that the fitting routine takes care of signals packed as 1D, 2D or 3D arrays. """ ivim_fit_multi = ivim_model.fit(data_multi) est_signal = ivim_fit_multi.predict(gtab, S0=1.) assert_array_equal(est_signal.shape, data_multi.shape) assert_array_almost_equal(ivim_fit_multi.model_params, ivim_params) assert_array_almost_equal(est_signal, data_multi) def test_ivim_errors(): """ Test if errors raised in the module are working correctly. Scipy introduced bounded least squares fitting in the version 0.17 and is not supported by the older versions. Initializing an IvimModel with bounds for older Scipy versions should raise an error. """ # Run the test for Scipy versions less than 0.17 if SCIPY_VERSION < LooseVersion('0.17'): assert_raises(ValueError, IvimModel, gtab, bounds=([0., 0., 0., 0.], [np.inf, 1., 1., 1.])) else: ivim_model = IvimModel(gtab, bounds=([0., 0., 0., 0.], [np.inf, 1., 1., 1.])) ivim_fit = ivim_model.fit(data_multi) est_signal = ivim_fit.predict(gtab, S0=1.) assert_array_equal(est_signal.shape, data_multi.shape) assert_array_almost_equal(ivim_fit.model_params, ivim_params) assert_array_almost_equal(est_signal, data_multi) def test_mask(): """ Test whether setting incorrect mask raises and error """ mask_correct = data_multi[..., 0] > 0.2 mask_not_correct = np.array([[False, True, False], [True, False]]) ivim_fit = ivim_model.fit(data_multi, mask_correct) est_signal = ivim_fit.predict(gtab, S0=1.) assert_array_equal(est_signal.shape, data_multi.shape) assert_array_almost_equal(est_signal, data_multi) assert_array_almost_equal(ivim_fit.model_params, ivim_params) assert_raises(ValueError, ivim_model.fit, data_multi, mask=mask_not_correct) def test_with_higher_S0(): """ Test whether fitting works for S0 > 1. """ # params for a single voxel S0_2 = 1000. params2 = np.array([S0_2, f, D_star, D]) mevals2 = np.array(([D_star, D_star, D_star], [D, D, D])) # This gives an isotropic signal. signal2 = multi_tensor(gtab, mevals2, snr=None, S0=S0_2, fractions=[f * 100, 100 * (1 - f)]) # Single voxel data data_single2 = signal2[0] ivim_fit = ivim_model.fit(data_single2) est_signal = ivim_fit.predict(gtab) assert_array_equal(est_signal.shape, data_single2.shape) assert_array_almost_equal(est_signal, data_single2) assert_array_almost_equal(ivim_fit.model_params, params2) def test_bounds_x0(): """ Test to check if setting bounds for signal where initial value is higher than subsequent values works. These values are from the IVIM dataset which can be obtained by using the `read_ivim` function from dipy.data.fetcher. These are values from the voxel [160, 98, 33] which can be obtained by : .. code-block:: python from dipy.data.fetcher import read_ivim img, gtab = read_ivim() data = img.get_data() signal = data[160, 98, 33, :] """ test_signal = np.array([4574.34814453, 4745.18164062, 4759.51806641, 4618.24951172, 4665.63623047, 4568.046875, 4525.90478516, 4734.54785156, 4526.41357422, 4299.99414062, 4256.61279297, 4254.50292969, 4098.74707031, 3776.10375977, 3614.0769043, 3440.56445312, 3146.52294922, 3006.94287109, 2879.69580078, 2728.44018555, 2600.09472656]) x0_test = np.array([1., 0.13, 0.001, 0.0001]) test_signal = ivim_prediction(x0_test, gtab) ivim_fit = ivim_model.fit(test_signal) est_signal = ivim_fit.predict(gtab) assert_array_equal(est_signal.shape, test_signal.shape) def test_predict(): """ Test the model prediction API. The predict method is already used in previous tests for estimation of the signal. But here, we will test is separately. """ assert_array_almost_equal(ivim_fit_single.predict(gtab), data_single) assert_array_almost_equal(ivim_model.predict(ivim_fit_single.model_params, gtab), data_single) ivim_fit_multi = ivim_model.fit(data_multi) assert_array_almost_equal(ivim_fit_multi.predict(gtab), data_multi) def test_fit_object(): """ Test the method of IvimFit class """ assert_raises(IndexError, ivim_fit_single.__getitem__, (-.1, 0, 0)) # Check if the S0 called is matching assert_array_almost_equal( ivim_fit_single.__getitem__(0).model_params, 1000.) ivim_fit_multi = ivim_model.fit(data_multi) # Should raise a TypeError if the arguments are not passed as tuple assert_raises(TypeError, ivim_fit_multi.__getitem__, -.1, 0) # Should return IndexError if invalid indices are passed assert_raises(IndexError, ivim_fit_multi.__getitem__, (100, -0)) assert_raises(IndexError, ivim_fit_multi.__getitem__, (100, -0, 2)) assert_raises(IndexError, ivim_fit_multi.__getitem__, (-100, 0)) assert_raises(IndexError, ivim_fit_multi.__getitem__, [-100, 0]) assert_raises(IndexError, ivim_fit_multi.__getitem__, (1, 0, 0, 3, 4)) # Check if the get item returns the S0 value for voxel (1,0,0) assert_array_almost_equal( ivim_fit_multi.__getitem__((1, 0, 0)).model_params[0], data_multi[1, 0, 0][0]) def test_shape(): """ Test if `shape` in `IvimFit` class gives the correct output. """ assert_array_equal(ivim_fit_single.shape, ()) ivim_fit_multi = ivim_model.fit(data_multi) assert_array_equal(ivim_fit_multi.shape, (2, 2, 1)) def test_multiple_b0(): # Generate a signal with multiple b0 # This gives an isotropic signal. signal = multi_tensor(gtab_with_multiple_b0, mevals, snr=None, S0=S0, fractions=[f * 100, 100 * (1 - f)]) # Single voxel data data_single = signal[0] ivim_model_multiple_b0 = IvimModel(gtab_with_multiple_b0) x0_estimated = ivim_model_multiple_b0.fit(data_single) # Test if all signals are positive def test_no_b0(): assert_raises(ValueError, IvimModel, gtab_no_b0) @dec.skipif(SCIPY_VERSION < LooseVersion('0.17'), "Gives wrong value for f") def test_noisy_fit(): """ Test fitting for noisy signals. This tests whether the threshold condition applies correctly and returns the linear fitting parameters. For older scipy versions, the returned value of `f` from a linear fit is around 135 and D and D_star values are equal. Hence doing a test based on Scipy version. """ model_one_stage = IvimModel(gtab) fit_one_stage = model_one_stage.fit(noisy_single) assert_array_less(fit_one_stage.model_params, [10000., 0.3, .01, 0.001]) def test_S0(): """ Test if the `IvimFit` class returns the correct S0 """ assert_array_almost_equal(ivim_fit_single.S0_predicted, S0) assert_array_almost_equal(ivim_fit_multi.S0_predicted, ivim_params[..., 0]) def test_perfusion_fraction(): """ Test if the `IvimFit` class returns the correct f """ assert_array_almost_equal(ivim_fit_single.perfusion_fraction, f) assert_array_almost_equal( ivim_fit_multi.perfusion_fraction, ivim_params[..., 1]) def test_D_star(): """ Test if the `IvimFit` class returns the correct D_star """ assert_array_almost_equal(ivim_fit_single.D_star, D_star) assert_array_almost_equal(ivim_fit_multi.D_star, ivim_params[..., 2]) def test_D(): """ Test if the `IvimFit` class returns the correct D """ assert_array_almost_equal(ivim_fit_single.D, D) assert_array_almost_equal(ivim_fit_multi.D, ivim_params[..., 3]) def test_estimate_linear_fit(): """ Test the linear estimates considering a single exponential fit. """ data_single_exponential_D = single_exponential(S0, D, gtab.bvals) assert_array_almost_equal(ivim_model.estimate_linear_fit( data_single_exponential_D, split_b=500., less_than=False), (S0, D)) data_single_exponential_D_star = single_exponential(S0, D_star, gtab.bvals) assert_array_almost_equal(ivim_model.estimate_linear_fit( data_single_exponential_D_star, split_b=100., less_than=True), (S0, D_star)) def test_estimate_f_D_star(): """ Test if the `estimate_f_D_star` returns the correct parameters after a non-linear fit. """ params_f_D = f + 0.001, D + 0.0001 assert_array_almost_equal(ivim_model.estimate_f_D_star(params_f_D, data_single, S0, D), (f, D_star)) def test_fit_one_stage(): """ Test to check the results for the one_stage linear fit. """ model = IvimModel(gtab, two_stage=False) fit = model.fit(data_single) # assert_array_almost_equal() linear_fit_params = [9.88834140e+02, 1.19707191e-01, 7.91176970e-03, 9.30095210e-04] linear_fit_signal = [988.83414044, 971.77122546, 955.46786293, 939.87125905, 924.93258982, 896.85182201, 870.90346447, 846.81187693, 824.34108781, 803.28900104, 783.48245048, 764.77297789, 747.03322866, 669.54798887, 605.03328304, 549.00852235, 499.21077611, 454.40299244, 413.83192296, 376.98072773, 343.45531017] assert_array_almost_equal(fit.model_params, linear_fit_params) assert_array_almost_equal(fit.predict(gtab), linear_fit_signal) def test_leastsq_failing(): """ Test for cases where leastsq fitting fails and the results from a linear fit is returned. """ fit_single = ivim_model.fit(noisy_single) # Test for the S0 and D values assert_array_almost_equal(fit_single.S0_predicted, 4356.268901117833) assert_array_almost_equal(fit_single.D, 6.936684e-04) def test_leastsq_error(): """ Test error handling of the `_leastsq` method works when unfeasible x0 is passed. If an unfeasible x0 value is passed using which leastsq fails, the x0 value is returned as it is. """ fit = ivim_model._leastsq(data_single, [-1, -1, -1, -1]) assert_array_almost_equal(fit, [-1, -1, -1, -1]) if __name__ == '__main__': run_module_suite() dipy-0.13.0/dipy/reconst/tests/test_mapmri.py000066400000000000000000001010051317371701200212210ustar00rootroot00000000000000import numpy as np from dipy.data import get_gtab_taiwan_dsi from numpy.testing import (assert_almost_equal, assert_array_almost_equal, assert_equal, run_module_suite, assert_raises) from dipy.reconst.mapmri import MapmriModel, mapmri_index_matrix from dipy.reconst import dti, mapmri from dipy.sims.voxel import (MultiTensor, multi_tensor_pdf, single_tensor, cylinders_and_ball_soderman) from scipy.special import gamma from scipy.misc import factorial from dipy.data import get_sphere from dipy.sims.voxel import add_noise import scipy.integrate as integrate from dipy.core.sphere_stats import angular_similarity from dipy.direction.peaks import peak_directions from dipy.reconst.odf import gfa from dipy.reconst.tests.test_dsi import sticks_and_ball_dummies from dipy.core.subdivide_octahedron import create_unit_sphere from dipy.reconst.shm import sh_to_sf import time def int_func(n): f = np.sqrt(2) * factorial(n) / float(((gamma(1 + n / 2.0)) * np.sqrt(2**(n + 1) * factorial(n)))) return f def generate_signal_crossing(gtab, lambda1, lambda2, lambda3, angle2=60): mevals = np.array(([lambda1, lambda2, lambda3], [lambda1, lambda2, lambda3])) angl = [(0, 0), (angle2, 0)] S, sticks = MultiTensor(gtab, mevals, S0=100.0, angles=angl, fractions=[50, 50], snr=None) return S, sticks def test_orthogonality_basis_functions(): # numerical integration parameters diffusivity = 0.0015 qmin = 0 qmax = 1000 int1 = integrate.quad(lambda x: np.real(mapmri.mapmri_phi_1d(0, x, diffusivity)) * np.real(mapmri.mapmri_phi_1d(2, x, diffusivity)), qmin, qmax)[0] int2 = integrate.quad(lambda x: np.real(mapmri.mapmri_phi_1d(2, x, diffusivity)) * np.real(mapmri.mapmri_phi_1d(4, x, diffusivity)), qmin, qmax)[0] int3 = integrate.quad(lambda x: np.real(mapmri.mapmri_phi_1d(4, x, diffusivity)) * np.real(mapmri.mapmri_phi_1d(6, x, diffusivity)), qmin, qmax)[0] int4 = integrate.quad(lambda x: np.real(mapmri.mapmri_phi_1d(6, x, diffusivity)) * np.real(mapmri.mapmri_phi_1d(8, x, diffusivity)), qmin, qmax)[0] # checking for first 5 basis functions if they are indeed orthogonal assert_almost_equal(int1, 0.) assert_almost_equal(int2, 0.) assert_almost_equal(int3, 0.) assert_almost_equal(int4, 0.) # do the same for the isotropic mapmri basis functions # we already know the spherical harmonics are orthonormal # only check j>0, l=0 basis functions C1 = mapmri.mapmri_isotropic_radial_pdf_basis(1, 0, diffusivity, 0) C2 = mapmri.mapmri_isotropic_radial_pdf_basis(2, 0, diffusivity, 0) C3 = mapmri.mapmri_isotropic_radial_pdf_basis(3, 0, diffusivity, 0) C4 = mapmri.mapmri_isotropic_radial_pdf_basis(4, 0, diffusivity, 0) C5 = mapmri.mapmri_isotropic_radial_pdf_basis(4, 0, diffusivity, 0) int1 = integrate.quad(lambda q: mapmri.mapmri_isotropic_radial_signal_basis( 1, 0, diffusivity, q) * mapmri.mapmri_isotropic_radial_signal_basis( 2, 0, diffusivity, q) * q ** 2, qmin, qmax)[0] int2 = integrate.quad(lambda q: mapmri.mapmri_isotropic_radial_signal_basis( 2, 0, diffusivity, q) * mapmri.mapmri_isotropic_radial_signal_basis( 3, 0, diffusivity, q) * q ** 2, qmin, qmax)[0] int3 = integrate.quad(lambda q: mapmri.mapmri_isotropic_radial_signal_basis( 3, 0, diffusivity, q) * mapmri.mapmri_isotropic_radial_signal_basis( 4, 0, diffusivity, q) * q ** 2, qmin, qmax)[0] int4 = integrate.quad(lambda q: mapmri.mapmri_isotropic_radial_signal_basis( 4, 0, diffusivity, q) * mapmri.mapmri_isotropic_radial_signal_basis( 5, 0, diffusivity, q) * q ** 2, qmin, qmax)[0] # checking for first 5 basis functions if they are indeed orthogonal assert_almost_equal(int1, 0.) assert_almost_equal(int2, 0.) assert_almost_equal(int3, 0.) assert_almost_equal(int4, 0.) def test_mapmri_number_of_coefficients(radial_order=6): indices = mapmri_index_matrix(radial_order) n_c = indices.shape[0] F = radial_order / 2 n_gt = np.round(1 / 6.0 * (F + 1) * (F + 2) * (4 * F + 3)) assert_equal(n_c, n_gt) def test_mapmri_initialize_radial_error(): """ Test initialization conditions """ gtab = get_gtab_taiwan_dsi() # No negative radial_order allowed assert_raises(ValueError, MapmriModel, gtab, radial_order=-1) # No odd radial order allowed: assert_raises(ValueError, MapmriModel, gtab, radial_order=3) def test_mapmri_initialize_gcv(): """ Test initialization conditions """ gtab = get_gtab_taiwan_dsi() # When string is provided it has to be "GCV" assert_raises(ValueError, MapmriModel, gtab, laplacian_weighting="notGCV") def test_mapmri_initialize_pos_radius(): """ Test initialization conditions """ gtab = get_gtab_taiwan_dsi() # When string is provided it has to be "adaptive" assert_raises(ValueError, MapmriModel, gtab, positivity_constraint=True, pos_radius="notadaptive") # When a number is provided it has to be positive assert_raises(ValueError, MapmriModel, gtab, positivity_constraint=True, pos_radius=-1) def test_mapmri_signal_fitting(radial_order=6): gtab = get_gtab_taiwan_dsi() l1, l2, l3 = [0.0015, 0.0003, 0.0003] S, _ = generate_signal_crossing(gtab, l1, l2, l3) mapm = MapmriModel(gtab, radial_order=radial_order, laplacian_weighting=0.02) mapfit = mapm.fit(S) S_reconst = mapfit.predict(gtab, 1.0) # test the signal reconstruction S = S / S[0] nmse_signal = np.sqrt(np.sum((S - S_reconst) ** 2)) / (S.sum()) assert_almost_equal(nmse_signal, 0.0, 3) # Test with multidimensional signals: mapm = MapmriModel(gtab, radial_order=radial_order, laplacian_weighting=0.02) # Each voxel is identical: mapfit = mapm.fit(S[:, None, None].T * np.ones((3, 3, 3, S.shape[0]))) # Predict back with an array of ones or a single value: for S0 in [S[0], np.ones((3, 3, 3, 203))]: S_reconst = mapfit.predict(gtab, S0=S0) # test the signal reconstruction for one voxel: nmse_signal = (np.sqrt(np.sum((S - S_reconst[0, 0, 0]) ** 2)) / (S.sum())) assert_almost_equal(nmse_signal, 0.0, 3) # do the same for isotropic implementation mapm = MapmriModel(gtab, radial_order=radial_order, laplacian_weighting=0.0001, anisotropic_scaling=False) mapfit = mapm.fit(S) S_reconst = mapfit.predict(gtab, 1.0) # test the signal reconstruction S = S / S[0] nmse_signal = np.sqrt(np.sum((S - S_reconst) ** 2)) / (S.sum()) assert_almost_equal(nmse_signal, 0.0, 3) # do the same without the positivity constraint: mapm = MapmriModel(gtab, radial_order=radial_order, laplacian_weighting=0.0001, positivity_constraint=False, anisotropic_scaling=False) mapfit = mapm.fit(S) S_reconst = mapfit.predict(gtab, 1.0) # test the signal reconstruction S = S / S[0] nmse_signal = np.sqrt(np.sum((S - S_reconst) ** 2)) / (S.sum()) assert_almost_equal(nmse_signal, 0.0, 3) # Repeat with a gtab with big_delta and small_delta: gtab.big_delta = 5 gtab.small_delta = 3 mapm = MapmriModel(gtab, radial_order=radial_order, laplacian_weighting=0.0001, positivity_constraint=False, anisotropic_scaling=False) mapfit = mapm.fit(S) S_reconst = mapfit.predict(gtab, 1.0) # test the signal reconstruction S = S / S[0] nmse_signal = np.sqrt(np.sum((S - S_reconst) ** 2)) / (S.sum()) assert_almost_equal(nmse_signal, 0.0, 3) if mapmri.have_cvxpy: # Positivity constraint and anisotropic scaling: mapm = MapmriModel(gtab, radial_order=radial_order, laplacian_weighting=0.0001, positivity_constraint=True, anisotropic_scaling=False, pos_radius=2) mapfit = mapm.fit(S) S_reconst = mapfit.predict(gtab, 1.0) # test the signal reconstruction S = S / S[0] nmse_signal = np.sqrt(np.sum((S - S_reconst) ** 2)) / (S.sum()) assert_almost_equal(nmse_signal, 0.0, 3) # Positivity constraint and anisotropic scaling: mapm = MapmriModel(gtab, radial_order=radial_order, laplacian_weighting=None, positivity_constraint=True, anisotropic_scaling=False, pos_radius=2) mapfit = mapm.fit(S) S_reconst = mapfit.predict(gtab, 1.0) # test the signal reconstruction S = S / S[0] nmse_signal = np.sqrt(np.sum((S - S_reconst) ** 2)) / (S.sum()) assert_almost_equal(nmse_signal, 0.0, 2) def test_mapmri_isotropic_static_scale_factor(radial_order=6): gtab = get_gtab_taiwan_dsi() D = 0.7e-3 tau = 1 / (4 * np.pi ** 2) mu = np.sqrt(D * 2 * tau) l1, l2, l3 = [D, D, D] S = single_tensor(gtab, evals=np.r_[l1, l2, l3]) S_array = np.tile(S, (5, 1)) stat_weight = 0.1 mapm_scale_stat_reg_stat = MapmriModel(gtab, radial_order=radial_order, anisotropic_scaling=False, dti_scale_estimation=False, static_diffusivity=D, laplacian_regularization=True, laplacian_weighting=stat_weight) mapm_scale_adapt_reg_stat = MapmriModel(gtab, radial_order=radial_order, anisotropic_scaling=False, dti_scale_estimation=True, laplacian_regularization=True, laplacian_weighting=stat_weight) start = time.time() mapf_scale_stat_reg_stat = mapm_scale_stat_reg_stat.fit(S_array) time_scale_stat_reg_stat = time.time() - start start = time.time() mapf_scale_adapt_reg_stat = mapm_scale_adapt_reg_stat.fit(S_array) time_scale_adapt_reg_stat = time.time() - start # test if indeed the scale factor is fixed now assert_equal(np.all(mapf_scale_stat_reg_stat.mu == mu), True) # test if computation time is shorter assert_equal(time_scale_stat_reg_stat < time_scale_adapt_reg_stat, True) # check if the fitted signal is the same assert_almost_equal(mapf_scale_stat_reg_stat.fitted_signal(), mapf_scale_adapt_reg_stat.fitted_signal()) def test_mapmri_signal_fitting_over_radial_order(order_max=8): gtab = get_gtab_taiwan_dsi() l1, l2, l3 = [0.0012, 0.0003, 0.0003] S, _ = generate_signal_crossing(gtab, l1, l2, l3, angle2=60) # take radial order 0, 4 and 8 orders = [0, 4, 8] error_array = np.zeros(len(orders)) for i, order in enumerate(orders): mapm = MapmriModel(gtab, radial_order=order, laplacian_regularization=False) mapfit = mapm.fit(S) S_reconst = mapfit.predict(gtab, 100.0) error_array[i] = np.mean((S - S_reconst) ** 2) # check if the fitting error decreases as radial order increases assert_equal(np.diff(error_array) < 0., True) def test_mapmri_pdf_integral_unity(radial_order=6): gtab = get_gtab_taiwan_dsi() l1, l2, l3 = [0.0015, 0.0003, 0.0003] S, _ = generate_signal_crossing(gtab, l1, l2, l3) sphere = get_sphere('symmetric724') # test MAPMRI fitting mapm = MapmriModel(gtab, radial_order=radial_order, laplacian_weighting=0.02) mapfit = mapm.fit(S) c_map = mapfit.mapmri_coeff # test if the analytical integral of the pdf is equal to one indices = mapmri_index_matrix(radial_order) integral = 0 for i in range(indices.shape[0]): n1, n2, n3 = indices[i] integral += c_map[i] * int_func(n1) * int_func(n2) * int_func(n3) assert_almost_equal(integral, 1.0, 3) # test if numerical integral of odf is equal to one odf = mapfit.odf(sphere, s=0) odf_sum = odf.sum() / sphere.vertices.shape[0] * (4 * np.pi) assert_almost_equal(odf_sum, 1.0, 2) # do the same for isotropic implementation radius_max = 0.04 # 40 microns gridsize = 17 r_points = mapmri.create_rspace(gridsize, radius_max) mapm = MapmriModel(gtab, radial_order=radial_order, laplacian_weighting=0.02, anisotropic_scaling=False) mapfit = mapm.fit(S) pdf = mapfit.pdf(r_points) pdf[r_points[:, 2] == 0.] /= 2 # for antipodal symmetry on z-plane point_volume = (radius_max / (gridsize // 2)) ** 3 integral = pdf.sum() * point_volume * 2 assert_almost_equal(integral, 1.0, 3) odf = mapfit.odf(sphere, s=0) odf_sum = odf.sum() / sphere.vertices.shape[0] * (4 * np.pi) assert_almost_equal(odf_sum, 1.0, 2) def test_mapmri_compare_fitted_pdf_with_multi_tensor(radial_order=6): gtab = get_gtab_taiwan_dsi() l1, l2, l3 = [0.0015, 0.0003, 0.0003] S, _ = generate_signal_crossing(gtab, l1, l2, l3) radius_max = 0.02 # 40 microns gridsize = 10 r_points = mapmri.create_rspace(gridsize, radius_max) # test MAPMRI fitting mapm = MapmriModel(gtab, radial_order=radial_order, laplacian_weighting=0.0001) mapfit = mapm.fit(S) # compare the mapmri pdf with the ground truth multi_tensor pdf mevals = np.array(([l1, l2, l3], [l1, l2, l3])) angl = [(0, 0), (60, 0)] pdf_mt = multi_tensor_pdf(r_points, mevals=mevals, angles=angl, fractions=[50, 50]) pdf_map = mapfit.pdf(r_points) nmse_pdf = np.sqrt(np.sum((pdf_mt - pdf_map) ** 2)) / (pdf_mt.sum()) assert_almost_equal(nmse_pdf, 0.0, 2) def test_mapmri_metrics_anisotropic(radial_order=6): gtab = get_gtab_taiwan_dsi() l1, l2, l3 = [0.0015, 0.0003, 0.0003] S, _ = generate_signal_crossing(gtab, l1, l2, l3, angle2=0) # test MAPMRI q-space indices mapm = MapmriModel(gtab, radial_order=radial_order, laplacian_regularization=False) mapfit = mapm.fit(S) tau = 1 / (4 * np.pi ** 2) # ground truth indices estimated from the DTI tensor rtpp_gt = 1. / (2 * np.sqrt(np.pi * l1 * tau)) rtap_gt = ( 1. / (2 * np.sqrt(np.pi * l2 * tau)) * 1. / (2 * np.sqrt(np.pi * l3 * tau)) ) rtop_gt = rtpp_gt * rtap_gt msd_gt = 2 * (l1 + l2 + l3) * tau qiv_gt = ( (64 * np.pi ** (7 / 2.) * (l1 * l2 * l3 * tau ** 3) ** (3 / 2.)) / ((l2 * l3 + l1 * (l2 + l3)) * tau ** 2) ) assert_almost_equal(mapfit.rtap(), rtap_gt, 5) assert_almost_equal(mapfit.rtpp(), rtpp_gt, 5) assert_almost_equal(mapfit.rtop(), rtop_gt, 5) assert_almost_equal(mapfit.ng(), 0., 5) assert_almost_equal(mapfit.ng_parallel(), 0., 5) assert_almost_equal(mapfit.ng_perpendicular(), 0., 5) assert_almost_equal(mapfit.msd(), msd_gt, 5) assert_almost_equal(mapfit.qiv(), qiv_gt, 5) def test_mapmri_metrics_isotropic(radial_order=6): gtab = get_gtab_taiwan_dsi() l1, l2, l3 = [0.0003, 0.0003, 0.0003] # isotropic diffusivities S = single_tensor(gtab, evals=np.r_[l1, l2, l3]) # test MAPMRI q-space indices mapm = MapmriModel(gtab, radial_order=radial_order, laplacian_regularization=False, anisotropic_scaling=False) mapfit = mapm.fit(S) tau = 1 / (4 * np.pi ** 2) # ground truth indices estimated from the DTI tensor rtpp_gt = 1. / (2 * np.sqrt(np.pi * l1 * tau)) rtap_gt = ( 1. / (2 * np.sqrt(np.pi * l2 * tau)) * 1. / (2 * np.sqrt(np.pi * l3 * tau)) ) rtop_gt = rtpp_gt * rtap_gt msd_gt = 2 * (l1 + l2 + l3) * tau qiv_gt = ( (64 * np.pi ** (7 / 2.) * (l1 * l2 * l3 * tau ** 3) ** (3 / 2.)) / ((l2 * l3 + l1 * (l2 + l3)) * tau ** 2) ) assert_almost_equal(mapfit.rtap(), rtap_gt, 5) assert_almost_equal(mapfit.rtpp(), rtpp_gt, 5) assert_almost_equal(mapfit.rtop(), rtop_gt, 4) assert_almost_equal(mapfit.msd(), msd_gt, 5) assert_almost_equal(mapfit.qiv(), qiv_gt, 5) def test_mapmri_laplacian_anisotropic(radial_order=6): gtab = get_gtab_taiwan_dsi() l1, l2, l3 = [0.0015, 0.0003, 0.0003] S = single_tensor(gtab, evals=np.r_[l1, l2, l3]) mapm = MapmriModel(gtab, radial_order=radial_order, laplacian_regularization=False) mapfit = mapm.fit(S) tau = 1 / (4 * np.pi ** 2) # ground truth norm of laplacian of tensor norm_of_laplacian_gt = ( (3 * (l1 ** 2 + l2 ** 2 + l3 ** 2) + 2 * l2 * l3 + 2 * l1 * (l2 + l3)) * (np.pi ** (5 / 2.) * tau) / (np.sqrt(2 * l1 * l2 * l3 * tau)) ) # check if estimated laplacian corresponds with ground truth laplacian_matrix = mapmri.mapmri_laplacian_reg_matrix( mapm.ind_mat, mapfit.mu, mapm.S_mat, mapm.T_mat, mapm.U_mat) coef = mapfit._mapmri_coef norm_of_laplacian = np.dot(np.dot(coef, laplacian_matrix), coef) assert_almost_equal(norm_of_laplacian, norm_of_laplacian_gt) def test_mapmri_laplacian_isotropic(radial_order=6): gtab = get_gtab_taiwan_dsi() l1, l2, l3 = [0.0003, 0.0003, 0.0003] # isotropic diffusivities S = single_tensor(gtab, evals=np.r_[l1, l2, l3]) mapm = MapmriModel(gtab, radial_order=radial_order, laplacian_regularization=False, anisotropic_scaling=False) mapfit = mapm.fit(S) tau = 1 / (4 * np.pi ** 2) # ground truth norm of laplacian of tensor norm_of_laplacian_gt = ( (3 * (l1 ** 2 + l2 ** 2 + l3 ** 2) + 2 * l2 * l3 + 2 * l1 * (l2 + l3)) * (np.pi ** (5 / 2.) * tau) / (np.sqrt(2 * l1 * l2 * l3 * tau)) ) # check if estimated laplacian corresponds with ground truth laplacian_matrix = mapmri.mapmri_isotropic_laplacian_reg_matrix( radial_order, mapfit.mu[0]) coef = mapfit._mapmri_coef norm_of_laplacian = np.dot(np.dot(coef, laplacian_matrix), coef) assert_almost_equal(norm_of_laplacian, norm_of_laplacian_gt) def test_signal_fitting_equality_anisotropic_isotropic(radial_order=6): gtab = get_gtab_taiwan_dsi() l1, l2, l3 = [0.0015, 0.0003, 0.0003] S, _ = generate_signal_crossing(gtab, l1, l2, l3, angle2=60) gridsize = 17 radius_max = 0.02 r_points = mapmri.create_rspace(gridsize, radius_max) tenmodel = dti.TensorModel(gtab) evals = tenmodel.fit(S).evals tau = 1 / (4 * np.pi ** 2) # estimate isotropic scale factor u0 = mapmri.isotropic_scale_factor(evals * 2 * tau) mu = np.array([u0, u0, u0]) qvals = np.sqrt(gtab.bvals / tau) / (2 * np.pi) q = gtab.bvecs * qvals[:, None] M_aniso = mapmri.mapmri_phi_matrix(radial_order, mu, q) K_aniso = mapmri.mapmri_psi_matrix(radial_order, mu, r_points) M_iso = mapmri.mapmri_isotropic_phi_matrix(radial_order, u0, q) K_iso = mapmri.mapmri_isotropic_psi_matrix(radial_order, u0, r_points) coef_aniso = np.dot(np.linalg.pinv(M_aniso), S) coef_iso = np.dot(np.linalg.pinv(M_iso), S) # test if anisotropic and isotropic implementation produce equal results # if the same isotropic scale factors are used s_fitted_aniso = np.dot(M_aniso, coef_aniso) s_fitted_iso = np.dot(M_iso, coef_iso) assert_array_almost_equal(s_fitted_aniso, s_fitted_iso) # the same test for the PDF pdf_fitted_aniso = np.dot(K_aniso, coef_aniso) pdf_fitted_iso = np.dot(K_iso, coef_iso) assert_array_almost_equal(pdf_fitted_aniso / pdf_fitted_iso, np.ones_like(pdf_fitted_aniso), 3) # test if the implemented version also produces the same result mapm = MapmriModel(gtab, radial_order=radial_order, laplacian_regularization=False, anisotropic_scaling=False) s_fitted_implemented_isotropic = mapm.fit(S).fitted_signal() # normalize non-implemented fitted signal with b0 value s_fitted_aniso_norm = s_fitted_aniso / s_fitted_aniso.max() assert_array_almost_equal(s_fitted_aniso_norm, s_fitted_implemented_isotropic) # test if norm of signal laplacians are the same laplacian_matrix_iso = mapmri.mapmri_isotropic_laplacian_reg_matrix( radial_order, mu[0]) ind_mat = mapmri.mapmri_index_matrix(radial_order) S_mat, T_mat, U_mat = mapmri.mapmri_STU_reg_matrices(radial_order) laplacian_matrix_aniso = mapmri.mapmri_laplacian_reg_matrix( ind_mat, mu, S_mat, T_mat, U_mat) norm_aniso = np.dot(coef_aniso, np.dot(coef_aniso, laplacian_matrix_aniso)) norm_iso = np.dot(coef_iso, np.dot(coef_iso, laplacian_matrix_iso)) assert_almost_equal(norm_iso, norm_aniso) def test_mapmri_isotropic_design_matrix_separability(radial_order=6): gtab = get_gtab_taiwan_dsi() tau = 1 / (4 * np.pi ** 2) qvals = np.sqrt(gtab.bvals / tau) / (2 * np.pi) q = gtab.bvecs * qvals[:, None] mu = 0.0003 # random value M = mapmri.mapmri_isotropic_phi_matrix(radial_order, mu, q) M_independent = mapmri.mapmri_isotropic_M_mu_independent(radial_order, q) M_dependent = mapmri.mapmri_isotropic_M_mu_dependent(radial_order, mu, qvals) M_reconstructed = M_independent * M_dependent assert_array_almost_equal(M, M_reconstructed) def test_estimate_radius_with_rtap(radius_gt=5e-3): gtab = get_gtab_taiwan_dsi() tau = 1 / (4 * np.pi ** 2) # we estimate the infinite diffusion time case for a perfectly reflecting # cylinder using the Callaghan model E = cylinders_and_ball_soderman(gtab, tau, radii=[radius_gt], snr=None, angles=[(0, 90)], fractions=[100])[0] # estimate radius using anisotropic MAP-MRI. mapmod = mapmri.MapmriModel(gtab, radial_order=6, laplacian_regularization=True, laplacian_weighting=0.01) mapfit = mapmod.fit(E) radius_estimated = np.sqrt(1 / (np.pi * mapfit.rtap())) assert_almost_equal(radius_estimated, radius_gt, 5) # estimate radius using isotropic MAP-MRI. # note that the radial order is higher and the precision is lower due to # less accurate signal extrapolation. mapmod = mapmri.MapmriModel(gtab, radial_order=8, laplacian_regularization=True, laplacian_weighting=0.01, anisotropic_scaling=False) mapfit = mapmod.fit(E) radius_estimated = np.sqrt(1 / (np.pi * mapfit.rtap())) assert_almost_equal(radius_estimated, radius_gt, 4) @np.testing.dec.skipif(not mapmri.have_cvxpy) def test_positivity_constraint(radial_order=6): gtab = get_gtab_taiwan_dsi() l1, l2, l3 = [0.0015, 0.0003, 0.0003] S, _ = generate_signal_crossing(gtab, l1, l2, l3, angle2=60) S_noise = add_noise(S, snr=20, S0=100.) gridsize = 20 max_radius = 15e-3 # 20 microns maximum radius r_grad = mapmri.create_rspace(gridsize, max_radius) # the posivitivity constraint does not make the pdf completely positive # but greatly decreases the amount of negativity in the constrained points. # we test if the amount of negative pdf has decreased more than 90% mapmod_no_constraint = MapmriModel(gtab, radial_order=radial_order, laplacian_regularization=False, positivity_constraint=False) mapfit_no_constraint = mapmod_no_constraint.fit(S_noise) pdf = mapfit_no_constraint.pdf(r_grad) pdf_negative_no_constraint = pdf[pdf < 0].sum() mapmod_constraint = MapmriModel(gtab, radial_order=radial_order, laplacian_regularization=False, positivity_constraint=True, pos_grid=gridsize, pos_radius='adaptive') mapfit_constraint = mapmod_constraint.fit(S_noise) pdf = mapfit_constraint.pdf(r_grad) pdf_negative_constraint = pdf[pdf < 0].sum() assert_equal((pdf_negative_constraint / pdf_negative_no_constraint) < 0.1, True) # the same for isotropic scaling mapmod_no_constraint = MapmriModel(gtab, radial_order=radial_order, laplacian_regularization=False, positivity_constraint=False, anisotropic_scaling=False) mapfit_no_constraint = mapmod_no_constraint.fit(S_noise) pdf = mapfit_no_constraint.pdf(r_grad) pdf_negative_no_constraint = pdf[pdf < 0].sum() mapmod_constraint = MapmriModel(gtab, radial_order=radial_order, laplacian_regularization=False, positivity_constraint=True, anisotropic_scaling=False, pos_grid=gridsize, pos_radius='adaptive') mapfit_constraint = mapmod_constraint.fit(S_noise) pdf = mapfit_constraint.pdf(r_grad) pdf_negative_constraint = pdf[pdf < 0].sum() assert_equal((pdf_negative_constraint / pdf_negative_no_constraint) < 0.1, True) def test_laplacian_regularization(radial_order=6): gtab = get_gtab_taiwan_dsi() l1, l2, l3 = [0.0015, 0.0003, 0.0003] S, _ = generate_signal_crossing(gtab, l1, l2, l3, angle2=60) S_noise = add_noise(S, snr=20, S0=100.) weight_array = np.linspace(0, .3, 301) mapmod_unreg = MapmriModel(gtab, radial_order=radial_order, laplacian_regularization=False, laplacian_weighting=weight_array) mapmod_laplacian_array = MapmriModel(gtab, radial_order=radial_order, laplacian_regularization=True, laplacian_weighting=weight_array) mapmod_laplacian_gcv = MapmriModel(gtab, radial_order=radial_order, laplacian_regularization=True, laplacian_weighting="GCV") # test the Generalized Cross Validation # test if GCV gives very low if there is no noise mapfit_laplacian_array = mapmod_laplacian_array.fit(S) assert_equal(mapfit_laplacian_array.lopt < 0.01, True) # test if GCV gives higher values if there is noise mapfit_laplacian_array = mapmod_laplacian_array.fit(S_noise) lopt_array = mapfit_laplacian_array.lopt assert_equal(lopt_array > 0.01, True) # test if continuous GCV gives the same the one based on an array mapfit_laplacian_gcv = mapmod_laplacian_gcv.fit(S_noise) lopt_gcv = mapfit_laplacian_gcv.lopt assert_almost_equal(lopt_array, lopt_gcv, 2) # test if laplacian reduced the norm of the laplacian in the reconstruction mu = mapfit_laplacian_gcv.mu laplacian_matrix = mapmri.mapmri_laplacian_reg_matrix( mapmod_laplacian_gcv.ind_mat, mu, mapmod_laplacian_gcv.S_mat, mapmod_laplacian_gcv.T_mat, mapmod_laplacian_gcv.U_mat) coef_unreg = mapmod_unreg.fit(S_noise)._mapmri_coef coef_laplacian = mapfit_laplacian_gcv._mapmri_coef laplacian_norm_unreg = np.dot( coef_unreg, np.dot(coef_unreg, laplacian_matrix)) laplacian_norm_laplacian = np.dot( coef_laplacian, np.dot(coef_laplacian, laplacian_matrix)) assert_equal(laplacian_norm_laplacian < laplacian_norm_unreg, True) # the same for isotropic scaling mapmod_unreg = MapmriModel(gtab, radial_order=radial_order, laplacian_regularization=False, laplacian_weighting=weight_array, anisotropic_scaling=False) mapmod_laplacian_array = MapmriModel(gtab, radial_order=radial_order, laplacian_regularization=True, laplacian_weighting=weight_array, anisotropic_scaling=False) mapmod_laplacian_gcv = MapmriModel(gtab, radial_order=radial_order, laplacian_regularization=True, laplacian_weighting="GCV", anisotropic_scaling=False) # test the Generalized Cross Validation # test if GCV gives zero if there is no noise mapfit_laplacian_array = mapmod_laplacian_array.fit(S) assert_equal(mapfit_laplacian_array.lopt < 0.01, True) # test if GCV gives higher values if there is noise mapfit_laplacian_array = mapmod_laplacian_array.fit(S_noise) lopt_array = mapfit_laplacian_array.lopt assert_equal(lopt_array > 0.01, True) # test if continuous GCV gives the same the one based on an array mapfit_laplacian_gcv = mapmod_laplacian_gcv.fit(S_noise) lopt_gcv = mapfit_laplacian_gcv.lopt assert_almost_equal(lopt_array, lopt_gcv, 2) # test if laplacian reduced the norm of the laplacian in the reconstruction mu = mapfit_laplacian_gcv.mu laplacian_matrix = mapmri.mapmri_isotropic_laplacian_reg_matrix( radial_order, mu[0]) coef_unreg = mapmod_unreg.fit(S_noise)._mapmri_coef coef_laplacian = mapfit_laplacian_gcv._mapmri_coef laplacian_norm_unreg = np.dot( coef_unreg, np.dot(coef_unreg, laplacian_matrix)) laplacian_norm_laplacian = np.dot( coef_laplacian, np.dot(coef_laplacian, laplacian_matrix)) assert_equal(laplacian_norm_laplacian < laplacian_norm_unreg, True) def test_mapmri_odf(radial_order=6): gtab = get_gtab_taiwan_dsi() # load symmetric 724 sphere sphere = get_sphere('symmetric724') # load icosahedron sphere l1, l2, l3 = [0.0015, 0.0003, 0.0003] data, golden_directions = generate_signal_crossing(gtab, l1, l2, l3, angle2=90) mapmod = MapmriModel(gtab, radial_order=radial_order, laplacian_regularization=True, laplacian_weighting=0.01) # symmetric724 sphere2 = create_unit_sphere(5) mapfit = mapmod.fit(data) odf = mapfit.odf(sphere) directions, _, _ = peak_directions(odf, sphere, .35, 25) assert_equal(len(directions), 2) assert_almost_equal( angular_similarity(directions, golden_directions), 2, 1) # 5 subdivisions odf = mapfit.odf(sphere2) directions, _, _ = peak_directions(odf, sphere2, .35, 25) assert_equal(len(directions), 2) assert_almost_equal( angular_similarity(directions, golden_directions), 2, 1) sb_dummies = sticks_and_ball_dummies(gtab) for sbd in sb_dummies: data, golden_directions = sb_dummies[sbd] asmfit = mapmod.fit(data) odf = asmfit.odf(sphere2) directions, _, _ = peak_directions(odf, sphere2, .35, 25) if len(directions) <= 3: assert_equal(len(directions), len(golden_directions)) if len(directions) > 3: assert_equal(gfa(odf) < 0.1, True) # for the isotropic implementation check if the odf spherical harmonics # actually represent the discrete sphere function. mapmod = MapmriModel(gtab, radial_order=radial_order, laplacian_regularization=True, laplacian_weighting=0.01, anisotropic_scaling=False) mapfit = mapmod.fit(data) odf = mapfit.odf(sphere) odf_sh = mapfit.odf_sh() odf_from_sh = sh_to_sf(odf_sh, sphere, radial_order, basis_type=None) assert_almost_equal(odf, odf_from_sh, 10) if __name__ == '__main__': run_module_suite() dipy-0.13.0/dipy/reconst/tests/test_multi_voxel.py000066400000000000000000000131471317371701200223140ustar00rootroot00000000000000from __future__ import division, print_function, absolute_import from functools import reduce import numpy as np import numpy.testing as npt from dipy.reconst.multi_voxel import _squash, multi_voxel_fit, CallableArray from dipy.core.sphere import unit_icosahedron def test_squash(): A = np.ones((3, 3), dtype=float) B = np.asarray(A, object) npt.assert_array_equal(A, _squash(B)) npt.assert_equal(_squash(B).dtype, A.dtype) B[2, 2] = None A[2, 2] = 0 npt.assert_array_equal(A, _squash(B)) npt.assert_equal(_squash(B).dtype, A.dtype) for ijk in np.ndindex(*B.shape): B[ijk] = np.ones((2,)) A = np.ones((3, 3, 2)) npt.assert_array_equal(A, _squash(B)) npt.assert_equal(_squash(B).dtype, A.dtype) B[2, 2] = None A[2, 2] = 0 npt.assert_array_equal(A, _squash(B)) npt.assert_equal(_squash(B).dtype, A.dtype) # sub-arrays have different shapes ( (3,) and (2,) ) B[0, 0] = np.ones((3,)) npt.assert_(_squash(B) is B) # Check dtypes for arrays and scalars arr_arr = np.zeros((2,), dtype=object) scalar_arr = np.zeros((2,), dtype=object) numeric_types = sum( [np.sctypes[t] for t in ('int', 'uint', 'float', 'complex')], [np.bool_]) for dt0 in numeric_types: arr_arr[0] = np.zeros((3,), dtype=dt0) scalar_arr[0] = dt0(0) for dt1 in numeric_types: arr_arr[1] = np.zeros((3,), dtype=dt1) npt.assert_equal(_squash(arr_arr).dtype, reduce(np.add, arr_arr).dtype) scalar_arr[1] = dt0(1) npt.assert_equal(_squash(scalar_arr).dtype, reduce(np.add, scalar_arr).dtype) # Check masks and Nones arr = np.ones((3, 4), dtype=float) obj_arr = arr.astype(object) arr[1, 1] = 99 obj_arr[1, 1] = None npt.assert_array_equal(_squash(obj_arr, mask=None, fill=99), arr) msk = arr == 1 npt.assert_array_equal(_squash(obj_arr, mask=msk, fill=99), arr) msk[1, 1] = 1 # unmask None - object array back npt.assert_array_equal(_squash(obj_arr, mask=msk, fill=99), obj_arr) msk[1, 1] = 0 # remask, back to fill again npt.assert_array_equal(_squash(obj_arr, mask=msk, fill=99), arr) obj_arr[2, 3] = None # add another unmasked None, object again npt.assert_array_equal(_squash(obj_arr, mask=msk, fill=99), obj_arr) # Check array of arrays obj_arrs = np.zeros((3,), dtype=object) for i in range(3): obj_arrs[i] = np.ones((4, 5)) arr_arrs = np.ones((3, 4, 5)) # No Nones npt.assert_array_equal(_squash(obj_arrs, mask=None, fill=99), arr_arrs) # None, implicit masking obj_masked = obj_arrs.copy() obj_masked[1] = None arr_masked = arr_arrs.copy() arr_masked[1] = 99 npt.assert_array_equal(_squash(obj_masked, mask=None, fill=99), arr_masked) msk = np.array([1, 0, 1], dtype=np.bool_) # explicit mask npt.assert_array_equal(_squash(obj_masked, mask=msk, fill=99), arr_masked) msk[1] = True # unmask None, object array back npt.assert_array_equal(_squash(obj_masked, mask=msk, fill=99), obj_masked) def test_CallableArray(): callarray = CallableArray((2, 3), dtype=object) # Test without Nones callarray[:] = np.arange expected = np.empty([2, 3, 4]) expected[:] = range(4) npt.assert_array_equal(callarray(4), expected) # Test with Nones callarray[0, 0] = None expected[0, 0] = 0 npt.assert_array_equal(callarray(4), expected) def test_multi_voxel_fit(): class SillyModel(object): @multi_voxel_fit def fit(self, data, mask=None): return SillyFit(model, data) def predict(self, S0): return np.ones(10) * S0 class SillyFit(object): def __init__(self, model, data): self.model = model self.data = data model_attr = 2. def odf(self, sphere): return np.ones(len(sphere.phi)) @property def directions(self): n = np.random.randint(0, 10) return np.zeros((n, 3)) def predict(self, S0): return np.ones(self.data.shape) * S0 # Test the single voxel case model = SillyModel() single_voxel = np.zeros(64) fit = model.fit(single_voxel) npt.assert_equal(type(fit), SillyFit) # Test without a mask many_voxels = np.zeros((2, 3, 4, 64)) fit = model.fit(many_voxels) expected = np.empty((2, 3, 4)) expected[:] = 2. npt.assert_array_equal(fit.model_attr, expected) expected = np.ones((2, 3, 4, 12)) npt.assert_array_equal(fit.odf(unit_icosahedron), expected) npt.assert_equal(fit.directions.shape, (2, 3, 4)) S0 = 100. npt.assert_equal(fit.predict(S0=S0), np.ones(many_voxels.shape) * S0) # Test with a mask mask = np.zeros((3, 3, 3)).astype('bool') mask[0, 0] = 1 mask[1, 1] = 1 mask[2, 2] = 1 data = np.zeros((3, 3, 3, 64)) fit = model.fit(data, mask) expected = np.zeros((3, 3, 3)) expected[0, 0] = 2 expected[1, 1] = 2 expected[2, 2] = 2 npt.assert_array_equal(fit.model_attr, expected) odf = fit.odf(unit_icosahedron) npt.assert_equal(odf.shape, (3, 3, 3, 12)) npt.assert_array_equal(odf[~mask], 0) npt.assert_array_equal(odf[mask], 1) predicted = np.zeros(data.shape) predicted[mask] = S0 npt.assert_equal(fit.predict(S0=S0), predicted) # Test fit.shape npt.assert_equal(fit.shape, (3, 3, 3)) # Test indexing into a fit npt.assert_equal(type(fit[0, 0, 0]), SillyFit) npt.assert_equal(fit[:2, :2, :2].shape, (2, 2, 2)) dipy-0.13.0/dipy/reconst/tests/test_odf.py000066400000000000000000000046541317371701200205200ustar00rootroot00000000000000import numpy as np from numpy.testing import (run_module_suite, assert_equal, assert_almost_equal, assert_) from dipy.reconst.odf import (OdfFit, OdfModel, minmax_normalize, gfa) from dipy.core.subdivide_octahedron import create_unit_hemisphere from dipy.sims.voxel import multi_tensor, multi_tensor_odf from dipy.data import get_sphere from dipy.core.gradients import gradient_table, GradientTable _sphere = create_unit_hemisphere(4) _odf = (_sphere.vertices * [1, 2, 3]).sum(-1) _gtab = GradientTable(np.ones((64, 3))) class SimpleOdfModel(OdfModel): sphere = _sphere def fit(self, data): fit = SimpleOdfFit(self, data) return fit class SimpleOdfFit(OdfFit): def odf(self, sphere=None): if sphere is None: sphere = self.model.sphere # Use ascontiguousarray to work around a bug in NumPy return np.ascontiguousarray((sphere.vertices * [1, 2, 3]).sum(-1)) def test_OdfFit(): m = SimpleOdfModel(_gtab) f = m.fit(None) odf = f.odf(_sphere) assert_equal(len(odf), len(_sphere.theta)) def test_minmax_normalize(): bvalue = 3000 S0 = 1 SNR = 100 sphere = get_sphere('symmetric362') bvecs = np.concatenate(([[0, 0, 0]], sphere.vertices)) bvals = np.zeros(len(bvecs)) + bvalue bvals[0] = 0 gtab = gradient_table(bvals, bvecs) evals = np.array(([0.0017, 0.0003, 0.0003], [0.0017, 0.0003, 0.0003])) S, sticks = multi_tensor(gtab, evals, S0, angles=[(0, 0), (90, 0)], fractions=[50, 50], snr=SNR) odf = multi_tensor_odf(sphere.vertices, evals, angles=[(0, 0), (90, 0)], fractions=[50, 50]) odf2 = minmax_normalize(odf) assert_equal(odf2.max(), 1) assert_equal(odf2.min(), 0) odf3 = np.empty(odf.shape) odf3 = minmax_normalize(odf, odf3) assert_equal(odf3.max(), 1) assert_equal(odf3.min(), 0) def test_gfa(): g = gfa(np.ones(100)) assert_equal(g, 0) g = gfa(np.ones((2, 100))) assert_equal(g, np.array([0, 0])) # The following series follows the rule (sqrt(n-1)/((n-1)^2)) g = gfa(np.hstack([np.ones((9)), [0]])) assert_almost_equal(g, np.sqrt(9./81)) g = gfa(np.hstack([np.ones((99)), [0]])) assert_almost_equal(g, np.sqrt(99./(99.**2))) # All-zeros returns a nan with no warning: g = gfa(np.zeros(10)) assert_(np.isnan(g)) if __name__ == '__main__': run_module_suite() dipy-0.13.0/dipy/reconst/tests/test_peak_finding.py000066400000000000000000000122711317371701200223600ustar00rootroot00000000000000from __future__ import division, print_function, absolute_import import numpy as np import numpy.testing as npt from dipy.reconst.recspeed import (local_maxima, remove_similar_vertices, search_descending) from dipy.data import get_sphere, get_data from dipy.core.sphere import unique_edges, HemiSphere from dipy.sims.voxel import all_tensor_evecs def test_local_maxima(): sphere = get_sphere('symmetric724') vertices, faces = sphere.vertices, sphere.faces edges = unique_edges(faces) # Check that the first peak is == max(odf) odf = abs(vertices.sum(-1)) peak_values, peak_index = local_maxima(odf, edges) npt.assert_equal(max(odf), peak_values[0]) npt.assert_equal(max(odf), odf[peak_index[0]]) # Create an artificial odf with a few peaks odf = np.zeros(len(vertices)) odf[1] = 1. odf[143] = 143. odf[505] = 505. peak_values, peak_index = local_maxima(odf, edges) npt.assert_array_equal(peak_values, [505, 143, 1]) npt.assert_array_equal(peak_index, [505, 143, 1]) # Check that neighboring points can both be peaks odf = np.zeros(len(vertices)) point1, point2 = edges[0] odf[[point1, point2]] = 1. peak_values, peak_index = local_maxima(odf, edges) npt.assert_array_equal(peak_values, [1., 1.]) npt.assert_(point1 in peak_index) npt.assert_(point2 in peak_index) # Repeat with a hemisphere hemisphere = HemiSphere(xyz=vertices, faces=faces) vertices, edges = hemisphere.vertices, hemisphere.edges # Check that the first peak is == max(odf) odf = abs(vertices.sum(-1)) peak_values, peak_index = local_maxima(odf, edges) npt.assert_equal(max(odf), peak_values[0]) npt.assert_equal(max(odf), odf[peak_index[0]]) # Create an artificial odf with a few peaks odf = np.zeros(len(vertices)) odf[1] = 1. odf[143] = 143. odf[300] = 300. peak_value, peak_index = local_maxima(odf, edges) npt.assert_array_equal(peak_value, [300, 143, 1]) npt.assert_array_equal(peak_index, [300, 143, 1]) # Check that neighboring points can both be peaks odf = np.zeros(len(vertices)) point1, point2 = edges[0] odf[[point1, point2]] = 1. peak_values, peak_index = local_maxima(odf, edges) npt.assert_array_equal(peak_values, [1., 1.]) npt.assert_(point1 in peak_index) npt.assert_(point2 in peak_index) # Should raise an error if odf has nans odf[20] = np.nan npt.assert_raises(ValueError, local_maxima, odf, edges) # Should raise an error if edge values are too large to index odf edges[0, 0] = 9999 odf[20] = 0 npt.assert_raises(IndexError, local_maxima, odf, edges) def test_remove_similar_peaks(): vertices = np.array([[1., 0., 0.], [0., 1., 0.], [0., 0., 1.], [1.1, 1., 0.], [0., 2., 1.], [2., 1., 0.], [1., 0., 0.]]) norms = np.sqrt((vertices*vertices).sum(-1)) vertices = vertices/norms[:, None] # Return unique vertices uv = remove_similar_vertices(vertices, .01) npt.assert_array_equal(uv, vertices[:6]) # Return vertices with mapping and indices uv, mapping, index = remove_similar_vertices(vertices, .01, return_mapping=True, return_index=True) npt.assert_array_equal(uv, vertices[:6]) npt.assert_array_equal(mapping, list(range(6)) + [0]) npt.assert_array_equal(index, range(6)) # Test mapping with different angles uv, mapping = remove_similar_vertices(vertices, .01, return_mapping=True) npt.assert_array_equal(uv, vertices[:6]) npt.assert_array_equal(mapping, list(range(6)) + [0]) uv, mapping = remove_similar_vertices(vertices, 30, return_mapping=True) npt.assert_array_equal(uv, vertices[:4]) npt.assert_array_equal(mapping, list(range(4)) + [1, 0, 0]) uv, mapping = remove_similar_vertices(vertices, 60, return_mapping=True) npt.assert_array_equal(uv, vertices[:3]) npt.assert_array_equal(mapping, list(range(3)) + [0, 1, 0, 0]) # Test index with different angles uv, index = remove_similar_vertices(vertices, .01, return_index=True) npt.assert_array_equal(uv, vertices[:6]) npt.assert_array_equal(index, range(6)) uv, index = remove_similar_vertices(vertices, 30, return_index=True) npt.assert_array_equal(uv, vertices[:4]) npt.assert_array_equal(index, range(4)) uv, index = remove_similar_vertices(vertices, 60, return_index=True) npt.assert_array_equal(uv, vertices[:3]) npt.assert_array_equal(index, range(3)) def test_search_descending(): a = np.linspace(10., 1., 10) npt.assert_equal(search_descending(a, 1.), 1) npt.assert_equal(search_descending(a, .89), 2) npt.assert_equal(search_descending(a, .79), 3) # Test small array npt.assert_equal(search_descending(a[:1], 1.), 1) npt.assert_equal(search_descending(a[:1], 0.), 1) npt.assert_equal(search_descending(a[:1], .5), 1) # Test very small array npt.assert_equal(search_descending(a[:0], 1.), 0) if __name__ == '__main__': import nose nose.runmodule() dipy-0.13.0/dipy/reconst/tests/test_peakdf.py000066400000000000000000000045571317371701200212040ustar00rootroot00000000000000import numpy as np import numpy.testing as npt from dipy.direction.peaks import default_sphere, peaks_from_model def test_PeaksAndMetricsDirectionGetter(): class SillyModel(object): def fit(self, data, mask=None): return SillyFit(self) class SillyFit(object): def __init__(self, model): self.model = model def odf(self, sphere): odf = np.zeros(sphere.theta.shape) r = np.random.randint(0, len(odf)) odf[r] = 1 return odf def get_direction(dg, point, dir): newdir = dir.copy() state = dg.get_direction(point, newdir) return (state, np.array(newdir)) data = np.random.random((3, 4, 5, 2)) peaks = peaks_from_model(SillyModel(), data, default_sphere, relative_peak_threshold=.5, min_separation_angle=25) peaks._initialize() up = np.zeros(3) up[2] = 1. down = -up for i in range(3-1): for j in range(4-1): for k in range(5-1): point = np.array([i, j, k], dtype=float) # Test that the angle threshold rejects points peaks.ang_thr = 0. state, nd = get_direction(peaks, point, up) npt.assert_equal(state, 1) # Here we leverage the fact that we know Hemispheres project # all their vertices into the z >= 0 half of the sphere. peaks.ang_thr = 90. state, nd = get_direction(peaks, point, up) npt.assert_equal(state, 0) expected_dir = peaks.peak_dirs[i, j, k, 0] npt.assert_array_almost_equal(nd, expected_dir) state, nd = get_direction(peaks, point, down) npt.assert_array_almost_equal(nd, -expected_dir) # Check that we can get directions at non-integer points point += np.random.random(3) state, nd = get_direction(peaks, point, up) npt.assert_equal(state, 0) # Check that points are rounded to get initial direction point -= .5 id = peaks.initial_direction(point) # id should be a (1, 3) array npt.assert_array_almost_equal(id, [expected_dir]) if __name__ == "__main__": npt.run_module_suite() dipy-0.13.0/dipy/reconst/tests/test_reco_utils.py000066400000000000000000000044661317371701200221210ustar00rootroot00000000000000""" Testing reconstruction utilities """ import numpy as np from dipy.reconst.recspeed import (adj_to_countarrs, argmax_from_countarrs) from nose.tools import assert_true, assert_false, \ assert_equal, assert_raises from numpy.testing import assert_array_equal, assert_array_almost_equal def test_adj_countarrs(): adj = [[0, 1, 2], [2, 3], [4, 5, 6, 7]] counts, inds = adj_to_countarrs(adj) assert_array_equal(counts, [3, 2, 4]) assert_equal(counts.dtype.type, np.uint32) assert_array_equal(inds, [0, 1, 2, 2, 3, 4, 5, 6, 7]) assert_equal(inds.dtype.type, np.uint32) def test_argmax_from_countarrs(): # basic case vals = np.arange(10, dtype=np.float) vertinds = np.arange(10, dtype=np.uint32) adj_counts = np.ones((10,), dtype=np.uint32) adj_inds_raw = np.arange(10, dtype=np.uint32)[::-1] # when contigous - OK adj_inds = adj_inds_raw.copy() inds = argmax_from_countarrs(vals, vertinds, adj_counts, adj_inds) # yield assert_array_equal(inds, [5, 6, 7, 8, 9]) # test for errors - first - not contiguous # # The tests below cause odd errors and segfaults with numpy SVN # vintage June 2010 (sometime after 1.4.0 release) - see # http://groups.google.com/group/cython-users/browse_thread/thread/624c696293b7fe44?pli=1 """ yield assert_raises(ValueError, argmax_from_countarrs, vals, vertinds, adj_counts, adj_inds_raw) # too few vertices yield assert_raises(ValueError, argmax_from_countarrs, vals, vertinds[:-1], adj_counts, adj_inds) # adj_inds too short yield assert_raises(IndexError, argmax_from_countarrs, vals, vertinds, adj_counts, adj_inds[:-1]) # vals too short yield assert_raises(IndexError, argmax_from_countarrs, vals[:-1], vertinds, adj_counts, adj_inds) """ dipy-0.13.0/dipy/reconst/tests/test_sfm.py000066400000000000000000000131521317371701200205260ustar00rootroot00000000000000import numpy as np import numpy.testing as npt import nibabel as nib import dipy.reconst.sfm as sfm import dipy.data as dpd import dipy.core.gradients as grad import dipy.sims.voxel as sims import dipy.core.optimize as opt import dipy.reconst.cross_validation as xval def test_design_matrix(): data, gtab = dpd.dsi_voxels() sphere = dpd.get_sphere() # Make it with NNLS, so that it gets tested regardless of sklearn sparse_fascicle_model = sfm.SparseFascicleModel(gtab, sphere, solver='NNLS') npt.assert_equal(sparse_fascicle_model.design_matrix.shape, (np.sum(~gtab.b0s_mask), sphere.vertices.shape[0])) @npt.dec.skipif(not sfm.has_sklearn) def test_sfm(): fdata, fbvals, fbvecs = dpd.get_data() data = nib.load(fdata).get_data() gtab = grad.gradient_table(fbvals, fbvecs) for iso in [sfm.ExponentialIsotropicModel, None]: sfmodel = sfm.SparseFascicleModel(gtab, isotropic=iso) sffit1 = sfmodel.fit(data[0, 0, 0]) sphere = dpd.get_sphere() odf1 = sffit1.odf(sphere) pred1 = sffit1.predict(gtab) mask = np.ones(data.shape[:-1]) sffit2 = sfmodel.fit(data, mask) pred2 = sffit2.predict(gtab) odf2 = sffit2.odf(sphere) sffit3 = sfmodel.fit(data) pred3 = sffit3.predict(gtab) odf3 = sffit3.odf(sphere) npt.assert_almost_equal(pred3, pred2, decimal=2) npt.assert_almost_equal(pred3[0, 0, 0], pred1, decimal=2) npt.assert_almost_equal(odf3[0, 0, 0], odf1, decimal=2) npt.assert_almost_equal(odf3[0, 0, 0], odf2[0, 0, 0], decimal=2) # Fit zeros and you will get back zeros npt.assert_almost_equal( sfmodel.fit(np.zeros(data[0, 0, 0].shape)).beta, np.zeros(sfmodel.design_matrix[0].shape[-1])) @npt.dec.skipif(not sfm.has_sklearn) def test_predict(): SNR = 1000 S0 = 100 _, fbvals, fbvecs = dpd.get_data('small_64D') bvals = np.load(fbvals) bvecs = np.load(fbvecs) gtab = grad.gradient_table(bvals, bvecs) mevals = np.array(([0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003])) angles = [(0, 0), (60, 0)] S, sticks = sims.multi_tensor(gtab, mevals, S0, angles=angles, fractions=[10, 90], snr=SNR) sfmodel = sfm.SparseFascicleModel(gtab, response=[0.0015, 0.0003, 0.0003]) sffit = sfmodel.fit(S) pred = sffit.predict() npt.assert_(xval.coeff_of_determination(pred, S) > 97) # Should be possible to predict using a different gtab: new_gtab = grad.gradient_table(bvals[::2], bvecs[::2]) new_pred = sffit.predict(new_gtab) npt.assert_(xval.coeff_of_determination(new_pred, S[::2]) > 97) def test_sfm_background(): fdata, fbvals, fbvecs = dpd.get_data() data = nib.load(fdata).get_data() gtab = grad.gradient_table(fbvals, fbvecs) to_fit = data[0, 0, 0] to_fit[gtab.b0s_mask] = 0 sfmodel = sfm.SparseFascicleModel(gtab, solver='NNLS') sffit = sfmodel.fit(to_fit) npt.assert_equal(sffit.beta, np.zeros_like(sffit.beta)) def test_sfm_stick(): fdata, fbvals, fbvecs = dpd.get_data() data = nib.load(fdata).get_data() gtab = grad.gradient_table(fbvals, fbvecs) sfmodel = sfm.SparseFascicleModel(gtab, solver='NNLS', response=[0.001, 0, 0]) sffit1 = sfmodel.fit(data[0, 0, 0]) sphere = dpd.get_sphere() odf1 = sffit1.odf(sphere) pred1 = sffit1.predict(gtab) SNR = 1000 S0 = 100 mevals = np.array(([0.001, 0, 0], [0.001, 0, 0])) angles = [(0, 0), (60, 0)] S, sticks = sims.multi_tensor(gtab, mevals, S0, angles=angles, fractions=[50, 50], snr=SNR) sfmodel = sfm.SparseFascicleModel(gtab, solver='NNLS', response=[0.001, 0, 0]) sffit = sfmodel.fit(S) pred = sffit.predict() npt.assert_(xval.coeff_of_determination(pred, S) > 96) def test_sfm_sklearnlinearsolver(): class SillySolver(opt.SKLearnLinearSolver): def fit(self, X, y): self.coef_ = np.ones(X.shape[-1]) class EvenSillierSolver(object): def fit(self, X, y): self.coef_ = np.ones(X.shape[-1]) fdata, fbvals, fbvecs = dpd.get_data() gtab = grad.gradient_table(fbvals, fbvecs) sfmodel = sfm.SparseFascicleModel(gtab, solver=SillySolver()) npt.assert_(isinstance(sfmodel.solver, SillySolver)) npt.assert_raises(ValueError, sfm.SparseFascicleModel, gtab, solver=EvenSillierSolver()) @npt.dec.skipif(not sfm.has_sklearn) def test_exponential_iso(): fdata, fbvals, fbvecs = dpd.get_data() data_dti = nib.load(fdata).get_data() gtab_dti = grad.gradient_table(fbvals, fbvecs) data_multi, gtab_multi = dpd.dsi_deconv_voxels() for data, gtab in zip([data_dti, data_multi], [gtab_dti, gtab_multi]): sfmodel = sfm.SparseFascicleModel( gtab, isotropic=sfm.ExponentialIsotropicModel) sffit1 = sfmodel.fit(data[0, 0, 0]) sphere = dpd.get_sphere() odf1 = sffit1.odf(sphere) pred1 = sffit1.predict(gtab) SNR = 1000 S0 = 100 mevals = np.array(([0.0015, 0.0005, 0.0005], [0.0015, 0.0005, 0.0005])) angles = [(0, 0), (60, 0)] S, sticks = sims.multi_tensor(gtab, mevals, S0, angles=angles, fractions=[50, 50], snr=SNR) sffit = sfmodel.fit(S) pred = sffit.predict() npt.assert_(xval.coeff_of_determination(pred, S) > 96) dipy-0.13.0/dipy/reconst/tests/test_shm.py000066400000000000000000000421561317371701200205360ustar00rootroot00000000000000"""Test spherical harmonic models and the tools associated with those models""" import warnings import numpy as np import numpy.linalg as npl from nose.tools import assert_equal, assert_raises, assert_true from numpy.testing import assert_array_equal, assert_array_almost_equal import numpy.testing as npt from scipy.special import sph_harm as sph_harm_sp from dipy.core.sphere import hemi_icosahedron from dipy.core.gradients import gradient_table from dipy.sims.voxel import single_tensor from dipy.direction.peaks import peak_directions from dipy.reconst.shm import sf_to_sh, sh_to_sf from dipy.reconst.interpolate import NearestNeighborInterpolator from dipy.sims.voxel import multi_tensor_odf from dipy.data import mrtrix_spherical_functions from dipy.reconst import odf from dipy.reconst.shm import (real_sph_harm, real_sym_sh_basis, real_sym_sh_mrtrix, sph_harm_ind_list, order_from_ncoef, OpdtModel, normalize_data, hat, lcr_matrix, smooth_pinv, bootstrap_data_array, bootstrap_data_voxel, ResidualBootstrapWrapper, CsaOdfModel, QballModel, SphHarmFit, spherical_harmonics, anisotropic_power, calculate_max_order) def test_order_from_ncoeff(): """ """ # Just try some out: for sh_order in [2, 4, 6, 8, 12, 24]: m, n = sph_harm_ind_list(sh_order) n_coef = m.shape[0] npt.assert_equal(order_from_ncoef(n_coef), sh_order) def test_sph_harm_ind_list(): m_list, n_list = sph_harm_ind_list(8) assert_equal(m_list.shape, n_list.shape) assert_equal(m_list.shape, (45,)) assert_true(np.all(np.abs(m_list) <= n_list)) assert_array_equal(n_list % 2, 0) assert_raises(ValueError, sph_harm_ind_list, 1) def test_real_sph_harm(): # Tests derived from tables in # http://en.wikipedia.org/wiki/Table_of_spherical_harmonics # where real spherical harmonic $Y^m_n$ is defined to be: # Real($Y^m_n$) * sqrt(2) if m > 0 # $Y^m_n$ if m == 0 # Imag($Y^m_n$) * sqrt(2) if m < 0 rsh = real_sph_harm pi = np.pi exp = np.exp sqrt = np.sqrt sin = np.sin cos = np.cos assert_array_almost_equal(rsh(0, 0, 0, 0), 0.5 / sqrt(pi)) assert_array_almost_equal(rsh(-2, 2, pi / 5, pi / 3), 0.25 * sqrt(15. / (2. * pi)) * (sin(pi / 5.)) ** 2. * cos(0 + 2. * pi / 3) * sqrt(2)) assert_array_almost_equal(rsh(2, 2, pi / 5, pi / 3), -1 * 0.25 * sqrt(15. / (2. * pi)) * (sin(pi / 5.)) ** 2. * sin(0 - 2. * pi / 3) * sqrt(2)) assert_array_almost_equal(rsh(-2, 2, pi / 2, pi), 0.25 * sqrt(15 / (2. * pi)) * cos(2. * pi) * sin(pi / 2.) ** 2. * sqrt(2)) assert_array_almost_equal(rsh(2, 4, pi / 3., pi / 4.), -1 * (3. / 8.) * sqrt(5. / (2. * pi)) * sin(0 - 2. * pi / 4.) * sin(pi / 3.) ** 2. * (7. * cos(pi / 3.) ** 2. - 1) * sqrt(2)) assert_array_almost_equal(rsh(-4, 4, pi / 6., pi / 8.), (3. / 16.) * sqrt(35. / (2. * pi)) * cos(0 + 4. * pi / 8.) * sin(pi / 6.) ** 4. * sqrt(2)) assert_array_almost_equal(rsh(4, 4, pi / 6., pi / 8.), -1 * (3. / 16.) * sqrt(35. / (2. * pi)) * sin(0 - 4. * pi / 8.) * sin(pi / 6.) ** 4. * sqrt(2)) aa = np.ones((3, 1, 1, 1)) bb = np.ones((1, 4, 1, 1)) cc = np.ones((1, 1, 5, 1)) dd = np.ones((1, 1, 1, 6)) assert_equal(rsh(aa, bb, cc, dd).shape, (3, 4, 5, 6)) def test_real_sym_sh_mrtrix(): coef, expected, sphere = mrtrix_spherical_functions() basis, m, n = real_sym_sh_mrtrix(8, sphere.theta, sphere.phi) func = np.dot(coef, basis.T) assert_array_almost_equal(func, expected, 4) def test_real_sym_sh_basis(): # This test should do for now # The mrtrix basis should be the same as re-ordering and re-scaling the # fibernav basis new_order = [0, 5, 4, 3, 2, 1, 14, 13, 12, 11, 10, 9, 8, 7, 6] sphere = hemi_icosahedron.subdivide(2) basis, m, n = real_sym_sh_mrtrix(4, sphere.theta, sphere.phi) expected = basis[:, new_order] expected *= np.where(m == 0, 1., np.sqrt(2)) fibernav_basis, m, n = real_sym_sh_basis(4, sphere.theta, sphere.phi) assert_array_almost_equal(fibernav_basis, expected) def test_smooth_pinv(): hemi = hemi_icosahedron.subdivide(2) m, n = sph_harm_ind_list(4) B = real_sph_harm(m, n, hemi.theta[:, None], hemi.phi[:, None]) L = np.zeros(len(m)) C = smooth_pinv(B, L) D = np.dot(npl.inv(np.dot(B.T, B)), B.T) assert_array_almost_equal(C, D) L = n * (n + 1) * .05 C = smooth_pinv(B, L) L = np.diag(L) D = np.dot(npl.inv(np.dot(B.T, B) + L * L), B.T) assert_array_almost_equal(C, D) L = np.arange(len(n)) * .05 C = smooth_pinv(B, L) L = np.diag(L) D = np.dot(npl.inv(np.dot(B.T, B) + L * L), B.T) assert_array_almost_equal(C, D) def test_normalize_data(): sig = np.arange(1, 66)[::-1] where_b0 = np.zeros(65, 'bool') where_b0[0] = True d = normalize_data(sig, where_b0, 1) assert_raises(ValueError, normalize_data, sig, where_b0, out=sig) norm_sig = normalize_data(sig, where_b0, min_signal=1) assert_array_almost_equal(norm_sig, sig / 65.) norm_sig = normalize_data(sig, where_b0, min_signal=5) assert_array_almost_equal(norm_sig[-5:], 5 / 65.) where_b0[[0, 1]] = [True, True] norm_sig = normalize_data(sig, where_b0, min_signal=1) assert_array_almost_equal(norm_sig, sig / 64.5) norm_sig = normalize_data(sig, where_b0, min_signal=5) assert_array_almost_equal(norm_sig[-5:], 5 / 64.5) sig = sig * np.ones((2, 3, 1)) where_b0[[0, 1]] = [True, False] norm_sig = normalize_data(sig, where_b0, min_signal=1) assert_array_almost_equal(norm_sig, sig / 65.) norm_sig = normalize_data(sig, where_b0, min_signal=5) assert_array_almost_equal(norm_sig[..., -5:], 5 / 65.) where_b0[[0, 1]] = [True, True] norm_sig = normalize_data(sig, where_b0, min_signal=1) assert_array_almost_equal(norm_sig, sig / 64.5) norm_sig = normalize_data(sig, where_b0, min_signal=5) assert_array_almost_equal(norm_sig[..., -5:], 5 / 64.5) def make_fake_signal(): hemisphere = hemi_icosahedron.subdivide(2) bvecs = np.concatenate(([[0, 0, 0]], hemisphere.vertices)) bvals = np.zeros(len(bvecs)) + 2000 bvals[0] = 0 gtab = gradient_table(bvals, bvecs) evals = np.array([[2.1, .2, .2], [.2, 2.1, .2]]) * 10 ** -3 evecs0 = np.eye(3) sq3 = np.sqrt(3) / 2. evecs1 = np.array([[sq3, .5, 0], [.5, sq3, 0], [0, 0, 1.]]) evecs1 = evecs0 a = evecs0[0] b = evecs1[1] S1 = single_tensor(gtab, .55, evals[0], evecs0) S2 = single_tensor(gtab, .45, evals[1], evecs1) return S1 + S2, gtab, np.vstack([a, b]) class TestQballModel(object): model = QballModel def test_single_voxel_fit(self): signal, gtab, expected = make_fake_signal() sphere = hemi_icosahedron.subdivide(4) model = self.model(gtab, sh_order=4, min_signal=1e-5, assume_normed=True) fit = model.fit(signal) odf = fit.odf(sphere) assert_equal(odf.shape, sphere.phi.shape) directions, _, _ = peak_directions(odf, sphere) # Check the same number of directions n = len(expected) assert_equal(len(directions), n) # Check directions are unit vectors cos_similarity = (directions * directions).sum(-1) assert_array_almost_equal(cos_similarity, np.ones(n)) # Check the directions == expected or -expected cos_similarity = (directions * expected).sum(-1) assert_array_almost_equal(abs(cos_similarity), np.ones(n)) # Test normalize data model = self.model(gtab, sh_order=4, min_signal=1e-5, assume_normed=False) fit = model.fit(signal * 5) odf_with_norm = fit.odf(sphere) assert_array_almost_equal(odf, odf_with_norm) def test_mulit_voxel_fit(self): signal, gtab, expected = make_fake_signal() sphere = hemi_icosahedron nd_signal = np.vstack([signal, signal]) model = self.model(gtab, sh_order=4, min_signal=1e-5, assume_normed=True) fit = model.fit(nd_signal) odf = fit.odf(sphere) assert_equal(odf.shape, (2,) + sphere.phi.shape) # Test fitting with mask, where mask is False odf should be 0 fit = model.fit(nd_signal, mask=[False, True]) odf = fit.odf(sphere) assert_array_equal(odf[0], 0.) def test_sh_order(self): signal, gtab, expected = make_fake_signal() model = self.model(gtab, sh_order=4, min_signal=1e-5) assert_equal(model.B.shape[1], 15) assert_equal(max(model.n), 4) model = self.model(gtab, sh_order=6, min_signal=1e-5) assert_equal(model.B.shape[1], 28) assert_equal(max(model.n), 6) def test_gfa(self): signal, gtab, expected = make_fake_signal() signal = np.ones((2, 3, 4, 1)) * signal sphere = hemi_icosahedron.subdivide(3) model = self.model(gtab, 6, min_signal=1e-5) fit = model.fit(signal) gfa_shm = fit.gfa gfa_odf = odf.gfa(fit.odf(sphere)) assert_array_almost_equal(gfa_shm, gfa_odf, 3) # gfa should be 0 if all coefficients are 0 (masked areas) mask = np.zeros(signal.shape[:-1]) fit = model.fit(signal, mask) assert_array_equal(fit.gfa, 0) def test_SphHarmFit(): coef = np.zeros((3, 4, 5, 45)) mask = np.zeros((3, 4, 5), dtype=bool) fit = SphHarmFit(None, coef, mask) item = fit[0, 0, 0] assert_equal(item.shape, ()) slice = fit[0] assert_equal(slice.shape, (4, 5)) slice = fit[:, :, 0] assert_equal(slice.shape, (3, 4)) class TestOpdtModel(TestQballModel): model = OpdtModel class TestCsaOdfModel(TestQballModel): model = CsaOdfModel def test_hat_and_lcr(): hemi = hemi_icosahedron.subdivide(3) m, n = sph_harm_ind_list(8) B = real_sph_harm(m, n, hemi.theta[:, None], hemi.phi[:, None]) H = hat(B) B_hat = np.dot(H, B) assert_array_almost_equal(B, B_hat) R = lcr_matrix(H) d = np.arange(len(hemi.theta)) r = d - np.dot(H, d) lev = np.sqrt(1 - H.diagonal()) r /= lev r -= r.mean() r2 = np.dot(R, d) assert_array_almost_equal(r, r2) r3 = np.dot(d, R.T) assert_array_almost_equal(r, r3) def test_bootstrap_array(): B = np.array([[4, 5, 7, 4, 2.], [4, 6, 2, 3, 6.]]) H = hat(B.T) R = np.zeros((5, 5)) d = np.arange(1, 6) dhat = np.dot(H, d) assert_array_almost_equal(bootstrap_data_voxel(dhat, H, R), dhat) assert_array_almost_equal(bootstrap_data_array(dhat, H, R), dhat) H = np.zeros((5, 5)) def test_ResidualBootstrapWrapper(): B = np.array([[4, 5, 7, 4, 2.], [4, 6, 2, 3, 6.]]) B = B.T H = hat(B) d = np.arange(10) / 8. d.shape = (2, 5) dhat = np.dot(d, H) signal_object = NearestNeighborInterpolator(dhat, (1,)) ms = .2 where_dwi = np.ones(len(H), dtype=bool) boot_obj = ResidualBootstrapWrapper(signal_object, B, where_dwi, ms) assert_array_almost_equal(boot_obj[0], dhat[0].clip(ms, 1)) assert_array_almost_equal(boot_obj[1], dhat[1].clip(ms, 1)) dhat = np.column_stack([[.6, .7], dhat]) signal_object = NearestNeighborInterpolator(dhat, (1,)) where_dwi = np.concatenate([[False], where_dwi]) boot_obj = ResidualBootstrapWrapper(signal_object, B, where_dwi, ms) assert_array_almost_equal(boot_obj[0], dhat[0].clip(ms, 1)) assert_array_almost_equal(boot_obj[1], dhat[1].clip(ms, 1)) def test_sf_to_sh(): # Subdividing a hemi_icosahedron twice produces 81 unique points, which # is more than enough to fit a order 8 (45 coefficients) spherical harmonic sphere = hemi_icosahedron.subdivide(2) mevals = np.array(([0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003])) angles = [(0, 0), (90, 0)] odf = multi_tensor_odf(sphere.vertices, mevals, angles, [50, 50]) # 1D case with the 3 bases functions odf_sh = sf_to_sh(odf, sphere, 8) odf2 = sh_to_sf(odf_sh, sphere, 8) assert_array_almost_equal(odf, odf2, 2) odf_sh = sf_to_sh(odf, sphere, 8, "mrtrix") odf2 = sh_to_sf(odf_sh, sphere, 8, "mrtrix") assert_array_almost_equal(odf, odf2, 2) odf_sh = sf_to_sh(odf, sphere, 8, "fibernav") odf2 = sh_to_sf(odf_sh, sphere, 8, "fibernav") assert_array_almost_equal(odf, odf2, 2) # 2D case odf2d = np.vstack((odf2, odf)) odf2d_sh = sf_to_sh(odf2d, sphere, 8) odf2d_sf = sh_to_sf(odf2d_sh, sphere, 8) assert_array_almost_equal(odf2d, odf2d_sf, 2) def test_faster_sph_harm(): sh_order = 8 m, n = sph_harm_ind_list(sh_order) theta = np.array([1.61491146, 0.76661665, 0.11976141, 1.20198246, 1.74066314, 1.5925956, 2.13022055, 0.50332859, 1.19868988, 0.78440679, 0.50686938, 0.51739718, 1.80342999, 0.73778957, 2.28559395, 1.29569064, 1.86877091, 0.39239191, 0.54043037, 1.61263047, 0.72695314, 1.90527318, 1.58186125, 0.23130073, 2.51695237, 0.99835604, 1.2883426, 0.48114057, 1.50079318, 1.07978624, 1.9798903, 2.36616966, 2.49233299, 2.13116602, 1.36801518, 1.32932608, 0.95926683, 1.070349, 0.76355762, 2.07148422, 1.50113501, 1.49823314, 0.89248164, 0.22187079, 1.53805373, 1.9765295, 1.13361568, 1.04908355, 1.68737368, 1.91732452, 1.01937457, 1.45839, 0.49641525, 0.29087155, 0.52824641, 1.29875871, 1.81023541, 1.17030475, 2.24953206, 1.20280498, 0.76399964, 2.16109722, 0.79780421, 0.87154509]) phi = np.array([-1.5889514, -3.11092733, -0.61328674, -2.4485381, 2.88058822, 2.02165946, -1.99783366, 2.71235211, 1.41577992, -2.29413676, -2.24565773, -1.55548635, 2.59318232, -1.84672472, -2.33710739, 2.12111948, 1.87523722, -1.05206575, -2.85381987, -2.22808984, 2.3202034, -2.19004474, -1.90358372, 2.14818373, 3.1030696, -2.86620183, -2.19860123, -0.45468447, -3.0034923, 1.73345011, -2.51716288, 2.49961525, -2.68782986, 2.69699056, 1.78566133, -1.59119705, -2.53378963, -2.02476738, 1.36924987, 2.17600517, 2.38117241, 2.99021511, -1.4218007, -2.44016802, -2.52868164, 3.01531658, 2.50093627, -1.70745826, -2.7863931, -2.97359741, 2.17039906, 2.68424643, 1.77896086, 0.45476215, 0.99734418, -2.73107896, 2.28815009, 2.86276506, 3.09450274, -3.09857384, -1.06955885, -2.83826831, 1.81932195, 2.81296654]) sh = spherical_harmonics(m, n, theta[:, None], phi[:, None]) sh2 = sph_harm_sp(m, n, theta[:, None], phi[:, None]) assert_array_almost_equal(sh, sh2, 8) def test_anisotropic_power(): for n_coeffs in [6, 15, 28, 45, 66, 91]: for norm_factor in [0.0005, 0.00001]: # Create some really simple cases: coeffs = np.ones((3, n_coeffs)) max_order = calculate_max_order(coeffs.shape[-1]) # For the case where all coeffs == 1, the ap is simply log of the # number of even orders up to the maximal order: analytic = (np.log(len(range(2, max_order + 2, 2))) - np.log(norm_factor)) answers = [analytic] * 3 apvals = anisotropic_power(coeffs, norm_factor=norm_factor) assert_array_almost_equal(apvals, answers) # Test that this works for single voxel arrays as well: assert_array_almost_equal( anisotropic_power(coeffs[1], norm_factor=norm_factor), answers[1]) # Test that even when we look at an all-zeros voxel, this # avoids a log-of-zero warning: with warnings.catch_warnings(record=True) as w: assert_equal(anisotropic_power(np.zeros(6)), 0) assert len(w) == 0 def test_calculate_max_order(): """Based on the table in: http://jdtournier.github.io/mrtrix-0.2/tractography/preprocess.html """ orders = [2, 4, 6, 8, 10, 12] n_coeffs = [6, 15, 28, 45, 66, 91] for o, n in zip(orders, n_coeffs): assert_equal(calculate_max_order(n), o) if __name__ == "__main__": import nose nose.runmodule() dipy-0.13.0/dipy/reconst/tests/test_shore.py000066400000000000000000000050601317371701200210600ustar00rootroot00000000000000# Tests for shore fitting from math import factorial import numpy as np from scipy.special import genlaguerre, gamma from dipy.data import get_gtab_taiwan_dsi from dipy.reconst.shore import ShoreModel from dipy.sims.voxel import MultiTensor from numpy.testing import (assert_almost_equal, assert_equal, run_module_suite, dec) from dipy.utils.optpkg import optional_package cvxpy, have_cvxpy, _ = optional_package("cvxpy") needs_cvxpy = dec.skipif(not have_cvxpy) # Object to hold module global data class _C(object): pass data = _C() def setup(): data.gtab = get_gtab_taiwan_dsi() data.mevals = np.array(([0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003])) data.angl = [(0, 0), (60, 0)] data.S, sticks = MultiTensor( data.gtab, data.mevals, S0=100.0, angles=data.angl, fractions=[50, 50], snr=None) data.radial_order = 6 data.zeta = 700 data.lambdaN = 1e-12 data.lambdaL = 1e-12 @needs_cvxpy def test_shore_positive_constrain(): asm = ShoreModel(data.gtab, radial_order=data.radial_order, zeta=data.zeta, lambdaN=data.lambdaN, lambdaL=data.lambdaL, constrain_e0=True, positive_constraint=True, pos_grid=11, pos_radius=20e-03) asmfit = asm.fit(data.S) eap = asmfit.pdf_grid(11, 20e-03) assert_equal(eap[eap < 0].sum(), 0) def test_shore_fitting_no_constrain_e0(): asm = ShoreModel(data.gtab, radial_order=data.radial_order, zeta=data.zeta, lambdaN=data.lambdaN, lambdaL=data.lambdaL) asmfit = asm.fit(data.S) assert_almost_equal(compute_e0(asmfit), 1) @needs_cvxpy def test_shore_fitting_constrain_e0(): asm = ShoreModel(data.gtab, radial_order=data.radial_order, zeta=data.zeta, lambdaN=data.lambdaN, lambdaL=data.lambdaL, constrain_e0=True) asmfit = asm.fit(data.S) assert_almost_equal(compute_e0(asmfit), 1) def compute_e0(shorefit): signal_0 = 0 for n in range(int(shorefit.model.radial_order / 2) + 1): signal_0 += (shorefit.shore_coeff[n] * (genlaguerre(n, 0.5)(0) * ((factorial(n)) / (2 * np.pi * (shorefit.model.zeta ** 1.5) * gamma(n + 1.5))) ** 0.5)) return signal_0 if __name__ == '__main__': run_module_suite() dipy-0.13.0/dipy/reconst/tests/test_shore_metrics.py000066400000000000000000000067771317371701200226260ustar00rootroot00000000000000import numpy as np from dipy.data import get_gtab_taiwan_dsi from numpy.testing import (assert_almost_equal, assert_equal, run_module_suite) from dipy.reconst.shore import (ShoreModel, shore_matrix, shore_indices, shore_order) from dipy.sims.voxel import ( MultiTensor, all_tensor_evecs, multi_tensor_odf, single_tensor_odf, multi_tensor_rtop, multi_tensor_msd, multi_tensor_pdf) from dipy.data import get_sphere from scipy.special import genlaguerre def test_shore_metrics(): gtab = get_gtab_taiwan_dsi() mevals = np.array(([0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003])) angl = [(0, 0), (60, 0)] S, sticks = MultiTensor(gtab, mevals, S0=100.0, angles=angl, fractions=[50, 50], snr=None) # test shore_indices n = 7 l = 6 m = -4 radial_order, c = shore_order(n, l, m) n2, l2, m2 = shore_indices(radial_order, c) assert_equal(n, n2) assert_equal(l, l2) assert_equal(m, m2) radial_order = 6 c = 41 n, l, m = shore_indices(radial_order, c) radial_order2, c2 = shore_order(n, l, m) assert_equal(radial_order, radial_order2) assert_equal(c, c2) # since we are testing without noise we can use higher order and lower # lambdas, with respect to the default. radial_order = 8 zeta = 700 lambdaN = 1e-12 lambdaL = 1e-12 asm = ShoreModel(gtab, radial_order=radial_order, zeta=zeta, lambdaN=lambdaN, lambdaL=lambdaL) asmfit = asm.fit(S) c_shore = asmfit.shore_coeff cmat = shore_matrix(radial_order, zeta, gtab) S_reconst = np.dot(cmat, c_shore) # test the signal reconstruction S = S / S[0] nmse_signal = np.sqrt(np.sum((S - S_reconst) ** 2)) / (S.sum()) assert_almost_equal(nmse_signal, 0.0, 4) # test if the analytical integral of the pdf is equal to one integral = 0 for n in range(int((radial_order)/2 + 1)): integral += c_shore[n] * (np.pi**(-1.5) * zeta ** (-1.5) * genlaguerre(n, 0.5)(0)) ** 0.5 assert_almost_equal(integral, 1.0, 10) # test if the integral of the pdf calculated on a discrete grid is equal to # one pdf_discrete = asmfit.pdf_grid(17, 40e-3) integral = pdf_discrete.sum() assert_almost_equal(integral, 1.0, 1) # compare the shore pdf with the ground truth multi_tensor pdf sphere = get_sphere('symmetric724') v = sphere.vertices radius = 10e-3 pdf_shore = asmfit.pdf(v * radius) pdf_mt = multi_tensor_pdf(v * radius, mevals=mevals, angles=angl, fractions=[50, 50]) nmse_pdf = np.sqrt(np.sum((pdf_mt - pdf_shore) ** 2)) / (pdf_mt.sum()) assert_almost_equal(nmse_pdf, 0.0, 2) # compare the shore rtop with the ground truth multi_tensor rtop rtop_shore_signal = asmfit.rtop_signal() rtop_shore_pdf = asmfit.rtop_pdf() assert_almost_equal(rtop_shore_signal, rtop_shore_pdf, 9) rtop_mt = multi_tensor_rtop([.5, .5], mevals=mevals) assert_equal(rtop_mt / rtop_shore_signal < 1.10 and rtop_mt / rtop_shore_signal > 0.95, True) # compare the shore msd with the ground truth multi_tensor msd msd_mt = multi_tensor_msd([.5, .5], mevals=mevals) msd_shore = asmfit.msd() assert_equal(msd_mt / msd_shore < 1.05 and msd_mt / msd_shore > 0.95, True) if __name__ == '__main__': run_module_suite() dipy-0.13.0/dipy/reconst/tests/test_shore_odf.py000066400000000000000000000053321317371701200217120ustar00rootroot00000000000000import numpy as np from dipy.data import get_sphere, get_3shell_gtab, get_isbi2013_2shell_gtab from dipy.reconst.shore import ShoreModel from dipy.reconst.shm import sh_to_sf from dipy.direction.peaks import peak_directions from dipy.reconst.odf import gfa from numpy.testing import (assert_equal, assert_almost_equal, run_module_suite) from dipy.sims.voxel import SticksAndBall from dipy.core.subdivide_octahedron import create_unit_sphere from dipy.core.sphere_stats import angular_similarity from dipy.reconst.tests.test_dsi import sticks_and_ball_dummies def test_shore_odf(): gtab = get_isbi2013_2shell_gtab() # load symmetric 724 sphere sphere = get_sphere('symmetric724') # load icosahedron sphere sphere2 = create_unit_sphere(5) data, golden_directions = SticksAndBall(gtab, d=0.0015, S0=100, angles=[(0, 0), (90, 0)], fractions=[50, 50], snr=None) asm = ShoreModel(gtab, radial_order=6, zeta=700, lambdaN=1e-8, lambdaL=1e-8) # symmetric724 asmfit = asm.fit(data) odf = asmfit.odf(sphere) odf_sh = asmfit.odf_sh() odf_from_sh = sh_to_sf(odf_sh, sphere, 6, basis_type=None) assert_almost_equal(odf, odf_from_sh, 10) directions, _, _ = peak_directions(odf, sphere, .35, 25) assert_equal(len(directions), 2) assert_almost_equal( angular_similarity(directions, golden_directions), 2, 1) # 5 subdivisions odf = asmfit.odf(sphere2) directions, _, _ = peak_directions(odf, sphere2, .35, 25) assert_equal(len(directions), 2) assert_almost_equal( angular_similarity(directions, golden_directions), 2, 1) sb_dummies = sticks_and_ball_dummies(gtab) for sbd in sb_dummies: data, golden_directions = sb_dummies[sbd] asmfit = asm.fit(data) odf = asmfit.odf(sphere2) directions, _ , _ = peak_directions(odf, sphere2, .35, 25) if len(directions) <= 3: assert_equal(len(directions), len(golden_directions)) if len(directions) > 3: assert_equal(gfa(odf) < 0.1, True) def test_multivox_shore(): gtab = get_3shell_gtab() data = np.random.random([20, 30, 1, gtab.gradients.shape[0]]) radial_order = 4 zeta = 700 asm = ShoreModel(gtab, radial_order=radial_order, zeta=zeta, lambdaN=1e-8, lambdaL=1e-8) asmfit = asm.fit(data) c_shore = asmfit.shore_coeff assert_equal(c_shore.shape[0:3], data.shape[0:3]) assert_equal(np.alltrue(np.isreal(c_shore)), True) if __name__ == '__main__': run_module_suite() dipy-0.13.0/dipy/reconst/tests/test_vec_val_vect.py000066400000000000000000000024261317371701200224030ustar00rootroot00000000000000import numpy as np from numpy.random import randn from numpy.testing import assert_almost_equal, dec from dipy.reconst.vec_val_sum import vec_val_vect def make_vecs_vals(shape): return randn(*(shape)), randn(*(shape[:-2] + shape[-1:])) try: np.einsum except AttributeError: with_einsum = dec.skipif(True, "Need einsum for benchmark") else: def with_einsum(f): return f @with_einsum def test_vec_val_vect(): for shape0 in ((10,), (100,), (10, 12), (12, 10, 5)): for shape1 in ((3, 3), (4, 3), (3, 4)): shape = shape0 + shape1 evecs, evals = make_vecs_vals(shape) res1 = np.einsum('...ij,...j,...kj->...ik', evecs, evals, evecs) assert_almost_equal(res1, vec_val_vect(evecs, evals)) def dumb_sum(vecs, vals): N, rows, cols = vecs.shape res2 = np.zeros((N, rows, rows)) for i in range(N): Q = vecs[i] L = vals[i] res2[i] = np.dot(Q, np.dot(np.diag(L), Q.T)) return res2 def test_vec_val_vect_dumber(): for shape0 in ((10,), (100,)): for shape1 in ((3, 3), (4, 3), (3, 4)): shape = shape0 + shape1 evecs, evals = make_vecs_vals(shape) res1 = dumb_sum(evecs, evals) assert_almost_equal(res1, vec_val_vect(evecs, evals)) dipy-0.13.0/dipy/reconst/utils.py000066400000000000000000000033141317371701200166770ustar00rootroot00000000000000import numpy as np def dki_design_matrix(gtab): r""" Constructs B design matrix for DKI Parameters --------- gtab : GradientTable Measurement directions. Returns ------- B : array (N, 22) Design matrix or B matrix for the DKI model B[j, :] = (Bxx, Bxy, Bzz, Bxz, Byz, Bzz, Bxxxx, Byyyy, Bzzzz, Bxxxy, Bxxxz, Bxyyy, Byyyz, Bxzzz, Byzzz, Bxxyy, Bxxzz, Byyzz, Bxxyz, Bxyyz, Bxyzz, BlogS0) """ b = gtab.bvals bvec = gtab.bvecs B = np.zeros((len(b), 22)) B[:, 0] = -b * bvec[:, 0] * bvec[:, 0] B[:, 1] = -2 * b * bvec[:, 0] * bvec[:, 1] B[:, 2] = -b * bvec[:, 1] * bvec[:, 1] B[:, 3] = -2 * b * bvec[:, 0] * bvec[:, 2] B[:, 4] = -2 * b * bvec[:, 1] * bvec[:, 2] B[:, 5] = -b * bvec[:, 2] * bvec[:, 2] B[:, 6] = b * b * bvec[:, 0]**4 / 6 B[:, 7] = b * b * bvec[:, 1]**4 / 6 B[:, 8] = b * b * bvec[:, 2]**4 / 6 B[:, 9] = 4 * b * b * bvec[:, 0]**3 * bvec[:, 1] / 6 B[:, 10] = 4 * b * b * bvec[:, 0]**3 * bvec[:, 2] / 6 B[:, 11] = 4 * b * b * bvec[:, 1]**3 * bvec[:, 0] / 6 B[:, 12] = 4 * b * b * bvec[:, 1]**3 * bvec[:, 2] / 6 B[:, 13] = 4 * b * b * bvec[:, 2]**3 * bvec[:, 0] / 6 B[:, 14] = 4 * b * b * bvec[:, 2]**3 * bvec[:, 1] / 6 B[:, 15] = b * b * bvec[:, 0]**2 * bvec[:, 1]**2 B[:, 16] = b * b * bvec[:, 0]**2 * bvec[:, 2]**2 B[:, 17] = b * b * bvec[:, 1]**2 * bvec[:, 2]**2 B[:, 18] = 2 * b * b * bvec[:, 0]**2 * bvec[:, 1] * bvec[:, 2] B[:, 19] = 2 * b * b * bvec[:, 1]**2 * bvec[:, 0] * bvec[:, 2] B[:, 20] = 2 * b * b * bvec[:, 2]**2 * bvec[:, 0] * bvec[:, 1] B[:, 21] = np.ones(len(b)) return B dipy-0.13.0/dipy/reconst/vec_val_sum.pyx000066400000000000000000000053341317371701200202360ustar00rootroot00000000000000import numpy as np cimport numpy as cnp cimport cython @cython.boundscheck(False) @cython.wraparound(False) def vec_val_vect(vecs, vals): """ Vectorize `vecs`.diag(`vals`).`vecs`.T for last 2 dimensions of `vecs` Parameters ---------- vecs : shape (..., M, N) array containing tensor in last two dimensions; M, N usually equal to (3, 3) vals : shape (..., N) array diagonal values carried in last dimension, ``...`` shape above must match that for `vecs` Returns ------- res : shape (..., M, M) array For all the dimensions ellided by ``...``, loops to get (M, N) ``vec`` matrix, and (N,) ``vals`` vector, and calculates ``vec.dot(np.diag(val).dot(vec.T)``. Raises ------ ValueError : non-matching ``...`` dimensions of `vecs`, `vals` ValueError : non-matching ``N`` dimensions of `vecs`, `vals` Examples -------- Make a 3D array where the first dimension is only 1 >>> vecs = np.arange(9).reshape((1, 3, 3)) >>> vals = np.arange(3).reshape((1, 3)) >>> vec_val_vect(vecs, vals) array([[[ 9., 24., 39.], [ 24., 66., 108.], [ 39., 108., 177.]]]) That's the same as the 2D case (apart from the float casting): >>> vecs = np.arange(9).reshape((3, 3)) >>> vals = np.arange(3) >>> np.dot(vecs, np.dot(np.diag(vals), vecs.T)) array([[ 9, 24, 39], [ 24, 66, 108], [ 39, 108, 177]]) """ vecs = np.asarray(vecs) vals = np.asarray(vals) cdef: cnp.npy_intp t, N, ndim, rows, cols, r, c, in_r_out_c double [:, :, :] vecr double [:, :] valr double [:, :] vec double [:, :] out_vec double [:] val double [:, :, :] out double row_c # Avoid negative indexing to avoid errors with False boundscheck decorator # and Cython > 0.18 ndim = vecs.ndim common_shape = vecs.shape[:(ndim-2)] rows, cols = vecs.shape[ndim-2], vecs.shape[ndim-1] if vals.shape != common_shape + (cols,): raise ValueError('dimensions do not match') N = np.prod(common_shape) vecr = np.array(vecs.reshape((N, rows, cols)), dtype=float) valr = np.array(vals.reshape((N, cols)), dtype=float) out = np.zeros((N, rows, rows)) with nogil: for t in range(N): # loop over the early dimensions vec = vecr[t] val = valr[t] out_vec = out[t] for r in range(rows): for c in range(cols): row_c = vec[r, c] * val[c] for in_r_out_c in range(rows): out_vec[r, in_r_out_c] += row_c * vec[in_r_out_c, c] return np.reshape(out, (common_shape + (rows, rows))) dipy-0.13.0/dipy/segment/000077500000000000000000000000001317371701200151515ustar00rootroot00000000000000dipy-0.13.0/dipy/segment/__init__.py000066400000000000000000000000001317371701200172500ustar00rootroot00000000000000dipy-0.13.0/dipy/segment/benchmarks/000077500000000000000000000000001317371701200172665ustar00rootroot00000000000000dipy-0.13.0/dipy/segment/benchmarks/__init__.py000066400000000000000000000000001317371701200213650ustar00rootroot00000000000000dipy-0.13.0/dipy/segment/benchmarks/bench_quickbundles.py000066400000000000000000000070651317371701200235000ustar00rootroot00000000000000""" Benchmarks for QuickBundles Run all benchmarks with:: import dipy.segment as dipysegment dipysegment.bench() If you have doctests enabled by default in nose (with a noserc file or environment variable), and you have a numpy version <= 1.6.1, this will also run the doctests, let's hope they pass. Run this benchmark with: nosetests -s --match '(?:^|[\\b_\\.//-])[Bb]ench' bench_quickbundles.py """ import numpy as np import nibabel as nib from dipy.data import get_data import dipy.tracking.streamline as streamline_utils from dipy.segment.metric import Metric from dipy.segment.quickbundles import QuickBundles as QB_Old from dipy.segment.clustering import QuickBundles as QB_New from nose.tools import assert_equal from dipy.testing import assert_arrays_equal from numpy.testing import assert_array_equal, measure class MDFpy(Metric): def are_compatible(self, shape1, shape2): return shape1 == shape2 def dist(self, features1, features2): dist = np.sqrt(np.sum((features1-features2)**2, axis=1)) dist = np.sum(dist/len(features1)) return dist def bench_quickbundles(): dtype = "float32" repeat = 10 nb_points = 18 streams, hdr = nib.trackvis.read(get_data('fornix')) fornix = [s[0].astype(dtype) for s in streams] fornix = streamline_utils.set_number_of_points(fornix, nb_points) # Create eight copies of the fornix to be clustered (one in each octant). streamlines = [] streamlines += [s + np.array([100, 100, 100], dtype) for s in fornix] streamlines += [s + np.array([100, -100, 100], dtype) for s in fornix] streamlines += [s + np.array([100, 100, -100], dtype) for s in fornix] streamlines += [s + np.array([100, -100, -100], dtype) for s in fornix] streamlines += [s + np.array([-100, 100, 100], dtype) for s in fornix] streamlines += [s + np.array([-100, -100, 100], dtype) for s in fornix] streamlines += [s + np.array([-100, 100, -100], dtype) for s in fornix] streamlines += [s + np.array([-100, -100, -100], dtype) for s in fornix] # The expected number of clusters of the fornix using threshold=10 is 4. threshold = 10. expected_nb_clusters = 4*8 print("Timing QuickBundles 1.0 vs. 2.0") qb = QB_Old(streamlines, threshold, pts=None) qb1_time = measure("QB_Old(streamlines, threshold, nb_points)", repeat) print("QuickBundles time: {0:.4}sec".format(qb1_time)) assert_equal(qb.total_clusters, expected_nb_clusters) sizes1 = [qb.partitions()[i]['N'] for i in range(qb.total_clusters)] indices1 = [qb.partitions()[i]['indices'] for i in range(qb.total_clusters)] qb2 = QB_New(threshold) qb2_time = measure("clusters = qb2.cluster(streamlines)", repeat) print("QuickBundles2 time: {0:.4}sec".format(qb2_time)) print("Speed up of {0}x".format(qb1_time/qb2_time)) clusters = qb2.cluster(streamlines) sizes2 = map(len, clusters) indices2 = map(lambda c: c.indices, clusters) assert_equal(len(clusters), expected_nb_clusters) assert_array_equal(sizes2, sizes1) assert_arrays_equal(indices2, indices1) qb = QB_New(threshold, metric=MDFpy()) qb3_time = measure("clusters = qb.cluster(streamlines)", repeat) print("QuickBundles2_python time: {0:.4}sec".format(qb3_time)) print("Speed up of {0}x".format(qb1_time/qb3_time)) clusters = qb.cluster(streamlines) sizes3 = map(len, clusters) indices3 = map(lambda c: c.indices, clusters) assert_equal(len(clusters), expected_nb_clusters) assert_array_equal(sizes3, sizes1) assert_arrays_equal(indices3, indices1) dipy-0.13.0/dipy/segment/clustering.py000066400000000000000000000402641317371701200177100ustar00rootroot00000000000000import operator import numpy as np from abc import ABCMeta, abstractmethod from dipy.segment.metric import Metric from dipy.segment.metric import ResampleFeature from dipy.segment.metric import AveragePointwiseEuclideanMetric class Identity: """ Provides identity indexing functionality. This can replace any class supporting indexing used for referencing (e.g. list, tuple). Indexing an instance of this class will return the index provided instead of the element. It does not support slicing. """ def __getitem__(self, idx): return idx class Cluster(object): """ Provides functionalities for interacting with a cluster. Useful container to retrieve index of elements grouped together. If a reference to the data is provided to `cluster_map`, elements will be returned instead of their index when possible. Parameters ---------- cluster_map : `ClusterMap` object Reference to the set of clusters this cluster is being part of. id : int Id of this cluster in its associated `cluster_map` object. refdata : list (optional) Actual elements that clustered indices refer to. Notes ----- A cluster does not contain actual data but instead knows how to retrieve them using its `ClusterMap` object. """ def __init__(self, id=0, indices=None, refdata=Identity()): self.id = id self.refdata = refdata self.indices = indices if indices is not None else [] def __len__(self): return len(self.indices) def __getitem__(self, idx): """ Gets element(s) through indexing. If a reference to the data was provided (via refdata property) elements will be returned instead of their index. Parameters ---------- idx : int, slice or list Index of the element(s) to get. Returns ------- `Cluster` object(s) When `idx` is a int, returns a single element. When `idx` is either a slice or a list, returns a list of elements. """ if isinstance(idx, int) or isinstance(idx, np.integer): return self.refdata[self.indices[idx]] elif type(idx) is slice: return [self.refdata[i] for i in self.indices[idx]] elif type(idx) is list: return [self[i] for i in idx] msg = "Index must be a int or a slice! Not '{0}'".format(type(idx)) raise TypeError(msg) def __iter__(self): return (self[i] for i in range(len(self))) def __str__(self): return "[" + ", ".join(map(str, self.indices)) + "]" def __repr__(self): return "Cluster(" + str(self) + ")" def __eq__(self, other): return isinstance(other, Cluster) and self.indices == other.indices def __ne__(self, other): return not self == other def __cmp__(self, other): raise TypeError("Cannot compare Cluster objects.") def assign(self, *indices): """ Assigns indices to this cluster. Parameters ---------- *indices : list of indices Indices to add to this cluster. """ self.indices += indices class ClusterCentroid(Cluster): """ Provides functionalities for interacting with a cluster. Useful container to retrieve the indices of elements grouped together and the cluster's centroid. If a reference to the data is provided to `cluster_map`, elements will be returned instead of their index when possible. Parameters ---------- cluster_map : `ClusterMapCentroid` object Reference to the set of clusters this cluster is being part of. id : int Id of this cluster in its associated `cluster_map` object. refdata : list (optional) Actual elements that clustered indices refer to. Notes ----- A cluster does not contain actual data but instead knows how to retrieve them using its `ClusterMapCentroid` object. """ def __init__(self, centroid, id=0, indices=None, refdata=Identity()): super(ClusterCentroid, self).__init__(id, indices, refdata) self.centroid = centroid.copy() self.new_centroid = centroid.copy() def __eq__(self, other): return (isinstance(other, ClusterCentroid) and np.all(self.centroid == other.centroid) and super(ClusterCentroid, self).__eq__(other)) def assign(self, id_datum, features): """ Assigns a data point to this cluster. Parameters ---------- id_datum : int Index of the data point to add to this cluster. features : 2D array Data point's features to modify this cluster's centroid. """ N = len(self) self.new_centroid = ((self.new_centroid * N) + features) / (N+1.) super(ClusterCentroid, self).assign(id_datum) def update(self): """ Update centroid of this cluster. Returns ------- converged : bool Tells if the centroid has moved. """ converged = np.equal(self.centroid, self.new_centroid) self.centroid = self.new_centroid.copy() return converged class ClusterMap(object): """ Provides functionalities for interacting with clustering outputs. Useful container to create, remove, retrieve and filter clusters. If `refdata` is given, elements will be returned instead of their index when using `Cluster` objects. Parameters ---------- refdata : list Actual elements that clustered indices refer to. """ def __init__(self, refdata=Identity()): self._clusters = [] self.refdata = refdata @property def clusters(self): return self._clusters @property def refdata(self): return self._refdata @refdata.setter def refdata(self, value): if value is None: value = Identity() self._refdata = value for cluster in self.clusters: cluster.refdata = self._refdata def __len__(self): return len(self.clusters) def __getitem__(self, idx): """ Gets cluster(s) through indexing. Parameters ---------- idx : int, slice, list or boolean array Index of the element(s) to get. Returns ------- `Cluster` object(s) When `idx` is a int, returns a single `Cluster` object. When `idx`is either a slice, list or boolean array, returns a list of `Cluster` objects. """ if isinstance(idx, np.ndarray) and idx.dtype == np.bool: return [self.clusters[i] for i, take_it in enumerate(idx) if take_it] elif type(idx) is slice: return [self.clusters[i] for i in range(*idx.indices(len(self)))] elif type(idx) is list: return [self.clusters[i] for i in idx] return self.clusters[idx] def __iter__(self): return iter(self.clusters) def __str__(self): return "[" + ", ".join(map(str, self)) + "]" def __repr__(self): return "ClusterMap(" + str(self) + ")" def _richcmp(self, other, op): """ Compares this cluster map with another cluster map or an integer. Two `ClusterMap` objects are equal if they contain the same clusters. When comparing a `ClusterMap` object with an integer, the comparison will be performed on the size of the clusters instead. Parameters ---------- other : `ClusterMap` object or int Object to compare to. op : rich comparison operators (see module `operator`) Valid operators are: lt, le, eq, ne, gt or ge. Returns ------- bool or 1D array (bool) When comparing to another `ClusterMap` object, it returns whether the two `ClusterMap` objects contain the same clusters or not. When comparing to an integer the comparison is performed on the clusters sizes, it returns an array of boolean. """ if isinstance(other, ClusterMap): if op is operator.eq: return isinstance(other, ClusterMap) \ and len(self) == len(other) \ and self.clusters == other.clusters elif op is operator.ne: return not self == other raise NotImplementedError( "Can only check if two ClusterMap instances are equal or not.") elif isinstance(other, int): return np.array([op(len(cluster), other) for cluster in self]) msg = ("ClusterMap only supports comparison with a int or another" " instance of Clustermap.") raise NotImplementedError(msg) def __eq__(self, other): return self._richcmp(other, operator.eq) def __ne__(self, other): return self._richcmp(other, operator.ne) def __lt__(self, other): return self._richcmp(other, operator.lt) def __le__(self, other): return self._richcmp(other, operator.le) def __gt__(self, other): return self._richcmp(other, operator.gt) def __ge__(self, other): return self._richcmp(other, operator.ge) def add_cluster(self, *clusters): """ Adds one or multiple clusters to this cluster map. Parameters ---------- *clusters : `Cluster` object, ... Cluster(s) to be added in this cluster map. """ for cluster in clusters: self.clusters.append(cluster) cluster.refdata = self.refdata def remove_cluster(self, *clusters): """ Remove one or multiple clusters from this cluster map. Parameters ---------- *clusters : `Cluster` object, ... Cluster(s) to be removed from this cluster map. """ for cluster in clusters: self.clusters.remove(cluster) def clear(self): """ Remove all clusters from this cluster map. """ del self.clusters[:] def size(self): """ Gets number of clusters contained in this cluster map. """ return len(self) def clusters_sizes(self): """ Gets the size of every cluster contained in this cluster map. Returns ------- list of int Sizes of every cluster in this cluster map. """ return list(map(len, self)) def get_large_clusters(self, min_size): """ Gets clusters which contains at least `min_size` elements. Parameters ---------- min_size : int Minimum number of elements a cluster needs to have to be selected. Returns ------- list of `Cluster` objects Clusters having at least `min_size` elements. """ return self[self >= min_size] def get_small_clusters(self, max_size): """ Gets clusters which contains at most `max_size` elements. Parameters ---------- max_size : int Maximum number of elements a cluster can have to be selected. Returns ------- list of `Cluster` objects Clusters having at most `max_size` elements. """ return self[self <= max_size] class ClusterMapCentroid(ClusterMap): """ Provides functionalities for interacting with clustering outputs that have centroids. Allows to retrieve easely the centroid of every cluster. Also, it is a useful container to create, remove, retrieve and filter clusters. If `refdata` is given, elements will be returned instead of their index when using `ClusterCentroid` objects. Parameters ---------- refdata : list Actual elements that clustered indices refer to. """ @property def centroids(self): return [cluster.centroid for cluster in self.clusters] class Clustering(object): __metaclass__ = ABCMeta @abstractmethod def cluster(self, data, ordering=None): """ Clusters `data`. Subclasses will perform their clustering algorithm here. Parameters ---------- data : list of N-dimensional arrays Each array represents a data point. ordering : iterable of indices, optional Specifies the order in which data points will be clustered. Returns ------- `ClusterMap` object Result of the clustering. """ msg = "Subclass has to define method 'cluster(data, ordering)'!" raise NotImplementedError(msg) class QuickBundles(Clustering): r""" Clusters streamlines using QuickBundles [Garyfallidis12]_. Given a list of streamlines, the QuickBundles algorithm sequentially assigns each streamline to its closest bundle in $\mathcal{O}(Nk)$ where $N$ is the number of streamlines and $k$ is the final number of bundles. If for a given streamline its closest bundle is farther than `threshold`, a new bundle is created and the streamline is assigned to it except if the number of bundles has already exceeded `max_nb_clusters`. Parameters ---------- threshold : float The maximum distance from a bundle for a streamline to be still considered as part of it. metric : str or `Metric` object (optional) The distance metric to use when comparing two streamlines. By default, the Minimum average Direct-Flip (MDF) distance [Garyfallidis12]_ is used and streamlines are automatically resampled so they have 12 points. max_nb_clusters : int Limits the creation of bundles. Examples -------- >>> from dipy.segment.clustering import QuickBundles >>> from dipy.data import get_data >>> from nibabel import trackvis as tv >>> streams, hdr = tv.read(get_data('fornix')) >>> streamlines = [i[0] for i in streams] >>> # Segment fornix with a treshold of 10mm and streamlines resampled >>> # to 12 points. >>> qb = QuickBundles(threshold=10.) >>> clusters = qb.cluster(streamlines) >>> len(clusters) 4 >>> list(map(len, clusters)) [61, 191, 47, 1] >>> # Resampling streamlines differently is done explicitly as follows. >>> # Note this has an impact on the speed and the accuracy (tradeoff). >>> from dipy.segment.metric import ResampleFeature >>> from dipy.segment.metric import AveragePointwiseEuclideanMetric >>> feature = ResampleFeature(nb_points=2) >>> metric = AveragePointwiseEuclideanMetric(feature) >>> qb = QuickBundles(threshold=10., metric=metric) >>> clusters = qb.cluster(streamlines) >>> len(clusters) 4 >>> list(map(len, clusters)) [58, 142, 72, 28] References ---------- .. [Garyfallidis12] Garyfallidis E. et al., QuickBundles a method for tractography simplification, Frontiers in Neuroscience, vol 6, no 175, 2012. """ def __init__(self, threshold, metric="MDF_12points", max_nb_clusters=np.iinfo('i4').max): self.threshold = threshold self.max_nb_clusters = max_nb_clusters if isinstance(metric, Metric): self.metric = metric elif metric == "MDF_12points": feature = ResampleFeature(nb_points=12) self.metric = AveragePointwiseEuclideanMetric(feature) else: raise ValueError("Unknown metric: {0}".format(metric)) def cluster(self, streamlines, ordering=None): """ Clusters `streamlines` into bundles. Performs quickbundles algorithm using predefined metric and threshold. Parameters ---------- streamlines : list of 2D arrays Each 2D array represents a sequence of 3D points (points, 3). ordering : iterable of indices Specifies the order in which data points will be clustered. Returns ------- `ClusterMapCentroid` object Result of the clustering. """ from dipy.segment.clustering_algorithms import quickbundles cluster_map = quickbundles(streamlines, self.metric, threshold=self.threshold, max_nb_clusters=self.max_nb_clusters, ordering=ordering) cluster_map.refdata = streamlines return cluster_map dipy-0.13.0/dipy/segment/clustering_algorithms.pyx000066400000000000000000000075661317371701200223410ustar00rootroot00000000000000# distutils: language = c # cython: wraparound=False, cdivision=True, boundscheck=False import itertools import numpy as np from cythonutils cimport Data2D, shape2tuple from metricspeed cimport Metric from clusteringspeed cimport ClustersCentroid, Centroid, QuickBundles from dipy.segment.clustering import ClusterMapCentroid, ClusterCentroid DTYPE = np.float32 DEF BIGGEST_DOUBLE = 1.7976931348623157e+308 # np.finfo('f8').max DEF BIGGEST_FLOAT = 3.4028235e+38 # np.finfo('f4').max DEF BIGGEST_INT = 2147483647 # np.iinfo('i4').max def clusters_centroid2clustermap_centroid(ClustersCentroid clusters_list): """ Converts a `ClustersCentroid` object (Cython) to a `ClusterMapCentroid` object (Python). Only basic functionalities are provided with a `Clusters` object. To have more flexibility, one should use `ClusterMap` object, hence this conversion function. Parameters ---------- clusters_list : `ClustersCentroid` object Result of the clustering contained in a Cython's object. Returns ------- `ClusterMapCentroid` object Result of the clustering contained in a Python's object. """ clusters = ClusterMapCentroid() for i in range(clusters_list._nb_clusters): centroid = np.asarray(clusters_list.centroids[i].features) indices = np.asarray( clusters_list.clusters_indices[i]).tolist() clusters.add_cluster(ClusterCentroid(id=i, centroid=centroid, indices=indices)) return clusters def peek(iterable): """ Returns the first element of an iterable and the iterator. """ iterable = iter(iterable) first = next(iterable, None) iterator = itertools.chain([first], iterable) return first, iterator def quickbundles(streamlines, Metric metric, double threshold, long max_nb_clusters=BIGGEST_INT, ordering=None): """ Clusters streamlines using QuickBundles. Parameters ---------- streamlines : list of 2D arrays List of streamlines to cluster. metric : `Metric` object Tells how to compute the distance between two streamlines. threshold : double The maximum distance from a cluster for a streamline to be still considered as part of it. max_nb_clusters : int, optional Limits the creation of bundles. (Default: inf) ordering : iterable of indices, optional Iterate through `data` using the given ordering. Returns ------- `ClusterMapCentroid` object Result of the clustering. References ---------- .. [Garyfallidis12] Garyfallidis E. et al., QuickBundles a method for tractography simplification, Frontiers in Neuroscience, vol 6, no 175, 2012. """ # Threshold of np.inf is not supported, set it to 'biggest_double' threshold = min(threshold, BIGGEST_DOUBLE) # Threshold of -np.inf is not supported, set it to 0 threshold = max(threshold, 0) if ordering is None: ordering = xrange(len(streamlines)) # Check if `ordering` or `streamlines` are empty first_idx, ordering = peek(ordering) if first_idx is None or len(streamlines) == 0: return ClusterMapCentroid() features_shape = shape2tuple(metric.feature.c_infer_shape(streamlines[first_idx].astype(DTYPE))) cdef QuickBundles qb = QuickBundles(features_shape, metric, threshold, max_nb_clusters) cdef int idx for idx in ordering: streamline = streamlines[idx] if not streamline.flags.writeable or streamline.dtype != DTYPE: streamline = streamline.astype(DTYPE) cluster_id = qb.assignment_step(streamline, idx) # The update step is performed right after the assignement step instead # of after all streamlines have been assigned like k-means algorithm. qb.update_step(cluster_id) return clusters_centroid2clustermap_centroid(qb.clusters) dipy-0.13.0/dipy/segment/clusteringspeed.pxd000066400000000000000000000026341317371701200210730ustar00rootroot00000000000000from cythonutils cimport Data2D, Shape, shape2tuple, tuple2shape from metricspeed cimport Metric cdef struct Centroid: Data2D features int size cdef struct NearestCluster: int id double dist cdef class Clusters: cdef int _nb_clusters cdef int** clusters_indices cdef int* clusters_size cdef void c_assign(Clusters self, int id_cluster, int id_element, Data2D element) nogil except * cdef int c_create_cluster(Clusters self) nogil except -1 cdef int c_size(Clusters self) nogil cdef class ClustersCentroid(Clusters): cdef Centroid* centroids cdef Centroid* _updated_centroids cdef Shape _centroid_shape cdef float eps cdef void c_assign(ClustersCentroid self, int id_cluster, int id_element, Data2D element) nogil except * cdef int c_create_cluster(ClustersCentroid self) nogil except -1 cdef int c_update(ClustersCentroid self, int id_cluster) nogil except -1 cdef class QuickBundles(object): cdef Shape features_shape cdef Data2D features cdef Data2D features_flip cdef ClustersCentroid clusters cdef Metric metric cdef double threshold cdef int max_nb_clusters cdef NearestCluster find_nearest_cluster(QuickBundles self, Data2D features) nogil except * cdef int assignment_step(QuickBundles self, Data2D datum, int datum_id) nogil except -1 cdef void update_step(QuickBundles self, int cluster_id) nogil except * dipy-0.13.0/dipy/segment/clusteringspeed.pyx000066400000000000000000000303001317371701200211070ustar00rootroot00000000000000# distutils: language = c # cython: wraparound=False, cdivision=True, boundscheck=False import numpy as np cimport numpy as cnp from libc.math cimport fabs from libc.stdlib cimport calloc, realloc, free from libc.string cimport memset from cythonutils cimport Data2D, Shape, shape2tuple, tuple2shape, same_shape DTYPE = np.float32 DEF BIGGEST_DOUBLE = 1.7976931348623157e+308 # np.finfo('f8').max DEF BIGGEST_INT = 2147483647 # np.iinfo('i4').max cdef class Clusters: """ Provides Cython functionalities to interact with clustering outputs. This class allows to create clusters and assign elements to them. Assignements of a cluster are represented as a list of element indices. """ def __init__(Clusters self): self._nb_clusters = 0 self.clusters_indices = NULL self.clusters_size = NULL def __dealloc__(Clusters self): """ Deallocates memory created with `c_create_cluster` and `c_assign`. """ for i in range(self._nb_clusters): free(self.clusters_indices[i]) self.clusters_indices[i] = NULL free(self.clusters_indices) self.clusters_indices = NULL free(self.clusters_size) self.clusters_size = NULL cdef int c_size(Clusters self) nogil: """ Returns the number of clusters. """ return self._nb_clusters cdef void c_assign(Clusters self, int id_cluster, int id_element, Data2D element) nogil except *: """ Assigns an element to a cluster. Parameters ---------- id_cluster : int Index of the cluster to which the element will be assigned. id_element : int Index of the element to assign. element : 2d array (float) Data of the element to assign. """ cdef cnp.npy_intp C = self.clusters_size[id_cluster] self.clusters_indices[id_cluster] = realloc(self.clusters_indices[id_cluster], (C+1)*sizeof(int)) self.clusters_indices[id_cluster][C] = id_element self.clusters_size[id_cluster] += 1 cdef int c_create_cluster(Clusters self) nogil except -1: """ Creates a cluster and adds it at the end of the list. Returns ------- id_cluster : int Index of the new cluster. """ self.clusters_indices = realloc(self.clusters_indices, (self._nb_clusters+1)*sizeof(int*)) self.clusters_indices[self._nb_clusters] = calloc(0, sizeof(int)) self.clusters_size = realloc(self.clusters_size, (self._nb_clusters+1)*sizeof(int)) self.clusters_size[self._nb_clusters] = 0 self._nb_clusters += 1 return self._nb_clusters - 1 cdef class ClustersCentroid(Clusters): """ Provides Cython functionalities to interact with clustering outputs having the notion of cluster's centroid. This class allows to create clusters, assign elements to them and update their centroid. Parameters ---------- centroid_shape : int, tuple of int Information about the shape of the centroid. eps : float, optional Consider the centroid has not changed if the changes per dimension are less than this epsilon. (Default: 1e-6) """ def __init__(ClustersCentroid self, centroid_shape, float eps=1e-6, *args, **kwargs): Clusters.__init__(self, *args, **kwargs) if isinstance(centroid_shape, int): centroid_shape = (1, centroid_shape) if not isinstance(centroid_shape, tuple): raise ValueError("'centroid_shape' must be a tuple or a int.") self._centroid_shape = tuple2shape(centroid_shape) self.centroids = NULL self._updated_centroids = NULL self.eps = eps def __dealloc__(ClustersCentroid self): """ Deallocates memory created with `c_create_cluster` and `c_assign`. Notes ----- The `__dealloc__` method of the superclass is automatically called: http://docs.cython.org/src/userguide/special_methods.html#finalization-method-dealloc """ cdef cnp.npy_intp i for i in range(self._nb_clusters): free(&(self.centroids[i].features[0, 0])) free(&(self._updated_centroids[i].features[0, 0])) self.centroids[i].features = None # Necessary to decrease refcount self._updated_centroids[i].features = None # Necessary to decrease refcount free(self.centroids) self.centroids = NULL free(self._updated_centroids) self._updated_centroids = NULL cdef void c_assign(ClustersCentroid self, int id_cluster, int id_element, Data2D element) nogil except *: """ Assigns an element to a cluster. In addition of keeping element's index, an updated version of the cluster's centroid is computed. The centroid is the average of all elements in a cluster. Parameters ---------- id_cluster : int Index of the cluster to which the element will be assigned. id_element : int Index of the element to assign. element : 2d array (float) Data of the element to assign. """ cdef Data2D updated_centroid = self._updated_centroids[id_cluster].features cdef cnp.npy_intp C = self.clusters_size[id_cluster] cdef cnp.npy_intp n, d cdef cnp.npy_intp N = updated_centroid.shape[0], D = updated_centroid.shape[1] for n in range(N): for d in range(D): updated_centroid[n, d] = ((updated_centroid[n, d] * C) + element[n, d]) / (C+1) Clusters.c_assign(self, id_cluster, id_element, element) cdef int c_update(ClustersCentroid self, cnp.npy_intp id_cluster) nogil except -1: """ Update the centroid of a cluster. Parameters ---------- id_cluster : int Index of the cluster of which its centroid will be updated. Returns ------- int Tells whether the centroid has changed or not, i.e. converged. """ cdef Data2D centroid = self.centroids[id_cluster].features cdef Data2D updated_centroid = self._updated_centroids[id_cluster].features cdef cnp.npy_intp N = updated_centroid.shape[0], D = centroid.shape[1] cdef cnp.npy_intp n, d cdef int converged = 1 for n in range(N): for d in range(D): converged &= fabs(centroid[n, d] - updated_centroid[n, d]) < self.eps centroid[n, d] = updated_centroid[n, d] return converged cdef int c_create_cluster(ClustersCentroid self) nogil except -1: """ Creates a cluster and adds it at the end of the list. Returns ------- id_cluster : int Index of the new cluster. """ self.centroids = realloc(self.centroids, (self._nb_clusters+1)*sizeof(Centroid)) # Zero-initialize the Centroid structure memset(&self.centroids[self._nb_clusters], 0, sizeof(Centroid)) self._updated_centroids = realloc(self._updated_centroids, (self._nb_clusters+1)*sizeof(Centroid)) # Zero-initialize the new Centroid structure memset(&self._updated_centroids[self._nb_clusters], 0, sizeof(Centroid)) with gil: self.centroids[self._nb_clusters].features = calloc(self._centroid_shape.size, sizeof(float)) self._updated_centroids[self._nb_clusters].features = calloc(self._centroid_shape.size, sizeof(float)) return Clusters.c_create_cluster(self) cdef class QuickBundles(object): def __init__(QuickBundles self, features_shape, Metric metric, double threshold, int max_nb_clusters=BIGGEST_INT): self.metric = metric self.features_shape = tuple2shape(features_shape) self.threshold = threshold self.max_nb_clusters = max_nb_clusters self.clusters = ClustersCentroid(features_shape) self.features = np.empty(features_shape, dtype=DTYPE) self.features_flip = np.empty(features_shape, dtype=DTYPE) cdef NearestCluster find_nearest_cluster(QuickBundles self, Data2D features) nogil except *: """ Finds the nearest cluster of a datum given its `features` vector. Parameters ---------- features : 2D array Features of a datum. Returns ------- `NearestCluster` object Nearest cluster to `features` according to the given metric. """ cdef: cnp.npy_intp k double dist NearestCluster nearest_cluster nearest_cluster.id = -1 nearest_cluster.dist = BIGGEST_DOUBLE for k in range(self.clusters.c_size()): dist = self.metric.c_dist(self.clusters.centroids[k].features, features) # Keep track of the nearest cluster if dist < nearest_cluster.dist: nearest_cluster.dist = dist nearest_cluster.id = k return nearest_cluster cdef int assignment_step(QuickBundles self, Data2D datum, int datum_id) nogil except -1: """ Compute the assignment step of the QuickBundles algorithm. It will assign a datum to its closest cluster according to a given metric. If the distance between the datum and its closest cluster is greater than the specified threshold, a new cluster is created and the datum is assigned to it. Parameters ---------- datum : 2D array The datum to assign. datum_id : int ID of the datum, usually its index. Returns ------- int Index of the cluster the datum has been assigned to. """ cdef: Data2D features_to_add = self.features NearestCluster nearest_cluster, nearest_cluster_flip Shape features_shape = self.metric.feature.c_infer_shape(datum) # Check if datum is compatible with the metric if not same_shape(features_shape, self.features_shape): with gil: raise ValueError("All features do not have the same shape! QuickBundles requires this to compute centroids!") # Check if datum is compatible with the metric if not self.metric.c_are_compatible(features_shape, self.features_shape): with gil: raise ValueError("Data features' shapes must be compatible according to the metric used!") # Find nearest cluster to datum self.metric.feature.c_extract(datum, self.features) nearest_cluster = self.find_nearest_cluster(self.features) # Find nearest cluster to s_i_flip if metric is not order invariant if not self.metric.feature.is_order_invariant: self.metric.feature.c_extract(datum[::-1], self.features_flip) nearest_cluster_flip = self.find_nearest_cluster(self.features_flip) # If we found a lower distance using a flipped datum, # add the flipped version instead if nearest_cluster_flip.dist < nearest_cluster.dist: nearest_cluster.id = nearest_cluster_flip.id nearest_cluster.dist = nearest_cluster_flip.dist features_to_add = self.features_flip # Check if distance with the nearest cluster is below some threshold # or if we already have the maximum number of clusters. # If the former or the latter is true, assign datum to its nearest cluster # otherwise create a new cluster and assign the datum to it. if not (nearest_cluster.dist < self.threshold or self.clusters.c_size() >= self.max_nb_clusters): nearest_cluster.id = self.clusters.c_create_cluster() self.clusters.c_assign(nearest_cluster.id, datum_id, features_to_add) return nearest_cluster.id cdef void update_step(QuickBundles self, int cluster_id) nogil except *: """ Compute the update step of the QuickBundles algorithm. It will update the centroid of a cluster given its index. Parameters ---------- cluster_id : int ID of the cluster to update. """ self.clusters.c_update(cluster_id) dipy-0.13.0/dipy/segment/cythonutils.pxd000066400000000000000000000013231317371701200202520ustar00rootroot00000000000000# distutils: language = c # cython: wraparound=False, cdivision=True, boundscheck=False cdef extern from "cythonutils.h": enum: MAX_NDIM ctypedef float[:] Data1D ctypedef float[:,:] Data2D ctypedef float[:,:,:] Data3D ctypedef float[:,:,:,:] Data4D ctypedef float[:,:,:,:,:] Data5D ctypedef float[:,:,:,:,:,:] Data6D ctypedef float[:,:,:,:,:,:,:] Data7D ctypedef fused Data: Data1D Data2D Data3D Data4D Data5D Data6D Data7D cdef struct Shape: Py_ssize_t ndim Py_ssize_t dims[MAX_NDIM] Py_ssize_t size cdef Shape shape_from_memview(Data data) nogil cdef Shape tuple2shape(dims) except * cdef shape2tuple(Shape shape) cdef int same_shape(Shape shape1, Shape shape2) nogil dipy-0.13.0/dipy/segment/cythonutils.pyx000066400000000000000000000045161317371701200203060ustar00rootroot00000000000000# distutils: language = c # cython: wraparound=False, cdivision=True, boundscheck=False import numpy as np cdef Shape shape_from_memview(Data data) nogil: """ Retrieves shape from a memoryview object. Parameters ---------- data : memoryview object (float) array for which the shape informations are retrieved Returns ------- shape : `Shape` struct structure containing informations about the shape of `data` """ cdef Shape shape cdef int i shape.ndim = 0 shape.size = 1 for i in range(MAX_NDIM): shape.dims[i] = data.shape[i] if shape.dims[i] > 0: shape.size *= shape.dims[i] shape.ndim += 1 return shape cdef Shape tuple2shape(dims) except *: """ Converts a Python's tuple into a `Shape` Cython's struct. Parameters ---------- dims : tuple of int size of each dimension Returns ------- shape : `Shape` struct structure containing shape informations obtained from `dims` """ assert len(dims) < MAX_NDIM cdef Shape shape cdef int i shape.ndim = len(dims) shape.size = np.prod(dims) for i in range(shape.ndim): shape.dims[i] = dims[i] return shape cdef shape2tuple(Shape shape): """ Converts a `Shape` Cython's struct into a Python's tuple. Parameters ---------- shape : `Shape` struct structure containing shape informations Returns ------- dims : tuple of int size of each dimension """ cdef int i dims = [] for i in range(shape.ndim): dims.append(shape.dims[i]) return tuple(dims) cdef int same_shape(Shape shape1, Shape shape2) nogil: """ Checks if two shapes are the same. Two shapes are equals if they have the same number of dimensions and that each dimension's size matches. Parameters ---------- shape1 : `Shape` struct structure containing shape informations shape2 : `Shape` struct structure containing shape informations Returns ------- same_shape : int (0 or 1) tells if the shape are equals """ """ """ cdef int i cdef int same_shape = True same_shape &= shape1.ndim == shape2.ndim for i in range(shape1.ndim): same_shape &= shape1.dims[i] == shape2.dims[i] return same_shape dipy-0.13.0/dipy/segment/featurespeed.pxd000066400000000000000000000015051317371701200203430ustar00rootroot00000000000000from cythonutils cimport Data2D, Shape cimport numpy as cnp cdef class Feature(object): cdef int is_order_invariant cdef Shape c_infer_shape(Feature self, Data2D datum) nogil except * cdef void c_extract(Feature self, Data2D datum, Data2D out) nogil except * cpdef infer_shape(Feature self, datum) cpdef extract(Feature self, datum) cdef class CythonFeature(Feature): pass # The IdentityFeature class returns the datum as-is. This is useful for metric # that does not require any pre-processing. cdef class IdentityFeature(CythonFeature): pass # The ResampleFeature class returns the datum resampled. This is useful for # metric like SumPointwiseEuclideanMetric that does require a consistent # number of points between datum. cdef class ResampleFeature(CythonFeature): cdef cnp.npy_intp nb_points dipy-0.13.0/dipy/segment/featurespeed.pyx000066400000000000000000000311301317371701200203650ustar00rootroot00000000000000# distutils: language = c # cython: wraparound=False, cdivision=True, boundscheck=False import numpy as np cimport numpy as cnp from cythonutils cimport tuple2shape, shape2tuple, shape_from_memview from dipy.tracking.streamlinespeed cimport c_set_number_of_points, c_length cdef class Feature(object): """ Extracts features from a sequential datum. A sequence of N-dimensional points is represented as a 2D array with shape (nb_points, nb_dimensions). Parameters ---------- is_order_invariant : bool (optional) tells if this feature is invariant to the sequence's ordering. This means starting from either extremities produces the same features. (Default: True) Notes ----- When subclassing `Feature`, one only needs to override the `extract` and `infer_shape` methods. """ def __init__(Feature self, is_order_invariant=True): # By default every feature is order invariant. self.is_order_invariant = is_order_invariant property is_order_invariant: """ Is this feature invariant to the sequence's ordering """ def __get__(Feature self): return bool(self.is_order_invariant) def __set__(self, int value): self.is_order_invariant = bool(value) cdef Shape c_infer_shape(Feature self, Data2D datum) nogil except *: """ Cython version of `Feature.infer_shape`. """ with gil: shape = self.infer_shape(np.asarray(datum)) if np.asarray(shape).ndim == 0: return tuple2shape((1, shape)) elif len(shape) == 1: return tuple2shape((1,) + shape) elif len(shape) == 2: return tuple2shape(shape) else: raise TypeError("Only scalar, 1D or 2D array features are supported!") cdef void c_extract(Feature self, Data2D datum, Data2D out) nogil except *: """ Cython version of `Feature.extract`. """ cdef Data2D c_features with gil: features = np.asarray(self.extract(np.asarray(datum))).astype(np.float32) if features.ndim == 0: features = features[np.newaxis, np.newaxis] elif features.ndim == 1: features = features[np.newaxis] elif features.ndim == 2: pass else: raise TypeError("Only scalar, 1D or 2D array features are supported!") c_features = features out[:] = c_features cpdef infer_shape(Feature self, datum): """ Infers the shape of features extracted from a sequential datum. Parameters ---------- datum : 2D array Sequence of N-dimensional points. Returns ------- int, 1-tuple or 2-tuple Shape of the features. """ raise NotImplementedError("Feature's subclasses must implement method `infer_shape(self, datum)`!") cpdef extract(Feature self, datum): """ Extracts features from a sequential datum. Parameters ---------- datum : 2D array Sequence of N-dimensional points. Returns ------- 2D array Features extracted from `datum`. """ raise NotImplementedError("Feature's subclasses must implement method `extract(self, datum)`!") cdef class CythonFeature(Feature): """ Extracts features from a sequential datum. A sequence of N-dimensional points is represented as a 2D array with shape (nb_points, nb_dimensions). Parameters ---------- is_order_invariant : bool, optional Tells if this feature is invariant to the sequence's ordering (Default: True). Notes ----- By default, when inheriting from `CythonFeature`, Python methods will call their C version (e.g. `CythonFeature.extract` -> `self.c_extract`). """ cpdef infer_shape(CythonFeature self, datum): """ Infers the shape of features extracted from a sequential datum. Parameters ---------- datum : 2D array Sequence of N-dimensional points. Returns ------- tuple Shape of the features. Notes ----- This method calls its Cython version `self.c_infer_shape` accordingly. """ if not datum.flags.writeable or datum.dtype is not np.float32: datum = datum.astype(np.float32) return shape2tuple(self.c_infer_shape(datum)) cpdef extract(CythonFeature self, datum): """ Extracts features from a sequential datum. Parameters ---------- datum : 2D array Sequence of N-dimensional points. Returns ------- 2D array Features extracted from `datum`. Notes ----- This method calls its Cython version `self.c_extract` accordingly. """ if not datum.flags.writeable or datum.dtype is not np.float32: datum = datum.astype(np.float32) shape = shape2tuple(self.c_infer_shape(datum)) cdef Data2D out = np.empty(shape, dtype=datum.dtype) self.c_extract(datum, out) return np.asarray(out) cdef class IdentityFeature(CythonFeature): """ Extracts features from a sequential datum. A sequence of N-dimensional points is represented as a 2D array with shape (nb_points, nb_dimensions). The features being extracted are the actual sequence's points. This is useful for metric that does not require any pre-processing. """ def __init__(IdentityFeature self): super(IdentityFeature, self).__init__(is_order_invariant=False) cdef Shape c_infer_shape(IdentityFeature self, Data2D datum) nogil except *: return shape_from_memview(datum) cdef void c_extract(IdentityFeature self, Data2D datum, Data2D out) nogil except *: cdef: int N = datum.shape[0], D = datum.shape[1] int n, d for n in range(N): for d in range(D): out[n, d] = datum[n, d] cdef class ResampleFeature(CythonFeature): """ Extracts features from a sequential datum. A sequence of N-dimensional points is represented as a 2D array with shape (nb_points, nb_dimensions). The features being extracted are the points of the sequence once resampled. This is useful for metrics requiring a constant number of points for all streamlines. """ def __init__(ResampleFeature self, cnp.npy_intp nb_points): super(ResampleFeature, self).__init__(is_order_invariant=False) self.nb_points = nb_points if nb_points <= 0: raise ValueError("ResampleFeature: `nb_points` must be strictly positive: {0}".format(nb_points)) cdef Shape c_infer_shape(ResampleFeature self, Data2D datum) nogil except *: cdef Shape shape = shape_from_memview(datum) shape.dims[0] = self.nb_points return shape cdef void c_extract(ResampleFeature self, Data2D datum, Data2D out) nogil except *: c_set_number_of_points(datum, out) cdef class CenterOfMassFeature(CythonFeature): """ Extracts features from a sequential datum. A sequence of N-dimensional points is represented as a 2D array with shape (nb_points, nb_dimensions). The feature being extracted consists of one N-dimensional point representing the mean of the points, i.e. the center of mass. """ def __init__(CenterOfMassFeature self): super(CenterOfMassFeature, self).__init__(is_order_invariant=True) cdef Shape c_infer_shape(CenterOfMassFeature self, Data2D datum) nogil except *: cdef Shape shape shape.ndim = 2 shape.dims[0] = 1 shape.dims[1] = datum.shape[1] shape.size = datum.shape[1] return shape cdef void c_extract(CenterOfMassFeature self, Data2D datum, Data2D out) nogil except *: cdef int N = datum.shape[0], D = datum.shape[1] cdef int i, d for d in range(D): out[0, d] = 0 for i in range(N): for d in range(D): out[0, d] += datum[i, d] for d in range(D): out[0, d] /= N cdef class MidpointFeature(CythonFeature): """ Extracts features from a sequential datum. A sequence of N-dimensional points is represented as a 2D array with shape (nb_points, nb_dimensions). The feature being extracted consists of one N-dimensional point representing the middle point of the sequence (i.e. `nb_points//2`th point). """ def __init__(MidpointFeature self): super(MidpointFeature, self).__init__(is_order_invariant=False) cdef Shape c_infer_shape(MidpointFeature self, Data2D datum) nogil except *: cdef Shape shape shape.ndim = 2 shape.dims[0] = 1 shape.dims[1] = datum.shape[1] shape.size = datum.shape[1] return shape cdef void c_extract(MidpointFeature self, Data2D datum, Data2D out) nogil except *: cdef: int N = datum.shape[0], D = datum.shape[1] int mid = N/2 int d for d in range(D): out[0, d] = datum[mid, d] cdef class ArcLengthFeature(CythonFeature): """ Extracts features from a sequential datum. A sequence of N-dimensional points is represented as a 2D array with shape (nb_points, nb_dimensions). The feature being extracted consists of one scalar representing the arc length of the sequence (i.e. the sum of the length of all segments). """ def __init__(ArcLengthFeature self): super(ArcLengthFeature, self).__init__(is_order_invariant=True) cdef Shape c_infer_shape(ArcLengthFeature self, Data2D datum) nogil except *: cdef Shape shape shape.ndim = 2 shape.dims[0] = 1 shape.dims[1] = 1 shape.size = 1 return shape cdef void c_extract(ArcLengthFeature self, Data2D datum, Data2D out) nogil except *: out[0, 0] = c_length(datum) cdef class VectorOfEndpointsFeature(CythonFeature): """ Extracts features from a sequential datum. A sequence of N-dimensional points is represented as a 2D array with shape (nb_points, nb_dimensions). The feature being extracted consists of one vector in the N-dimensional space pointing from one end-point of the sequence to the other (i.e. `S[-1]-S[0]`). """ def __init__(VectorOfEndpointsFeature self): super(VectorOfEndpointsFeature, self).__init__(is_order_invariant=False) cdef Shape c_infer_shape(VectorOfEndpointsFeature self, Data2D datum) nogil except *: cdef Shape shape shape.ndim = 2 shape.dims[0] = 1 shape.dims[1] = datum.shape[1] shape.size = datum.shape[1] return shape cdef void c_extract(VectorOfEndpointsFeature self, Data2D datum, Data2D out) nogil except *: cdef: int N = datum.shape[0], D = datum.shape[1] int d for d in range(D): out[0, d] = datum[N-1, d] - datum[0, d] cpdef infer_shape(Feature feature, data): """ Infers shape of the features extracted from data. Parameters ---------- feature : `Feature` object Tells how to infer shape of the features. data : list of 2D arrays List of sequences of N-dimensional points. Returns ------- list of tuples Shapes of the features inferred from `data`. """ single_datum = False if type(data) is np.ndarray: single_datum = True data = [data] if len(data) == 0: return [] shapes = [] cdef int i for i in range(0, len(data)): datum = data[i] if data[i].flags.writeable else data[i].astype(np.float32) shapes.append(shape2tuple(feature.c_infer_shape(datum))) if single_datum: return shapes[0] else: return shapes cpdef extract(Feature feature, data): """ Extracts features from data. Parameters ---------- feature : `Feature` object Tells how to extract features from the data. datum : list of 2D arrays List of sequence of N-dimensional points. Returns ------- list of 2D arrays List of features extracted from `data`. """ single_datum = False if type(data) is np.ndarray: single_datum = True data = [data] if len(data) == 0: return [] shapes = infer_shape(feature, data) features = [np.empty(shape, dtype=np.float32) for shape in shapes] cdef int i for i in range(len(data)): datum = data[i] if data[i].flags.writeable else data[i].astype(np.float32) feature.c_extract(datum, features[i]) if single_datum: return features[0] else: return features dipy-0.13.0/dipy/segment/mask.py000066400000000000000000000225041317371701200164610ustar00rootroot00000000000000from __future__ import division, print_function, absolute_import from warnings import warn import numpy as np from dipy.reconst.dti import fractional_anisotropy, color_fa from scipy.ndimage.filters import median_filter try: from skimage.filters import threshold_otsu as otsu except: from dipy.segment.threshold import otsu from scipy.ndimage import binary_dilation, generate_binary_structure def multi_median(input, median_radius, numpass): """ Applies median filter multiple times on input data. Parameters ---------- input : ndarray The input volume to apply filter on. median_radius : int Radius (in voxels) of the applied median filter numpass: int Number of pass of the median filter Returns ------- input : ndarray Filtered input volume. """ # Array representing the size of the median window in each dimension. medarr = np.ones_like(input.shape) * ((median_radius * 2) + 1) # Multi pass for i in range(0, numpass): median_filter(input, medarr, output=input) return input def applymask(vol, mask): """ Mask vol with mask. Parameters ---------- vol : ndarray Array with $V$ dimensions mask : ndarray Binary mask. Has $M$ dimensions where $M <= V$. When $M < V$, we append $V - M$ dimensions with axis length 1 to `mask` so that `mask` will broadcast against `vol`. In the typical case `vol` can be 4D, `mask` can be 3D, and we append a 1 to the mask shape which (via numpy broadcasting) has the effect of appling the 3D mask to each 3D slice in `vol` (``vol[..., 0]`` to ``vol[..., -1``). Returns ------- masked_vol : ndarray `vol` multiplied by `mask` where `mask` may have been extended to match extra dimensions in `vol` """ mask = mask.reshape(mask.shape + (vol.ndim - mask.ndim) * (1,)) return vol * mask def bounding_box(vol): """Compute the bounding box of nonzero intensity voxels in the volume. Parameters ---------- vol : ndarray Volume to compute bounding box on. Returns ------- npmins : list Array containg minimum index of each dimension npmaxs : list Array containg maximum index of each dimension """ # Find bounds on first dimension temp = vol for i in range(vol.ndim - 1): temp = temp.any(-1) mins = [temp.argmax()] maxs = [len(temp) - temp[::-1].argmax()] # Check that vol is not all 0 if mins[0] == 0 and temp[0] == 0: warn('No data found in volume to bound. Returning empty bounding box.') return [0] * vol.ndim, [0] * vol.ndim # Find bounds on remaining dimensions if vol.ndim > 1: a, b = bounding_box(vol.any(0)) mins.extend(a) maxs.extend(b) return mins, maxs def crop(vol, mins, maxs): """Crops the input volume. Parameters ---------- vol : ndarray Volume to crop. mins : array Array containg minimum index of each dimension. maxs : array Array containg maximum index of each dimension. Returns ------- vol : ndarray The cropped volume. """ return vol[tuple(slice(i, j) for i, j in zip(mins, maxs))] def median_otsu(input_volume, median_radius=4, numpass=4, autocrop=False, vol_idx=None, dilate=None): """Simple brain extraction tool method for images from DWI data. It uses a median filter smoothing of the input_volumes `vol_idx` and an automatic histogram Otsu thresholding technique, hence the name *median_otsu*. This function is inspired from Mrtrix's bet which has default values ``median_radius=3``, ``numpass=2``. However, from tests on multiple 1.5T and 3T data from GE, Philips, Siemens, the most robust choice is ``median_radius=4``, ``numpass=4``. Parameters ---------- input_volume : ndarray ndarray of the brain volume median_radius : int Radius (in voxels) of the applied median filter (default: 4). numpass: int Number of pass of the median filter (default: 4). autocrop: bool, optional if True, the masked input_volume will also be cropped using the bounding box defined by the masked data. Should be on if DWI is upsampled to 1x1x1 resolution. (default: False). vol_idx : None or array, optional 1D array representing indices of ``axis=3`` of a 4D `input_volume` None (the default) corresponds to ``(0,)`` (assumes first volume in 4D array). dilate : None or int, optional number of iterations for binary dilation Returns ------- maskedvolume : ndarray Masked input_volume mask : 3D ndarray The binary brain mask Notes ----- Copyright (C) 2011, the scikit-image team All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of skimage nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ if len(input_volume.shape) == 4: if vol_idx is not None: b0vol = np.mean(input_volume[..., tuple(vol_idx)], axis=3) else: b0vol = input_volume[..., 0].copy() else: b0vol = input_volume.copy() # Make a mask using a multiple pass median filter and histogram # thresholding. mask = multi_median(b0vol, median_radius, numpass) thresh = otsu(mask) mask = mask > thresh if dilate is not None: cross = generate_binary_structure(3, 1) mask = binary_dilation(mask, cross, iterations=dilate) # Auto crop the volumes using the mask as input_volume for bounding box # computing. if autocrop: mins, maxs = bounding_box(mask) mask = crop(mask, mins, maxs) croppedvolume = crop(input_volume, mins, maxs) maskedvolume = applymask(croppedvolume, mask) else: maskedvolume = applymask(input_volume, mask) return maskedvolume, mask def segment_from_cfa(tensor_fit, roi, threshold, return_cfa=False): """ Segment the cfa inside roi using the values from threshold as bounds. Parameters ------------- tensor_fit : TensorFit object TensorFit object roi : ndarray A binary mask, which contains the bounding box for the segmentation. threshold : array-like An iterable that defines the min and max values to use for the thresholding. The values are specified as (R_min, R_max, G_min, G_max, B_min, B_max) return_cfa : bool, optional If True, the cfa is also returned. Returns ---------- mask : ndarray Binary mask of the segmentation. cfa : ndarray, optional Array with shape = (..., 3), where ... is the shape of tensor_fit. The color fractional anisotropy, ordered as a nd array with the last dimension of size 3 for the R, G and B channels. """ FA = fractional_anisotropy(tensor_fit.evals) FA[np.isnan(FA)] = 0 FA = np.clip(FA, 0, 1) # Clamp the FA to remove degenerate tensors cfa = color_fa(FA, tensor_fit.evecs) roi = np.asarray(roi, dtype=bool) include = ((cfa >= threshold[0::2]) & (cfa <= threshold[1::2]) & roi[..., None]) mask = np.all(include, axis=-1) if return_cfa: return mask, cfa return mask def clean_cc_mask(mask): """ Cleans a segmentation of the corpus callosum so no random pixels are included. Parameters ---------- mask : ndarray Binary mask of the coarse segmentation. Returns ------- new_cc_mask : ndarray Binary mask of the cleaned segmentation. """ from scipy.ndimage.measurements import label new_cc_mask = np.zeros(mask.shape) # Flood fill algorithm to find contiguous regions. labels, numL = label(mask) volumes = [len(labels[np.where(labels == l_idx+1)]) for l_idx in np.arange(numL)] biggest_vol = np.arange(numL)[np.where(volumes == np.max(volumes))] + 1 new_cc_mask[np.where(labels == biggest_vol)] = 1 return new_cc_mask dipy-0.13.0/dipy/segment/metric.py000066400000000000000000000031111317371701200170020ustar00rootroot00000000000000from dipy.segment.featurespeed import (Feature, IdentityFeature, ResampleFeature, CenterOfMassFeature, MidpointFeature, ArcLengthFeature, VectorOfEndpointsFeature) from dipy.segment.metricspeed import (Metric, SumPointwiseEuclideanMetric, AveragePointwiseEuclideanMetric, MinimumAverageDirectFlipMetric, CosineMetric) from dipy.segment.metricspeed import (dist, distance_matrix) # Creates aliases EuclideanMetric = SumPointwiseEuclideanMetric def mdf(s1, s2): """ Computes the MDF (Minimum average Direct-Flip) distance [Garyfallidis12]_ between two streamlines. Streamlines must have the same number of points. Parameters ---------- s1 : 2D array A streamline (sequence of N-dimensional points). s2 : 2D array A streamline (sequence of N-dimensional points). Returns ------- double Distance between two streamlines. References ---------- .. [Garyfallidis12] Garyfallidis E. et al., QuickBundles a method for tractography simplification, Frontiers in Neuroscience, vol 6, no 175, 2012. """ return dist(MinimumAverageDirectFlipMetric(), s1, s2) dipy-0.13.0/dipy/segment/metricspeed.pxd000066400000000000000000000007111317371701200201710ustar00rootroot00000000000000from cythonutils cimport Data2D, Shape from featurespeed cimport Feature cdef class Metric(object): cdef Feature feature cdef int is_order_invariant cdef double c_dist(Metric self, Data2D features1, Data2D features2) nogil except -1 cdef int c_are_compatible(Metric self, Shape shape1, Shape shape2) nogil except -1 cpdef double dist(Metric self, features1, features2) except -1 cpdef are_compatible(Metric self, shape1, shape2) dipy-0.13.0/dipy/segment/metricspeed.pyx000066400000000000000000000362021317371701200202220ustar00rootroot00000000000000# distutils: language = c # cython: wraparound=False, cdivision=True, boundscheck=False import numpy as np from libc.math cimport sqrt, acos from cythonutils cimport tuple2shape, shape2tuple, same_shape from featurespeed cimport IdentityFeature, ResampleFeature DEF biggest_double = 1.7976931348623157e+308 # np.finfo('f8').max import math cdef double PI = math.pi cdef class Metric(object): """ Computes a distance between two sequential data. A sequence of N-dimensional points is represented as a 2D array with shape (nb_points, nb_dimensions). A `feature` object can be specified in order to calculate the distance between extracted features, rather than directly between the sequential data. Parameters ---------- feature : `Feature` object, optional It is used to extract features before computing the distance. Notes ----- When subclassing `Metric`, one only needs to override the `dist` and `are_compatible` methods. """ def __init__(Metric self, Feature feature=IdentityFeature()): self.feature = feature self.is_order_invariant = self.feature.is_order_invariant property feature: """ `Feature` object used to extract features from sequential data """ def __get__(Metric self): return self.feature property is_order_invariant: """ Is this metric invariant to the sequence's ordering """ def __get__(Metric self): return bool(self.is_order_invariant) cdef int c_are_compatible(Metric self, Shape shape1, Shape shape2) nogil except -1: """ Cython version of `Metric.are_compatible`. """ with gil: return self.are_compatible(shape2tuple(shape1), shape2tuple(shape2)) cdef double c_dist(Metric self, Data2D features1, Data2D features2) nogil except -1: """ Cython version of `Metric.dist`. """ with gil: return self.dist(np.asarray(features1), np.asarray(features2)) cpdef are_compatible(Metric self, shape1, shape2): """ Checks if features can be used by `metric.dist` based on their shape. Basically this method exists so we don't have to do this check inside the `metric.dist` function (speedup). Parameters ---------- shape1 : int, 1-tuple or 2-tuple shape of the first data point's features shape2 : int, 1-tuple or 2-tuple shape of the second data point's features Returns ------- are_compatible : bool whether or not shapes are compatible """ raise NotImplementedError("Metric's subclasses must implement method `are_compatible(self, shape1, shape2)`!") cpdef double dist(Metric self, features1, features2) except -1: """ Computes a distance between two data points based on their features. Parameters ---------- features1 : 2D array Features of the first data point. features2 : 2D array Features of the second data point. Returns ------- double Distance between two data points. """ raise NotImplementedError("Metric's subclasses must implement method `dist(self, features1, features2)`!") cdef class CythonMetric(Metric): """ Computes a distance between two sequential data. A sequence of N-dimensional points is represented as a 2D array with shape (nb_points, nb_dimensions). A `feature` object can be specified in order to calculate the distance between extracted features, rather than directly between the sequential data. Parameters ---------- feature : `Feature` object, optional It is used to extract features before computing the distance. Notes ----- When subclassing `CythonMetric`, one only needs to override the `c_dist` and `c_are_compatible` methods. """ cpdef are_compatible(CythonMetric self, shape1, shape2): """ Checks if features can be used by `metric.dist` based on their shape. Basically this method exists so we don't have to do this check inside method `dist` (speedup). Parameters ---------- shape1 : int, 1-tuple or 2-tuple Shape of the first data point's features. shape2 : int, 1-tuple or 2-tuple Shape of the second data point's features. Returns ------- bool Whether or not shapes are compatible. Notes ----- This method calls its Cython version `self.c_are_compatible` accordingly. """ if np.asarray(shape1).ndim == 0: shape1 = (1, shape1) elif len(shape1) == 1: shape1 = (1,) + shape1 if np.asarray(shape2).ndim == 0: shape2 = (1, shape2) elif len(shape2) == 1: shape2 = (1,) + shape2 return self.c_are_compatible(tuple2shape(shape1), tuple2shape(shape2)) == 1 cpdef double dist(CythonMetric self, features1, features2) except -1: """ Computes a distance between two data points based on their features. Parameters ---------- features1 : 2D array Features of the first data point. features2 : 2D array Features of the second data point. Returns ------- double Distance between two data points. Notes ----- This method calls its Cython version `self.c_dist` accordingly. """ # If needed, we convert features to 2D arrays. features1 = np.asarray(features1) if features1.ndim == 0: features1 = features1[np.newaxis, np.newaxis] elif features1.ndim == 1: features1 = features1[np.newaxis] elif features1.ndim == 2: pass else: raise TypeError("Only scalar, 1D or 2D array features are" " supported for parameter 'features1'!") features2 = np.asarray(features2) if features2.ndim == 0: features2 = features2[np.newaxis, np.newaxis] elif features2.ndim == 1: features2 = features2[np.newaxis] elif features2.ndim == 2: pass else: raise TypeError("Only scalar, 1D or 2D array features are" " supported for parameter 'features2'!") if not self.are_compatible(features1.shape, features2.shape): raise ValueError("Features are not compatible according to this metric!") return self.c_dist(features1, features2) cdef class SumPointwiseEuclideanMetric(CythonMetric): r""" Computes the sum of pointwise Euclidean distances between two sequential data. A sequence of N-dimensional points is represented as a 2D array with shape (nb_points, nb_dimensions). A `feature` object can be specified in order to calculate the distance between the features, rather than directly between the sequential data. Parameters ---------- feature : `Feature` object, optional It is used to extract features before computing the distance. Notes ----- The distance between two 2D sequential data:: s1 s2 0* a *0 \ | \ | 1* | | b *1 | \ 2* \ c *2 is equal to $a+b+c$ where $a$ is the Euclidean distance between s1[0] and s2[0], $b$ between s1[1] and s2[1] and $c$ between s1[2] and s2[2]. """ cdef double c_dist(SumPointwiseEuclideanMetric self, Data2D features1, Data2D features2) nogil except -1: cdef : int N = features1.shape[0], D = features1.shape[1] int n, d double dd, dist_n, dist = 0.0 for n in range(N): dist_n = 0.0 for d in range(D): dd = features1[n, d] - features2[n, d] dist_n += dd*dd dist += sqrt(dist_n) return dist cdef int c_are_compatible(SumPointwiseEuclideanMetric self, Shape shape1, Shape shape2) nogil except -1: return same_shape(shape1, shape2) cdef class AveragePointwiseEuclideanMetric(SumPointwiseEuclideanMetric): r""" Computes the average of pointwise Euclidean distances between two sequential data. A sequence of N-dimensional points is represented as a 2D array with shape (nb_points, nb_dimensions). A `feature` object can be specified in order to calculate the distance between the features, rather than directly between the sequential data. Parameters ---------- feature : `Feature` object, optional It is used to extract features before computing the distance. Notes ----- The distance between two 2D sequential data:: s1 s2 0* a *0 \ | \ | 1* | | b *1 | \ 2* \ c *2 is equal to $(a+b+c)/3$ where $a$ is the Euclidean distance between s1[0] and s2[0], $b$ between s1[1] and s2[1] and $c$ between s1[2] and s2[2]. """ cdef double c_dist(AveragePointwiseEuclideanMetric self, Data2D features1, Data2D features2) nogil except -1: cdef int N = features1.shape[0] cdef double dist = SumPointwiseEuclideanMetric.c_dist(self, features1, features2) return dist / N cdef class MinimumAverageDirectFlipMetric(AveragePointwiseEuclideanMetric): r""" Computes the MDF distance (minimum average direct-flip) between two sequential data. A sequence of N-dimensional points is represented as a 2D array with shape (nb_points, nb_dimensions). Notes ----- The distance between two 2D sequential data:: s1 s2 0* a *0 \ | \ | 1* | | b *1 | \ 2* \ c *2 is equal to $\min((a+b+c)/3, (a'+b'+c')/3)$ where $a$ is the Euclidean distance between s1[0] and s2[0], $b$ between s1[1] and s2[1], $c$ between s1[2] and s2[2], $a'$ between s1[0] and s2[2], $b'$ between s1[1] and s2[1] and $c'$ between s1[2] and s2[0]. """ property is_order_invariant: """ Is this metric invariant to the sequence's ordering """ def __get__(MinimumAverageDirectFlipMetric self): return True # Ordering is handled in the distance computation cdef double c_dist(MinimumAverageDirectFlipMetric self, Data2D features1, Data2D features2) nogil except -1: cdef double dist_direct = AveragePointwiseEuclideanMetric.c_dist(self, features1, features2) cdef double dist_flipped = AveragePointwiseEuclideanMetric.c_dist(self, features1, features2[::-1]) return min(dist_direct, dist_flipped) cdef class CosineMetric(CythonMetric): r""" Computes the cosine distance between two vectors. A vector (i.e. a N-dimensional point) is represented as a 2D array with shape (1, nb_dimensions). Notes ----- The distance between two vectors $v_1$ and $v_2$ is equal to $\frac{1}{\pi} \arccos\left(\frac{v_1 \cdot v_2}{\|v_1\| \|v_2\|}\right)$ and is bounded within $[0,1]$. """ def __init__(CosineMetric self, Feature feature): super(CosineMetric, self).__init__(feature=feature) cdef int c_are_compatible(CosineMetric self, Shape shape1, Shape shape2) nogil except -1: return same_shape(shape1, shape2) != 0 and shape1.dims[0] == 1 cdef double c_dist(CosineMetric self, Data2D features1, Data2D features2) nogil except -1: cdef : int d, D = features1.shape[1] double sqr_norm_features1 = 0.0, sqr_norm_features2 = 0.0 double cos_theta = 0.0 for d in range(D): cos_theta += features1[0, d] * features2[0, d] sqr_norm_features1 += features1[0, d] * features1[0, d] sqr_norm_features2 += features2[0, d] * features2[0, d] if sqr_norm_features1 == 0.: if sqr_norm_features2 == 0.: return 0. else: return 1. cos_theta /= sqrt(sqr_norm_features1) * sqrt(sqr_norm_features2) # Make sure it's in [-1, 1], i.e. within domain of arccosine cos_theta = min(cos_theta, 1.) cos_theta = max(cos_theta, -1.) return acos(cos_theta) / PI # Normalized cosine distance cpdef distance_matrix(Metric metric, data1, data2=None): """ Computes the distance matrix between two lists of sequential data. The distance matrix is obtained by computing the pairwise distance of all tuples spawn by the Cartesian product of `data1` with `data2`. If `data2` is not provided, the Cartesian product of `data1` with itself is used instead. A sequence of N-dimensional points is represented as a 2D array with shape (nb_points, nb_dimensions). Parameters ---------- metric : `Metric` object Tells how to compute the distance between two sequential data. data1 : list of 2D arrays List of sequences of N-dimensional points. data2 : list of 2D arrays Llist of sequences of N-dimensional points. Returns ------- 2D array (double) Distance matrix. """ cdef int i, j if data2 is None: data2 = data1 shape = metric.feature.infer_shape(data1[0].astype(np.float32)) distance_matrix = np.zeros((len(data1), len(data2)), dtype=np.float64) cdef: Data2D features1 = np.empty(shape, np.float32) Data2D features2 = np.empty(shape, np.float32) for i in range(len(data1)): datum1 = data1[i] if data1[i].flags.writeable and data1[i].dtype is np.float32 else data1[i].astype(np.float32) metric.feature.c_extract(datum1, features1) for j in range(len(data2)): datum2 = data2[j] if data2[j].flags.writeable and data2[j].dtype is np.float32 else data2[j].astype(np.float32) metric.feature.c_extract(datum2, features2) distance_matrix[i, j] = metric.c_dist(features1, features2) return distance_matrix cpdef double dist(Metric metric, datum1, datum2) except -1: """ Computes a distance between `datum1` and `datum2`. A sequence of N-dimensional points is represented as a 2D array with shape (nb_points, nb_dimensions). Parameters ---------- metric : `Metric` object Tells how to compute the distance between `datum1` and `datum2`. datum1 : 2D array Sequence of N-dimensional points. datum2 : 2D array Sequence of N-dimensional points. Returns ------- double Distance between two data points. """ datum1 = datum1 if datum1.flags.writeable and datum1.dtype is np.float32 else datum1.astype(np.float32) datum2 = datum2 if datum2.flags.writeable and datum2.dtype is np.float32 else datum2.astype(np.float32) cdef: Shape shape1 = metric.feature.c_infer_shape(datum1) Shape shape2 = metric.feature.c_infer_shape(datum2) Data2D features1 = np.empty(shape2tuple(shape1), np.float32) Data2D features2 = np.empty(shape2tuple(shape2), np.float32) metric.feature.c_extract(datum1, features1) metric.feature.c_extract(datum2, features2) return metric.c_dist(features1, features2) dipy-0.13.0/dipy/segment/mrf.pyx000066400000000000000000000560401317371701200165040ustar00rootroot00000000000000#!python #cython: boundscheck=False #cython: wraparound=False #cython: cdivision=True import numpy as np from dipy.segment.mask import applymask from dipy.core.ndindex import ndindex from dipy.sims.voxel import add_noise cimport cython cimport numpy as cnp cdef extern from "dpy_math.h" nogil: cdef double NPY_PI cdef double NPY_INFINITY double sqrt(double) double log(double) double exp(double) double fabs(double) class ConstantObservationModel(object): r""" Observation model assuming that the intensity of each class is constant. The model parameters are the means $\mu_{k}$ and variances $\sigma_{k}$ associated with each tissue class. According to this model, the observed intensity at voxel $x$ is given by $I(x) = \mu_{k} + \eta_{k}$ where $k$ is the tissue class of voxel $x$, and $\eta_{k}$ is a Gaussian random variable with zero mean and variance $\sigma_{k}^{2}$. The observation model is responsible for computing the negative log-likelihood of observing any given intensity $z$ at each voxel $x$ assuming the voxel belongs to each class $k$. It also provides a default parameter initialization. """ def __init__(self): r""" Initializes an instance of the ConstantObservationModel class """ pass def initialize_param_uniform(self, image, nclasses): r""" Initializes the means and variances uniformly The means are initialized uniformly along the dynamic range of `image`. The variances are set to 1 for all classes Parameters ---------- image : array, 3D structural image nclasses : int, number of desired classes Returns ------- mu : array, 1 x nclasses, mean for each class sigma : array, 1 x nclasses, standard deviation for each class. Set up to 1.0 for all classes. """ cdef: double[:] mu = np.empty((nclasses,), dtype=np.float64) double[:] sigma = np.empty((nclasses,), dtype=np.float64) _initialize_param_uniform(image, mu, sigma) return np.array(mu), np.array(sigma) def seg_stats(self, input_image, seg_image, nclass): r""" Mean and standard variation for N desired tissue classes Parameters ---------- input_image : ndarray, 3D structural image seg_image : ndarray, 3D segmented image nclass : int, number of classes (3 in most cases) Returns ------- mu, std: ndarrays, 1 x nclasses dimension Mean and standard deviation for each class """ mu = np.zeros(nclass) std = np.zeros(nclass) num_vox = np.zeros(nclass) for index in ndindex(np.shape(input_image)): s = seg_image[index] v = input_image[index] for i in range(0, nclass): if s == i: mu[i] += v std[i] += v * v num_vox[i] += 1 mu = mu / num_vox std = np.sqrt(std/num_vox - mu**2) return mu, std def negloglikelihood(self, image, mu, sigmasq, nclasses): r""" Computes the gaussian negative log-likelihood of each class at each voxel of `image` assuming a gaussian distribution with means and variances given by `mu` and `sigmasq`, respectively (constant models along the full volume). The negative log-likelihood will be written in `nloglike`. Parameters ---------- image : ndarray, 3D gray scale structural image mu : ndarray, mean of each class sigmasq : ndarray, variance of each class nclasses : int number of classes Returns ------- nloglike : ndarray, 4D negloglikelihood for each class in each volume """ nloglike = np.zeros(image.shape + (nclasses,), dtype=np.float64) for l in range(nclasses): _negloglikelihood(image, mu, sigmasq, l, nloglike) return nloglike def prob_image(self, img, nclasses, mu, sigmasq, P_L_N): r""" Conditional probability of the label given the image Parameters ----------- img : ndarray, 3D structural gray-scale image nclasses : int, number of tissue classes mu : ndarray, 1 x nclasses, current estimate of the mean of each tissue class sigmasq : ndarray, 1 x nclasses, current estimate of the variance of each tissue class P_L_N : ndarray, 4D probability map of the label given the neighborhood. Previously computed by function prob_neighborhood Returns -------- P_L_Y : ndarray, 4D probability of the label given the input image """ P_L_Y = np.zeros_like(P_L_N) P_L_Y_norm = np.zeros_like(img) for l in range(nclasses): g = np.zeros_like(img) _prob_image(img, g, mu, sigmasq, l, P_L_N, P_L_Y) P_L_Y_norm[:, :, :] += P_L_Y[:, :, :, l] for l in range(nclasses): P_L_Y[:, :, :, l] = P_L_Y[:, :, :, l] / P_L_Y_norm return P_L_Y def update_param(self, image, P_L_Y, mu, nclasses): r""" Updates the means and the variances in each iteration for all the labels. This is for equations 25 and 26 of Zhang et. al., IEEE Trans. Med. Imag, Vol. 20, No. 1, Jan 2001. Parameters ----------- image : ndarray, 3D structural gray-scale image P_L_Y : ndarray, 4D probability map of the label given the input image computed by the expectation maximization (EM) algorithm mu : ndarray, 1 x nclasses, current estimate of the mean of each tissue class. nclasses : int, number of tissue classes Returns -------- mu_upd : ndarray, 1 x nclasses, updated mean of each tissue class var_upd : ndarray, 1 x nclasses, updated variance of each tissue class """ mu_upd = np.zeros(nclasses, dtype=np.float64) var_upd = np.zeros(nclasses, dtype=np.float64) mu_num = np.zeros(image.shape + (nclasses,), dtype=np.float64) var_num = np.zeros(image.shape + (nclasses,), dtype=np.float64) for l in range(nclasses): mu_num[..., l] = P_L_Y[..., l] * image var_num[..., l] = P_L_Y[..., l] * ((image - mu[l]) ** 2) mu_upd[l] = np.sum(mu_num[..., l]) / np.sum(P_L_Y[..., l]) var_upd[l] = np.sum(var_num[..., l]) / np.sum(P_L_Y[..., l]) return mu_upd, var_upd def update_param_new(self, image, P_L_Y, mu, nclasses): r""" Updates the means and the variances in each iteration for all the labels. This is for equations 25 and 26 of the Zhang et al. paper Parameters ----------- image : ndarray, 3D structural gray-scale image P_L_Y : ndarray, 4D probability map of the label given the input image computed by the expectation maximization (EM) algorithm mu : ndarray, 1 x nclasses, current estimate of the mean of each tissue class. nclasses : int, number of tissue classes Returns -------- mu_upd : ndarray, 1 x nclasses, updated mean of each tissue class var_upd : ndarray, 1 x nclasses, updated variance of each tissue class """ mu_upd = np.zeros(nclasses, dtype=np.float64) var_upd = np.zeros(nclasses, dtype=np.float64) mu_num = np.zeros(image.shape + (nclasses,), dtype=np.float64) var_num = np.zeros(image.shape + (nclasses,), dtype=np.float64) for l in range(nclasses): mu_num[..., l] = P_L_Y[..., l] * image var_num[..., l] = mu_num[..., l] * image mu_upd[l] = np.sum(mu_num[..., l]) / np.sum(P_L_Y[..., l]) var_upd[l] = (np.sum(var_num[..., l]) / np.sum(P_L_Y[..., l]) - mu_upd[l] ** 2) return mu_upd, var_upd cdef void _initialize_param_uniform(double[:,:,:] image, double[:] mu, double[:] sigma) nogil: r""" Initializes the means and standard deviations uniformly The means are initialized uniformly along the dynamic range of `image`. The standard deviations are set to 1 for all classes. Parameters ---------- image : array, 3D structural gray-scale image mu : buffer array for the mean of each tissue class sigma : buffer array for the variance of each tissue class Returns ------- mu : array, 1 x nclasses, mean of each class sigma : array, 1 x nclasses, standard deviation of each class """ cdef: cnp.npy_intp nx = image.shape[0] cnp.npy_intp ny = image.shape[1] cnp.npy_intp nz = image.shape[2] int nclasses = mu.shape[0] int i double min_val double max_val min_val = image[0,0,0] max_val = image[0,0,0] for x in range(nx): for y in range(ny): for z in range(nz): if image[x,y,z] < min_val: min_val = image[x,y,z] if image[x,y,z] > max_val: max_val = image[x,y,z] for i in range(nclasses): sigma[i] = 1.0 mu[i] = min_val + i * (max_val - min_val)/nclasses cdef void _negloglikelihood(double[:, :, :] image, double[:] mu, double[:] sigmasq, int classid, double[:, :, :, :] neglogl) nogil: r""" Computes the gaussian negative log-likelihood of each class at each voxel of `image` assuming a gaussian distribution with means and variances given by `mu` and `sigmasq`, respectively (constant models along the full volume). The negative log-likelihood will be written in `neglogl`. Parameters ---------- image : array, 3D structural gray-scale image mu : array, mean of each class sigmasq : array, variance of each class classid : int, class identifier neglogl : buffer for the neg-loglikelihood Returns ------- neglogl : array, neg-loglikelihood for the class (l = classid) """ cdef: cnp.npy_intp nx = image.shape[0] cnp.npy_intp ny = image.shape[1] cnp.npy_intp nz = image.shape[2] cnp.npy_intp l = classid cnp.npy_intp x, y, z double eps = 1e-8 # We assume images normalized to 0-1 double eps_sq = 1e-16 # Maximum precision for double. for x in range(nx): for y in range(ny): for z in range(nz): if sigmasq[l] < eps_sq: if fabs(image[x, y, z] - mu[l]) < eps: neglogl[x, y, z, l] = 1 + log(sqrt(2.0 * NPY_PI * sigmasq[l])) else: neglogl[x, y, z, l] = NPY_INFINITY else: neglogl[x, y, z, l] = (((image[x, y, z] - mu[l])**2.0) / (2.0 * sigmasq[l])) neglogl[x, y, z, l] += log(sqrt(2.0 * NPY_PI * sigmasq[l])) cdef void _prob_image(double[:, :, :] image, double[:, :, :] gaussian, double[:] mu, double[:] sigmasq, int classid, double[:, :, :, :] P_L_N, double[:, :, :, :] P_L_Y) nogil: r""" Conditional probability of the label given the image Parameters ----------- image : array, 3D structural gray-scale image gaussian : array 3D buffer for the gaussian distribution that is multiplied by P_L_N to make P_L_Y mu : array, current estimate of the mean of each tissue class sigmasq : array, current estimate of the variance of each tissue class classid : int, tissue class identifier P_L_N : array, 4D probability map of the label given the neighborhood. Previously computed by function prob_neighborhood P_L_Y : array 4D buffer to hold P(L|Y) Returns -------- P_L_Y : array, 4D probability of the label given the input image P(L|Y) """ cdef: cnp.npy_intp nx = image.shape[0] cnp.npy_intp ny = image.shape[1] cnp.npy_intp nz = image.shape[2] cnp.npy_intp l = classid cnp.npy_intp x, y, z double eps = 1e-8 double eps_sq = 1e-16 for x in range(nx): for y in range(ny): for z in range(nz): if sigmasq[l] < eps_sq: if fabs(image[x, y, z] - mu[l]) < eps: gaussian[x, y, z] = 1 else: gaussian[x, y, z] = 0 else: gaussian[x, y, z] = ( (exp(-((image[x, y, z] - mu[l]) ** 2) / (2 * sigmasq[l]))) / (sqrt(2 * NPY_PI * sigmasq[l]))) P_L_Y[x, y, z, l] = gaussian[x, y, z] * P_L_N[x, y, z, l] class IteratedConditionalModes(object): def __init__(self): pass def initialize_maximum_likelihood(self, nloglike): r""" Initializes the segmentation of an image with given neg-loglikelihood Initializes the segmentation of an image with neglog-likelihood field given by `nloglike`. The class of each voxel is selected as the one with the minimum neglog-likelihood (i.e. maximum-likelihood segmentation). Parameters ---------- nloglike : ndarray, 4D shape, nloglike[x,y,z,k] is the likelihhood of class k for voxel (x, y, z) Returns -------- seg : ndarray, 3D initial segmentation """ seg = np.zeros(nloglike.shape[:3]).astype(np.int16) _initialize_maximum_likelihood(nloglike, seg) return seg def icm_ising(self, nloglike, beta, seg): r""" Executes one iteration of the ICM algorithm for MRF MAP estimation. The prior distribution of the MRF is a Gibbs distribution with the Potts/Ising model with parameter `beta`: https://en.wikipedia.org/wiki/Potts_model Parameters ---------- nloglike : ndarray, 4D shape, nloglike[x,y,z,k] is the negative log likelihood of class k at voxel (x,y,z) beta : float, positive scalar, it is the parameter of the Potts/Ising model. Determines the smoothness of the output segmentation. seg : ndarray, 3D initial segmentation. This segmentation will change by one iteration of the ICM algorithm Returns ------- new_seg : ndarray, 3D final segmentation energy : ndarray, 3D final energy """ energy = np.zeros(nloglike.shape[:3]).astype(np.float64) new_seg = np.zeros_like(seg) _icm_ising(nloglike, beta, seg, energy, new_seg) return new_seg, energy def prob_neighborhood(self, seg, beta, nclasses): r""" Conditional probability of the label given the neighborhood Equation 2.18 of the Stan Z. Li book (Stan Z. Li, Markov Random Field Modeling in Image Analysis, 3rd ed., Advances in Pattern Recognition Series, Springer Verlag 2009.) Parameters ----------- seg : ndarray, 3D tissue segmentation derived from the ICM model beta : float, scalar that determines the importance of the neighborhood and the spatial smoothness of the segmentation. Usually between 0 to 0.5 nclasses : int, number of tissue classes Returns -------- PLN : ndarray, 4D probability map of the label given the neighborhood of the voxel. """ cdef: double[:, :, :] P_L_N = np.zeros(seg.shape, dtype=np.float64) cnp.npy_intp classid = 0 PLN_norm = np.zeros(seg.shape, dtype=np.float64) PLN = np.zeros(seg.shape + (nclasses,), dtype=np.float64) for classid in range(nclasses): P_L_N = np.zeros(seg.shape, dtype=np.float64) _prob_class_given_neighb(seg, beta, classid, P_L_N) PLN[:, :, :, classid] = np.array(P_L_N) PLN[:, :, :, classid] = np.exp(- PLN[:, :, :, classid]) PLN_norm += PLN[:, :, :, classid] for l in range(nclasses): PLN[:, :, :, l] = PLN[:, :, :, l] / PLN_norm return PLN cdef void _initialize_maximum_likelihood(double[:,:,:,:] nloglike, cnp.npy_short[:,:,:] seg) nogil: r""" Initializes the segmentation of an image with given neg-log-likelihood. Initializes the segmentation of an image with neg-log-likelihood field given by `nloglike`. The class of each voxel is selected as the one with the minimum neg-log-likelihood (i.e. the maximum-likelihood segmentation). Parameters ---------- nloglike : array 4D nloglike[x,y,z,k] is the likelihhood of class k for voxel (x,y,z) seg : array 3D buffer for the initial segmentation Returns : seg : array, 3D initial segmentation """ cdef: cnp.npy_intp nx = nloglike.shape[0] cnp.npy_intp ny = nloglike.shape[1] cnp.npy_intp nz = nloglike.shape[2] cnp.npy_intp nclasses = nloglike.shape[3] double min_energy cnp.npy_short best_class for x in range(nx): for y in range(ny): for z in range(nz): best_class = -1 for k in range(nclasses): if (best_class == -1) or (nloglike[x, y, z, k] < min_energy): best_class = k min_energy = nloglike[x, y, z, k] seg[x, y, z] = best_class cdef void _icm_ising(double[:,:,:,:] nloglike, double beta, cnp.npy_short[:,:,:] seg, double[:,:,:] energy, cnp.npy_short[:,:,:] new_seg) nogil: r""" Executes one iteration of the ICM algorithm for MRF MAP estimation The prior distribution of the MRF is a Gibbs distribution with the Potts/Ising model with parameter `beta`: https://en.wikipedia.org/wiki/Potts_model Parameters ---------- nloglike : array, 4D nloglike[x,y,z,k] is the negative log likelihood of class k at voxel (x,y,z) beta : float, positive scalar, it is the parameter of the Potts/Ising model. Determines the smoothness of the output segmentation seg : array, 3D initial segmentation. This segmentation will change by one iteration of the ICM algorithm energy : array, 3D buffer for the energy new_seg : array, 3D buffer for the final segmentation Returns ------- energy : array, 3D map of the energy for every voxel new_seg : array, 3D new final segmentation (there is a new one after each iteration). """ cdef: cnp.npy_intp nneigh = 6 cnp.npy_intp* dX = [-1, 0, 0, 0, 0, 1] cnp.npy_intp* dY = [0, -1, 0, 1, 0, 0] cnp.npy_intp* dZ = [0, 0, 1, 0, -1, 0] cnp.npy_intp nx = nloglike.shape[0] cnp.npy_intp ny = nloglike.shape[1] cnp.npy_intp nz = nloglike.shape[2] cnp.npy_intp nclasses = nloglike.shape[3] cnp.npy_intp x, y, z, xx, yy, zz, i, j, k double min_energy = NPY_INFINITY double this_energy = NPY_INFINITY cnp.npy_short best_class for x in range(nx): for y in range(ny): for z in range(nz): best_class = -1 min_energy = NPY_INFINITY for k in range(nclasses): this_energy = nloglike[x, y, z, k] for i in range(nneigh): xx = x + dX[i] if((xx < 0) or (xx >= nx)): continue yy = y + dY[i] if((yy < 0) or (yy >= ny)): continue zz = z + dZ[i] if((zz < 0) or (zz >= nz)): continue if seg[xx, yy, zz] == k: this_energy -= beta else: this_energy += beta if this_energy < min_energy: min_energy = this_energy best_class = k new_seg[x, y, z] = best_class energy[x, y, z] = min_energy cdef void _prob_class_given_neighb(cnp.npy_short[:, :, :] seg, double beta, int classid, double[:, :, :] P_L_N) nogil: r""" Conditional probability of the label given the neighborhood Equation 2.18 of the Stan Z. Li book. Parameters ----------- image : array, 3D structural gray-scale image seg : array, 3D tissue segmentation derived from the ICM model beta : float, scalar that determines the importance of the neighborhood and the spatial smoothness of the segmentation. Usually between 0 to 0.5 classid : int, tissue class identifier P_L_N : buffer array for P(L|N) Returns -------- P_L_N : array, 3D map of the probability of the label (l) given the neighborhood of the voxel P(L|N) """ cdef: cnp.npy_intp nx = seg.shape[0] cnp.npy_intp ny = seg.shape[1] cnp.npy_intp nz = seg.shape[2] cnp.npy_intp nneigh = 6 cnp.npy_intp l = classid cnp.npy_intp x, y, z, xx, yy, zz double vox_prob cnp.npy_intp* dX = [-1, 0, 0, 0, 0, 1] cnp.npy_intp* dY = [0, -1, 0, 1, 0, 0] cnp.npy_intp* dZ = [0, 0, 1, 0, -1, 0] for x in range(nx): for y in range(ny): for z in range(nz): vox_prob = 0 for i in range(nneigh): xx = x + dX[i] if((xx < 0) or (xx >= nx)): continue yy = y + dY[i] if((yy < 0) or (yy >= ny)): continue zz = z + dZ[i] if((zz < 0) or (zz >= nz)): continue if seg[xx, yy, zz] == l: vox_prob -= beta else: vox_prob += beta P_L_N[x, y, z] = vox_prob dipy-0.13.0/dipy/segment/quickbundles.py000066400000000000000000000103711317371701200202160ustar00rootroot00000000000000import numpy as np from dipy.tracking.metrics import downsample from dipy.tracking.distances import local_skeleton_clustering from dipy.tracking.distances import bundles_distances_mdf from warnings import warn deprecation_msg = ("Class 'dipy.segment.quickbundles.QuickBundles' is" " deprecated, instead use module " " 'dipy.segment.clustering.QuickBundles'.") warn(DeprecationWarning(deprecation_msg)) class QuickBundles(object): def __init__(self, tracks, dist_thr=4., pts=12): """ Highly efficient trajectory clustering [Garyfallidis12]_. Parameters ---------- tracks : sequence of (N,3) ... (M,3) arrays trajectories (or tractography or streamlines) dist_thr : float distance threshold in the space of the tracks pts : int number of points for simplifying the tracks Methods ------- clustering() returns a dict holding with the clustering result virtuals() gives the virtuals (track centroids) of the clusters exemplars() gives the exemplars (track medoids) of the clusters References ---------- .. [Garyfallidis12] Garyfallidis E. et al., QuickBundles a method for tractography simplification, Frontiers in Neuroscience, vol 6, no 175, 2012. """ warn(DeprecationWarning(deprecation_msg)) self.dist_thr = dist_thr self.pts = pts if pts is not None: self.tracksd = [downsample(track, self.pts) for track in tracks] else: self.tracksd = tracks self.clustering = local_skeleton_clustering(self.tracksd, self.dist_thr) self.virts = None self.exemps = None def virtuals(self): if self.virts is None: self.virts = [(self.clustering[c]['hidden'] / np.float(self.clustering[c]['N'])) for c in self.clustering] return self.virts @property def centroids(self): return self.virtuals() def exemplars(self, tracks=None): if self.exemps is None: self.exemps = [] self.exempsi = [] C = self.clustering if tracks is None: tracks = self.tracksd for c in C: cluster = [tracks[i] for i in C[c]['indices']] D = bundles_distances_mdf([C[c]['hidden']/float(C[c]['N'])], cluster) D = D.ravel() si = np.argmin(D) self.exempsi.append(si) self.exemps.append(cluster[si]) return self.exemps, self.exempsi def partitions(self): return self.clustering def clusters(self): return self.clustering def clusters_sizes(self): C = self.clustering return [C[c]['N'] for c in C] def label2cluster(self, id): return self.clustering[id] def label2tracksids(self, id): return [i for i in self.clustering[id]['indices']] def label2tracks(self, tracks, id): return [tracks[i] for i in self.clustering[id]['indices']] @property def total_clusters(self): return len(self.clustering) def downsampled_tracks(self): return self.tracksd def remove_small_clusters(self, size): """ Remove clusters with small size Parameters ----------- size : int, threshold for minimum number of tracks allowed """ C = self.clustering for c in range(len(C)): if C[c]['N'] <= size: del C[c] C2 = {} keys = C.keys() for c in range(len(C)): C2[c] = C[keys[c]] self.clustering = C2 # self.tracksd=[downsample(track,self.pts) for track in tracks] self.virts = None def remove_cluster(self, id): print('Not implemented yet') pass def remove_clusters(self, list_ids): print('Not implemented yet') pass def remove_tracks(self): print('Not implemented yet') pass def points_per_track(self): print('Not implemented yet') pass dipy-0.13.0/dipy/segment/tests/000077500000000000000000000000001317371701200163135ustar00rootroot00000000000000dipy-0.13.0/dipy/segment/tests/__init__.py000066400000000000000000000000001317371701200204120ustar00rootroot00000000000000dipy-0.13.0/dipy/segment/tests/test_adjustment.py000066400000000000000000000033501317371701200221030ustar00rootroot00000000000000import numpy as np from numpy import zeros from dipy.segment.threshold import upper_bound_by_percent, upper_bound_by_rate from numpy.testing import assert_equal, run_module_suite def test_adjustment(): imga = zeros([128, 128]) for y in range(128): for x in range(128): if y > 10 and y < 115 and x > 10 and x < 115: imga[x, y] = 100 if y > 39 and y < 88 and x > 39 and x < 88: imga[x, y] = 150 if y > 59 and y < 69 and x > 59 and x < 69: imga[x, y] = 255 high_1 = upper_bound_by_rate(imga) high_2 = upper_bound_by_percent(imga) vol1 = np.interp(imga, xp=[imga.min(), high_1], fp=[0, 255]) vol2 = np.interp(imga, xp=[imga.min(), high_2], fp=[0, 255]) count2 = (88 - 40) * (88 - 40) count1 = (114 - 10) * (114 - 10) count1_test = 0 count2_test = 0 count2_upper = (88 - 40) * (88 - 40) count1_upper = (114 - 10) * (114 - 10) count1_upper_test = 0 count2_upper_test = 0 value1 = np.unique(vol1) value2 = np.unique(vol2) for i in range(128): for j in range(128): if vol1[i][j] > value1[1]: count2_test = count2_test + 1 if vol1[i][j] > 0: count1_test = count1_test + 1 for i in range(128): for j in range(128): if vol2[i][j] > value2[1]: count2_upper_test = count2_upper_test + 1 if vol2[i][j] > 0: count1_upper_test = count1_upper_test + 1 assert_equal(count2, count2_test) assert_equal(count1, count1_test) assert_equal(count2_upper, count2_upper_test) assert_equal(count1_upper, count1_upper_test) if __name__ == '__main__': run_module_suite() dipy-0.13.0/dipy/segment/tests/test_clustering.py000066400000000000000000000625611317371701200221150ustar00rootroot00000000000000 import numpy as np import itertools import copy from dipy.segment.clustering import Cluster, ClusterCentroid from dipy.segment.clustering import ClusterMap, ClusterMapCentroid from dipy.segment.clustering import Clustering from nose.tools import assert_equal, assert_true, assert_false from numpy.testing import assert_array_equal, assert_raises, run_module_suite from dipy.testing import assert_arrays_equal features_shape = (1, 10) dtype = "float32" features = np.ones(features_shape, dtype=dtype) data = [np.arange(3*5, dtype=dtype).reshape((-1, 3)), np.arange(3*10, dtype=dtype).reshape((-1, 3)), np.arange(3*15, dtype=dtype).reshape((-1, 3)), np.arange(3*17, dtype=dtype).reshape((-1, 3)), np.arange(3*20, dtype=dtype).reshape((-1, 3))] expected_clusters = [[2, 4], [0, 3], [1]] def test_cluster_attributes_and_constructor(): cluster = Cluster() assert_equal(type(cluster), Cluster) assert_equal(cluster.id, 0) assert_array_equal(cluster.indices, []) assert_equal(len(cluster), 0) # Duplicate assert_equal(cluster, Cluster(cluster.id, cluster.indices, cluster.refdata)) assert_false(cluster != Cluster(cluster.id, cluster.indices, cluster.refdata)) # Invalid comparison assert_raises(TypeError, cluster.__cmp__, cluster) def test_cluster_assign(): cluster = Cluster() indices = [] for idx in range(1, 10): cluster.assign(idx) indices.append(idx) assert_equal(len(cluster), idx) assert_equal(type(cluster.indices), list) assert_array_equal(cluster.indices, indices) # Test add multiples indices at the same time cluster = Cluster() cluster.assign(*range(1, 10)) assert_array_equal(cluster.indices, indices) def test_cluster_iter(): indices = list(range(len(data))) np.random.shuffle(indices) # None trivial ordering # Test without specifying refdata cluster = Cluster() cluster.assign(*indices) assert_array_equal(cluster.indices, indices) assert_array_equal(list(cluster), indices) # Test with specifying refdata in ClusterMap cluster.refdata = data assert_arrays_equal(list(cluster), [data[i] for i in indices]) def test_cluster_getitem(): indices = list(range(len(data))) np.random.shuffle(indices) # None trivial ordering advanced_indices = indices + [0, 1, 2, -1, -2, -3] # Test without specifying refdata in ClusterMap cluster = Cluster() cluster.assign(*indices) # Test indexing for i in advanced_indices: assert_equal(cluster[i], indices[i]) # Test advanced indexing assert_array_equal(cluster[advanced_indices], [indices[i] for i in advanced_indices]) # Test index out of bounds assert_raises(IndexError, cluster.__getitem__, len(cluster)) assert_raises(IndexError, cluster.__getitem__, -len(cluster)-1) # Test slicing and negative indexing assert_equal(cluster[-1], indices[-1]) assert_array_equal(cluster[::2], indices[::2]) assert_arrays_equal(cluster[::-1], indices[::-1]) assert_arrays_equal(cluster[:-1], indices[:-1]) assert_arrays_equal(cluster[1:], indices[1:]) # Test with wrong indexing object assert_raises(TypeError, cluster.__getitem__, "wrong") # Test with specifying refdata in ClusterMap cluster.refdata = data # Test indexing for i in advanced_indices: assert_array_equal(cluster[i], data[indices[i]]) # Test advanced indexing assert_arrays_equal(cluster[advanced_indices], [data[indices[i]] for i in advanced_indices]) # Test index out of bounds assert_raises(IndexError, cluster.__getitem__, len(cluster)) assert_raises(IndexError, cluster.__getitem__, -len(cluster)-1) # Test slicing and negative indexing assert_array_equal(cluster[-1], data[indices[-1]]) assert_arrays_equal(cluster[::2], [data[i] for i in indices[::2]]) assert_arrays_equal(cluster[::-1], [data[i] for i in indices[::-1]]) assert_arrays_equal(cluster[:-1], [data[i] for i in indices[:-1]]) assert_arrays_equal(cluster[1:], [data[i] for i in indices[1:]]) # Test with wrong indexing object assert_raises(TypeError, cluster.__getitem__, "wrong") def test_cluster_str_and_repr(): indices = list(range(len(data))) np.random.shuffle(indices) # None trivial ordering # Test without specifying refdata in ClusterMap cluster = Cluster() cluster.assign(*indices) assert_equal(str(cluster), "[" + ", ".join(map(str, indices)) + "]") assert_equal(repr(cluster), "Cluster([" + ", ".join(map(str, indices)) + "])") # Test with specifying refdata in ClusterMap cluster.refdata = data assert_equal(str(cluster), "[" + ", ".join(map(str, indices)) + "]") assert_equal(repr(cluster), "Cluster([" + ", ".join(map(str, indices)) + "])") def test_cluster_centroid_attributes_and_constructor(): centroid = np.zeros(features_shape) cluster = ClusterCentroid(centroid) assert_equal(type(cluster), ClusterCentroid) assert_equal(cluster.id, 0) assert_array_equal(cluster.indices, []) assert_array_equal(cluster.centroid, np.zeros(features_shape)) assert_equal(len(cluster), 0) # Duplicate assert_equal(cluster, ClusterCentroid(centroid)) assert_false(cluster != ClusterCentroid(centroid)) assert_false(cluster == ClusterCentroid(centroid+1)) # Invalid comparison assert_raises(TypeError, cluster.__cmp__, cluster) def test_cluster_centroid_assign(): centroid = np.zeros(features_shape) cluster = ClusterCentroid(centroid) indices = [] centroid = np.zeros(features_shape, dtype=dtype) for idx in range(1, 10): cluster.assign(idx, (idx+1) * features) cluster.update() indices.append(idx) centroid = (centroid * (idx-1) + (idx+1) * features) / idx assert_equal(len(cluster), idx) assert_equal(type(cluster.indices), list) assert_array_equal(cluster.indices, indices) assert_equal(type(cluster.centroid), np.ndarray) assert_array_equal(cluster.centroid, centroid) def test_cluster_centroid_iter(): indices = list(range(len(data))) np.random.shuffle(indices) # None trivial ordering # Test without specifying refdata in ClusterCentroid centroid = np.zeros(features_shape) cluster = ClusterCentroid(centroid) for idx in indices: cluster.assign(idx, (idx+1)*features) assert_array_equal(cluster.indices, indices) assert_array_equal(list(cluster), indices) # Test with specifying refdata in ClusterCentroid cluster.refdata = data assert_arrays_equal(list(cluster), [data[i] for i in indices]) def test_cluster_centroid_getitem(): indices = list(range(len(data))) np.random.shuffle(indices) # None trivial ordering advanced_indices = indices + [0, 1, 2, -1, -2, -3] # Test without specifying refdata in ClusterCentroid centroid = np.zeros(features_shape) cluster = ClusterCentroid(centroid) for idx in indices: cluster.assign(idx, (idx+1)*features) # Test indexing for i in advanced_indices: assert_equal(cluster[i], indices[i]) # Test advanced indexing assert_array_equal(cluster[advanced_indices], [indices[i] for i in advanced_indices]) # Test index out of bounds assert_raises(IndexError, cluster.__getitem__, len(cluster)) assert_raises(IndexError, cluster.__getitem__, -len(cluster)-1) # Test slicing and negative indexing assert_equal(cluster[-1], indices[-1]) assert_array_equal(cluster[::2], indices[::2]) assert_arrays_equal(cluster[::-1], indices[::-1]) assert_arrays_equal(cluster[:-1], indices[:-1]) assert_arrays_equal(cluster[1:], indices[1:]) # Test with specifying refdata in ClusterCentroid cluster.refdata = data # Test indexing for i in advanced_indices: assert_array_equal(cluster[i], data[indices[i]]) # Test advanced indexing assert_arrays_equal(cluster[advanced_indices], [data[indices[i]] for i in advanced_indices]) # Test index out of bounds assert_raises(IndexError, cluster.__getitem__, len(cluster)) assert_raises(IndexError, cluster.__getitem__, -len(cluster)-1) # Test slicing and negative indexing assert_array_equal(cluster[-1], data[indices[-1]]) assert_arrays_equal(cluster[::2], [data[i] for i in indices[::2]]) assert_arrays_equal(cluster[::-1], [data[i] for i in indices[::-1]]) assert_arrays_equal(cluster[:-1], [data[i] for i in indices[:-1]]) assert_arrays_equal(cluster[1:], [data[i] for i in indices[1:]]) def test_cluster_map_attributes_and_constructor(): clusters = ClusterMap() assert_equal(len(clusters), 0) assert_array_equal(clusters.clusters, []) assert_array_equal(list(clusters), []) assert_raises(IndexError, clusters.__getitem__, 0) assert_raises(AttributeError, setattr, clusters, 'clusters', []) def test_cluster_map_add_cluster(): clusters = ClusterMap() list_of_cluster_objects = [] list_of_indices = [] for i in range(3): cluster = Cluster() list_of_cluster_objects.append(cluster) list_of_indices.append([]) for id_data in range(2 * i): list_of_indices[-1].append(id_data) cluster.assign(id_data) clusters.add_cluster(cluster) assert_equal(type(cluster), Cluster) assert_equal(len(clusters), i+1) assert_equal(cluster, clusters[-1]) assert_array_equal(list(itertools.chain(*clusters)), list(itertools.chain(*list_of_indices))) # Test adding multiple clusters at once. clusters = ClusterMap() clusters.add_cluster(*list_of_cluster_objects) assert_array_equal(list(itertools.chain(*clusters)), list(itertools.chain(*list_of_indices))) def test_cluster_map_remove_cluster(): clusters = ClusterMap() cluster1 = Cluster(indices=[1]) clusters.add_cluster(cluster1) cluster2 = Cluster(indices=[1, 2]) clusters.add_cluster(cluster2) cluster3 = Cluster(indices=[1, 2, 3]) clusters.add_cluster(cluster3) assert_equal(len(clusters), 3) clusters.remove_cluster(cluster2) assert_equal(len(clusters), 2) assert_array_equal(list(itertools.chain(*clusters)), list(itertools.chain(*[cluster1, cluster3]))) assert_equal(clusters[0], cluster1) assert_equal(clusters[1], cluster3) clusters.remove_cluster(cluster3) assert_equal(len(clusters), 1) assert_array_equal(list(itertools.chain(*clusters)), list(cluster1)) assert_equal(clusters[0], cluster1) clusters.remove_cluster(cluster1) assert_equal(len(clusters), 0) assert_array_equal(list(itertools.chain(*clusters)), []) # Test removing multiple clusters at once. clusters = ClusterMap() clusters.add_cluster(cluster1, cluster2, cluster3) clusters.remove_cluster(cluster3, cluster2) assert_equal(len(clusters), 1) assert_array_equal(list(itertools.chain(*clusters)), list(cluster1)) assert_equal(clusters[0], cluster1) clusters = ClusterMap() clusters.add_cluster(cluster2, cluster1, cluster3) clusters.remove_cluster(cluster1, cluster3, cluster2) assert_equal(len(clusters), 0) assert_array_equal(list(itertools.chain(*clusters)), []) def test_cluster_map_clear(): nb_clusters = 11 clusters = ClusterMap() for i in range(nb_clusters): new_cluster = Cluster(indices=range(i)) clusters.add_cluster(new_cluster) clusters.clear() assert_equal(len(clusters), 0) assert_array_equal(list(itertools.chain(*clusters)), []) def test_cluster_map_iter(): rng = np.random.RandomState(42) nb_clusters = 11 # Test without specifying refdata in ClusterMap cluster_map = ClusterMap() clusters = [] for i in range(nb_clusters): new_cluster = Cluster(indices=rng.randint(0, len(data), size=10)) cluster_map.add_cluster(new_cluster) clusters.append(new_cluster) assert_true(all([c1 is c2 for c1, c2 in zip(cluster_map.clusters, clusters)])) assert_array_equal(cluster_map, clusters) assert_array_equal(cluster_map.clusters, clusters) assert_array_equal(cluster_map, [cluster.indices for cluster in clusters]) # Set refdata cluster_map.refdata = data for c1, c2 in zip(cluster_map, clusters): assert_arrays_equal(c1, [data[i] for i in c2.indices]) # Remove refdata, i.e. back to indices cluster_map.refdata = None assert_array_equal(cluster_map, [cluster.indices for cluster in clusters]) def test_cluster_map_getitem(): nb_clusters = 11 indices = list(range(nb_clusters)) np.random.shuffle(indices) # None trivial ordering advanced_indices = indices + [0, 1, 2, -1, -2, -3] cluster_map = ClusterMap() clusters = [] for i in range(nb_clusters): new_cluster = Cluster(indices=range(i)) cluster_map.add_cluster(new_cluster) clusters.append(new_cluster) # Test indexing for i in advanced_indices: assert_equal(cluster_map[i], clusters[i]) # Test advanced indexing assert_arrays_equal(cluster_map[advanced_indices], [clusters[i] for i in advanced_indices]) # Test index out of bounds assert_raises(IndexError, cluster_map.__getitem__, len(clusters)) assert_raises(IndexError, cluster_map.__getitem__, -len(clusters)-1) # Test slicing and negative indexing assert_equal(cluster_map[-1], clusters[-1]) assert_array_equal(cluster_map[::2], clusters[::2]) assert_arrays_equal(cluster_map[::-1], clusters[::-1]) assert_arrays_equal(cluster_map[:-1], clusters[:-1]) assert_arrays_equal(cluster_map[1:], clusters[1:]) def test_cluster_map_str_and_repr(): nb_clusters = 11 cluster_map = ClusterMap() clusters = [] for i in range(nb_clusters): new_cluster = Cluster(indices=range(i)) cluster_map.add_cluster(new_cluster) clusters.append(new_cluster) expected_str = "[" + ", ".join(map(str, clusters)) + "]" assert_equal(str(cluster_map), expected_str) assert_equal(repr(cluster_map), "ClusterMap(" + expected_str + ")") def test_cluster_map_size(): nb_clusters = 11 cluster_map = ClusterMap() clusters = [Cluster() for i in range(nb_clusters)] cluster_map.add_cluster(*clusters) assert_equal(len(cluster_map), nb_clusters) assert_equal(cluster_map.size(), nb_clusters) def test_cluster_map_clusters_sizes(): rng = np.random.RandomState(42) nb_clusters = 11 # Generate random indices indices = [range(rng.randint(1, 10)) for i in range(nb_clusters)] cluster_map = ClusterMap() clusters = [Cluster(indices=indices[i]) for i in range(nb_clusters)] cluster_map.add_cluster(*clusters) assert_equal(cluster_map.clusters_sizes(), list(map(len, indices))) def test_cluster_map_get_small_and_large_clusters(): rng = np.random.RandomState(42) nb_clusters = 11 cluster_map = ClusterMap() # Randomly generate small clusters indices = [rng.randint(0, 10, size=i) for i in range(1, nb_clusters+1)] small_clusters = [Cluster(indices=indices[i]) for i in range(nb_clusters)] cluster_map.add_cluster(*small_clusters) # Randomly generate small clusters indices = [rng.randint(0, 10, size=i) for i in range(nb_clusters+1, 2*nb_clusters+1)] large_clusters = [Cluster(indices=indices[i]) for i in range(nb_clusters)] cluster_map.add_cluster(*large_clusters) assert_equal(len(cluster_map), 2*nb_clusters) assert_equal(len(cluster_map.get_small_clusters(nb_clusters)), len(small_clusters)) assert_arrays_equal(cluster_map.get_small_clusters(nb_clusters), small_clusters) assert_equal(len(cluster_map.get_large_clusters(nb_clusters+1)), len(large_clusters)) assert_arrays_equal(cluster_map.get_large_clusters(nb_clusters+1), large_clusters) def test_cluster_map_comparison_with_int(): clusters1_indices = range(10) clusters2_indices = range(10, 15) clusters3_indices = [15] # Build a test ClusterMap clusters = ClusterMap() cluster1 = Cluster() cluster1.assign(*clusters1_indices) clusters.add_cluster(cluster1) cluster2 = Cluster() cluster2.assign(*clusters2_indices) clusters.add_cluster(cluster2) cluster3 = Cluster() cluster3.assign(*clusters3_indices) clusters.add_cluster(cluster3) subset = clusters < 5 assert_equal(subset.sum(), 1) assert_array_equal(list(clusters[subset][0]), clusters3_indices) subset = clusters <= 5 assert_equal(subset.sum(), 2) assert_array_equal(list(clusters[subset][0]), clusters2_indices) assert_array_equal(list(clusters[subset][1]), clusters3_indices) subset = clusters == 5 assert_equal(subset.sum(), 1) assert_array_equal(list(clusters[subset][0]), clusters2_indices) subset = clusters != 5 assert_equal(subset.sum(), 2) assert_array_equal(list(clusters[subset][0]), clusters1_indices) assert_array_equal(list(clusters[subset][1]), clusters3_indices) subset = clusters > 5 assert_equal(subset.sum(), 1) assert_array_equal(list(clusters[subset][0]), clusters1_indices) subset = clusters >= 5 assert_equal(subset.sum(), 2) assert_array_equal(list(clusters[subset][0]), clusters1_indices) assert_array_equal(list(clusters[subset][1]), clusters2_indices) def test_cluster_map_comparison_with_object(): nb_clusters = 4 cluster_map = ClusterMap() # clusters = [] for i in range(nb_clusters): new_cluster = Cluster(indices=range(i)) cluster_map.add_cluster(new_cluster) # clusters.append(new_cluster) # Comparison with another ClusterMap object other_cluster_map = copy.deepcopy(cluster_map) assert_true(cluster_map == other_cluster_map) other_cluster_map = copy.deepcopy(cluster_map) assert_false(cluster_map != other_cluster_map) other_cluster_map = copy.deepcopy(cluster_map) assert_raises(NotImplementedError, cluster_map.__le__, other_cluster_map) # Comparison with an object that is not a ClusterMap or int assert_raises(NotImplementedError, cluster_map.__le__, float(42)) def test_cluster_map_centroid_attributes_and_constructor(): clusters = ClusterMapCentroid() assert_array_equal(clusters.centroids, []) assert_raises(AttributeError, setattr, clusters, 'centroids', []) def test_cluster_map_centroid_add_cluster(): clusters = ClusterMapCentroid() centroids = [] for i in range(3): cluster = ClusterCentroid(centroid=np.zeros_like(features)) centroids.append(np.zeros_like(features)) for id_data in range(2*i): centroids[-1] = ((centroids[-1]*id_data + (id_data+1)*features) / (id_data+1)) cluster.assign(id_data, (id_data+1)*features) cluster.update() clusters.add_cluster(cluster) assert_array_equal(cluster.centroid, centroids[-1]) assert_equal(type(cluster), ClusterCentroid) assert_equal(cluster, clusters[-1]) assert_equal(type(clusters.centroids), list) assert_array_equal(list(itertools.chain(*clusters.centroids)), list(itertools.chain(*centroids))) # Check adding features of different sizes (shorter and longer) features_shape_short = (1, features_shape[1]-3) features_too_short = np.ones(features_shape_short, dtype=dtype) assert_raises(ValueError, cluster.assign, 123, features_too_short) features_shape_long = (1, features_shape[1]+3) features_too_long = np.ones(features_shape_long, dtype=dtype) assert_raises(ValueError, cluster.assign, 123, features_too_long) def test_cluster_map_centroid_remove_cluster(): clusters = ClusterMapCentroid() centroid1 = np.random.rand(*features_shape).astype(dtype) cluster1 = ClusterCentroid(centroid1, indices=[1]) clusters.add_cluster(cluster1) centroid2 = np.random.rand(*features_shape).astype(dtype) cluster2 = ClusterCentroid(centroid2, indices=[1, 2]) clusters.add_cluster(cluster2) centroid3 = np.random.rand(*features_shape).astype(dtype) cluster3 = ClusterCentroid(centroid3, indices=[1, 2, 3]) clusters.add_cluster(cluster3) assert_equal(len(clusters), 3) clusters.remove_cluster(cluster2) assert_equal(len(clusters), 2) assert_array_equal(list(itertools.chain(*clusters)), list(itertools.chain(*[cluster1, cluster3]))) assert_array_equal(clusters.centroids, np.array([centroid1, centroid3])) assert_equal(clusters[0], cluster1) assert_equal(clusters[1], cluster3) clusters.remove_cluster(cluster3) assert_equal(len(clusters), 1) assert_array_equal(list(itertools.chain(*clusters)), list(cluster1)) assert_array_equal(clusters.centroids, np.array([centroid1])) assert_equal(clusters[0], cluster1) clusters.remove_cluster(cluster1) assert_equal(len(clusters), 0) assert_array_equal(list(itertools.chain(*clusters)), []) assert_array_equal(clusters.centroids, []) def test_cluster_map_centroid_iter(): rng = np.random.RandomState(42) nb_clusters = 11 cluster_map = ClusterMapCentroid() clusters = [] for i in range(nb_clusters): new_centroid = np.zeros_like(features) new_cluster = ClusterCentroid(new_centroid, indices=rng.randint(0, len(data), size=10)) cluster_map.add_cluster(new_cluster) clusters.append(new_cluster) assert_true(all([c1 is c2 for c1, c2 in zip(cluster_map.clusters, clusters)])) assert_array_equal(cluster_map, clusters) assert_array_equal(cluster_map.clusters, clusters) assert_array_equal(cluster_map, [cluster.indices for cluster in clusters]) # Set refdata cluster_map.refdata = data for c1, c2 in zip(cluster_map, clusters): assert_arrays_equal(c1, [data[i] for i in c2.indices]) def test_cluster_map_centroid_getitem(): nb_clusters = 11 indices = list(range(len(data))) np.random.shuffle(indices) # None trivial ordering advanced_indices = indices + [0, 1, 2, -1, -2, -3] cluster_map = ClusterMapCentroid() clusters = [] for i in range(nb_clusters): centroid = np.zeros_like(features) cluster = ClusterCentroid(centroid) cluster.id = cluster_map.add_cluster(cluster) clusters.append(cluster) # Test indexing for i in advanced_indices: assert_equal(cluster_map[i], clusters[i]) # Test advanced indexing assert_arrays_equal(cluster_map[advanced_indices], [clusters[i] for i in advanced_indices]) # Test index out of bounds assert_raises(IndexError, cluster_map.__getitem__, len(clusters)) assert_raises(IndexError, cluster_map.__getitem__, -len(clusters)-1) # Test slicing and negative indexing assert_equal(cluster_map[-1], clusters[-1]) assert_array_equal(cluster_map[::2], clusters[::2]) assert_arrays_equal(cluster_map[::-1], clusters[::-1]) assert_arrays_equal(cluster_map[:-1], clusters[:-1]) assert_arrays_equal(cluster_map[1:], clusters[1:]) def test_cluster_map_centroid_comparison_with_int(): clusters1_indices = range(10) clusters2_indices = range(10, 15) clusters3_indices = [15] # Build a test ClusterMapCentroid centroid = np.zeros_like(features) cluster1 = ClusterCentroid(centroid.copy()) for i in clusters1_indices: cluster1.assign(i, features) cluster2 = ClusterCentroid(centroid.copy()) for i in clusters2_indices: cluster2.assign(i, features) cluster3 = ClusterCentroid(centroid.copy()) for i in clusters3_indices: cluster3.assign(i, features) # Update centroids cluster1.update() cluster2.update() cluster3.update() clusters = ClusterMapCentroid() clusters.add_cluster(cluster1) clusters.add_cluster(cluster2) clusters.add_cluster(cluster3) subset = clusters < 5 assert_equal(subset.sum(), 1) assert_array_equal(list(clusters[subset][0]), clusters3_indices) subset = clusters <= 5 assert_equal(subset.sum(), 2) assert_array_equal(list(clusters[subset][0]), clusters2_indices) assert_array_equal(list(clusters[subset][1]), clusters3_indices) subset = clusters == 5 assert_equal(subset.sum(), 1) assert_array_equal(list(clusters[subset][0]), clusters2_indices) subset = clusters != 5 assert_equal(subset.sum(), 2) assert_array_equal(list(clusters[subset][0]), clusters1_indices) assert_array_equal(list(clusters[subset][1]), clusters3_indices) subset = clusters > 5 assert_equal(subset.sum(), 1) assert_array_equal(list(clusters[subset][0]), clusters1_indices) subset = clusters >= 5 assert_equal(subset.sum(), 2) assert_array_equal(list(clusters[subset][0]), clusters1_indices) assert_array_equal(list(clusters[subset][1]), clusters2_indices) def test_subclassing_clustering(): class SubClustering(Clustering): def cluster(self, data, ordering=None): pass clustering_algo = SubClustering() assert_raises(NotImplementedError, super(SubClustering, clustering_algo).cluster, None) if __name__ == '__main__': run_module_suite() dipy-0.13.0/dipy/segment/tests/test_feature.py000066400000000000000000000266511317371701200213710ustar00rootroot00000000000000import sys import numpy as np import dipy.segment.metric as dipymetric from dipy.segment.featurespeed import extract from nose.tools import assert_true, assert_false, assert_equal from numpy.testing import (assert_array_equal, assert_array_almost_equal, assert_raises, run_module_suite) dtype = "float32" s1 = np.array([np.arange(10, dtype=dtype)]*3).T # 10x3 s2 = np.arange(3*10, dtype=dtype).reshape((-1, 3))[::-1] # 10x3 s3 = np.random.rand(5, 4).astype(dtype) # 5x4 s4 = np.random.rand(5, 3).astype(dtype) # 5x3 def test_identity_feature(): # Test subclassing Feature class IdentityFeature(dipymetric.Feature): def __init__(self): super(IdentityFeature, self).__init__(is_order_invariant=False) def infer_shape(self, streamline): return streamline.shape def extract(self, streamline): return streamline for feature in [dipymetric.IdentityFeature(), IdentityFeature()]: for s in [s1, s2, s3, s4]: # Test method infer_shape assert_equal(feature.infer_shape(s), s.shape) # Test method extract features = feature.extract(s) assert_equal(features.shape, s.shape) assert_array_equal(features, s) # This feature type is not order invariant assert_false(feature.is_order_invariant) for s in [s1, s2, s3, s4]: features = feature.extract(s) features_flip = feature.extract(s[::-1]) assert_array_equal(features_flip, s[::-1]) assert_true(np.any(np.not_equal(features, features_flip))) def test_feature_resample(): from dipy.tracking.streamline import set_number_of_points # Test subclassing Feature class ResampleFeature(dipymetric.Feature): def __init__(self, nb_points): super(ResampleFeature, self).__init__(is_order_invariant=False) self.nb_points = nb_points if nb_points <= 0: msg = ("ResampleFeature: `nb_points` must be strictly" " positive: {0}").format(nb_points) raise ValueError(msg) def infer_shape(self, streamline): return (self.nb_points, streamline.shape[1]) def extract(self, streamline): return set_number_of_points(streamline, self.nb_points) assert_raises(ValueError, dipymetric.ResampleFeature, nb_points=0) assert_raises(ValueError, ResampleFeature, nb_points=0) max_points = max(map(len, [s1, s2, s3, s4])) for nb_points in [2, 5, 2*max_points]: for feature in [dipymetric.ResampleFeature(nb_points), ResampleFeature(nb_points)]: for s in [s1, s2, s3, s4]: # Test method infer_shape assert_equal(feature.infer_shape(s), (nb_points, s.shape[1])) # Test method extract features = feature.extract(s) assert_equal(features.shape, (nb_points, s.shape[1])) assert_array_almost_equal(features, set_number_of_points(s, nb_points)) # This feature type is not order invariant assert_false(feature.is_order_invariant) for s in [s1, s2, s3, s4]: features = feature.extract(s) features_flip = feature.extract(s[::-1]) assert_array_equal(features_flip, set_number_of_points(s[::-1], nb_points)) assert_true(np.any(np.not_equal(features, features_flip))) def test_feature_center_of_mass(): # Test subclassing Feature class CenterOfMassFeature(dipymetric.Feature): def __init__(self): super(CenterOfMassFeature, self).__init__(is_order_invariant=True) def infer_shape(self, streamline): return (1, streamline.shape[1]) def extract(self, streamline): return np.mean(streamline, axis=0)[None, :] for feature in [dipymetric.CenterOfMassFeature(), CenterOfMassFeature()]: for s in [s1, s2, s3, s4]: # Test method infer_shape assert_equal(feature.infer_shape(s), (1, s.shape[1])) # Test method extract features = feature.extract(s) assert_equal(features.shape, (1, s.shape[1])) assert_array_almost_equal(features, np.mean(s, axis=0)[None, :]) # This feature type is order invariant assert_true(feature.is_order_invariant) for s in [s1, s2, s3, s4]: features = feature.extract(s) features_flip = feature.extract(s[::-1]) assert_array_almost_equal(features, features_flip) def test_feature_midpoint(): # Test subclassing Feature class MidpointFeature(dipymetric.Feature): def __init__(self): super(MidpointFeature, self).__init__(is_order_invariant=False) def infer_shape(self, streamline): return (1, streamline.shape[1]) def extract(self, streamline): return streamline[[len(streamline)//2]] for feature in [dipymetric.MidpointFeature(), MidpointFeature()]: for s in [s1, s2, s3, s4]: # Test method infer_shape assert_equal(feature.infer_shape(s), (1, s.shape[1])) # Test method extract features = feature.extract(s) assert_equal(features.shape, (1, s.shape[1])) assert_array_almost_equal(features, s[len(s)//2][None, :]) # This feature type is not order invariant assert_false(feature.is_order_invariant) for s in [s1, s2, s3, s4]: features = feature.extract(s) features_flip = feature.extract(s[::-1]) if len(s) % 2 == 0: assert_true(np.any(np.not_equal(features, features_flip))) else: assert_array_equal(features, features_flip) def test_feature_arclength(): from dipy.tracking.streamline import length # Test subclassing Feature class ArcLengthFeature(dipymetric.Feature): def __init__(self): super(ArcLengthFeature, self).__init__(is_order_invariant=True) def infer_shape(self, streamline): return (1, 1) def extract(self, streamline): return length(streamline)[None, None] for feature in [dipymetric.ArcLengthFeature(), ArcLengthFeature()]: for s in [s1, s2, s3, s4]: # Test method infer_shape assert_equal(feature.infer_shape(s), (1, 1)) # Test method extract features = feature.extract(s) assert_equal(features.shape, (1, 1)) assert_array_almost_equal(features, length(s)[None, None]) # This feature type is order invariant assert_true(feature.is_order_invariant) for s in [s1, s2, s3, s4]: features = feature.extract(s) features_flip = feature.extract(s[::-1]) assert_array_almost_equal(features, features_flip) def test_feature_vector_of_endpoints(): # Test subclassing Feature class VectorOfEndpointsFeature(dipymetric.Feature): def __init__(self): super(VectorOfEndpointsFeature, self).__init__(False) def infer_shape(self, streamline): return (1, streamline.shape[1]) def extract(self, streamline): return streamline[[-1]] - streamline[[0]] feature_types = [dipymetric.VectorOfEndpointsFeature(), VectorOfEndpointsFeature()] for feature in feature_types: for s in [s1, s2, s3, s4]: # Test method infer_shape assert_equal(feature.infer_shape(s), (1, s.shape[1])) # Test method extract features = feature.extract(s) assert_equal(features.shape, (1, s.shape[1])) assert_array_almost_equal(features, s[[-1]] - s[[0]]) # This feature type is not order invariant assert_false(feature.is_order_invariant) for s in [s1, s2, s3, s4]: features = feature.extract(s) features_flip = feature.extract(s[::-1]) # The flip features are simply the negative of the features. assert_array_almost_equal(features, -features_flip) def test_feature_extract(): # Test that features are automatically cast into float32 when # coming from Python space class CenterOfMass64bit(dipymetric.Feature): def infer_shape(self, streamline): return streamline.shape[1] def extract(self, streamline): return np.mean(streamline.astype(np.float64), axis=0) rng = np.random.RandomState(1234) nb_streamlines = 100 feature_shape = (1, 3) # One N-dimensional point feature = CenterOfMass64bit() nb_points = rng.randint(20, 30, size=(nb_streamlines,)) * 3 streamlines = [np.arange(nb).reshape((-1, 3)).astype(np.float32) for nb in nb_points] features = extract(feature, streamlines) assert_equal(len(features), len(streamlines)) assert_equal(features[0].shape, feature_shape) # Test that scalar features class ArcLengthFeature(dipymetric.Feature): def infer_shape(self, streamline): return 1 def extract(self, streamline): square_norms = np.sum((streamline[1:] - streamline[:-1]) ** 2) return np.sum(np.sqrt(square_norms)) nb_streamlines = 100 feature_shape = (1, 1) # One scalar represented as a 2D array feature = ArcLengthFeature() features = extract(feature, streamlines) assert_equal(len(features), len(streamlines)) assert_equal(features[0].shape, feature_shape) # Try if streamlines are readonly for s in streamlines: s.setflags(write=False) features = extract(feature, streamlines) def test_subclassing_feature(): class EmptyFeature(dipymetric.Feature): pass feature = EmptyFeature() assert_raises(NotImplementedError, feature.infer_shape, None) assert_raises(NotImplementedError, feature.extract, None) def test_using_python_feature_with_cython_metric(): class Identity(dipymetric.Feature): def infer_shape(self, streamline): return streamline.shape def extract(self, streamline): return streamline # Test using Python Feature with Cython Metric feature = Identity() metric = dipymetric.AveragePointwiseEuclideanMetric(feature) d1 = dipymetric.dist(metric, s1, s2) features1 = metric.feature.extract(s1) features2 = metric.feature.extract(s2) d2 = metric.dist(features1, features2) assert_equal(d1, d2) # Python 2.7 on Windows 64 bits uses long type instead of int for # constants integer. We make sure the code is robust to such behaviour # by explicitly testing it. class ArcLengthFeature(dipymetric.Feature): def infer_shape(self, streamline): if sys.version_info > (3,): return 1 # In Python 3, constant integer are of type long. return long(1) def extract(self, streamline): square_norms = np.sum((streamline[1:] - streamline[:-1]) ** 2) return np.sum(np.sqrt(square_norms)) # Test using Python Feature with Cython Metric feature = ArcLengthFeature() metric = dipymetric.EuclideanMetric(feature) d1 = dipymetric.dist(metric, s1, s2) features1 = metric.feature.extract(s1) features2 = metric.feature.extract(s2) d2 = metric.dist(features1, features2) assert_equal(d1, d2) if __name__ == '__main__': run_module_suite() dipy-0.13.0/dipy/segment/tests/test_mask.py000066400000000000000000000073721317371701200206700ustar00rootroot00000000000000import warnings import numpy as np import nibabel as nib from scipy.ndimage import generate_binary_structure, binary_dilation from scipy.ndimage.filters import median_filter from dipy.segment.mask import (otsu, bounding_box, crop, applymask, multi_median, median_otsu) from numpy.testing import (assert_equal, assert_almost_equal, run_module_suite) from dipy.data import get_data def test_mask(): vol = np.zeros((30, 30, 30)) vol[15, 15, 15] = 1 struct = generate_binary_structure(3, 1) voln = binary_dilation(vol, structure=struct, iterations=4).astype('f4') initial = np.sum(voln > 0) mask = voln.copy() thresh = otsu(mask) mask = mask > thresh initial_otsu = np.sum(mask > 0) assert_equal(initial_otsu, initial) mins, maxs = bounding_box(mask) voln_crop = crop(mask, mins, maxs) initial_crop = np.sum(voln_crop > 0) assert_equal(initial_crop, initial) applymask(voln, mask) final = np.sum(voln > 0) assert_equal(final, initial) # Test multi_median. median_test = np.arange(25).reshape(5, 5) median_control = median_test.copy() medianradius = 3 median_test = multi_median(median_test, medianradius, 3) medarr = np.ones_like(median_control.shape) * ((medianradius * 2) + 1) median_filter(median_control, medarr, output=median_control) median_filter(median_control, medarr, output=median_control) median_filter(median_control, medarr, output=median_control) assert_equal(median_test, median_control) def test_bounding_box(): vol = np.zeros((100, 100, 50), dtype=int) # Check the more usual case vol[10:90, 11:40, 5:33] = 3 mins, maxs = bounding_box(vol) assert_equal(mins, [10, 11, 5]) assert_equal(maxs, [90, 40, 33]) # Check a 2d case mins, maxs = bounding_box(vol[10]) assert_equal(mins, [11, 5]) assert_equal(maxs, [40, 33]) vol[:] = 0 with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") # Trigger a warning. num_warns = len(w) mins, maxs = bounding_box(vol) # Assert number of warnings has gone up by 1 assert_equal(len(w), num_warns + 1) # Check that an empty array returns zeros for both min & max assert_equal(mins, [0, 0, 0]) assert_equal(maxs, [0, 0, 0]) # Check the 2d case mins, maxs = bounding_box(vol[0]) assert_equal(len(w), num_warns + 2) assert_equal(mins, [0, 0]) assert_equal(maxs, [0, 0]) def test_median_otsu(): fname = get_data('S0_10') img = nib.load(fname) data = img.get_data() data = np.squeeze(data.astype('f8')) dummy_mask = data > data.mean() data_masked, mask = median_otsu(data, median_radius=3, numpass=2, autocrop=False, vol_idx=None, dilate=None) assert_equal(mask.sum() < dummy_mask.sum(), True) data2 = np.zeros(data.shape + (2,)) data2[..., 0] = data data2[..., 1] = data data2_masked, mask2 = median_otsu(data2, median_radius=3, numpass=2, autocrop=False, vol_idx=[0, 1], dilate=None) assert_almost_equal(mask.sum(), mask2.sum()) _, mask3 = median_otsu(data2, median_radius=3, numpass=2, autocrop=False, vol_idx=[0, 1], dilate=1) assert_equal(mask2.sum() < mask3.sum(), True) _, mask4 = median_otsu(data2, median_radius=3, numpass=2, autocrop=False, vol_idx=[0, 1], dilate=2) assert_equal(mask3.sum() < mask4.sum(), True) if __name__ == '__main__': run_module_suite() dipy-0.13.0/dipy/segment/tests/test_metric.py000066400000000000000000000234201317371701200212100ustar00rootroot00000000000000import numpy as np import dipy.segment.metric as dipymetric import itertools from nose.tools import (assert_true, assert_false, assert_equal) from numpy.testing import (assert_array_equal, assert_raises, run_module_suite, assert_almost_equal) def norm(x, ord=None, axis=None): if axis is not None: return np.apply_along_axis(np.linalg.norm, axis, x.astype(np.float64), ord) return np.linalg.norm(x.astype(np.float64), ord=ord) dtype = "float32" # Create wiggling streamline nb_points = 18 rng = np.random.RandomState(42) x = np.linspace(0, 10, nb_points) y = rng.rand(nb_points) z = np.sin(np.linspace(0, np.pi, nb_points)) # Bending s = np.array([x, y, z], dtype=dtype).T # Create trivial streamlines s1 = np.array([np.arange(10, dtype=dtype)]*3).T # 10x3 s2 = np.arange(3*10, dtype=dtype).reshape((-1, 3))[::-1] # 10x3 s3 = np.array([np.arange(5, dtype=dtype)]*4) # 5x4 s4 = np.array([np.arange(5, dtype=dtype)]*3) # 5x3 streamlines = [s, s1, s2, s3, s4] def test_metric_minimum_average_direct_flip(): feature = dipymetric.IdentityFeature() class MinimumAverageDirectFlipMetric(dipymetric.Metric): def __init__(self, feature): super(MinimumAverageDirectFlipMetric, self).__init__( feature=feature) @property def is_order_invariant(self): return True # Ordering is handled in the distance computation def are_compatible(self, shape1, shape2): return shape1[0] == shape2[0] def dist(self, v1, v2): def average_euclidean(x, y): return np.mean(norm(x-y, axis=1)) dist_direct = average_euclidean(v1, v2) dist_flipped = average_euclidean(v1, v2[::-1]) return min(dist_direct, dist_flipped) for metric in [MinimumAverageDirectFlipMetric(feature), dipymetric.MinimumAverageDirectFlipMetric(feature)]: # Test special cases of the MDF distance. assert_equal(metric.dist(s, s), 0.) assert_equal(metric.dist(s, s[::-1]), 0.) # Translation offset = np.array([0.8, 1.3, 5], dtype=dtype) assert_almost_equal(metric.dist(s, s+offset), norm(offset), 5) # Scaling M_scaling = np.diag([1.2, 2.8, 3]).astype(dtype) s_mean = np.mean(s, axis=0) s_zero_mean = s - s_mean s_scaled = np.dot(M_scaling, s_zero_mean.T).T + s_mean d = np.mean(norm((np.diag(M_scaling)-1)*s_zero_mean, axis=1)) assert_almost_equal(metric.dist(s, s_scaled), d, 5) # Rotation from dipy.core.geometry import rodrigues_axis_rotation rot_axis = np.array([1, 2, 3], dtype=dtype) M_rotation = rodrigues_axis_rotation(rot_axis, 60.).astype(dtype) s_mean = np.mean(s, axis=0) s_zero_mean = s - s_mean s_rotated = np.dot(M_rotation, s_zero_mean.T).T + s_mean opposite = norm(np.cross(rot_axis, s_zero_mean), axis=1) / norm(rot_axis) distances = np.sqrt(2*opposite**2 * (1 - np.cos(60.*np.pi/180.))).astype(dtype) d = np.mean(distances) assert_almost_equal(metric.dist(s, s_rotated), d, 5) # All possible pairs for s1, s2 in itertools.product(*[streamlines]*2): # Extract features since metric doesn't work # directly on streamlines f1 = metric.feature.extract(s1) f2 = metric.feature.extract(s2) # Test method are_compatible same_nb_points = f1.shape[0] == f2.shape[0] assert_equal(metric.are_compatible(f1.shape, f2.shape), same_nb_points) # Test method dist if features are compatible if metric.are_compatible(f1.shape, f2.shape): distance = metric.dist(f1, f2) if np.all(f1 == f2): assert_equal(distance, 0.) assert_almost_equal(distance, dipymetric.dist(metric, s1, s2)) assert_almost_equal(distance, dipymetric.mdf(s1, s2)) assert_true(distance >= 0.) # This metric type is order invariant assert_true(metric.is_order_invariant) # All possible pairs for s1, s2 in itertools.product(*[streamlines]*2): f1 = metric.feature.extract(s1) f2 = metric.feature.extract(s2) if not metric.are_compatible(f1.shape, f2.shape): continue f1_flip = metric.feature.extract(s1[::-1]) f2_flip = metric.feature.extract(s2[::-1]) distance = metric.dist(f1, f2) assert_almost_equal(metric.dist(f1_flip, f2_flip), distance) if not np.all(f1_flip == f2_flip): assert_true(np.allclose(metric.dist(f1, f2_flip), distance)) assert_true(np.allclose(metric.dist(f1_flip, f2), distance)) def test_metric_cosine(): feature = dipymetric.VectorOfEndpointsFeature() class CosineMetric(dipymetric.Metric): def __init__(self, feature): super(CosineMetric, self).__init__(feature=feature) def are_compatible(self, shape1, shape2): # Cosine metric works on vectors. return shape1 == shape2 and shape1[0] == 1 def dist(self, v1, v2): # Check if we have null vectors if norm(v1) == 0: return 0. if norm(v2) == 0 else 1. v1_normed = v1.astype(np.float64) / norm(v1.astype(np.float64)) v2_normed = v2.astype(np.float64) / norm(v2.astype(np.float64)) cos_theta = np.dot(v1_normed, v2_normed.T) # Make sure it's in [-1, 1], i.e. within domain of arccosine cos_theta = np.minimum(cos_theta, 1.) cos_theta = np.maximum(cos_theta, -1.) return np.arccos(cos_theta) / np.pi # Normalized cosine distance for metric in [CosineMetric(feature), dipymetric.CosineMetric(feature)]: # Test special cases of the cosine distance. v0 = np.array([[0, 0, 0]], dtype=np.float32) v1 = np.array([[1, 2, 3]], dtype=np.float32) v2 = np.array([[1, -1./2, 0]], dtype=np.float32) v3 = np.array([[-1, -2, -3]], dtype=np.float32) assert_equal(metric.dist(v0, v0), 0.) # dot-dot assert_equal(metric.dist(v0, v1), 1.) # dot-line assert_equal(metric.dist(v1, v1), 0.) # collinear assert_equal(metric.dist(v1, v2), 0.5) # orthogonal assert_equal(metric.dist(v1, v3), 1.) # opposite # All possible pairs for s1, s2 in itertools.product(*[streamlines]*2): # Extract features since metric doesn't # work directly on streamlines f1 = metric.feature.extract(s1) f2 = metric.feature.extract(s2) # Test method are_compatible are_vectors = f1.shape[0] == 1 and f2.shape[0] == 1 same_dimension = f1.shape[1] == f2.shape[1] assert_equal(metric.are_compatible(f1.shape, f2.shape), are_vectors and same_dimension) # Test method dist if features are compatible if metric.are_compatible(f1.shape, f2.shape): distance = metric.dist(f1, f2) if np.all(f1 == f2): assert_almost_equal(distance, 0.) assert_almost_equal(distance, dipymetric.dist(metric, s1, s2)) assert_true(distance >= 0.) assert_true(distance <= 1.) # This metric type is not order invariant assert_false(metric.is_order_invariant) # All possible pairs for s1, s2 in itertools.product(*[streamlines]*2): f1 = metric.feature.extract(s1) f2 = metric.feature.extract(s2) if not metric.are_compatible(f1.shape, f2.shape): continue f1_flip = metric.feature.extract(s1[::-1]) f2_flip = metric.feature.extract(s2[::-1]) distance = metric.dist(f1, f2) assert_almost_equal(metric.dist(f1_flip, f2_flip), distance) if not np.all(f1_flip == f2_flip): assert_false(metric.dist(f1, f2_flip) == distance) assert_false(metric.dist(f1_flip, f2) == distance) def test_subclassing_metric(): class EmptyMetric(dipymetric.Metric): pass metric = EmptyMetric() assert_raises(NotImplementedError, metric.are_compatible, None, None) assert_raises(NotImplementedError, metric.dist, None, None) def test_distance_matrix(): metric = dipymetric.SumPointwiseEuclideanMetric() for dtype in [np.int32, np.int64, np.float32, np.float64]: # Compute distances of all tuples spawn by the Cartesian product # of `data` with itself. data = (np.random.rand(4, 10, 3)*10).astype(dtype) D = dipymetric.distance_matrix(metric, data) assert_equal(D.shape, (len(data), len(data))) assert_array_equal(np.diag(D), np.zeros(len(data))) if metric.is_order_invariant: # Distance matrix should be symmetric assert_array_equal(D, D.T) for i in range(len(data)): for j in range(len(data)): assert_equal(D[i, j], dipymetric.dist(metric, data[i], data[j])) # Compute distances of all tuples spawn by the Cartesian product # of `data` with `data2`. data2 = (np.random.rand(3, 10, 3)*10).astype(dtype) D = dipymetric.distance_matrix(metric, data, data2) assert_equal(D.shape, (len(data), len(data2))) for i in range(len(data)): for j in range(len(data2)): assert_equal(D[i, j], dipymetric.dist(metric, data[i], data2[j])) if __name__ == '__main__': run_module_suite() dipy-0.13.0/dipy/segment/tests/test_mrf.py000066400000000000000000000372771317371701200205300ustar00rootroot00000000000000import numpy as np import numpy.testing as npt from dipy.data import get_data from dipy.sims.voxel import add_noise from dipy.segment.mrf import (ConstantObservationModel, IteratedConditionalModes) from dipy.segment.tissue import (TissueClassifierHMRF) # Load a coronal slice from a T1-weighted MRI fname = get_data('t1_coronal_slice') single_slice = np.load(fname) # Stack a few copies to form a 3D volume nslices = 5 image = np.zeros(shape=single_slice.shape + (nslices,)) image[..., :nslices] = single_slice[..., None] # Set up parameters nclasses = 4 beta = np.float64(0.0) max_iter = 10 background_noise = True # Making squares square = np.zeros((256, 256, 3), dtype=np.int16) square[42:213, 42:213, :] = 1 square[71:185, 71:185, :] = 2 square[99:157, 99:157, :] = 3 square_gauss = np.zeros((256, 256, 3)) + 0.001 square_gauss = add_noise(square_gauss, 10000, 1, noise_type='gaussian') square_gauss[42:213, 42:213, :] = 1 noise_1 = np.random.normal(1.001, 0.0001, size=square_gauss[42:213, 42:213, :].shape) square_gauss[42:213, 42:213, :] = square_gauss[42:213, 42:213, :] + noise_1 square_gauss[71:185, 71:185, :] = 2 noise_2 = np.random.normal(2.001, 0.0001, size=square_gauss[71:185, 71:185, :].shape) square_gauss[71:185, 71:185, :] = square_gauss[71:185, 71:185, :] + noise_2 square_gauss[99:157, 99:157, :] = 3 noise_3 = np.random.normal(3.001, 0.0001, size=square_gauss[99:157, 99:157, :].shape) square_gauss[99:157, 99:157, :] = square_gauss[99:157, 99:157, :] + noise_3 square_1 = np.zeros((256, 256, 3)) + 0.001 square_1 = add_noise(square_1, 10000, 1, noise_type='gaussian') temp_1 = np.random.random_integers(20, size=(171, 171, 3)) temp_1 = np.where(temp_1 < 20, 1, 3) square_1[42:213, 42:213, :] = temp_1 temp_2 = np.random.random_integers(20, size=(114, 114, 3)) temp_2 = np.where(temp_2 < 19, 2, 1) square_1[71:185, 71:185, :] = temp_2 temp_3 = np.random.random_integers(20, size=(58, 58, 3)) temp_3 = np.where(temp_3 < 20, 3, 1) square_1[99:157, 99:157, :] = temp_3 def test_greyscale_image(): com = ConstantObservationModel() icm = IteratedConditionalModes() mu, sigma = com.initialize_param_uniform(image, nclasses) sigmasq = sigma ** 2 npt.assert_array_almost_equal(mu, np.array([0., 0.25, 0.5, 0.75])) npt.assert_array_almost_equal(sigma, np.array([1.0, 1.0, 1.0, 1.0])) npt.assert_array_almost_equal(sigmasq, np.array([1.0, 1.0, 1.0, 1.0])) neglogl = com.negloglikelihood(image, mu, sigmasq, nclasses) npt.assert_(neglogl[100, 100, 1, 0] != neglogl[100, 100, 1, 1]) npt.assert_(neglogl[100, 100, 1, 1] != neglogl[100, 100, 1, 2]) npt.assert_(neglogl[100, 100, 1, 2] != neglogl[100, 100, 1, 3]) npt.assert_(neglogl[100, 100, 1, 1] != neglogl[100, 100, 1, 3]) initial_segmentation = icm.initialize_maximum_likelihood(neglogl) npt.assert_(initial_segmentation.max() == nclasses - 1) npt.assert_(initial_segmentation.min() == 0) PLN = icm.prob_neighborhood(initial_segmentation, beta, nclasses) print(PLN.shape) npt.assert_(np.all((PLN >= 0) & (PLN <= 1.0))) if beta == 0.0: npt.assert_almost_equal(PLN[50, 50, 1, 0], 0.25, True) npt.assert_almost_equal(PLN[50, 50, 1, 1], 0.25, True) npt.assert_almost_equal(PLN[50, 50, 1, 2], 0.25, True) npt.assert_almost_equal(PLN[50, 50, 1, 3], 0.25, True) npt.assert_almost_equal(PLN[147, 129, 1, 0], 0.25, True) npt.assert_almost_equal(PLN[147, 129, 1, 1], 0.25, True) npt.assert_almost_equal(PLN[147, 129, 1, 2], 0.25, True) npt.assert_almost_equal(PLN[147, 129, 1, 3], 0.25, True) npt.assert_almost_equal(PLN[61, 152, 1, 0], 0.25, True) npt.assert_almost_equal(PLN[61, 152, 1, 1], 0.25, True) npt.assert_almost_equal(PLN[61, 152, 1, 2], 0.25, True) npt.assert_almost_equal(PLN[61, 152, 1, 3], 0.25, True) npt.assert_almost_equal(PLN[100, 100, 1, 0], 0.25, True) npt.assert_almost_equal(PLN[100, 100, 1, 1], 0.25, True) npt.assert_almost_equal(PLN[100, 100, 1, 2], 0.25, True) npt.assert_almost_equal(PLN[100, 100, 1, 3], 0.25, True) PLY = com.prob_image(image, nclasses, mu, sigmasq, PLN) print(PLY) npt.assert_(np.all((PLY >= 0) & (PLY <= 1.0))) mu_upd, sigmasq_upd = com.update_param(image, PLY, mu, nclasses) print(mu) print(mu_upd) npt.assert_(mu_upd[0] != mu[0]) npt.assert_(mu_upd[1] != mu[1]) npt.assert_(mu_upd[2] != mu[2]) npt.assert_(mu_upd[3] != mu[3]) print(sigmasq) print(sigmasq_upd) npt.assert_(sigmasq_upd[0] != sigmasq[0]) npt.assert_(sigmasq_upd[1] != sigmasq[1]) npt.assert_(sigmasq_upd[2] != sigmasq[2]) npt.assert_(sigmasq_upd[3] != sigmasq[3]) icm_segmentation, energy = icm.icm_ising(neglogl, beta, initial_segmentation) npt.assert_(np.abs(np.sum(icm_segmentation)) != 0) npt.assert_(icm_segmentation.max() == nclasses - 1) npt.assert_(icm_segmentation.min() == 0) def test_greyscale_iter(): max_iter = 15 beta = np.float64(0.1) com = ConstantObservationModel() icm = IteratedConditionalModes() mu, sigma = com.initialize_param_uniform(image, nclasses) sigmasq = sigma ** 2 neglogl = com.negloglikelihood(image, mu, sigmasq, nclasses) initial_segmentation = icm.initialize_maximum_likelihood(neglogl) npt.assert_(initial_segmentation.max() == nclasses - 1) npt.assert_(initial_segmentation.min() == 0) mu, sigma = com.seg_stats(image, initial_segmentation, nclasses) sigmasq = sigma ** 2 npt.assert_(mu[0] >= 0.0) npt.assert_(mu[1] >= 0.0) npt.assert_(mu[2] >= 0.0) npt.assert_(mu[3] >= 0.0) npt.assert_(sigmasq[0] >= 0.0) npt.assert_(sigmasq[1] >= 0.0) npt.assert_(sigmasq[2] >= 0.0) npt.assert_(sigmasq[3] >= 0.0) if background_noise: zero = np.zeros_like(image) + 0.001 zero_noise = add_noise(zero, 10000, 1, noise_type='gaussian') image_gauss = np.where(image == 0, zero_noise, image) else: image_gauss = image final_segmentation = np.empty_like(image) seg_init = initial_segmentation.copy() energies = [] for i in range(max_iter): PLN = icm.prob_neighborhood(initial_segmentation, beta, nclasses) npt.assert_(np.all((PLN >= 0) & (PLN <= 1.0))) if beta == 0.0: npt.assert_almost_equal(PLN[50, 50, 1, 0], 0.25, True) npt.assert_almost_equal(PLN[50, 50, 1, 1], 0.25, True) npt.assert_almost_equal(PLN[50, 50, 1, 2], 0.25, True) npt.assert_almost_equal(PLN[50, 50, 1, 3], 0.25, True) npt.assert_almost_equal(PLN[147, 129, 1, 0], 0.25, True) npt.assert_almost_equal(PLN[147, 129, 1, 1], 0.25, True) npt.assert_almost_equal(PLN[147, 129, 1, 2], 0.25, True) npt.assert_almost_equal(PLN[147, 129, 1, 3], 0.25, True) npt.assert_almost_equal(PLN[61, 152, 1, 0], 0.25, True) npt.assert_almost_equal(PLN[61, 152, 1, 1], 0.25, True) npt.assert_almost_equal(PLN[61, 152, 1, 2], 0.25, True) npt.assert_almost_equal(PLN[61, 152, 1, 3], 0.25, True) npt.assert_almost_equal(PLN[100, 100, 1, 0], 0.25, True) npt.assert_almost_equal(PLN[100, 100, 1, 1], 0.25, True) npt.assert_almost_equal(PLN[100, 100, 1, 2], 0.25, True) npt.assert_almost_equal(PLN[100, 100, 1, 3], 0.25, True) PLY = com.prob_image(image_gauss, nclasses, mu, sigmasq, PLN) npt.assert_(np.all((PLY >= 0) & (PLY <= 1.0))) npt.assert_(PLY[50, 50, 1, 0] > PLY[50, 50, 1, 1]) npt.assert_(PLY[50, 50, 1, 0] > PLY[50, 50, 1, 2]) npt.assert_(PLY[50, 50, 1, 0] > PLY[50, 50, 1, 3]) npt.assert_(PLY[100, 100, 1, 3] > PLY[100, 100, 1, 0]) npt.assert_(PLY[100, 100, 1, 3] > PLY[100, 100, 1, 1]) npt.assert_(PLY[100, 100, 1, 3] > PLY[100, 100, 1, 2]) mu_upd, sigmasq_upd = com.update_param(image_gauss, PLY, mu, nclasses) npt.assert_(mu_upd[0] >= 0.0) npt.assert_(mu_upd[1] >= 0.0) npt.assert_(mu_upd[2] >= 0.0) npt.assert_(mu_upd[3] >= 0.0) npt.assert_(sigmasq_upd[0] >= 0.0) npt.assert_(sigmasq_upd[1] >= 0.0) npt.assert_(sigmasq_upd[2] >= 0.0) npt.assert_(sigmasq_upd[3] >= 0.0) negll = com.negloglikelihood(image_gauss, mu_upd, sigmasq_upd, nclasses) npt.assert_(negll[50, 50, 1, 0] < negll[50, 50, 1, 1]) npt.assert_(negll[50, 50, 1, 0] < negll[50, 50, 1, 2]) npt.assert_(negll[50, 50, 1, 0] < negll[50, 50, 1, 3]) npt.assert_(negll[100, 100, 1, 3] < negll[100, 100, 1, 0]) npt.assert_(negll[100, 100, 1, 3] < negll[100, 100, 1, 1]) npt.assert_(negll[100, 100, 1, 3] < negll[100, 100, 1, 2]) final_segmentation, energy = icm.icm_ising(negll, beta, initial_segmentation) print(energy[energy > -np.inf].sum()) energies.append(energy[energy > -np.inf].sum()) initial_segmentation = final_segmentation.copy() mu = mu_upd.copy() sigmasq = sigmasq_upd.copy() npt.assert_(energies[-1] < energies[0]) difference_map = np.abs(seg_init - final_segmentation) npt.assert_(np.abs(np.sum(difference_map)) != 0) def test_square_iter(): com = ConstantObservationModel() icm = IteratedConditionalModes() initial_segmentation = square mu, sigma = com.seg_stats(square_gauss, initial_segmentation, nclasses) sigmasq = sigma ** 2 npt.assert_(mu[0] >= 0.0) npt.assert_(mu[1] >= 0.0) npt.assert_(mu[2] >= 0.0) npt.assert_(mu[3] >= 0.0) npt.assert_(sigmasq[0] >= 0.0) npt.assert_(sigmasq[1] >= 0.0) npt.assert_(sigmasq[2] >= 0.0) npt.assert_(sigmasq[3] >= 0.0) final_segmentation = np.empty_like(square_gauss) seg_init = initial_segmentation.copy() energies = [] for i in range(max_iter): print('\n') print('>> Iteration: ' + str(i)) print('\n') PLN = icm.prob_neighborhood(initial_segmentation, beta, nclasses) npt.assert_(np.all((PLN >= 0) & (PLN <= 1.0))) if beta == 0.0: npt.assert_(PLN[25, 25, 1, 0] == 0.25) npt.assert_(PLN[25, 25, 1, 1] == 0.25) npt.assert_(PLN[25, 25, 1, 2] == 0.25) npt.assert_(PLN[25, 25, 1, 3] == 0.25) npt.assert_(PLN[50, 50, 1, 0] == 0.25) npt.assert_(PLN[50, 50, 1, 1] == 0.25) npt.assert_(PLN[50, 50, 1, 2] == 0.25) npt.assert_(PLN[50, 50, 1, 3] == 0.25) npt.assert_(PLN[90, 90, 1, 0] == 0.25) npt.assert_(PLN[90, 90, 1, 1] == 0.25) npt.assert_(PLN[90, 90, 1, 2] == 0.25) npt.assert_(PLN[90, 90, 1, 3] == 0.25) npt.assert_(PLN[125, 125, 1, 0] == 0.25) npt.assert_(PLN[125, 125, 1, 1] == 0.25) npt.assert_(PLN[125, 125, 1, 2] == 0.25) npt.assert_(PLN[125, 125, 1, 3] == 0.25) PLY = com.prob_image(square_gauss, nclasses, mu, sigmasq, PLN) npt.assert_(np.all((PLY >= 0) & (PLY <= 1.0))) npt.assert_(PLY[25, 25, 1, 0] > PLY[25, 25, 1, 1]) npt.assert_(PLY[25, 25, 1, 0] > PLY[25, 25, 1, 2]) npt.assert_(PLY[25, 25, 1, 0] > PLY[25, 25, 1, 3]) npt.assert_(PLY[125, 125, 1, 3] > PLY[125, 125, 1, 0]) npt.assert_(PLY[125, 125, 1, 3] > PLY[125, 125, 1, 1]) npt.assert_(PLY[125, 125, 1, 3] > PLY[125, 125, 1, 2]) mu_upd, sigmasq_upd = com.update_param(square_gauss, PLY, mu, nclasses) npt.assert_(mu_upd[0] >= 0.0) npt.assert_(mu_upd[1] >= 0.0) npt.assert_(mu_upd[2] >= 0.0) npt.assert_(mu_upd[3] >= 0.0) npt.assert_(sigmasq_upd[0] >= 0.0) npt.assert_(sigmasq_upd[1] >= 0.0) npt.assert_(sigmasq_upd[2] >= 0.0) npt.assert_(sigmasq_upd[3] >= 0.0) negll = com.negloglikelihood(square_gauss, mu_upd, sigmasq_upd, nclasses) npt.assert_(negll[25, 25, 1, 0] < negll[25, 25, 1, 1]) npt.assert_(negll[25, 25, 1, 0] < negll[25, 25, 1, 2]) npt.assert_(negll[25, 25, 1, 0] < negll[25, 25, 1, 3]) npt.assert_(negll[100, 100, 1, 3] < negll[125, 125, 1, 0]) npt.assert_(negll[100, 100, 1, 3] < negll[125, 125, 1, 1]) npt.assert_(negll[100, 100, 1, 3] < negll[125, 125, 1, 2]) final_segmentation, energy = icm.icm_ising(negll, beta, initial_segmentation) energies.append(energy[energy > -np.inf].sum()) initial_segmentation = final_segmentation.copy() mu = mu_upd.copy() sigmasq = sigmasq_upd.copy() difference_map = np.abs(seg_init - final_segmentation) npt.assert_(np.abs(np.sum(difference_map)) == 0.0) def test_icm_square(): com = ConstantObservationModel() icm = IteratedConditionalModes() initial_segmentation = square.copy() mu, sigma = com.seg_stats(square_1, initial_segmentation, nclasses) sigmasq = sigma ** 2 npt.assert_(mu[0] >= 0.0) npt.assert_(mu[1] >= 0.0) npt.assert_(mu[2] >= 0.0) npt.assert_(mu[3] >= 0.0) npt.assert_(sigmasq[0] >= 0.0) npt.assert_(sigmasq[1] >= 0.0) npt.assert_(sigmasq[2] >= 0.0) npt.assert_(sigmasq[3] >= 0.0) negll = com.negloglikelihood(square_1, mu, sigmasq, nclasses) final_segmentation_1 = np.empty_like(square_1) final_segmentation_2 = np.empty_like(square_1) beta = 0.0 for i in range(max_iter): print('\n') print('>> Iteration: ' + str(i)) print('\n') final_segmentation_1, energy_1 = icm.icm_ising(negll, beta, initial_segmentation) initial_segmentation = final_segmentation_1.copy() beta = 2 initial_segmentation = square.copy() for j in range(max_iter): print('\n') print('>> Iteration: ' + str(j)) print('\n') final_segmentation_2, energy_2 = icm.icm_ising(negll, beta, initial_segmentation) initial_segmentation = final_segmentation_2.copy() difference_map = np.abs(final_segmentation_1 - final_segmentation_2) npt.assert_(np.abs(np.sum(difference_map)) != 0) def test_classify(): imgseg = TissueClassifierHMRF() beta = 0.1 tolerance = 0.0001 max_iter = 10 npt.assert_(image.max() == 1.0) npt.assert_(image.min() == 0.0) # First we test without setting iterations and tolerance seg_init, seg_final, PVE = imgseg.classify(image, nclasses, beta) npt.assert_(seg_final.max() == nclasses) npt.assert_(seg_final.min() == 0.0) # Second we test it with just changing the tolerance seg_init, seg_final, PVE = imgseg.classify(image, nclasses, beta, tolerance) npt.assert_(seg_final.max() == nclasses) npt.assert_(seg_final.min() == 0.0) # Third we test it with just the iterations seg_init, seg_final, PVE = imgseg.classify(image, nclasses, beta, max_iter) npt.assert_(seg_final.max() == nclasses) npt.assert_(seg_final.min() == 0.0) # Next we test saving the history of accumulated energies from ICM imgseg = TissueClassifierHMRF(save_history=True) seg_init, seg_final, PVE = imgseg.classify(200 * image, nclasses, beta, tolerance) npt.assert_(seg_final.max() == nclasses) npt.assert_(seg_final.min() == 0.0) npt.assert_(imgseg.energies_sum[0] > imgseg.energies_sum[-1]) if __name__ == '__main__': npt.run_module_suite() dipy-0.13.0/dipy/segment/tests/test_qb.py000066400000000000000000000011721317371701200203270ustar00rootroot00000000000000import numpy as np import nibabel as nib from nose.tools import (assert_true, assert_false, assert_equal, assert_almost_equal, assert_raises) from numpy.testing import assert_array_equal, assert_array_almost_equal from dipy.data import get_data from dipy.segment.quickbundles import QuickBundles def test_qbundles(): streams, hdr = nib.trackvis.read(get_data('fornix')) T = [s[0] for s in streams] Trk = np.array(T, dtype=np.object) qb = QuickBundles(T, 10., 12) Tqb = qb.virtuals() # Tqbe,Tqbei=qb.exemplars(T) Tqbe, Tqbei = qb.exemplars() assert_equal(4, qb.total_clusters) dipy-0.13.0/dipy/segment/tests/test_quickbundles.py000066400000000000000000000154351317371701200224250ustar00rootroot00000000000000import numpy as np import itertools from nose.tools import assert_equal, assert_raises from numpy.testing import assert_array_equal, run_module_suite from dipy.testing.memory import get_type_refcount from dipy.testing import assert_arrays_equal from dipy.segment.clustering import QuickBundles import dipy.segment.metric as dipymetric from dipy.segment.clustering_algorithms import quickbundles import dipy.tracking.streamline as streamline_utils dtype = "float32" threshold = 7 data = [np.arange(3 * 5, dtype=dtype).reshape((-1, 3)) + 2 * threshold, np.arange(3 * 10, dtype=dtype).reshape((-1, 3)) + 0 * threshold, np.arange(3 * 15, dtype=dtype).reshape((-1, 3)) + 8 * threshold, np.arange(3 * 17, dtype=dtype).reshape((-1, 3)) + 2 * threshold, np.arange(3 * 20, dtype=dtype).reshape((-1, 3)) + 8 * threshold] clusters_truth = [[0, 1], [2, 4], [3]] def test_quickbundles_empty_data(): threshold = 10 metric = dipymetric.SumPointwiseEuclideanMetric() clusters = quickbundles([], metric, threshold) assert_equal(len(clusters), 0) assert_equal(len(clusters.centroids), 0) clusters = quickbundles([], metric, threshold, ordering=[]) assert_equal(len(clusters), 0) assert_equal(len(clusters.centroids), 0) def test_quickbundles_wrong_metric(): assert_raises(ValueError, QuickBundles, threshold=10., metric="WrongMetric") def test_quickbundles_shape_uncompatibility(): # QuickBundles' old default metric (AveragePointwiseEuclideanMetric, # aka MDF) requires that all streamlines have the same number of points. metric = dipymetric.AveragePointwiseEuclideanMetric() qb = QuickBundles(threshold=20., metric=metric) assert_raises(ValueError, qb.cluster, data) # QuickBundles' new default metric (AveragePointwiseEuclideanMetric, # aka MDF combined with ResampleFeature) will automatically resample # streamlines so they all have 18 points. qb = QuickBundles(threshold=20.) clusters1 = qb.cluster(data) feature = dipymetric.ResampleFeature(nb_points=18) metric = dipymetric.AveragePointwiseEuclideanMetric(feature) qb = QuickBundles(threshold=20., metric=metric) clusters2 = qb.cluster(data) assert_arrays_equal(list(itertools.chain(*clusters1)), list(itertools.chain(*clusters2))) def test_quickbundles_2D(): # Test quickbundles clustering using 2D points and the Eulidean metric. rng = np.random.RandomState(42) data = [] data += [rng.randn(1, 2) + np.array([0, 0]) for i in range(1)] data += [rng.randn(1, 2) + np.array([10, 10]) for i in range(2)] data += [rng.randn(1, 2) + np.array([-10, 10]) for i in range(3)] data += [rng.randn(1, 2) + np.array([10, -10]) for i in range(4)] data += [rng.randn(1, 2) + np.array([-10, -10]) for i in range(5)] data = np.array(data, dtype=dtype) clusters_truth = [[0], [1, 2], [3, 4, 5], [6, 7, 8, 9], [10, 11, 12, 13, 14]] # # Uncomment the following to visualize this test # import pylab as plt # plt.plot(*zip(*data[0:1, 0]), linestyle='None', marker='s') # plt.plot(*zip(*data[1:3, 0]), linestyle='None', marker='o') # plt.plot(*zip(*data[3:6, 0]), linestyle='None', marker='+') # plt.plot(*zip(*data[6:10, 0]), linestyle='None', marker='.') # plt.plot(*zip(*data[10:, 0]), linestyle='None', marker='*') # plt.show() # Theorically using a threshold above the following value will not # produce expected results. threshold = np.sqrt(2*(10**2))-np.sqrt(2) metric = dipymetric.SumPointwiseEuclideanMetric() ordering = np.arange(len(data)) for i in range(100): rng.shuffle(ordering) clusters = quickbundles(data, metric, threshold, ordering=ordering) # Check if clusters are the same as 'clusters_truth' for cluster in clusters: # Find the corresponding cluster in 'clusters_truth' for cluster_truth in clusters_truth: if cluster_truth[0] in cluster.indices: assert_equal(sorted(cluster.indices), sorted(cluster_truth)) # Cluster each cluster again using a small threshold for cluster in clusters: subclusters = quickbundles(data, metric, threshold=0, ordering=cluster.indices) assert_equal(len(subclusters), len(cluster)) assert_equal(sorted(itertools.chain(*subclusters)), sorted(cluster.indices)) # A very large threshold should produce only 1 cluster clusters = quickbundles(data, metric, threshold=np.inf) assert_equal(len(clusters), 1) assert_equal(len(clusters[0]), len(data)) assert_array_equal(clusters[0].indices, range(len(data))) # A very small threshold should produce only N clusters where N=len(data) clusters = quickbundles(data, metric, threshold=0) assert_equal(len(clusters), len(data)) assert_array_equal(list(map(len, clusters)), np.ones(len(data))) assert_array_equal([idx for cluster in clusters for idx in cluster.indices], range(len(data))) def test_quickbundles_streamlines(): rdata = streamline_utils.set_number_of_points(data, 10) qb = QuickBundles(threshold=2*threshold) clusters = qb.cluster(rdata) # By default `refdata` refers to data being clustered. assert_equal(clusters.refdata, rdata) # Set `refdata` to return indices instead of actual data points. clusters.refdata = None assert_array_equal(list(itertools.chain(*clusters)), list(itertools.chain(*clusters_truth))) # Cluster read-only data for datum in rdata: datum.setflags(write=False) clusters = qb.cluster(rdata) # Cluster data with different dtype (should be converted into float32) for datatype in [np.float64, np.int32, np.int64]: newdata = [datum.astype(datatype) for datum in rdata] clusters = qb.cluster(newdata) assert_equal(clusters.centroids[0].dtype, np.float32) def test_quickbundles_with_not_order_invariant_metric(): metric = dipymetric.AveragePointwiseEuclideanMetric() qb = QuickBundles(threshold=np.inf, metric=metric) streamline = np.arange(10*3, dtype=dtype).reshape((-1, 3)) streamlines = [streamline, streamline[::-1]] clusters = qb.cluster(streamlines) assert_equal(len(clusters), 1) assert_array_equal(clusters[0].centroid, streamline) def test_quickbundles_memory_leaks(): qb = QuickBundles(threshold=2*threshold) type_name_pattern = "memoryview" initial_types_refcount = get_type_refcount(type_name_pattern) qb.cluster(data) # At this point, all memoryviews created during clustering should be freed. assert_equal(get_type_refcount(type_name_pattern), initial_types_refcount) if __name__ == '__main__': run_module_suite() dipy-0.13.0/dipy/segment/threshold.py000066400000000000000000000057541317371701200175320ustar00rootroot00000000000000import numpy as np def otsu(image, nbins=256): """ Return threshold value based on Otsu's method. Copied from scikit-image to remove dependency. Parameters ---------- image : array Input image. nbins : int Number of bins used to calculate histogram. This value is ignored for integer arrays. Returns ------- threshold : float Threshold value. """ hist, bin_centers = np.histogram(image, nbins) hist = hist.astype(np.float) # class probabilities for all possible thresholds weight1 = np.cumsum(hist) weight2 = np.cumsum(hist[::-1])[::-1] # class means for all possible thresholds mean1 = np.cumsum(hist * bin_centers[1:]) / weight1 mean2 = (np.cumsum((hist * bin_centers[1:])[::-1]) / weight2[::-1])[::-1] # Clip ends to align class 1 and class 2 variables: # The last value of `weight1`/`mean1` should pair with zero values in # `weight2`/`mean2`, which do not exist. variance12 = weight1[:-1] * weight2[1:] * (mean1[:-1] - mean2[1:])**2 idx = np.argmax(variance12) threshold = bin_centers[:-1][idx] return threshold def upper_bound_by_rate(data, rate=0.05): r""" Adjusts upper intensity boundary using rates It calculates the image intensity histogram, and based on the rate value it decide what is the upperbound value for intensity normalization, usually lower bound is 0. The rate is the ratio between the amount of pixels in every bins and the bins with highest pixel amount Parameters ----------- data : float Input intensity value data rate : float representing the threshold whether a spicific histogram bin that should be count in the normalization range Returns ------- high : float the upper_bound value for normalization """ g, h = np.histogram(data) m = np.zeros((10, 3)) low = data.min() high = data.max() for i in np.array(range(10)): m[i, 0] = g[i] m[i, 1] = h[i] m[i, 2] = h[i + 1] g = sorted(g,reverse = True) sz = np.size(g) Index = 0 for i in np.array(range(sz)): if g[i] / g[0] > rate: Index = Index + 1 for i in np.array(range(10)): for j in np.array(range(Index)): if g[j] == m[i, 0]: high = m[i, 2] return high def upper_bound_by_percent(data, percent=1): """ Find the upper bound for visualization of medical images Calculate the histogram of the image and go right to left until you find the bound that contains more than a percentage of the image. Parameters ---------- data : ndarray percent : float Returns ------- upper_bound : float """ percent = percent / 100. values, bounds = np.histogram(data, 20) total_voxels = np.prod(data.shape) agg = 0 for i in range(len(values) - 1, 0, -1): agg += values[i] if agg / float(total_voxels) > percent: return bounds[i] dipy-0.13.0/dipy/segment/tissue.py000066400000000000000000000140771317371701200170500ustar00rootroot00000000000000import numpy as np from dipy.sims.voxel import add_noise from dipy.segment.mrf import (ConstantObservationModel, IteratedConditionalModes) class TissueClassifierHMRF(object): r""" This class contains the methods for tissue classification using the Markov Random Fields modeling approach """ def __init__(self, save_history=False, verbose=True): self.save_history = save_history self.segmentations = [] self.pves = [] self.energies = [] self.energies_sum = [] self.verbose = verbose def classify(self, image, nclasses, beta, tolerance=None, max_iter=None): r""" This method uses the Maximum a posteriori - Markov Random Field approach for segmentation by using the Iterative Conditional Modes and Expectation Maximization to estimate the parameters. Parameters ---------- image : ndarray, 3D structural image. nclasses : int, number of desired classes. beta : float, smoothing parameter, the higher this number the smoother the output will be. tolerance: float, value that defines the percentage of change tolerated to prevent the ICM loop to stop. Default is 1e-05. max_iter : float, fixed number of desired iterations. Default is 100. If the user only specifies this parameter, the tolerance value will not be considered. If none of these two parameters Returns ------- initial_segmentation : ndarray, 3D segmented image with all tissue types specified in nclasses. final_segmentation : ndarray, 3D final refined segmentation containing all tissue types. PVE : ndarray, 3D probability map of each tissue type. """ nclasses = nclasses + 1 # One extra class for the background energy_sum = [1e-05] com = ConstantObservationModel() icm = IteratedConditionalModes() if image.max() > 1: image = np.interp(image, [0, image.max()], [0.0, 1.0]) mu, sigma = com.initialize_param_uniform(image, nclasses) p = np.argsort(mu) mu = mu[p] sigma = sigma[p] sigmasq = sigma ** 2 neglogl = com.negloglikelihood(image, mu, sigmasq, nclasses) seg_init = icm.initialize_maximum_likelihood(neglogl) mu, sigma = com.seg_stats(image, seg_init, nclasses) sigmasq = sigma ** 2 zero = np.zeros_like(image) + 0.001 zero_noise = add_noise(zero, 10000, 1, noise_type='gaussian') image_gauss = np.where(image == 0, zero_noise, image) final_segmentation = np.empty_like(image) initial_segmentation = seg_init.copy() if max_iter is not None and tolerance is None: for i in range(max_iter): if self.verbose: print('>> Iteration: ' + str(i)) PLN = icm.prob_neighborhood(seg_init, beta, nclasses) PVE = com.prob_image(image_gauss, nclasses, mu, sigmasq, PLN) mu_upd, sigmasq_upd = com.update_param(image_gauss, PVE, mu, nclasses) ind = np.argsort(mu_upd) mu_upd = mu_upd[ind] sigmasq_upd = sigmasq_upd[ind] negll = com.negloglikelihood(image_gauss, mu_upd, sigmasq_upd, nclasses) final_segmentation, energy = icm.icm_ising(negll, beta, seg_init) if self.save_history: self.segmentations.append(final_segmentation) self.pves.append(PVE) self.energies.append(energy) self.energies_sum.append(energy[energy > -np.inf].sum()) seg_init = final_segmentation.copy() mu = mu_upd.copy() sigmasq = sigmasq_upd.copy() else: max_iter = 100 for i in range(max_iter): if self.verbose: print('>> Iteration: ' + str(i)) PLN = icm.prob_neighborhood(seg_init, beta, nclasses) PVE = com.prob_image(image_gauss, nclasses, mu, sigmasq, PLN) mu_upd, sigmasq_upd = com.update_param(image_gauss, PVE, mu, nclasses) ind = np.argsort(mu_upd) mu_upd = mu_upd[ind] sigmasq_upd = sigmasq_upd[ind] negll = com.negloglikelihood(image_gauss, mu_upd, sigmasq_upd, nclasses) final_segmentation, energy = icm.icm_ising(negll, beta, seg_init) energy_sum.append(energy[energy > -np.inf].sum()) if self.save_history: self.segmentations.append(final_segmentation) self.pves.append(PVE) self.energies.append(energy) self.energies_sum.append(energy[energy > -np.inf].sum()) if tolerance is None: tolerance = 1e-05 if i % 10 == 0 and i != 0: tol = tolerance * (np.amax(energy_sum) - np.amin(energy_sum)) test_dist = np.absolute(np.amax( energy_sum[np.size(energy_sum) - 5: i]) - np.amin(energy_sum[np.size(energy_sum) - 5: i]) ) if test_dist < tol: break seg_init = final_segmentation.copy() mu = mu_upd.copy() sigmasq = sigmasq_upd.copy() PVE = PVE[..., 1:] return initial_segmentation, final_segmentation, PVE dipy-0.13.0/dipy/sims/000077500000000000000000000000001317371701200144625ustar00rootroot00000000000000dipy-0.13.0/dipy/sims/__init__.py000066400000000000000000000000271317371701200165720ustar00rootroot00000000000000# init for simulations dipy-0.13.0/dipy/sims/phantom.py000066400000000000000000000156421317371701200165120ustar00rootroot00000000000000import numpy as np import scipy.stats as stats from dipy.sims.voxel import SingleTensor, diffusion_evals import dipy.sims.voxel as vox from dipy.core.geometry import vec2vec_rotmat from dipy.data import get_data from dipy.core.gradients import gradient_table def add_noise(vol, snr=1.0, S0=None, noise_type='rician'): """ Add noise of specified distribution to a 4D array. Parameters ----------- vol : array, shape (X,Y,Z,W) Diffusion measurements in `W` directions at each ``(X, Y, Z)`` voxel position. snr : float, optional The desired signal-to-noise ratio. (See notes below.) S0 : float, optional Reference signal for specifying `snr` (defaults to 1). noise_type : string, optional The distribution of noise added. Can be either 'gaussian' for Gaussian distributed noise, 'rician' for Rice-distributed noise (default) or 'rayleigh' for a Rayleigh distribution. Returns -------- vol : array, same shape as vol Volume with added noise. Notes ----- SNR is defined here, following [1]_, as ``S0 / sigma``, where ``sigma`` is the standard deviation of the two Gaussian distributions forming the real and imaginary components of the Rician noise distribution (see [2]_). References ---------- .. [1] Descoteaux, Angelino, Fitzgibbons and Deriche (2007) Regularized, fast and robust q-ball imaging. MRM, 58: 497-510 .. [2] Gudbjartson and Patz (2008). The Rician distribution of noisy MRI data. MRM 34: 910-914. Examples -------- >>> signal = np.arange(800).reshape(2, 2, 2, 100) >>> signal_w_noise = add_noise(signal, snr=10, noise_type='rician') """ orig_shape = vol.shape vol_flat = np.reshape(vol.copy(), (-1, vol.shape[-1])) if S0 is None: S0 = np.max(vol) for vox_idx, signal in enumerate(vol_flat): vol_flat[vox_idx] = vox.add_noise(signal, snr=snr, S0=S0, noise_type=noise_type) return np.reshape(vol_flat, orig_shape) def diff2eigenvectors(dx, dy, dz): """ numerical derivatives 2 eigenvectors """ basis = np.eye(3) u = np.array([dx, dy, dz]) u = u/np.linalg.norm(u) R = vec2vec_rotmat(basis[:, 0], u) eig0 = u eig1 = np.dot(R, basis[:, 1]) eig2 = np.dot(R, basis[:, 2]) eigs = np.zeros((3, 3)) eigs[:, 0] = eig0 eigs[:, 1] = eig1 eigs[:, 2] = eig2 return eigs, R def orbital_phantom(gtab=None, evals=diffusion_evals, func=None, t=np.linspace(0, 2 * np.pi, 1000), datashape=(64, 64, 64, 65), origin=(32, 32, 32), scale=(25, 25, 25), angles=np.linspace(0, 2 * np.pi, 32), radii=np.linspace(0.2, 2, 6), S0=100., snr=None): """Create a phantom based on a 3-D orbit ``f(t) -> (x,y,z)``. Parameters ----------- gtab : GradientTable Gradient table of measurement directions. evals : array, shape (3,) Tensor eigenvalues. func : user defined function f(t)->(x,y,z) It could be desirable for ``-1=>> def f(t): ... x = np.sin(t) ... y = np.cos(t) ... z = np.linspace(-1, 1, len(x)) ... return x, y, z >>> data = orbital_phantom(func=f) """ if gtab is None: fimg, fbvals, fbvecs = get_data('small_64D') gtab = gradient_table(fbvals, fbvecs) if func is None: x = np.sin(t) y = np.cos(t) z = np.zeros(t.shape) else: x, y, z = func(t) dx = np.diff(x) dy = np.diff(y) dz = np.diff(z) x = scale[0] * x + origin[0] y = scale[1] * y + origin[1] z = scale[2] * z + origin[2] bx = np.zeros(len(angles)) by = np.sin(angles) bz = np.cos(angles) # The entire volume is considered to be inside the brain. # Voxels without a fiber crossing through them are taken # to be isotropic with signal = S0. vol = np.zeros(datashape) + S0 for i in range(len(dx)): evecs, R = diff2eigenvectors(dx[i], dy[i], dz[i]) S = SingleTensor(gtab, S0, evals, evecs, snr=None) vol[int(x[i]), int(y[i]), int(z[i]), :] += S for r in radii: for j in range(len(angles)): rb = np.dot(R, np.array([bx[j], by[j], bz[j]])) ix = int(x[i] + r * rb[0]) iy = int(y[i] + r * rb[1]) iz = int(z[i] + r * rb[2]) vol[ix, iy, iz] = vol[ix, iy, iz] + S vol = vol / np.max(vol, axis=-1)[..., np.newaxis] vol *= S0 if snr is not None: vol = add_noise(vol, snr, S0=S0, noise_type='rician') return vol if __name__ == "__main__": # TODO: this can become a nice tutorial for generating phantoms def f(t): x = np.sin(t) y = np.cos(t) # z=np.zeros(t.shape) z = np.linspace(-1, 1, len(x)) return x, y, z # helix vol = orbital_phantom(func=f) def f2(t): x = np.linspace(-1, 1, len(t)) y = np.linspace(-1, 1, len(t)) z = np.zeros(x.shape) return x, y, z # first direction vol2 = orbital_phantom(func=f2) def f3(t): x = np.linspace(-1, 1, len(t)) y = -np.linspace(-1, 1, len(t)) z = np.zeros(x.shape) return x, y, z # second direction vol3 = orbital_phantom(func=f3) # double crossing vol23 = vol2 + vol3 # """ def f4(t): x = np.zeros(t.shape) y = np.zeros(t.shape) z = np.linspace(-1, 1, len(t)) return x, y, z # triple crossing vol4 = orbital_phantom(func=f4) vol234 = vol23 + vol4 voln = add_rician_noise(vol234) # """ # r=fvtk.ren() # fvtk.add(r,fvtk.volume(vol234[...,0])) # fvtk.show(r) # vol234n=add_rician_noise(vol234,20) dipy-0.13.0/dipy/sims/tests/000077500000000000000000000000001317371701200156245ustar00rootroot00000000000000dipy-0.13.0/dipy/sims/tests/__init__.py000066400000000000000000000001211317371701200177270ustar00rootroot00000000000000# Test callable from numpy.testing import Tester test = Tester().test del Tester dipy-0.13.0/dipy/sims/tests/test_phantom.py000066400000000000000000000044721317371701200207120ustar00rootroot00000000000000from __future__ import division import numpy as np from numpy.testing import (assert_, assert_equal, assert_array_equal, assert_array_almost_equal, assert_almost_equal, run_module_suite) from dipy.data import get_data from dipy.reconst.dti import TensorModel from dipy.sims.phantom import orbital_phantom from dipy.core.gradients import gradient_table fimg, fbvals, fbvecs = get_data('small_64D') bvals = np.load(fbvals) bvecs = np.load(fbvecs) bvecs[np.isnan(bvecs)] = 0 gtab = gradient_table(bvals, bvecs) def f(t): """ Helper function used to define a mapping time => xyz """ x = np.linspace(-1, 1, len(t)) y = np.linspace(-1, 1, len(t)) z = np.linspace(-1, 1, len(t)) return x, y, z def test_phantom(): N = 50 vol = orbital_phantom(gtab, func=f, t=np.linspace(0, 2 * np.pi, N), datashape=(10, 10, 10, len(bvals)), origin=(5, 5, 5), scale=(3, 3, 3), angles=np.linspace(0, 2 * np.pi, 16), radii=np.linspace(0.2, 2, 6), S0=100) m = TensorModel(gtab) t = m.fit(vol) FA = t.fa # print vol FA[np.isnan(FA)] = 0 # 686 -> expected FA given diffusivities of [1500, 400, 400] l1, l2, l3 = 1500e-6, 400e-6, 400e-6 expected_fa = (np.sqrt(0.5) * np.sqrt((l1 - l2)**2 + (l2-l3)**2 + (l3-l1)**2) / np.sqrt(l1**2 + l2**2 + l3**2)) assert_array_almost_equal(FA.max(), expected_fa, decimal=2) def test_add_noise(): np.random.seed(1980) N = 50 S0 = 100 options = dict(func=f, t=np.linspace(0, 2 * np.pi, N), datashape=(10, 10, 10, len(bvals)), origin=(5, 5, 5), scale=(3, 3, 3), angles=np.linspace(0, 2 * np.pi, 16), radii=np.linspace(0.2, 2, 6), S0=S0) vol = orbital_phantom(gtab, **options) for snr in [10, 20, 30, 50]: vol_noise = orbital_phantom(gtab, snr=snr, **options) sigma = S0 / snr assert_(np.abs(np.var(vol_noise - vol) - sigma ** 2) < 1) if __name__ == "__main__": run_module_suite() dipy-0.13.0/dipy/sims/tests/test_voxel.py000066400000000000000000000312171317371701200203760ustar00rootroot00000000000000import numpy as np from nose.tools import (assert_true, assert_false, assert_equal, assert_almost_equal) from numpy.testing import (assert_array_equal, assert_array_almost_equal, assert_) from dipy.sims.voxel import (_check_directions, SingleTensor, MultiTensor, multi_tensor_odf, all_tensor_evecs, add_noise, single_tensor, sticks_and_ball, multi_tensor_dki, kurtosis_element, dki_signal) from dipy.core.geometry import (vec2vec_rotmat, sphere2cart) from dipy.data import get_data, get_sphere from dipy.core.gradients import gradient_table from dipy.io.gradients import read_bvals_bvecs fimg, fbvals, fbvecs = get_data('small_64D') bvals, bvecs = read_bvals_bvecs(fbvals, fbvecs) gtab = gradient_table(bvals, bvecs) # 2 shells for techniques that requires multishell data bvals_2s = np.concatenate((bvals, bvals * 2), axis=0) bvecs_2s = np.concatenate((bvecs, bvecs), axis=0) gtab_2s = gradient_table(bvals_2s, bvecs_2s) def diff2eigenvectors(dx, dy, dz): """ numerical derivatives 2 eigenvectors """ u = np.array([dx, dy, dz]) u = u / np.linalg.norm(u) R = vec2vec_rotmat(basis[:, 0], u) eig0 = u eig1 = np.dot(R, basis[:, 1]) eig2 = np.dot(R, basis[:, 2]) eigs = np.zeros((3, 3)) eigs[:, 0] = eig0 eigs[:, 1] = eig1 eigs[:, 2] = eig2 return eigs, R def test_check_directions(): # Testing spherical angles for two principal coordinate axis angles = [(0, 0)] # axis z sticks = _check_directions(angles) assert_array_almost_equal(sticks, [[0, 0, 1]]) angles = [(0, 90)] # axis z again (phi can be anything it theta is zero) sticks = _check_directions(angles) assert_array_almost_equal(sticks, [[0, 0, 1]]) angles = [(90, 0)] # axis x sticks = _check_directions(angles) assert_array_almost_equal(sticks, [[1, 0, 0]]) # Testing if directions are already given in cartesian coordinates angles = [(0, 0, 1)] sticks = _check_directions(angles) assert_array_almost_equal(sticks, [[0, 0, 1]]) # Testing more than one direction simultaneously angles = np.array([[90, 0], [30, 0]]) sticks = _check_directions(angles) ref_vec = [np.sin(np.pi*30/180), 0, np.cos(np.pi*30/180)] assert_array_almost_equal(sticks, [[1, 0, 0], ref_vec]) # Testing directions not aligned to planes x = 0, y = 0, or z = 0 the1 = 0 phi1 = 90 the2 = 30 phi2 = 45 angles = np.array([(the1, phi1), (the2, phi2)]) sticks = _check_directions(angles) ref_vec1 = (np.sin(np.pi*the1/180) * np.cos(np.pi*phi1/180), np.sin(np.pi*the1/180) * np.sin(np.pi*phi1/180), np.cos(np.pi*the1/180)) ref_vec2 = (np.sin(np.pi*the2/180) * np.cos(np.pi*phi2/180), np.sin(np.pi*the2/180) * np.sin(np.pi*phi2/180), np.cos(np.pi*the2/180)) assert_array_almost_equal(sticks, [ref_vec1, ref_vec2]) def test_sticks_and_ball(): d = 0.0015 S, sticks = sticks_and_ball(gtab, d=d, S0=1, angles=[(0, 0), ], fractions=[100], snr=None) assert_array_equal(sticks, [[0, 0, 1]]) S_st = SingleTensor(gtab, 1, evals=[d, 0, 0], evecs=[[0, 0, 0], [0, 0, 0], [1, 0, 0]]) assert_array_almost_equal(S, S_st) def test_single_tensor(): evals = np.array([1.4, .35, .35]) * 10 ** (-3) evecs = np.eye(3) S = SingleTensor(gtab, 100, evals, evecs, snr=None) assert_array_almost_equal(S[gtab.b0s_mask], 100) assert_(np.mean(S[~gtab.b0s_mask]) < 100) from dipy.reconst.dti import TensorModel m = TensorModel(gtab) t = m.fit(S) assert_array_almost_equal(t.fa, 0.707, decimal=3) def test_multi_tensor(): sphere = get_sphere('symmetric724') vertices = sphere.vertices mevals = np.array(([0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003])) e0 = np.array([np.sqrt(2) / 2., np.sqrt(2) / 2., 0]) e1 = np.array([0, np.sqrt(2) / 2., np.sqrt(2) / 2.]) mevecs = [all_tensor_evecs(e0), all_tensor_evecs(e1)] # odf = multi_tensor_odf(vertices, [0.5, 0.5], mevals, mevecs) # assert_(odf.shape == (len(vertices),)) # assert_(np.all(odf <= 1) & np.all(odf >= 0)) fimg, fbvals, fbvecs = get_data('small_101D') bvals, bvecs = read_bvals_bvecs(fbvals, fbvecs) gtab = gradient_table(bvals, bvecs) s1 = single_tensor(gtab, 100, mevals[0], mevecs[0], snr=None) s2 = single_tensor(gtab, 100, mevals[1], mevecs[1], snr=None) Ssingle = 0.5*s1 + 0.5*s2 S, sticks = MultiTensor(gtab, mevals, S0=100, angles=[(90, 45), (45, 90)], fractions=[50, 50], snr=None) assert_array_almost_equal(S, Ssingle) def test_snr(): np.random.seed(1978) s = single_tensor(gtab) # For reasonably large SNR, var(signal) ~= sigma**2, where sigma = 1/SNR for snr in [5, 10, 20]: sigma = 1.0 / snr for j in range(1000): s_noise = add_noise(s, snr, 1, noise_type='rician') assert_array_almost_equal(np.var(s_noise - s), sigma ** 2, decimal=2) def test_all_tensor_evecs(): e0 = np.array([1/np.sqrt(2), 1/np.sqrt(2), 0]) # Vectors are returned column-wise! desired = np.array([[1/np.sqrt(2), 1/np.sqrt(2), 0], [-1/np.sqrt(2), 1/np.sqrt(2), 0], [0, 0, 1]]).T assert_array_almost_equal(all_tensor_evecs(e0), desired) def test_kurtosis_elements(): """ Testing symmetry of the elements of the KT As an 4th order tensor, KT has 81 elements. However, due to diffusion symmetry the KT is fully characterized by 15 independent elements. This test checks for this property. """ # two fiber not aligned to planes x = 0, y = 0, or z = 0 mevals = np.array([[0.00099, 0, 0], [0.00226, 0.00087, 0.00087], [0.00099, 0, 0], [0.00226, 0.00087, 0.00087]]) angles = [(80, 10), (80, 10), (20, 30), (20, 30)] fie = 0.49 # intra axonal water fraction frac = [fie * 50, (1-fie) * 50, fie * 50, (1-fie) * 50] sticks = _check_directions(angles) mD = np.zeros((len(frac), 3, 3)) for i in range(len(frac)): R = all_tensor_evecs(sticks[i]) mD[i] = np.dot(np.dot(R, np.diag(mevals[i])), R.T) # compute global DT D = np.zeros((3, 3)) for i in range(len(frac)): D = D + frac[i]*mD[i] # compute voxel's MD MD = (D[0][0] + D[1][1] + D[2][2]) / 3 # Reference dictionary with the 15 independent elements. # Note: The multiplication of the indexes (i+1) * (j+1) * (k+1) * (l+1) # for of an elements is only equal to this multiplication for another # element if an only if the element corresponds to an symmetry element. # Thus indexes multiplication is used as key of the reference dictionary kt_ref = {1: kurtosis_element(mD, frac, 0, 0, 0, 0), 16: kurtosis_element(mD, frac, 1, 1, 1, 1), 81: kurtosis_element(mD, frac, 2, 2, 2, 2), 2: kurtosis_element(mD, frac, 0, 0, 0, 1), 3: kurtosis_element(mD, frac, 0, 0, 0, 2), 8: kurtosis_element(mD, frac, 0, 1, 1, 1), 24: kurtosis_element(mD, frac, 1, 1, 1, 2), 27: kurtosis_element(mD, frac, 0, 2, 2, 2), 54: kurtosis_element(mD, frac, 1, 2, 2, 2), 4: kurtosis_element(mD, frac, 0, 0, 1, 1), 9: kurtosis_element(mD, frac, 0, 0, 2, 2), 36: kurtosis_element(mD, frac, 1, 1, 2, 2), 6: kurtosis_element(mD, frac, 0, 0, 1, 2), 12: kurtosis_element(mD, frac, 0, 1, 1, 2), 18: kurtosis_element(mD, frac, 0, 1, 2, 2)} # Testing all 81 possible elements xyz = [0, 1, 2] for i in xyz: for j in xyz: for k in xyz: for l in xyz: key = (i+1) * (j+1) * (k+1) * (l+1) assert_almost_equal(kurtosis_element(mD, frac, i, k, j, l), kt_ref[key]) # Testing optional funtion inputs assert_almost_equal(kurtosis_element(mD, frac, i, k, j, l), kurtosis_element(mD, frac, i, k, j, l, D, MD)) def test_DKI_simulations_aligned_fibers(): """ Testing DKI simulations when aligning the same fiber to different axis. If biological parameters don't change, kt[0] of a fiber aligned to axis x has to be equal to kt[1] of a fiber aligned to the axis y and equal to kt[2] of a fiber aligned to axis z. The same is applicable for dt """ # Defining parameters based on Neto Henriques et al., 2015. NeuroImage 111 mevals = np.array([[0.00099, 0, 0], # Intra-cellular [0.00226, 0.00087, 0.00087]]) # Extra-cellular frac = [49, 51] # Compartment volume fraction # axis x angles = [(90, 0), (90, 0)] signal_fx, dt_fx, kt_fx = multi_tensor_dki(gtab_2s, mevals, angles=angles, fractions=frac) # axis y angles = [(90, 90), (90, 90)] signal_fy, dt_fy, kt_fy = multi_tensor_dki(gtab_2s, mevals, angles=angles, fractions=frac) # axis z angles = [(0, 0), (0, 0)] signal_fz, dt_fz, kt_fz = multi_tensor_dki(gtab_2s, mevals, angles=angles, fractions=frac) assert_array_equal([kt_fx[0], kt_fx[1], kt_fx[2]], [kt_fy[1], kt_fy[0], kt_fy[2]]) assert_array_equal([kt_fx[0], kt_fx[1], kt_fx[2]], [kt_fz[2], kt_fz[0], kt_fz[1]]) assert_array_equal([dt_fx[0], dt_fx[2], dt_fx[5]], [dt_fy[2], dt_fy[0], dt_fy[5]]) assert_array_equal([dt_fx[0], dt_fx[2], dt_fx[5]], [dt_fz[5], dt_fz[0], dt_fz[2]]) # testing S signal along axis x, y and z bvals = np.array([0, 0, 0, 1000, 1000, 1000, 2000, 2000, 2000]) bvecs = np.asarray([[1, 0, 0], [0, 1, 0], [0, 0, 1], [1, 0, 0], [0, 1, 0], [0, 0, 1], [1, 0, 0], [0, 1, 0], [0, 0, 1]]) gtab_axis = gradient_table(bvals, bvecs) # axis x S_fx = dki_signal(gtab_axis, dt_fx, kt_fx, S0=100) assert_array_almost_equal(S_fx[0:3], [100, 100, 100]) # test S f0r b=0 # axis y S_fy = dki_signal(gtab_axis, dt_fy, kt_fy, S0=100) assert_array_almost_equal(S_fy[0:3], [100, 100, 100]) # test S f0r b=0 # axis z S_fz = dki_signal(gtab_axis, dt_fz, kt_fz, S0=100) assert_array_almost_equal(S_fz[0:3], [100, 100, 100]) # test S f0r b=0 # test S for b = 1000 assert_array_almost_equal([S_fx[3], S_fx[4], S_fx[5]], [S_fy[4], S_fy[3], S_fy[5]]) assert_array_almost_equal([S_fx[3], S_fx[4], S_fx[5]], [S_fz[5], S_fz[3], S_fz[4]]) # test S for b = 2000 assert_array_almost_equal([S_fx[6], S_fx[7], S_fx[8]], [S_fy[7], S_fy[6], S_fy[8]]) assert_array_almost_equal([S_fx[6], S_fx[7], S_fx[8]], [S_fz[8], S_fz[6], S_fz[7]]) def test_DKI_crossing_fibers_simulations(): """ Testing DKI simulations of a crossing fiber """ # two fiber not aligned to planes x = 0, y = 0, or z = 0 mevals = np.array([[0.00099, 0, 0], [0.00226, 0.00087, 0.00087], [0.00099, 0, 0], [0.00226, 0.00087, 0.00087]]) angles = [(80, 10), (80, 10), (20, 30), (20, 30)] fie = 0.49 frac = [fie*50, (1 - fie)*50, fie*50, (1 - fie)*50] signal, dt, kt = multi_tensor_dki(gtab_2s, mevals, angles=angles, fractions=frac, snr=None) # in this simulations dt and kt cannot have zero elements for i in range(len(dt)): assert dt[i] != 0 for i in range(len(kt)): assert kt[i] != 0 # test S, dt and kt relative to the expected values computed from another # DKI package - UDKI (Neto Henriques et al., 2015) dt_ref = [1.0576161e-3, 0.1292542e-3, 0.4786179e-3, 0.2667081e-3, 0.1136643e-3, 0.9888660e-3] kt_ref = [2.3529944, 0.8226448, 2.3011221, 0.2017312, -0.0437535, 0.0404011, 0.0355281, 0.2449859, 0.2157668, 0.3495910, 0.0413366, 0.3461519, -0.0537046, 0.0133414, -0.017441] assert_array_almost_equal(dt, dt_ref) assert_array_almost_equal(kt, kt_ref) assert_array_almost_equal(signal, dki_signal(gtab_2s, dt_ref, kt_ref, S0=1., snr=None), decimal=5) if __name__ == "__main__": test_multi_tensor() dipy-0.13.0/dipy/sims/voxel.py000066400000000000000000000735541317371701200162070ustar00rootroot00000000000000# -*- coding: utf-8 -*- from __future__ import division import numpy as np from numpy import dot from dipy.core.geometry import sphere2cart from dipy.core.geometry import vec2vec_rotmat from dipy.reconst.utils import dki_design_matrix from scipy.special import jn # Diffusion coefficients for white matter tracts, in mm^2/s # # Based roughly on values from: # # Pierpaoli, Basser, "Towards a Quantitative Assessment of Diffusion # Anisotropy", Magnetic Resonance in Medicine, 1996; 36(6):893-906. # diffusion_evals = np.array([1500e-6, 400e-6, 400e-6]) def _check_directions(angles): """ Helper function to check if direction ground truth have the right format and are in cartesian coordinates Parameters ----------- angles : array (K,2) or (K, 3) List of K polar angles (in degrees) for the sticks or array of K sticks as unit vectors. Returns -------- sticks : (K,3) Sticks in cartesian coordinates. """ angles = np.array(angles) if angles.shape[-1] == 3: sticks = angles else: sticks = [sphere2cart(1, np.deg2rad(pair[0]), np.deg2rad(pair[1])) for pair in angles] sticks = np.array(sticks) return sticks def _add_gaussian(sig, noise1, noise2): """ Helper function to add_noise This one simply adds one of the Gaussians to the sig and ignores the other one. """ return sig + noise1 def _add_rician(sig, noise1, noise2): """ Helper function to add_noise. This does the same as abs(sig + complex(noise1, noise2)) """ return np.sqrt((sig + noise1) ** 2 + noise2 ** 2) def _add_rayleigh(sig, noise1, noise2): """ Helper function to add_noise The Rayleigh distribution is $\sqrt\{Gauss_1^2 + Gauss_2^2}$. """ return sig + np.sqrt(noise1 ** 2 + noise2 ** 2) def add_noise(signal, snr, S0, noise_type='rician'): r""" Add noise of specified distribution to the signal from a single voxel. Parameters ----------- signal : 1-d ndarray The signal in the voxel. snr : float The desired signal-to-noise ratio. (See notes below.) If `snr` is None, return the signal as-is. S0 : float Reference signal for specifying `snr`. noise_type : string, optional The distribution of noise added. Can be either 'gaussian' for Gaussian distributed noise, 'rician' for Rice-distributed noise (default) or 'rayleigh' for a Rayleigh distribution. Returns -------- signal : array, same shape as the input Signal with added noise. Notes ----- SNR is defined here, following [1]_, as ``S0 / sigma``, where ``sigma`` is the standard deviation of the two Gaussian distributions forming the real and imaginary components of the Rician noise distribution (see [2]_). References ---------- .. [1] Descoteaux, Angelino, Fitzgibbons and Deriche (2007) Regularized, fast and robust q-ball imaging. MRM, 58: 497-510 .. [2] Gudbjartson and Patz (2008). The Rician distribution of noisy MRI data. MRM 34: 910-914. Examples -------- >>> signal = np.arange(800).reshape(2, 2, 2, 100) >>> signal_w_noise = add_noise(signal, 10., 100., noise_type='rician') """ if snr is None: return signal sigma = S0 / snr noise_adder = {'gaussian': _add_gaussian, 'rician': _add_rician, 'rayleigh': _add_rayleigh} noise1 = np.random.normal(0, sigma, size=signal.shape) if noise_type == 'gaussian': noise2 = None else: noise2 = np.random.normal(0, sigma, size=signal.shape) return noise_adder[noise_type](signal, noise1, noise2) def sticks_and_ball(gtab, d=0.0015, S0=1., angles=[(0, 0), (90, 0)], fractions=[35, 35], snr=20): """ Simulate the signal for a Sticks & Ball model. Parameters ----------- gtab : GradientTable Signal measurement directions. d : float Diffusivity value. S0 : float Unweighted signal value. angles : array (K,2) or (K, 3) List of K polar angles (in degrees) for the sticks or array of K sticks as unit vectors. fractions : float Percentage of each stick. Remainder to 100 specifies isotropic component. snr : float Signal to noise ratio, assuming Rician noise. If set to None, no noise is added. Returns -------- S : (N,) ndarray Simulated signal. sticks : (M,3) Sticks in cartesian coordinates. References ---------- .. [1] Behrens et al., "Probabilistic diffusion tractography with multiple fiber orientations: what can we gain?", Neuroimage, 2007. """ fractions = [f / 100. for f in fractions] f0 = 1 - np.sum(fractions) S = np.zeros(len(gtab.bvals)) sticks = _check_directions(angles) for (i, g) in enumerate(gtab.bvecs[1:]): S[i + 1] = f0*np.exp(-gtab.bvals[i + 1]*d) + \ np.sum([fractions[j]*np.exp(-gtab.bvals[i + 1]*d*np.dot(s, g)**2) for (j, s) in enumerate(sticks)]) S[i + 1] = S0 * S[i + 1] S[gtab.b0s_mask] = S0 S = add_noise(S, snr, S0) return S, sticks def callaghan_perpendicular(q, radius): r""" Calculates the perpendicular diffusion signal E(q) in a cylinder of radius R using the Soderman model [1]_. Assumes that the pulse length is infinitely short and the diffusion time is infinitely long. Parameters ---------- q : array, shape (N,) q-space value in 1/mm radius : float cylinder radius in mm Returns ------- E : array, shape (N,) signal attenuation References ---------- .. [1] Söderman, Olle, and Bengt Jönsson. "Restricted diffusion in cylindrical geometry." Journal of Magnetic Resonance, Series A 117.1 (1995): 94-97. """ # Eq. [6] in the paper E = ((2 * jn(1, 2 * np.pi * q * radius)) ** 2 / (2 * np.pi * q * radius) ** 2 ) return E def gaussian_parallel(q, tau, D=0.7e-3): r""" Calculates the parallel Gaussian diffusion signal. Parameters ---------- q : array, shape (N,) q-space value in 1/mm tau : float diffusion time in s D : float diffusion constant Returns ------- E : array, shape (N,) signal attenuation """ return np.exp(-(2 * np.pi * q) ** 2 * tau * D) def cylinders_and_ball_soderman(gtab, tau, radii=[5e-3, 5e-3], D=0.7e-3, S0=1., angles=[(0, 0), (90, 0)], fractions=[35, 35], snr=20): r""" Calculates the three-dimensional signal attenuation E(q) originating from within a cylinder of radius R using the Soderman approximation [1]_. The diffusion signal is assumed to be separable perpendicular and parallel to the cylinder axis [2]_. This function is basically an extension of the ball and stick model. Setting the radius to zero makes them equivalent. Parameters ---------- gtab : GradientTable Signal measurement directions. tau : float diffusion time in s radii : float cylinder radius in mm D : float diffusion constant S0 : float Unweighted signal value. angles : array (K,2) or (K, 3) List of K polar angles (in degrees) for the sticks or array of K sticks as unit vectors. direction : array (3) direction of the axis of the cylinder fractions : float Percentage of each stick. Remainder to 100 specifies isotropic component. snr : float Signal to noise ratio, assuming Rician noise. If set to None, no noise is added. Returns ------- E : array, shape (N,) signal attenuation References ---------- .. [1] Söderman, Olle, and Bengt Jönsson. "Restricted diffusion in cylindrical geometry." Journal of Magnetic Resonance, Series A 117.1 (1995): 94-97. .. [2] Assaf, Yaniv, et al. "New modeling and experimental framework to characterize hindered and restricted water diffusion in brain white matter." Magnetic Resonance in Medicine 52.5 (2004): 965-978. """ qvals = np.sqrt(gtab.bvals / tau) / (2 * np.pi) qvecs = qvals[:, None] * gtab.bvecs q_norm = np.sqrt(np.einsum('ij,ij->i', qvecs, qvecs)) fractions = [f / 100. for f in fractions] f0 = 1 - np.sum(fractions) S = np.zeros(len(gtab.bvals)) sticks = _check_directions(angles) for i, f in enumerate(fractions): q_par = abs(np.dot(qvecs, sticks[i])) q_perp = np.sqrt(q_norm ** 2 - q_par ** 2) S_cylinder = (callaghan_perpendicular(q_perp, radii[i]) * gaussian_parallel(q_par, tau, D=D)) S += f * S_cylinder S += f0 * np.exp(-gtab.bvals * D) S *= S0 S[gtab.b0s_mask] = S0 S = add_noise(S, snr, S0) return S, sticks def single_tensor(gtab, S0=1, evals=None, evecs=None, snr=None): """ Simulated Q-space signal with a single tensor. Parameters ----------- gtab : GradientTable Measurement directions. S0 : double, Strength of signal in the presence of no diffusion gradient (also called the ``b=0`` value). evals : (3,) ndarray Eigenvalues of the diffusion tensor. By default, values typical for prolate white matter are used. evecs : (3, 3) ndarray Eigenvectors of the tensor. You can also think of this as a rotation matrix that transforms the direction of the tensor. The eigenvectors need to be column wise. snr : float Signal to noise ratio, assuming Rician noise. None implies no noise. Returns -------- S : (N,) ndarray Simulated signal: ``S(q, tau) = S_0 e^(-b g^T R D R.T g)``. References ---------- .. [1] M. Descoteaux, "High Angular Resolution Diffusion MRI: from Local Estimation to Segmentation and Tractography", PhD thesis, University of Nice-Sophia Antipolis, p. 42, 2008. .. [2] E. Stejskal and J. Tanner, "Spin diffusion measurements: spin echos in the presence of a time-dependent field gradient", Journal of Chemical Physics, nr. 42, pp. 288--292, 1965. """ if evals is None: evals = diffusion_evals if evecs is None: evecs = np.eye(3) out_shape = gtab.bvecs.shape[:gtab.bvecs.ndim - 1] gradients = gtab.bvecs.reshape(-1, 3) R = np.asarray(evecs) S = np.zeros(len(gradients)) D = dot(dot(R, np.diag(evals)), R.T) for (i, g) in enumerate(gradients): S[i] = S0 * np.exp(-gtab.bvals[i] * dot(dot(g.T, D), g)) S = add_noise(S, snr, S0) return S.reshape(out_shape) def multi_tensor(gtab, mevals, S0=1., angles=[(0, 0), (90, 0)], fractions=[50, 50], snr=20): r""" Simulate a Multi-Tensor signal. Parameters ----------- gtab : GradientTable mevals : array (K, 3) each tensor's eigenvalues in each row S0 : float Unweighted signal value (b0 signal). angles : array (K,2) or (K,3) List of K tensor directions in polar angles (in degrees) or unit vectors fractions : float Percentage of the contribution of each tensor. The sum of fractions should be equal to 100%. snr : float Signal to noise ratio, assuming Rician noise. If set to None, no noise is added. Returns -------- S : (N,) ndarray Simulated signal. sticks : (M,3) Sticks in cartesian coordinates. Examples -------- >>> import numpy as np >>> from dipy.sims.voxel import multi_tensor >>> from dipy.data import get_data >>> from dipy.core.gradients import gradient_table >>> from dipy.io.gradients import read_bvals_bvecs >>> fimg, fbvals, fbvecs = get_data('small_101D') >>> bvals, bvecs = read_bvals_bvecs(fbvals, fbvecs) >>> gtab = gradient_table(bvals, bvecs) >>> mevals=np.array(([0.0015, 0.0003, 0.0003],[0.0015, 0.0003, 0.0003])) >>> e0 = np.array([1, 0, 0.]) >>> e1 = np.array([0., 1, 0]) >>> S = multi_tensor(gtab, mevals) """ if np.round(np.sum(fractions), 2) != 100.0: raise ValueError('Fractions should sum to 100') fractions = [f / 100. for f in fractions] S = np.zeros(len(gtab.bvals)) sticks = _check_directions(angles) for i in range(len(fractions)): S = S + fractions[i] * single_tensor(gtab, S0=S0, evals=mevals[i], evecs=all_tensor_evecs( sticks[i]), snr=None) return add_noise(S, snr, S0), sticks def multi_tensor_dki(gtab, mevals, S0=1., angles=[(90., 0.), (90., 0.)], fractions=[50, 50], snr=20): r""" Simulate the diffusion-weight signal, diffusion and kurtosis tensors based on the DKI model Parameters ----------- gtab : GradientTable mevals : array (K, 3) eigenvalues of the diffusion tensor for each individual compartment S0 : float (optional) Unweighted signal value (b0 signal). angles : array (K,2) or (K,3) (optional) List of K tensor directions of the diffusion tensor of each compartment in polar angles (in degrees) or unit vectors fractions : float (K,) (optional) Percentage of the contribution of each tensor. The sum of fractions should be equal to 100%. snr : float (optional) Signal to noise ratio, assuming Rician noise. If set to None, no noise is added. Returns -------- S : (N,) ndarray Simulated signal based on the DKI model. dt : (6,) elements of the diffusion tensor. kt : (15,) elements of the kurtosis tensor. Notes ----- Simulations are based on multicompartmental models which assumes that tissue is well described by impermeable diffusion compartments characterized by their only diffusion tensor. Since simulations are based on the DKI model, coefficients larger than the fourth order of the signal's taylor expansion approximation are neglected. Examples -------- >>> import numpy as np >>> from dipy.sims.voxel import multi_tensor_dki >>> from dipy.data import get_data >>> from dipy.core.gradients import gradient_table >>> from dipy.io.gradients import read_bvals_bvecs >>> fimg, fbvals, fbvecs = get_data('small_64D') >>> bvals, bvecs = read_bvals_bvecs(fbvals, fbvecs) >>> bvals_2s = np.concatenate((bvals, bvals * 2), axis=0) >>> bvecs_2s = np.concatenate((bvecs, bvecs), axis=0) >>> gtab = gradient_table(bvals_2s, bvecs_2s) >>> mevals = np.array([[0.00099, 0, 0],[0.00226, 0.00087, 0.00087]]) >>> S, dt, kt = multi_tensor_dki(gtab, mevals) References ---------- .. [1] R. Neto Henriques et al., "Exploring the 3D geometry of the diffusion kurtosis tensor - Impact on the development of robust tractography procedures and novel biomarkers", NeuroImage (2015) 111, 85-99. """ if np.round(np.sum(fractions), 2) != 100.0: raise ValueError('Fractions should sum to 100') fractions = [f / 100. for f in fractions] S = np.zeros(len(gtab.bvals)) sticks = _check_directions(angles) # computing a 3D matrix containing the individual DT components D_comps = np.zeros((len(fractions), 3, 3)) for i in range(len(fractions)): R = all_tensor_evecs(sticks[i]) D_comps[i] = dot(dot(R, np.diag(mevals[i])), R.T) # compute voxel's DT DT = np.zeros((3, 3)) for i in range(len(fractions)): DT = DT + fractions[i]*D_comps[i] dt = np.array([DT[0][0], DT[0][1], DT[1][1], DT[0][2], DT[1][2], DT[2][2]]) # compute voxel's MD MD = (DT[0][0] + DT[1][1] + DT[2][2]) / 3 # compute voxel's KT kt = np.zeros((15)) kt[0] = kurtosis_element(D_comps, fractions, 0, 0, 0, 0, DT, MD) kt[1] = kurtosis_element(D_comps, fractions, 1, 1, 1, 1, DT, MD) kt[2] = kurtosis_element(D_comps, fractions, 2, 2, 2, 2, DT, MD) kt[3] = kurtosis_element(D_comps, fractions, 0, 0, 0, 1, DT, MD) kt[4] = kurtosis_element(D_comps, fractions, 0, 0, 0, 2, DT, MD) kt[5] = kurtosis_element(D_comps, fractions, 0, 1, 1, 1, DT, MD) kt[6] = kurtosis_element(D_comps, fractions, 1, 1, 1, 2, DT, MD) kt[7] = kurtosis_element(D_comps, fractions, 0, 2, 2, 2, DT, MD) kt[8] = kurtosis_element(D_comps, fractions, 1, 2, 2, 2, DT, MD) kt[9] = kurtosis_element(D_comps, fractions, 0, 0, 1, 1, DT, MD) kt[10] = kurtosis_element(D_comps, fractions, 0, 0, 2, 2, DT, MD) kt[11] = kurtosis_element(D_comps, fractions, 1, 1, 2, 2, DT, MD) kt[12] = kurtosis_element(D_comps, fractions, 0, 0, 1, 2, DT, MD) kt[13] = kurtosis_element(D_comps, fractions, 0, 1, 1, 2, DT, MD) kt[14] = kurtosis_element(D_comps, fractions, 0, 1, 2, 2, DT, MD) # compute S based on the DT and KT S = dki_signal(gtab, dt, kt, S0, snr) return S, dt, kt def kurtosis_element(D_comps, frac, ind_i, ind_j, ind_k, ind_l, DT=None, MD=None): r""" Computes the diffusion kurtosis tensor element (with indexes i, j, k and l) based on the individual diffusion tensor components of a multicompartmental model. Parameters ----------- D_comps : (K,3,3) ndarray Diffusion tensors for all K individual compartment of the multicompartmental model. frac : float Percentage of the contribution of each tensor. The sum of fractions should be equal to 100%. ind_i : int Element's index i (0 for x, 1 for y, 2 for z) ind_j : int Element's index j (0 for x, 1 for y, 2 for z) ind_k : int Element's index k (0 for x, 1 for y, 2 for z) ind_l: int Elements index l (0 for x, 1 for y, 2 for z) DT : (3,3) ndarray (optional) Voxel's global diffusion tensor. MD : float (optional) Voxel's global mean diffusivity. Returns -------- wijkl : float kurtosis tensor element of index i, j, k, l Notes -------- wijkl is calculated using equation 8 given in [1]_ References ---------- .. [1] R. Neto Henriques et al., "Exploring the 3D geometry of the diffusion kurtosis tensor - Impact on the development of robust tractography procedures and novel biomarkers", NeuroImage (2015) 111, 85-99. """ if DT is None: DT = np.zeros((3, 3)) for i in range(len(frac)): DT = DT + frac[i]*D_comps[i] if MD is None: MD = (DT[0][0] + DT[1][1] + DT[2][2]) / 3 wijkl = 0 for f in range(len(frac)): wijkl = wijkl + frac[f] * ( D_comps[f][ind_i][ind_j]*D_comps[f][ind_k][ind_l] + D_comps[f][ind_i][ind_k]*D_comps[f][ind_j][ind_l] + D_comps[f][ind_i][ind_l]*D_comps[f][ind_j][ind_k]) wijkl = (wijkl - DT[ind_i][ind_j]*DT[ind_k][ind_l] - DT[ind_i][ind_k]*DT[ind_j][ind_l] - DT[ind_i][ind_l]*DT[ind_j][ind_k]) / (MD**2) return wijkl def dki_signal(gtab, dt, kt, S0=150, snr=None): r""" Simulated signal based on the diffusion and diffusion kurtosis tensors of a single voxel. Simulations are preformed assuming the DKI model. Parameters ----------- gtab : GradientTable Measurement directions. dt : (6,) ndarray Elements of the diffusion tensor. kt : (15, ) ndarray Elements of the diffusion kurtosis tensor. S0 : float (optional) Strength of signal in the presence of no diffusion gradient. snr : float (optional) Signal to noise ratio, assuming Rician noise. None implies no noise. Returns -------- S : (N,) ndarray Simulated signal based on the DKI model: .. math:: S=S_{0}e^{-bD+\frac{1}{6}b^{2}D^{2}K} References ---------- .. [1] R. Neto Henriques et al., "Exploring the 3D geometry of the diffusion kurtosis tensor - Impact on the development of robust tractography procedures and novel biomarkers", NeuroImage (2015) 111, 85-99. """ dt = np.array(dt) kt = np.array(kt) A = dki_design_matrix(gtab) # define vector of DKI parameters MD = (dt[0] + dt[2] + dt[5]) / 3 X = np.concatenate((dt, kt*MD*MD, np.array([np.log(S0)])), axis=0) # Compute signals based on the DKI model S = np.exp(dot(A, X)) S = add_noise(S, snr, S0) return S def single_tensor_odf(r, evals=None, evecs=None): """ Simulated ODF with a single tensor. Parameters ---------- r : (N,3) or (M,N,3) ndarray Measurement positions in (x, y, z), either as a list or on a grid. evals : (3,) Eigenvalues of diffusion tensor. By default, use values typical for prolate white matter. evecs : (3, 3) ndarray Eigenvectors of the tensor, written column-wise. You can also think of these as the rotation matrix that determines the orientation of the diffusion tensor. Returns ------- ODF : (N,) ndarray The diffusion probability at ``r`` after time ``tau``. References ---------- .. [1] Aganj et al., "Reconstruction of the Orientation Distribution Function in Single- and Multiple-Shell q-Ball Imaging Within Constant Solid Angle", Magnetic Resonance in Medicine, nr. 64, pp. 554--566, 2010. """ if evals is None: evals = diffusion_evals if evecs is None: evecs = np.eye(3) out_shape = r.shape[:r.ndim - 1] R = np.asarray(evecs) D = dot(dot(R, np.diag(evals)), R.T) Di = np.linalg.inv(D) r = r.reshape(-1, 3) P = np.zeros(len(r)) for (i, u) in enumerate(r): P[i] = (dot(dot(u.T, Di), u)) ** (3 / 2) return (1 / (4 * np.pi * np.prod(evals) ** (1 / 2) * P)).reshape(out_shape) def all_tensor_evecs(e0): """Given the principle tensor axis, return the array of all eigenvectors column-wise (or, the rotation matrix that orientates the tensor). Parameters ---------- e0 : (3,) ndarray Principle tensor axis. Returns ------- evecs : (3,3) ndarray Tensor eigenvectors, arranged column-wise. """ axes = np.eye(3) mat = vec2vec_rotmat(axes[0], e0) e1 = np.dot(mat, axes[1]) e2 = np.dot(mat, axes[2]) # Return the eigenvectors column-wise: return np.array([e0, e1, e2]).T def multi_tensor_odf(odf_verts, mevals, angles, fractions): r'''Simulate a Multi-Tensor ODF. Parameters ---------- odf_verts : (N,3) ndarray Vertices of the reconstruction sphere. mevals : sequence of 1D arrays, Eigen-values for each tensor. angles : sequence of 2d tuples, Sequence of principal directions for each tensor in polar angles or cartesian unit coordinates. fractions : sequence of floats, Percentages of the fractions for each tensor. Returns ------- ODF : (N,) ndarray Orientation distribution function. Examples -------- Simulate a MultiTensor ODF with two peaks and calculate its exact ODF. >>> import numpy as np >>> from dipy.sims.voxel import multi_tensor_odf, all_tensor_evecs >>> from dipy.data import get_sphere >>> sphere = get_sphere('symmetric724') >>> vertices, faces = sphere.vertices, sphere.faces >>> mevals = np.array(([0.0015, 0.0003, 0.0003],[0.0015, 0.0003, 0.0003])) >>> angles = [(0, 0), (90, 0)] >>> odf = multi_tensor_odf(vertices, mevals, angles, [50, 50]) ''' mf = [f / 100. for f in fractions] sticks = _check_directions(angles) odf = np.zeros(len(odf_verts)) mevecs = [] for s in sticks: mevecs += [all_tensor_evecs(s)] for (j, f) in enumerate(mf): odf += f * single_tensor_odf(odf_verts, evals=mevals[j], evecs=mevecs[j]) return odf def single_tensor_rtop(evals=None, tau=1.0 / (4 * np.pi ** 2)): r'''Simulate a Multi-Tensor rtop. Parameters ---------- evals : 1D arrays, Eigen-values for the tensor. By default, values typical for prolate white matter are used. tau : float, diffusion time. By default the value that makes q=sqrt(b). Returns ------- rtop : float, Return to origin probability. References ---------- .. [1] Cheng J., "Estimation and Processing of Ensemble Average Propagator and Its Features in Diffusion MRI", PhD Thesis, 2012. ''' if evals is None: evals = diffusion_evals rtop = 1.0 / np.sqrt((4 * np.pi * tau) ** 3 * np.prod(evals)) return rtop def multi_tensor_rtop(mf, mevals=None, tau=1 / (4 * np.pi ** 2)): r'''Simulate a Multi-Tensor rtop. Parameters ---------- mf : sequence of floats, bounded [0,1] Percentages of the fractions for each tensor. mevals : sequence of 1D arrays, Eigen-values for each tensor. By default, values typical for prolate white matter are used. tau : float, diffusion time. By default the value that makes q=sqrt(b). Returns ------- rtop : float, Return to origin probability. References ---------- .. [1] Cheng J., "Estimation and Processing of Ensemble Average Propagator and Its Features in Diffusion MRI", PhD Thesis, 2012. ''' rtop = 0 if mevals is None: mevals = [None, ] * len(mf) for j, f in enumerate(mf): rtop += f * single_tensor_rtop(mevals[j], tau=tau) return rtop def single_tensor_pdf(r, evals=None, evecs=None, tau=1 / (4 * np.pi ** 2)): """Simulated ODF with a single tensor. Parameters ---------- r : (N,3) or (M,N,3) ndarray Measurement positions in (x, y, z), either as a list or on a grid. evals : (3,) Eigenvalues of diffusion tensor. By default, use values typical for prolate white matter. evecs : (3, 3) ndarray Eigenvectors of the tensor. You can also think of these as the rotation matrix that determines the orientation of the diffusion tensor. tau : float, diffusion time. By default the value that makes q=sqrt(b). Returns ------- pdf : (N,) ndarray The diffusion probability at ``r`` after time ``tau``. References ---------- .. [1] Cheng J., "Estimation and Processing of Ensemble Average Propagator and Its Features in Diffusion MRI", PhD Thesis, 2012. """ if evals is None: evals = diffusion_evals if evecs is None: evecs = np.eye(3) out_shape = r.shape[:r.ndim - 1] R = np.asarray(evecs) D = dot(dot(R, np.diag(evals)), R.T) Di = np.linalg.inv(D) r = r.reshape(-1, 3) P = np.zeros(len(r)) for (i, u) in enumerate(r): P[i] = (-dot(dot(u.T, Di), u)) / (4 * tau) pdf = (1 / np.sqrt((4 * np.pi * tau) ** 3 * np.prod(evals))) * np.exp(P) return pdf.reshape(out_shape) def multi_tensor_pdf(pdf_points, mevals, angles, fractions, tau=1 / (4 * np.pi ** 2)): r'''Simulate a Multi-Tensor ODF. Parameters ---------- pdf_points : (N, 3) ndarray Points to evaluate the PDF. mevals : sequence of 1D arrays, Eigen-values for each tensor. By default, values typical for prolate white matter are used. angles : sequence, Sequence of principal directions for each tensor in polar angles or cartesian unit coordinates. fractions : sequence of floats, Percentages of the fractions for each tensor. tau : float, diffusion time. By default the value that makes q=sqrt(b). Returns ------- pdf : (N,) ndarray, Probability density function of the water displacement. References ---------- .. [1] Cheng J., "Estimation and Processing of Ensemble Average Propagator and its Features in Diffusion MRI", PhD Thesis, 2012. ''' mf = [f / 100. for f in fractions] sticks = _check_directions(angles) pdf = np.zeros(len(pdf_points)) mevecs = [] for s in sticks: mevecs += [all_tensor_evecs(s)] for j, f in enumerate(mf): pdf += f * single_tensor_pdf(pdf_points, evals=mevals[j], evecs=mevecs[j], tau=tau) return pdf def single_tensor_msd(evals=None, tau=1 / (4 * np.pi ** 2)): r'''Simulate a Multi-Tensor rtop. Parameters ---------- evals : 1D arrays, Eigen-values for the tensor. By default, values typical for prolate white matter are used. tau : float, diffusion time. By default the value that makes q=sqrt(b). Returns ------- msd : float, Mean square displacement. References ---------- .. [1] Cheng J., "Estimation and Processing of Ensemble Average Propagator and Its Features in Diffusion MRI", PhD Thesis, 2012. ''' if evals is None: evals = diffusion_evals msd = 2 * tau * np.sum(evals) return msd def multi_tensor_msd(mf, mevals=None, tau=1 / (4 * np.pi ** 2)): r'''Simulate a Multi-Tensor rtop. Parameters ---------- mf : sequence of floats, bounded [0,1] Percentages of the fractions for each tensor. mevals : sequence of 1D arrays, Eigen-values for each tensor. By default, values typical for prolate white matter are used. tau : float, diffusion time. By default the value that makes q=sqrt(b). Returns ------- msd : float, Mean square displacement. References ---------- .. [1] Cheng J., "Estimation and Processing of Ensemble Average Propagator and Its Features in Diffusion MRI", PhD Thesis, 2012. ''' msd = 0 if mevals is None: mevals = [None, ] * len(mf) for j, f in enumerate(mf): msd += f * single_tensor_msd(mevals[j], tau=tau) return msd # Use standard naming convention, but keep old names # for backward compatibility SticksAndBall = sticks_and_ball SingleTensor = single_tensor MultiTensor = multi_tensor dipy-0.13.0/dipy/testing/000077500000000000000000000000001317371701200151645ustar00rootroot00000000000000dipy-0.13.0/dipy/testing/__init__.py000066400000000000000000000023421317371701200172760ustar00rootroot00000000000000''' Utilities for testing ''' from os.path import dirname, abspath, join as pjoin from dipy.testing.spherepoints import sphere_points from dipy.testing.decorators import doctest_skip_parser from numpy.testing import assert_array_equal import numpy as np from distutils.version import LooseVersion # set path to example data IO_DATA_PATH = abspath(pjoin(dirname(__file__), '..', 'io', 'tests', 'data')) # Allow failed import of nose if not now running tests try: import nose.tools as nt except ImportError: pass else: from nose.tools import (assert_equal, assert_not_equal, assert_true, assert_false, assert_raises) def assert_arrays_equal(arrays1, arrays2): for arr1, arr2 in zip(arrays1, arrays2): assert_array_equal(arr1, arr2) def setup_test(): """ Set numpy print options to "legacy" for new versions of numpy If imported into a file, nosetest will run this before any doctests. References ----------- https://github.com/numpy/numpy/commit/710e0327687b9f7653e5ac02d222ba62c657a718 https://github.com/nipy/nibabel/pull/556 """ if LooseVersion(np.__version__) >= LooseVersion('1.14'): np.set_printoptions(sign='legacy') dipy-0.13.0/dipy/testing/decorators.py000066400000000000000000000043451317371701200177110ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Decorators for dipy tests """ import re import os SKIP_RE = re.compile("(\s*>>>.*?)(\s*)#\s*skip\s+if\s+(.*)$") def doctest_skip_parser(func): """Decorator replaces custom skip test markup in doctests. Say a function has a docstring:: >>> something # skip if not HAVE_AMODULE >>> something + else >>> something # skip if HAVE_BMODULE This decorator will evaluate the expresssion after ``skip if``. If this evaluates to True, then the comment is replaced by ``# doctest: +SKIP``. If False, then the comment is just removed. The expression is evaluated in the ``globals`` scope of `func`. For example, if the module global ``HAVE_AMODULE`` is False, and module global ``HAVE_BMODULE`` is False, the returned function will have docstring:: >>> something # doctest: +SKIP >>> something + else >>> something """ lines = func.__doc__.split('\n') new_lines = [] for line in lines: match = SKIP_RE.match(line) if match is None: new_lines.append(line) continue code, space, expr = match.groups() if eval(expr, func.__globals__): code = code + space + "# doctest: +SKIP" new_lines.append(code) func.__doc__ = "\n".join(new_lines) return func ### # In some cases (e.g., on Travis), we want to use a virtual frame-buffer for # testing. The following decorator runs the tests under xvfb (mediated by # xvfbwrapper) conditioned on an environment variable (that we set in # .travis.yml for these cases): use_xvfb = os.environ.get('TEST_WITH_XVFB', False) def xvfb_it(my_test): """Run a test with xvfbwrapper.""" # When we use verbose testing we want the name: fname = my_test.__name__ def test_with_xvfb(*args, **kwargs): if use_xvfb: from xvfbwrapper import Xvfb display = Xvfb(width=1920, height=1080) display.start() my_test(*args, **kwargs) if use_xvfb: display.stop() # Plant it back in and return the new function: test_with_xvfb.__name__ = fname return test_with_xvfb dipy-0.13.0/dipy/testing/memory.py000066400000000000000000000013171317371701200170500ustar00rootroot00000000000000import gc from collections import defaultdict def get_type_refcount(pattern=None): """ Retrieves refcount of types for which their name matches `pattern`. Parameters ---------- pattern : str Consider only types that have `pattern` in their name. Returns ------- dict The key is the type name and the value is the refcount. """ gc.collect() refcounts_per_type = defaultdict(int) for obj in gc.get_objects(): obj_type_name = type(obj).__name__ # If `pattern` is not None, keep only matching types. if pattern is None or pattern in obj_type_name: refcounts_per_type[obj_type_name] += 1 return refcounts_per_type dipy-0.13.0/dipy/testing/spherepoints.py000066400000000000000000000010511317371701200202560ustar00rootroot00000000000000''' Create example sphere points ''' import numpy as np def _make_pts(): ''' Make points around sphere quadrants ''' thetas = np.arange(1, 4) * np.pi/4 phis = np.arange(8) * np.pi/4 north_pole = (0, 0, 1) south_pole = (0, 0, -1) points = [north_pole, south_pole] for theta in thetas: for phi in phis: x = np.sin(theta) * np.cos(phi) y = np.sin(theta) * np.sin(phi) z = np.cos(theta) points.append((x, y, z)) return np.array(points) sphere_points = _make_pts() dipy-0.13.0/dipy/testing/tests/000077500000000000000000000000001317371701200163265ustar00rootroot00000000000000dipy-0.13.0/dipy/testing/tests/__init__.py000066400000000000000000000000311317371701200204310ustar00rootroot00000000000000# Init for testing/tests dipy-0.13.0/dipy/testing/tests/test_decorators.py000066400000000000000000000025231317371701200221060ustar00rootroot00000000000000""" Testing decorators module """ import numpy as np from numpy.testing import (assert_almost_equal, assert_array_equal) from nose.tools import (assert_true, assert_false, assert_raises, assert_equal, assert_not_equal) from dipy.testing.decorators import doctest_skip_parser def test_skipper(): def f(): pass docstring = \ """ Header >>> something # skip if not HAVE_AMODULE >>> something + else >>> a = 1 # skip if not HAVE_BMODULE >>> something2 # skip if HAVE_AMODULE """ f.__doc__ = docstring global HAVE_AMODULE, HAVE_BMODULE HAVE_AMODULE = False HAVE_BMODULE = True f2 = doctest_skip_parser(f) assert_true(f is f2) assert_equal(f2.__doc__, """ Header >>> something # doctest: +SKIP >>> something + else >>> a = 1 >>> something2 """) HAVE_AMODULE = True HAVE_BMODULE = False f.__doc__ = docstring f2 = doctest_skip_parser(f) assert_true(f is f2) assert_equal(f2.__doc__, """ Header >>> something >>> something + else >>> a = 1 # doctest: +SKIP >>> something2 # doctest: +SKIP """) del HAVE_AMODULE f.__doc__ = docstring assert_raises(NameError, doctest_skip_parser, f) dipy-0.13.0/dipy/testing/tests/test_memory.py000066400000000000000000000005311317371701200212460ustar00rootroot00000000000000from nose.tools import assert_equal from dipy.testing.memory import get_type_refcount def test_get_type_refcount(): list_ref_count = get_type_refcount("list") A = list() assert_equal(get_type_refcount("list")["list"], list_ref_count["list"]+1) del A assert_equal(get_type_refcount("list")["list"], list_ref_count["list"]) dipy-0.13.0/dipy/tests/000077500000000000000000000000001317371701200146515ustar00rootroot00000000000000dipy-0.13.0/dipy/tests/__init__.py000066400000000000000000000000341317371701200167570ustar00rootroot00000000000000# Make dipy.tests a package dipy-0.13.0/dipy/tests/scriptrunner.py000066400000000000000000000143431317371701200177660ustar00rootroot00000000000000""" Module to help tests check script output Provides class to be instantiated in tests that check scripts. Usually works something like this in a test module:: from dipy.tests.scriptrunner import ScriptRunner runner = ScriptRunner() Then, in the tests, something like:: code, stdout, stderr = runner.run_command(['my-script', my_arg]) assert_equal(code, 0) assert_equal(stdout, b'This script ran OK') """ import sys import os from os.path import (dirname, join as pjoin, isfile, isdir, realpath, pathsep) from subprocess import Popen, PIPE try: # Python 2 string_types = basestring, except NameError: # Python 3 string_types = str, def _get_package(): """ Workaround for missing ``__package__`` in Python 3.2 """ if(('__package__' in globals()) and (__package__ is not None)): return __package__ return __name__.split('.', 1)[0] # Same as __package__ for Python 2.6, 2.7 and >= 3.3 MY_PACKAGE = _get_package() def local_script_dir(script_sdir): """ Get local script directory if running in development dir, else None """ # Check for presence of scripts in development directory. ``realpath`` # allows for the situation where the development directory has been linked # into the path. package_path = dirname(__import__(MY_PACKAGE).__file__) above_us = realpath(pjoin(package_path, '..')) devel_script_dir = pjoin(above_us, script_sdir) if isfile(pjoin(above_us, 'setup.py')) and isdir(devel_script_dir): return devel_script_dir return None def local_module_dir(module_name): """ Get local module directory if running in development dir, else None """ mod = __import__(module_name) containing_path = dirname(dirname(realpath(mod.__file__))) if containing_path == realpath(os.getcwd()): return containing_path return None class ScriptRunner(object): """ Class to run scripts and return output Finds local scripts and local modules if running in the development directory, otherwise finds system scripts and modules. """ def __init__(self, script_sdir='scripts', module_sdir=MY_PACKAGE, debug_print_var=None, output_processor=lambda x: x): """ Init ScriptRunner instance Parameters ---------- script_sdir : str, optional Name of subdirectory in top-level directory (directory containing setup.py), to find scripts in development tree. Typically 'scripts', but might be 'bin'. module_sdir : str, optional Name of subdirectory in top-level directory (directory containing setup.py), to find main package directory. debug_print_vsr : str, optional Name of environment variable that indicates whether to do debug printing or no. output_processor : callable Callable to run on the stdout, stderr outputs before returning them. Use this to convert bytes to unicode, strip whitespace, etc. """ self.local_script_dir = local_script_dir(script_sdir) self.local_module_dir = local_module_dir(module_sdir) if debug_print_var is None: debug_print_var = '{0}_DEBUG_PRINT'.format(module_sdir.upper()) self.debug_print = os.environ.get(debug_print_var, False) self.output_processor = output_processor def run_command(self, cmd, check_code=True): """ Run command sequence `cmd` returning exit code, stdout, stderr Parameters ---------- cmd : str or sequence string with command name or sequence of strings defining command check_code : {True, False}, optional If True, raise error for non-zero return code Returns ------- returncode : int return code from execution of `cmd` stdout : bytes (python 3) or str (python 2) stdout from `cmd` stderr : bytes (python 3) or str (python 2) stderr from `cmd` """ if isinstance(cmd, string_types): cmd = [cmd] else: cmd = list(cmd) if self.local_script_dir is not None: # Windows can't run script files without extensions # natively so we need to run local scripts (no extensions) # via the Python interpreter. On Unix, we might have the # wrong incantation for the Python interpreter # in the hash bang first line in the source file. So, either way, # run the script through the Python interpreter cmd = [sys.executable, pjoin(self.local_script_dir, cmd[0])] + cmd[1:] elif os.name == 'nt': # Need .bat file extension for windows cmd[0] += '.bat' if os.name == 'nt': # Quote any arguments with spaces. The quotes delimit the arguments # on Windows, and the arguments might be files paths with spaces. # On Unix the list elements are each separate arguments. cmd = ['"{0}"'.format(c) if ' ' in c else c for c in cmd] if self.debug_print: print("Running command '%s'" % cmd) env = os.environ if self.local_module_dir is not None: # module likely comes from the current working directory. # We might need that directory on the path if we're running # the scripts from a temporary directory env = env.copy() pypath = env.get('PYTHONPATH', None) if pypath is None: env['PYTHONPATH'] = self.local_module_dir else: env['PYTHONPATH'] = self.local_module_dir + pathsep + pypath proc = Popen(cmd, stdout=PIPE, stderr=PIPE, env=env) stdout, stderr = proc.communicate() if proc.poll() is None: proc.terminate() if check_code and proc.returncode != 0: raise RuntimeError( """Command "{0}" failed with stdout ------ {1} stderr ------ {2} """.format(cmd, stdout, stderr)) opp = self.output_processor return proc.returncode, opp(stdout), opp(stderr) dipy-0.13.0/dipy/tests/test_scripts.py000066400000000000000000000117771317371701200177660ustar00rootroot00000000000000# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Test scripts Run scripts and check outputs """ ''' from __future__ import division, print_function, absolute_import import glob import os import shutil from os.path import (dirname, join as pjoin, abspath) from nose.tools import assert_true, assert_false, assert_equal import numpy.testing as nt import nibabel as nib from nibabel.tmpdirs import InTemporaryDirectory from dipy.data import get_data # Quickbundles command-line requires matplotlib: try: import matplotlib no_mpl = False except ImportError: no_mpl = True from dipy.tests.scriptrunner import ScriptRunner runner = ScriptRunner( script_sdir='bin', debug_print_var='NIPY_DEBUG_PRINT') run_command = runner.run_command DATA_PATH = abspath(pjoin(dirname(__file__), 'data')) def test_dipy_peak_extraction(): # test dipy_peak_extraction script cmd = 'dipy_peak_extraction' code, stdout, stderr = run_command(cmd, check_code=False) assert_equal(code, 2) def test_dipy_fit_tensor(): # test dipy_fit_tensor script cmd = 'dipy_fit_tensor' code, stdout, stderr = run_command(cmd, check_code=False) assert_equal(code, 2) def test_dipy_sh_estimate(): # test dipy_sh_estimate script cmd = 'dipy_sh_estimate' code, stdout, stderr = run_command(cmd, check_code=False) assert_equal(code, 2) def assert_image_shape_affine(filename, shape, affine): assert_true(os.path.isfile(filename)) image = nib.load(filename) assert_equal(image.shape, shape) nt.assert_array_almost_equal(image.affine, affine) def test_dipy_fit_tensor_again(): with InTemporaryDirectory(): dwi, bval, bvec = get_data("small_25") # Copy data to tmp directory shutil.copyfile(dwi, "small_25.nii.gz") shutil.copyfile(bval, "small_25.bval") shutil.copyfile(bvec, "small_25.bvec") # Call script cmd = ["dipy_fit_tensor", "--mask=none", "small_25.nii.gz"] out = run_command(cmd) assert_equal(out[0], 0) # Get expected values img = nib.load("small_25.nii.gz") affine = img.affine shape = img.shape[:-1] # Check expected outputs assert_image_shape_affine("small_25_fa.nii.gz", shape, affine) assert_image_shape_affine("small_25_t2di.nii.gz", shape, affine) assert_image_shape_affine("small_25_dirFA.nii.gz", shape, affine) assert_image_shape_affine("small_25_ad.nii.gz", shape, affine) assert_image_shape_affine("small_25_md.nii.gz", shape, affine) assert_image_shape_affine("small_25_rd.nii.gz", shape, affine) with InTemporaryDirectory(): dwi, bval, bvec = get_data("small_25") # Copy data to tmp directory shutil.copyfile(dwi, "small_25.nii.gz") shutil.copyfile(bval, "small_25.bval") shutil.copyfile(bvec, "small_25.bvec") # Call script cmd = ["dipy_fit_tensor", "--save-tensor", "--mask=none", "small_25.nii.gz"] out = run_command(cmd) assert_equal(out[0], 0) # Get expected values img = nib.load("small_25.nii.gz") affine = img.affine shape = img.shape[:-1] # Check expected outputs assert_image_shape_affine("small_25_fa.nii.gz", shape, affine) assert_image_shape_affine("small_25_t2di.nii.gz", shape, affine) assert_image_shape_affine("small_25_dirFA.nii.gz", shape, affine) assert_image_shape_affine("small_25_ad.nii.gz", shape, affine) assert_image_shape_affine("small_25_md.nii.gz", shape, affine) assert_image_shape_affine("small_25_rd.nii.gz", shape, affine) # small_25_tensor saves the tensor as a symmetric matrix following # the nifti standard. ten_shape = shape + (1, 6) assert_image_shape_affine("small_25_tensor.nii.gz", ten_shape, affine) @nt.dec.skipif(no_mpl) def test_qb_commandline(): with InTemporaryDirectory(): tracks_file = get_data('fornix') cmd = ["dipy_quickbundles", tracks_file, '--pkl_file', 'mypickle.pkl', '--out_file', 'tracks300.trk'] out = run_command(cmd) assert_equal(out[0], 0) @nt.dec.skipif(no_mpl) def test_qb_commandline_output_path_handling(): with InTemporaryDirectory(): # Create temporary subdirectory for input and for output os.mkdir('work') os.mkdir('output') os.chdir('work') tracks_file = get_data('fornix') # Need to specify an output directory with a "../" style path # to trigger old bug. cmd = ["dipy_quickbundles", tracks_file, '--pkl_file', 'mypickle.pkl', '--out_file', os.path.join('..', 'output', 'tracks300.trk')] out = run_command(cmd) assert_equal(out[0], 0) # Make sure the files were created in the output directory os.chdir('../') output_files_list = glob.glob('output/tracks300_*.trk') assert_true(output_files_list) ''' dipy-0.13.0/dipy/tracking/000077500000000000000000000000001317371701200153115ustar00rootroot00000000000000dipy-0.13.0/dipy/tracking/__init__.py000066400000000000000000000003331317371701200174210ustar00rootroot00000000000000# Init for tracking module """ Tracking objects """ from nibabel.streamlines import ArraySequence as Streamlines # Test callable from numpy.testing import Tester test = Tester().test bench = Tester().bench del Tester dipy-0.13.0/dipy/tracking/_utils.py000066400000000000000000000043031317371701200171620ustar00rootroot00000000000000from __future__ import division, print_function, absolute_import """This is a helper module for dipy.tracking.utils""" from warnings import warn import numpy as np def _voxel_size_deprecated(): m = DeprecationWarning('the voxel_size argument to this function is ' 'deprecated, use the affine argument instead') warn(m) def _mapping_to_voxel(affine, voxel_size): """Inverts affine and returns a mapping so voxel coordinates. This function is an implementation detail and only meant to be used with ``_to_voxel_coordinates``. Parameters ---------- affine : array_like (4, 4) The mapping from voxel indices, [i, j, k], to real world coordinates. The inverse of this mapping is used unless `affine` is None. voxel_size : array_like (3,) Used to support deprecated trackvis space. Return ------ lin_T : array (3, 3) Transpose of the linear part of the mapping to voxel space, (ie ``inv(affine)[:3, :3].T``) offset : array or scalar Offset part of the mapping (ie, ``inv(affine)[:3, 3]``) + ``.5``. The half voxel shift is so that truncating the result of this mapping will give the correct integer voxel coordinate. Raises ------ ValueError If both affine and voxel_size are None. """ if affine is not None: affine = np.array(affine, dtype=float) inv_affine = np.linalg.inv(affine) lin_T = inv_affine[:3, :3].T.copy() offset = inv_affine[:3, 3] + .5 elif voxel_size is not None: _voxel_size_deprecated() voxel_size = np.asarray(voxel_size, dtype=float) lin_T = np.diag(1. / voxel_size) offset = 0. else: raise ValueError("no affine specified") return lin_T, offset def _to_voxel_coordinates(streamline, lin_T, offset): """Applies a mapping from streamline coordinates to voxel_coordinates, raises an error for negative voxel values.""" inds = np.dot(streamline, lin_T) inds += offset if inds.min().round(decimals=6) < 0: raise IndexError('streamline has points that map to negative voxel' ' indices') return inds.astype(int) dipy-0.13.0/dipy/tracking/benchmarks/000077500000000000000000000000001317371701200174265ustar00rootroot00000000000000dipy-0.13.0/dipy/tracking/benchmarks/__init__.py000066400000000000000000000000001317371701200215250ustar00rootroot00000000000000dipy-0.13.0/dipy/tracking/benchmarks/bench_streamline.py000066400000000000000000000120001317371701200232730ustar00rootroot00000000000000""" Benchmarks for functions related to streamline Run all benchmarks with:: import dipy.tracking as dipytracking dipytracking.bench() If you have doctests enabled by default in nose (with a noserc file or environment variable), and you have a numpy version <= 1.6.1, this will also run the doctests, let's hope they pass. Run this benchmark with: nosetests -s --match '(?:^|[\\b_\\.//-])[Bb]ench' bench_streamline.py """ import numpy as np from numpy.testing import measure from numpy.testing import assert_array_equal, assert_array_almost_equal from dipy.data import get_data from nibabel import trackvis as tv from dipy.tracking.streamline import (set_number_of_points, length, compress_streamlines) from dipy.tracking.tests.test_streamline import (set_number_of_points_python, length_python, compress_streamlines_python) from dipy.tracking import Streamlines DATA = {} def setup(): global DATA rng = np.random.RandomState(42) nb_streamlines = 20000 min_nb_points = 2 max_nb_points = 100 DATA['rng'] = rng DATA['nb_streamlines'] = nb_streamlines DATA['streamlines'] = generate_streamlines(nb_streamlines, min_nb_points, max_nb_points, rng=rng) DATA['streamlines_arrseq'] = Streamlines(DATA['streamlines']) def generate_streamlines(nb_streamlines, min_nb_points, max_nb_points, rng): streamlines = [rng.rand(*(rng.randint(min_nb_points, max_nb_points), 3)) for _ in range(nb_streamlines)] return streamlines def bench_set_number_of_points(): repeat = 5 nb_points = 42 nb_streamlines = DATA['nb_streamlines'] streamlines = DATA["streamlines"] # Streamlines as a list of ndarrays. msg = "Timing set_number_of_points() with {0:,} streamlines." print(msg.format(nb_streamlines * repeat)) cython_time = measure("set_number_of_points(streamlines, nb_points)", repeat) print("Cython time: {0:.3f} sec".format(cython_time)) python_time = measure("[set_number_of_points_python(s, nb_points)" " for s in streamlines]", repeat) print("Python time: {0:.2f} sec".format(python_time)) print("Speed up of {0:.2f}x".format(python_time/cython_time)) # Make sure it produces the same results. assert_array_almost_equal([set_number_of_points_python(s) for s in DATA["streamlines"]], set_number_of_points(DATA["streamlines"])) streamlines = DATA['streamlines_arrseq'] cython_time_arrseq = measure("set_number_of_points(streamlines, nb_points)", repeat) print("Cython time (ArrSeq): {0:.3f} sec".format(cython_time_arrseq)) print("Speed up of {0:.2f}x".format(python_time/cython_time_arrseq)) # Make sure it produces the same results. assert_array_equal(set_number_of_points(DATA["streamlines"]), set_number_of_points(DATA["streamlines_arrseq"])) def bench_length(): repeat = 10 nb_streamlines = DATA['nb_streamlines'] streamlines = DATA["streamlines"] # Streamlines as a list of ndarrays. msg = "Timing length() with {0:,} streamlines." print(msg.format(nb_streamlines * repeat)) python_time = measure("[length_python(s) for s in streamlines]", repeat) print("Python time: {0:.2f} sec".format(python_time)) cython_time = measure("length(streamlines)", repeat) print("Cython time: {0:.3f} sec".format(cython_time)) print("Speed up of {0:.2f}x".format(python_time/cython_time)) # Make sure it produces the same results. assert_array_almost_equal([length_python(s) for s in DATA["streamlines"]], length(DATA["streamlines"])) streamlines = DATA['streamlines_arrseq'] cython_time_arrseq = measure("length(streamlines)", repeat) print("Cython time (ArrSeq): {0:.3f} sec".format(cython_time_arrseq)) print("Speed up of {0:.2f}x".format(python_time/cython_time_arrseq)) # Make sure it produces the same results. assert_array_equal(length(DATA["streamlines"]), length(DATA["streamlines_arrseq"])) def bench_compress_streamlines(): repeat = 10 fname = get_data('fornix') streams, hdr = tv.read(fname) streamlines = [i[0] for i in streams] print("Timing compress_streamlines() in Cython" " ({0} streamlines)".format(len(streamlines))) cython_time = measure("compress_streamlines(streamlines)", repeat) print("Cython time: {0:.3}sec".format(cython_time)) del streamlines fname = get_data('fornix') streams, hdr = tv.read(fname) streamlines = [i[0] for i in streams] python_time = measure("map(compress_streamlines_python, streamlines)", repeat) print("Python time: {0:.2}sec".format(python_time)) print("Speed up of {0}x".format(python_time/cython_time)) del streamlines dipy-0.13.0/dipy/tracking/distances.pyx000066400000000000000000001744161317371701200200450ustar00rootroot00000000000000 # A type of -*- python -*- file """ Optimized track distances, similarities and distanch clustering algorithms """ # cython: profile=True # cython: embedsignature=True cimport cython from libc.stdlib cimport calloc, realloc, free from libc.string cimport memcpy import time import numpy as np cimport numpy as cnp cdef extern from "dpy_math.h" nogil: double floor(double x) float sqrt(float x) float fabs(float x) float acos(float x ) bint dpy_isnan(double x) double dpy_log2(double x) #@cython.boundscheck(False) #@cython.wraparound(False) DEF biggest_double = 1.79769e+308 #np.finfo('f8').max DEF biggest_float = 3.4028235e+38 #np.finfo('f4').max cdef inline cnp.ndarray[cnp.float32_t, ndim=1] as_float_3vec(object vec): ''' Utility function to convert object to 3D float vector ''' return np.squeeze(np.asarray(vec, dtype=np.float32)) cdef inline float* asfp(cnp.ndarray pt): return pt.data def normalized_3vec(vec): ''' Return normalized 3D vector Vector divided by Euclidean (L2) norm Parameters ---------- vec : array-like shape (3,) Returns ------- vec_out : array shape (3,) ''' cdef cnp.ndarray[cnp.float32_t, ndim=1] vec_in = as_float_3vec(vec) cdef cnp.ndarray[cnp.float32_t, ndim=1] vec_out = np.zeros((3,), np.float32) cnormalized_3vec(vec_in.data, vec_out.data) return vec_out def norm_3vec(vec): ''' Euclidean (L2) norm of length 3 vector Parameters ---------- vec : array-like shape (3,) Returns ------- norm : float Euclidean norm ''' cdef cnp.ndarray[cnp.float32_t, ndim=1] vec_in = as_float_3vec(vec) return cnorm_3vec(vec_in.data) cdef inline float cnorm_3vec(float *vec): ''' Calculate Euclidean norm of input vector Parameters ---------- vec : float * length 3 float vector Returns ------- norm : float Euclidean norm ''' cdef float v0, v1, v2 v0 = vec[0] v1 = vec[1] v2 = vec[2] return sqrt(v0 * v0 + v1*v1 + v2*v2) cdef inline void cnormalized_3vec(float *vec_in, float *vec_out): ''' Calculate and fill normalized 3D vector Parameters ---------- vec_in : float * Length 3 vector to normalize vec_out : float * Memory into which to write normalized length 3 vector ''' cdef float norm = cnorm_3vec(vec_in) cdef int i for i in range(3): vec_out[i] = vec_in[i] / norm def inner_3vecs(vec1, vec2): cdef cnp.ndarray[cnp.float32_t, ndim=1] fvec1 = as_float_3vec(vec1) cdef cnp.ndarray[cnp.float32_t, ndim=1] fvec2 = as_float_3vec(vec2) return cinner_3vecs(fvec1.data, fvec2.data) cdef inline float cinner_3vecs(float *vec1, float *vec2) nogil: cdef int i cdef float ip = 0 for i from 0<=i<3: ip += vec1[i]*vec2[i] return ip def sub_3vecs(vec1, vec2): cdef cnp.ndarray[cnp.float32_t, ndim=1] fvec1 = as_float_3vec(vec1) cdef cnp.ndarray[cnp.float32_t, ndim=1] fvec2 = as_float_3vec(vec2) cdef cnp.ndarray[cnp.float32_t, ndim=1] vec_out = np.zeros((3,), np.float32) csub_3vecs(fvec1.data, fvec2.data, vec_out.data) return vec_out cdef inline void csub_3vecs(float *vec1, float *vec2, float *vec_out) nogil: cdef int i for i from 0<=i<3: vec_out[i] = vec1[i]-vec2[i] def add_3vecs(vec1, vec2): cdef cnp.ndarray[cnp.float32_t, ndim=1] fvec1 = as_float_3vec(vec1) cdef cnp.ndarray[cnp.float32_t, ndim=1] fvec2 = as_float_3vec(vec2) cdef cnp.ndarray[cnp.float32_t, ndim=1] vec_out = np.zeros((3,), np.float32) cadd_3vecs(fvec1.data, fvec2.data, vec_out.data) return vec_out cdef inline void cadd_3vecs(float *vec1, float *vec2, float *vec_out) nogil: cdef int i for i from 0<=i<3: vec_out[i] = vec1[i]+vec2[i] def mul_3vecs(vec1, vec2): cdef cnp.ndarray[cnp.float32_t, ndim=1] fvec1 = as_float_3vec(vec1) cdef cnp.ndarray[cnp.float32_t, ndim=1] fvec2 = as_float_3vec(vec2) cdef cnp.ndarray[cnp.float32_t, ndim=1] vec_out = np.zeros((3,), np.float32) cmul_3vecs(fvec1.data, fvec2.data, vec_out.data) return vec_out cdef inline void cmul_3vecs(float *vec1, float *vec2, float *vec_out) nogil: cdef int i for i from 0<=i<3: vec_out[i] = vec1[i]*vec2[i] def mul_3vec(a, vec): cdef cnp.ndarray[cnp.float32_t, ndim=1] fvec = as_float_3vec(vec) cdef cnp.ndarray[cnp.float32_t, ndim=1] vec_out = np.zeros((3,), np.float32) cmul_3vec(a,fvec.data, vec_out.data) return vec_out cdef inline void cmul_3vec(float a, float *vec, float *vec_out) nogil: cdef int i for i from 0<=i<3: vec_out[i] = a*vec[i] # float 32 dtype for casting cdef cnp.dtype f32_dt = np.dtype(np.float32) def cut_plane(tracks, ref): ''' Extract divergence vectors and points of intersection between planes normal to the reference fiber and other tracks Parameters ---------- tracks : sequence of tracks as arrays, shape (N1,3) .. (Nm,3) ref : array, shape (N,3) reference track Returns ------- hits : sequence list of points and rcds (radial coefficient of divergence) Notes ----- The orthogonality relationship ``np.inner(hits[p][q][0:3]-ref[p+1],ref[p+2]-ref[r][p+1])`` will hold throughout for every point q in the hits plane at point (p+1) on the reference track. Examples -------- >>> refx = np.array([[0,0,0],[1,0,0],[2,0,0],[3,0,0]],dtype='float32') >>> bundlex = [np.array([[0.5,1,0],[1.5,2,0],[2.5,3,0]],dtype='float32')] >>> res = cut_plane(bundlex,refx) >>> len(res) 2 >>> print(res[0]) [[ 1. 1.5 0. 0.70710683 0. ]] >>> print(res[1]) [[ 2. 2.5 0. 0.70710677 0. ]] ''' cdef: size_t n_hits, hit_no, max_hit_len float alpha,beta,lrq,rcd,lhp,ld cnp.ndarray[cnp.float32_t, ndim=2] ref32 cnp.ndarray[cnp.float32_t, ndim=2] track object hits cnp.ndarray[cnp.float32_t, ndim=1] one_hit float *hit_ptr cnp.ndarray[cnp.float32_t, ndim=2] hit_arr object Hit=[] # make reference fiber usable type ref32 = np.ascontiguousarray(ref, f32_dt) # convert all the tracks to something we can work with. Get track # lengths cdef: size_t N_tracks=len(tracks) cnp.ndarray[cnp.uint64_t, ndim=1] track_lengths size_t t_no, N_track cdef object tracks32 = [] track_lengths = np.empty((N_tracks,), dtype=np.uint64) for t_no in range(N_tracks): track = np.ascontiguousarray(tracks[t_no], f32_dt) track_lengths[t_no] = track.shape[0] tracks32.append(track) # set up loop across reference fiber points cdef: size_t N_ref = ref32.shape[0] size_t p_no, q_no float *this_ref_p, *next_ref_p, *this_trk_p, *next_trk_p float along[3], normal[3] float qMp[3], rMp[3], rMq[3], pMq[3] float hit[3], hitMp[3], *delta # List used for storage of hits. We will fill this with lots of # small numpy arrays, and reuse them over the reference track point # loops. max_hit_len = 0 hits = [] # for every point along the reference track next_ref_p = asfp(ref32[0]) for p_no in range(N_ref-1): # extract point to point vector into `along` this_ref_p = next_ref_p next_ref_p = asfp(ref32[p_no+1]) csub_3vecs(next_ref_p, this_ref_p, along) # normalize cnormalized_3vec(along, normal) # initialize index for hits hit_no = 0 # for every track for t_no in range(N_tracks): track=tracks32[t_no] N_track = track_lengths[t_no] # for every point on the track next_trk_p = asfp(track[0]) for q_no in range(N_track-1): # p = ref32[p_no] # q = track[q_no] # r = track[q_no+1] # float* versions of above: p == this_ref_p this_trk_p = next_trk_p # q next_trk_p = asfp(track[q_no+1]) # r #if np.inner(normal,q-p)*np.inner(normal,r-p) <= 0: csub_3vecs(this_trk_p, this_ref_p, qMp) # q-p csub_3vecs(next_trk_p, this_ref_p, rMp) # r-p if (cinner_3vecs(normal, qMp) * cinner_3vecs(normal, rMp)) <=0: #if np.inner((r-q),normal) != 0: csub_3vecs(next_trk_p, this_trk_p, rMq) beta = cinner_3vecs(rMq, normal) if beta !=0: #alpha = np.inner((p-q),normal)/np.inner((r-q),normal) csub_3vecs(this_ref_p, this_trk_p, pMq) alpha = (cinner_3vecs(pMq, normal) / cinner_3vecs(rMq, normal)) if alpha < 1: # hit = q+alpha*(r-q) hit[0] = this_trk_p[0]+alpha*rMq[0] hit[1] = this_trk_p[1]+alpha*rMq[1] hit[2] = this_trk_p[2]+alpha*rMq[2] # h-p csub_3vecs(hit, this_ref_p, hitMp) # |h-p| lhp = cnorm_3vec(hitMp) delta = rMq # just renaming # |r-q| == |delta| ld = cnorm_3vec(delta) ''' # Summary of stuff in comments # divergence =((r-q)-inner(r-q,normal)*normal)/|r-q| div[0] = (rMq[0]-beta*normal[0]) / ld div[1] = (rMq[1]-beta*normal[1]) / ld div[2] = (rMq[2]-beta*normal[2]) / ld # radial coefficient of divergence d.(h-p)/|h-p| ''' # radial divergence # np.inner(delta, (hit-p)) / (ld * lhp) if lhp > 0: rcd = fabs(cinner_3vecs(delta, hitMp) / (ld*lhp)) else: rcd=0 # hit data into array if hit_no >= max_hit_len: one_hit = np.empty((5,), dtype=f32_dt) hits.append(one_hit) else: one_hit = hits[hit_no] hit_ptr = one_hit.data hit_ptr[0] = hit[0] hit_ptr[1] = hit[1] hit_ptr[2] = hit[2] hit_ptr[3] = rcd hit_ptr[4] = t_no hit_no += 1 # convert hits list to hits array n_hits = hit_no if n_hits > max_hit_len: max_hit_len = n_hits hit_arr = np.empty((n_hits,5), dtype=f32_dt) for hit_no in range(n_hits): hit_arr[hit_no] = hits[hit_no] Hit.append(hit_arr) #Div.append(divs[1:]) return Hit[1:] def most_similar_track_mam(tracks,metric='avg'): ''' Find the most similar track in a bundle using distances calculated from Zhang et. al 2008. Parameters ---------- tracks : sequence of tracks as arrays, shape (N1,3) .. (Nm,3) metric : str 'avg', 'min', 'max' Returns ------- si : int index of the most similar track in tracks. This can be used as a reference track for a bundle. s : array, shape (len(tracks),) similarities between tracks[si] and the rest of the tracks in the bundle Notes ----- A vague description of this function is given below: for (i,j) in tracks_combinations_of_2: calculate the mean_closest_distance from i to j (mcd_i) calculate the mean_closest_distance from j to i (mcd_j) if 'avg': s holds the average similarities if 'min': s holds the minimum similarities if 'max': s holds the maximum similarities si holds the index of the track with min {avg,min,max} average metric ''' cdef: size_t i, j, lent int metric_type if metric=='avg': metric_type = 0 elif metric == 'min': metric_type = 1 elif metric == 'max': metric_type = 2 else: raise ValueError('Metric should be one of avg, min, max') # preprocess tracks cdef: size_t longest_track_len = 0, track_len cnp.ndarray[object, ndim=1] tracks32 lent = len(tracks) tracks32 = np.zeros((lent,), dtype=object) # process tracks to predictable memory layout, find longest track for i in range(lent): tracks32[i] = np.ascontiguousarray(tracks[i], dtype=f32_dt) track_len = tracks32[i].shape[0] if track_len > longest_track_len: longest_track_len = track_len # buffer for distances of found track to other tracks cdef: cnp.ndarray[cnp.double_t, ndim=1] track2others track2others = np.zeros((lent,), dtype=np.double) # use this buffer also for working space containing summed distances # of candidate track to all other tracks cdef cnp.double_t *sum_track2others = track2others.data # preallocate buffer array for track distance calculations cdef: cnp.ndarray [cnp.float32_t, ndim=1] distances_buffer cnp.float32_t *t1_ptr, *t2_ptr, *min_buffer, distance distances_buffer = np.zeros((longest_track_len*2,), dtype=np.float32) min_buffer = distances_buffer.data # cycle over tracks cdef: cnp.ndarray [cnp.float32_t, ndim=2] t1, t2 size_t t1_len, t2_len for i from 0 <= i < lent-1: t1 = tracks32[i] t1_len = t1.shape[0] t1_ptr = t1.data for j from i+1 <= j < lent: t2 = tracks32[j] t2_len = t2.shape[0] t2_ptr = t2.data distance = czhang(t1_len, t1_ptr, t2_len, t2_ptr, min_buffer, metric_type) # get metric sum_track2others[i]+=distance sum_track2others[j]+=distance # find track with smallest summed metric with other tracks cdef double mn = sum_track2others[0] cdef size_t si = 0 for i in range(lent): if sum_track2others[i] < mn: si = i mn = sum_track2others[i] # recalculate distance of this track from the others t1 = tracks32[si] t1_len = t1.shape[0] t1_ptr = t1.data for j from 0 <= j < lent: t2 = tracks32[j] t2_len = t2.shape[0] t2_ptr = t2.data track2others[j] = czhang(t1_len, t1_ptr, t2_len, t2_ptr, min_buffer, metric_type) return si, track2others @cython.boundscheck(False) @cython.wraparound(False) def bundles_distances_mam(tracksA, tracksB, metric='avg'): ''' Calculate distances between list of tracks A and list of tracks B Parameters ---------- tracksA : sequence of tracks as arrays, shape (N1,3) .. (Nm,3) tracksB : sequence of tracks as arrays, shape (N1,3) .. (Nm,3) metric : str 'avg', 'min', 'max' Returns ------- DM : array, shape (len(tracksA), len(tracksB)) distances between tracksA and tracksB according to metric ''' cdef: size_t i, j, lentA, lentB int metric_type if metric=='avg': metric_type = 0 elif metric == 'min': metric_type = 1 elif metric == 'max': metric_type = 2 else: raise ValueError('Metric should be one of avg, min, max') # preprocess tracks cdef: size_t longest_track_len = 0, track_len size_t longest_track_lenA = 0, longest_track_lenB = 0 cnp.ndarray[object, ndim=1] tracksA32 cnp.ndarray[object, ndim=1] tracksB32 cnp.ndarray[cnp.double_t, ndim=2] DM lentA = len(tracksA) lentB = len(tracksB) tracksA32 = np.zeros((lentA,), dtype=object) tracksB32 = np.zeros((lentB,), dtype=object) DM = np.zeros((lentA,lentB), dtype=np.double) # process tracks to predictable memory layout, find longest track for i in range(lentA): tracksA32[i] = np.ascontiguousarray(tracksA[i], dtype=f32_dt) track_len = tracksA32[i].shape[0] if track_len > longest_track_lenA: longest_track_lenA = track_len for i in range(lentB): tracksB32[i] = np.ascontiguousarray(tracksB[i], dtype=f32_dt) track_len = tracksB32[i].shape[0] if track_len > longest_track_lenB: longest_track_lenB = track_len if longest_track_lenB > longest_track_lenA: longest_track_lenA = longest_track_lenB # preallocate buffer array for track distance calculations cdef: cnp.ndarray [cnp.float32_t, ndim=1] distances_buffer cnp.float32_t *t1_ptr, *t2_ptr, *min_buffer distances_buffer = np.zeros((longest_track_lenA*2,), dtype=np.float32) min_buffer = distances_buffer.data # cycle over tracks cdef: cnp.ndarray [cnp.float32_t, ndim=2] t1, t2 size_t t1_len, t2_len for i from 0 <= i < lentA: t1 = tracksA32[i] t1_len = t1.shape[0] t1_ptr = t1.data for j from 0 <= j < lentB: t2 = tracksB32[j] t2_len = t2.shape[0] t2_ptr = t2.data DM[i,j] = czhang(t1_len, t1_ptr, t2_len, t2_ptr, min_buffer, metric_type) return DM @cython.boundscheck(False) @cython.wraparound(False) def bundles_distances_mdf(tracksA, tracksB): ''' Calculate distances between list of tracks A and list of tracks B All tracks need to have the same number of points Parameters ---------- tracksA : sequence of tracks as arrays, [(N,3) .. (N,3)] tracksB : sequence of tracks as arrays, [(N,3) .. (N,3)] Returns ------- DM : array, shape (len(tracksA), len(tracksB)) distances between tracksA and tracksB according to metric See Also --------- dipy.metrics.downsample ''' cdef: size_t i, j, lentA, lentB # preprocess tracks cdef: size_t longest_track_len = 0, track_len longest_track_lenA, longest_track_lenB cnp.ndarray[object, ndim=1] tracksA32 cnp.ndarray[object, ndim=1] tracksB32 cnp.ndarray[cnp.double_t, ndim=2] DM lentA = len(tracksA) lentB = len(tracksB) tracksA32 = np.zeros((lentA,), dtype=object) tracksB32 = np.zeros((lentB,), dtype=object) DM = np.zeros((lentA,lentB), dtype=np.double) # process tracks to predictable memory layout for i in range(lentA): tracksA32[i] = np.ascontiguousarray(tracksA[i], dtype=f32_dt) for i in range(lentB): tracksB32[i] = np.ascontiguousarray(tracksB[i], dtype=f32_dt) # preallocate buffer array for track distance calculations cdef: cnp.float32_t *t1_ptr, *t2_ptr, *min_buffer # cycle over tracks cdef: cnp.ndarray [cnp.float32_t, ndim=2] t1, t2 size_t t1_len, t2_len float d[2] t_len = tracksA32[0].shape[0] for i from 0 <= i < lentA: t1 = tracksA32[i] #t1_len = t1.shape[0] t1_ptr = t1.data for j from 0 <= j < lentB: t2 = tracksB32[j] #t2_len = t2.shape[0] t2_ptr = t2.data #DM[i,j] = czhang(t1_len, t1_ptr, t2_len, t2_ptr, min_buffer, metric_type) track_direct_flip_dist(t1_ptr, t2_ptr,t_len,d) if d[0] mean_t1t2: dist_val=mean_t2t1 else: dist_val=mean_t1t2 return dist_val @cython.cdivision(True) cdef inline void min_distances(size_t t1_len, cnp.float32_t *track1_ptr, size_t t2_len, cnp.float32_t *track2_ptr, cnp.float32_t *min_t2t1, cnp.float32_t *min_t1t2) nogil: cdef: cnp.float32_t *t1_pt, *t2_pt, d0, d1, d2 cnp.float32_t delta2 int t1_pi, t2_pi for t2_pi from 0<= t2_pi < t2_len: min_t2t1[t2_pi] = inf for t1_pi from 0<= t1_pi < t1_len: min_t1t2[t1_pi] = inf # pointer to current point in track 1 t1_pt = track1_ptr # calculate min squared distance between each point in the two # lines. Squared distance to delay doing the sqrt until after this # speed-critical loop for t1_pi from 0<= t1_pi < t1_len: # pointer to current point in track 2 t2_pt = track2_ptr for t2_pi from 0<= t2_pi < t2_len: d0 = t1_pt[0] - t2_pt[0] d1 = t1_pt[1] - t2_pt[1] d2 = t1_pt[2] - t2_pt[2] delta2 = d0*d0 + d1*d1 + d2*d2 if delta2 < min_t2t1[t2_pi]: min_t2t1[t2_pi]=delta2 if delta2 < min_t1t2[t1_pi]: min_t1t2[t1_pi]=delta2 t2_pt += 3 # to next point in track 2 t1_pt += 3 # to next point in track 1 # sqrt to get Euclidean distance from squared distance for t1_pi from 0<= t1_pi < t1_len: min_t1t2[t1_pi]=sqrt(min_t1t2[t1_pi]) for t2_pi from 0<= t2_pi < t2_len: min_t2t1[t2_pi]=sqrt(min_t2t1[t2_pi]) def mam_distances(xyz1,xyz2,metric='all'): ''' Min/Max/Mean Average Minimum Distance between tracks xyz1 and xyz2 Based on the metrics in Zhang, Correia, Laidlaw 2008 http://ieeexplore.ieee.org/xpl/freeabs_all.jsp?arnumber=4479455 which in turn are based on those of Corouge et al. 2004 Parameters ---------- xyz1 : array, shape (N1,3), dtype float32 xyz2 : array, shape (N2,3), dtype float32 arrays representing x,y,z of the N1 and N2 points of two tracks metrics : {'avg','min','max','all'} Metric to calculate. {'avg','min','max'} return a scalar. 'all' returns a tuple Returns ------- avg_mcd : float average_mean_closest_distance min_mcd : float minimum_mean_closest_distance max_mcd : float maximum_mean_closest_distance Notes ----- Algorithmic description Lets say we have curves A and B. For every point in A calculate the minimum distance from every point in B stored in minAB For every point in B calculate the minimum distance from every point in A stored in minBA find average of minAB stored as avg_minAB find average of minBA stored as avg_minBA if metric is 'avg' then return (avg_minAB + avg_minBA)/2.0 if metric is 'min' then return min(avg_minAB,avg_minBA) if metric is 'max' then return max(avg_minAB,avg_minBA) ''' cdef: cnp.ndarray[cnp.float32_t, ndim=2] track1 cnp.ndarray[cnp.float32_t, ndim=2] track2 size_t t1_len, t2_len track1 = np.ascontiguousarray(xyz1, dtype=f32_dt) t1_len = track1.shape[0] track2 = np.ascontiguousarray(xyz2, dtype=f32_dt) t2_len = track2.shape[0] # preallocate buffer array for track distance calculations cdef: cnp.float32_t *min_t2t1, *min_t1t2 cnp.ndarray [cnp.float32_t, ndim=1] distances_buffer distances_buffer = np.zeros((t1_len + t2_len,), dtype=np.float32) min_t2t1 = distances_buffer.data min_t1t2 = min_t2t1 + t2_len min_distances(t1_len, track1.data, t2_len, track2.data, min_t2t1, min_t1t2) cdef: size_t t1_pi, t2_pi cnp.float32_t mean_t2t1 = 0, mean_t1t2 = 0 for t1_pi from 0<= t1_pi < t1_len: mean_t1t2+=min_t1t2[t1_pi] mean_t1t2=mean_t1t2/t1_len for t2_pi from 0<= t2_pi < t2_len: mean_t2t1+=min_t2t1[t2_pi] mean_t2t1=mean_t2t1/t2_len if metric=='all': return ((mean_t2t1+mean_t1t2)/2.0, np.min((mean_t2t1,mean_t1t2)), np.max((mean_t2t1,mean_t1t2))) elif metric=='avg': return (mean_t2t1+mean_t1t2)/2.0 elif metric=='min': return np.min((mean_t2t1,mean_t1t2)) elif metric =='max': return np.max((mean_t2t1,mean_t1t2)) else : ValueError('Wrong argument for metric') def minimum_closest_distance(xyz1,xyz2): ''' Find the minimum distance between two curves xyz1, xyz2 Parameters ---------- xyz1 : array, shape (N1,3), dtype float32 xyz2 : array, shape (N2,3), dtype float32 arrays representing x,y,z of the N1 and N2 points of two tracks Returns ------- md : minimum distance Notes ----- Algorithmic description Lets say we have curves A and B for every point in A calculate the minimum distance from every point in B stored in minAB for every point in B calculate the minimum distance from every point in A stored in minBA find min of minAB stored in min_minAB find min of minBA stored in min_minBA Then return (min_minAB + min_minBA)/2.0 ''' cdef: cnp.ndarray[cnp.float32_t, ndim=2] track1 cnp.ndarray[cnp.float32_t, ndim=2] track2 size_t t1_len, t2_len track1 = np.ascontiguousarray(xyz1, dtype=f32_dt) t1_len = track1.shape[0] track2 = np.ascontiguousarray(xyz2, dtype=f32_dt) t2_len = track2.shape[0] # preallocate buffer array for track distance calculations cdef: cnp.float32_t *min_t2t1, *min_t1t2 cnp.ndarray [cnp.float32_t, ndim=1] distances_buffer distances_buffer = np.zeros((t1_len + t2_len,), dtype=np.float32) min_t2t1 = distances_buffer.data min_t1t2 = min_t2t1 + t2_len min_distances(t1_len, track1.data, t2_len, track2.data, min_t2t1, min_t1t2) cdef: size_t t1_pi, t2_pi double min_min_t2t1 = inf double min_min_t1t2 = inf for t1_pi in range(t1_len): if min_min_t1t2 > min_t1t2[t1_pi]: min_min_t1t2 = min_t1t2[t1_pi] for t2_pi in range(t2_len): if min_min_t2t1 > min_t2t1[t2_pi]: min_min_t2t1 = min_t2t1[t2_pi] return (min_min_t1t2+min_min_t2t1)/2.0 def lee_perpendicular_distance(start0, end0, start1, end1): ''' Calculates perpendicular distance metric for the distance between two line segments Based on Lee , Han & Whang SIGMOD07. This function assumes that norm(end0-start0)>norm(end1-start1) i.e. that the first segment will be bigger than the second one. Parameters ---------- start0 : float array(3,) end0 : float array(3,) start1 : float array(3,) end1 : float array(3,) Returns ------- perpendicular_distance: float Notes ----- l0 = np.inner(end0-start0,end0-start0) l1 = np.inner(end1-start1,end1-start1) k0=end0-start0 u1 = np.inner(start1-start0,k0)/l0 u2 = np.inner(end1-start0,k0)/l0 ps = start0+u1*k0 pe = start0+u2*k0 lperp1 = np.sqrt(np.inner(ps-start1,ps-start1)) lperp2 = np.sqrt(np.inner(pe-end1,pe-end1)) if lperp1+lperp2 > 0.: return (lperp1**2+lperp2**2)/(lperp1+lperp2) else: return 0. Examples -------- >>> d = lee_perpendicular_distance([0,0,0],[1,0,0],[3,4,5],[5,4,3]) >>> print('%.6f' % d) 5.787888 ''' cdef cnp.ndarray[cnp.float32_t, ndim=1] fvec1,fvec2,fvec3,fvec4 fvec1 = as_float_3vec(start0) fvec2 = as_float_3vec(end0) fvec3 = as_float_3vec(start1) fvec4 = as_float_3vec(end1) return clee_perpendicular_distance(fvec1.data,fvec2.data,fvec3.data,fvec4.data) cdef float clee_perpendicular_distance(float *start0, float *end0,float *start1, float *end1): ''' This function assumes that norm(end0-start0)>norm(end1-start1) ''' cdef: float l0,l1,ltmp,u1,u2,lperp1,lperp2 float *s_tmp,*e_tmp,k0[3],ps[3],pe[3],ps1[3],pe1[3],tmp[3] csub_3vecs(end0,start0,k0) l0 = cinner_3vecs(k0,k0) csub_3vecs(end1,start1,tmp) l1 = cinner_3vecs(tmp, tmp) #csub_3vecs(end0,start0,k0) #u1 = np.inner(start1-start0,k0)/l0 #u2 = np.inner(end1-start0,k0)/l0 csub_3vecs(start1,start0,tmp) u1 = cinner_3vecs(tmp,k0)/l0 csub_3vecs(end1,start0,tmp) u2 = cinner_3vecs(tmp,k0)/l0 cmul_3vec(u1,k0,tmp) cadd_3vecs(start0,tmp,ps) cmul_3vec(u2,k0,tmp) cadd_3vecs(start0,tmp,pe) #lperp1 = np.sqrt(np.inner(ps-start1,ps-start1)) #lperp2 = np.sqrt(np.inner(pe-end1,pe-end1)) csub_3vecs(ps,start1,ps1) csub_3vecs(pe,end1,pe1) lperp1 = sqrt(cinner_3vecs(ps1,ps1)) lperp2 = sqrt(cinner_3vecs(pe1,pe1)) if lperp1+lperp2 > 0.: return (lperp1*lperp1+lperp2*lperp2)/(lperp1+lperp2) else: return 0. def lee_angle_distance(start0, end0, start1, end1): ''' Calculates angle distance metric for the distance between two line segments Based on Lee , Han & Whang SIGMOD07. This function assumes that norm(end0-start0)>norm(end1-start1) i.e. that the first segment will be bigger than the second one. Parameters ---------- start0 : float array(3,) end0 : float array(3,) start1 : float array(3,) end1 : float array(3,) Returns ------- angle_distance : float Notes ----- l_0 = np.inner(end0-start0,end0-start0) l_1 = np.inner(end1-start1,end1-start1) cos_theta_squared = np.inner(end0-start0,end1-start1)**2/ (l_0*l_1) return np.sqrt((1-cos_theta_squared)*l_1) Examples -------- >>> lee_angle_distance([0,0,0],[1,0,0],[3,4,5],[5,4,3]) 2.0 ''' cdef cnp.ndarray[cnp.float32_t, ndim=1] fvec1,fvec2,fvec3,fvec4 fvec1 = as_float_3vec(start0) fvec2 = as_float_3vec(end0) fvec3 = as_float_3vec(start1) fvec4 = as_float_3vec(end1) return clee_angle_distance(fvec1.data,fvec2.data,fvec3.data,fvec4.data) cdef float clee_angle_distance(float *start0, float *end0,float *start1, float *end1): ''' This function assumes that norm(end0-start0)>norm(end1-start1) ''' cdef: float l0,l1,ltmp,cos_theta_squared float *s_tmp,*e_tmp,k0[3],k1[3],tmp[3] csub_3vecs(end0,start0,k0) l0 = cinner_3vecs(k0,k0) #print l0 csub_3vecs(end1,start1,k1) l1 = cinner_3vecs(k1, k1) #print l1 ltmp=cinner_3vecs(k0,k1) cos_theta_squared = (ltmp*ltmp)/ (l0*l1) #print cos_theta_squared return sqrt((1-cos_theta_squared)*l1) def approx_polygon_track(xyz,alpha=0.392): ''' Fast and simple trajectory approximation algorithm by Eleftherios and Ian It will reduce the number of points of the track by keeping intact the start and endpoints of the track and trying to remove as many points as possible without distorting much the shape of the track Parameters ---------- xyz : array(N,3) initial trajectory alpha : float smoothing parameter (<0.392 smoother, >0.392 rougher) if the trajectory was a smooth circle then with alpha =0.393 ~=pi/8. the circle would be approximated with an decahexagon if alpha = 0.7853 ~=pi/4. with an octagon. Returns ------- characteristic_points: list of M array(3,) points Examples -------- Approximating a helix: >>> t=np.linspace(0,1.75*2*np.pi,100) >>> x = np.sin(t) >>> y = np.cos(t) >>> z = t >>> xyz=np.vstack((x,y,z)).T >>> xyza = approx_polygon_track(xyz) >>> len(xyza) < len(xyz) True Notes ----- Assuming that a good approximation for a circle is an octagon then that means that the points of the octagon will have angle alpha = 2*pi/8 = pi/4 . We calculate the angle between every two neighbour segments of a trajectory and if the angle is higher than pi/4 we choose that point as a characteristic point otherwise we move at the next point. ''' cdef : int mid_index cnp.ndarray[cnp.float32_t, ndim=2] track float *fvec0,*fvec1,*fvec2 object characteristic_points size_t t_len double angle,tmp float vec0[3],vec1[3] angle=alpha track = np.ascontiguousarray(xyz, dtype=f32_dt) t_len=len(track) characteristic_points=[track[0]] mid_index = 1 angle=0 while mid_index < t_len-1: #fvec0 = as_float_3vec(track[mid_index-1]) #track[0].data fvec0 = asfp(track[mid_index-1]) fvec1 = asfp(track[mid_index]) fvec2 = asfp(track[mid_index+1]) #csub_3vecs(fvec1.data,fvec0.data,vec0) csub_3vecs(fvec1,fvec0,vec0) csub_3vecs(fvec2,fvec1,vec1) tmp=fabs(acos(cinner_3vecs(vec0,vec1)/(cnorm_3vec(vec0)*cnorm_3vec(vec1)))) if dpy_isnan(tmp) : angle+=0. else: angle+=tmp if angle > alpha: characteristic_points.append(track[mid_index]) angle=0 mid_index+=1 characteristic_points.append(track[-1]) return np.array(characteristic_points) def approximate_mdl_trajectory(xyz, alpha=1.): ''' Implementation of Lee et al Approximate Trajectory Partitioning Algorithm This is base on the minimum description length principle Parameters ---------- xyz : array(N,3) initial trajectory alpha : float smoothing parameter (>1 smoother, <1 rougher) Returns ------- characteristic_points : list of M array(3,) points ''' cdef : int start_index,length,current_index, i double cost_par,cost_nopar,alphac object characteristic_points size_t t_len cnp.ndarray[cnp.float32_t, ndim=2] track float tmp[3] cnp.ndarray[cnp.float32_t, ndim=1] fvec1,fvec2,fvec3,fvec4 track = np.ascontiguousarray(xyz, dtype=f32_dt) t_len=len(track) alphac=alpha characteristic_points=[xyz[0]] start_index = 0 length = 2 #print t_len while start_index+length < t_len-1: current_index = start_index+length fvec1 = as_float_3vec(track[start_index]) fvec2 = as_float_3vec(track[current_index]) # L(H) csub_3vecs(fvec2.data,fvec1.data,tmp) cost_par=dpy_log2(sqrt(cinner_3vecs(tmp,tmp))) cost_nopar=0 #print start_index,current_index # L(D|H) #for i in range(start_index+1,current_index):#+1): for i in range(start_index,current_index+1): #print i fvec3 = as_float_3vec(track[i]) fvec4 = as_float_3vec(track[i+1]) cost_par += dpy_log2(clee_perpendicular_distance(fvec3.data,fvec4.data,fvec1.data,fvec2.data)) cost_par += dpy_log2(clee_angle_distance(fvec3.data,fvec4.data,fvec1.data,fvec2.data)) csub_3vecs(fvec4.data,fvec3.data,tmp) cost_nopar += dpy_log2(cinner_3vecs(tmp,tmp)) cost_nopar /= 2 #print cost_par, cost_nopar, start_index,length if alphac*cost_par>cost_nopar: characteristic_points.append(track[current_index-1]) start_index = current_index-1 length = 2 else: length+=1 characteristic_points.append(track[-1]) return np.array(characteristic_points) def intersect_segment_cylinder(sa,sb,p,q,r): ''' Intersect Segment S(t) = sa +t(sb-sa), 0 <=t<= 1 against cylinder specified by p,q and r See p.197 from Real Time Collision Detection by C. Ericson Examples -------- Define cylinder using a segment defined by >>> p=np.array([0,0,0],dtype=np.float32) >>> q=np.array([1,0,0],dtype=np.float32) >>> r=0.5 Define segment >>> sa=np.array([0.5,1 ,0],dtype=np.float32) >>> sb=np.array([0.5,-1,0],dtype=np.float32) Intersection >>> intersect_segment_cylinder(sa, sb, p, q, r) (1.0, 0.25, 0.75) ''' cdef: float *csa,*csb,*cp,*cq float cr float ct[2] csa = asfp(sa) csb = asfp(sb) cp = asfp(p) cq = asfp(q) cr=r ct[0]=-100 ct[1]=-100 tmp = cintersect_segment_cylinder(csa,csb,cp, cq, cr, ct) return tmp, ct[0], ct[1] cdef float cintersect_segment_cylinder(float *sa,float *sb,float *p, float *q, float r, float *t): ''' Intersect Segment S(t) = sa +t(sb-sa), 0 <=t<= 1 against cylinder specified by p,q and r Look p.197 from Real Time Collision Detection C. Ericson Returns ------- inter : bool 0 no intersection 1 intersection ''' cdef: float d[3],m[3],n[3] float md,nd,dd, nn, mn, a, k, c,b, discr float epsilon_float=5.96e-08 csub_3vecs(q,p,d) csub_3vecs(sa,p,m) csub_3vecs(sb,sa,n) md=cinner_3vecs(m,d) nd=cinner_3vecs(n,d) dd=cinner_3vecs(d,d) #test if segment fully outside either endcap of cylinder if md < 0. and md + nd < 0.: return 0 #segment outside p side if md > dd and md + nd > dd: return 0 #segment outside q side nn=cinner_3vecs(n,n) mn=cinner_3vecs(m,n) a=dd*nn-nd*nd k=cinner_3vecs(m,m) -r*r c=dd*k-md*md if fabs(a) < epsilon_float: #segment runs parallel to cylinder axis if c>0.: return 0. # segment lies outside cylinder if md < 0.: t[0]=-mn/nn # intersect against p endcap elif md > dd : t[0]=(nd-mn)/nn # intersect against q endcap else: t[0]=0. # lies inside cylinder return 1 b=dd*mn -nd*md discr=b*b-a*c if discr < 0.: return 0. # no real roots ; no intersection t[0]=(-b-sqrt(discr))/a t[1]=(-b+sqrt(discr))/a if t[0]<0. or t[0] > 1. : return 0. # intersection lies outside segment if md + t[0]* nd < 0.: #intersection outside cylinder on 'p' side if nd <= 0. : return 0. # segment pointing away from endcap t[0]=-md/nd #keep intersection if Dot(S(t)-p,S(t)-p) <= r^2 if k+2*t[0]*(mn+t[0]*nn) <=0.: return 1. elif md+t[0]*nd > dd : #intersection outside cylinder on 'q' side if nd >= 0.: return 0. # segment pointing away from endcap t[0]= (dd-md)/nd #keep intersection if Dot(S(t)-q,S(t)-q) <= r^2 if k+dd-2*md+t[0]*(2*(mn-nd)+t[0]*nn) <= 0.: return 1. # segment intersects cylinder between the endcaps; t is correct return 1. def point_segment_sq_distance(a, b, c): ''' Calculate the squared distance from a point c to a finite line segment ab. Examples -------- >>> a=np.array([0,0,0], dtype=np.float32) >>> b=np.array([1,0,0], dtype=np.float32) >>> c=np.array([0,1,0], dtype=np.float32) >>> point_segment_sq_distance(a, b, c) 1.0 >>> c = np.array([0,3,0], dtype=np.float32) >>> point_segment_sq_distance(a,b,c) 9.0 >>> c = np.array([-1,1,0], dtype=np.float32) >>> point_segment_sq_distance(a, b, c) 2.0 ''' cdef: float *ca,*cb,*cc float cr float ct[2] ca = asfp(a) cb = asfp(b) cc = asfp(c) return cpoint_segment_sq_dist(ca, cb, cc) @cython.cdivision(True) cdef inline float cpoint_segment_sq_dist(float * a, float * b, float * c) nogil: ''' Calculate the squared distance from a point c to a line segment ab. ''' cdef: float ab[3],ac[3],bc[3] float e,f csub_3vecs(b,a,ab) csub_3vecs(c,a,ac) csub_3vecs(c,b,bc) e = cinner_3vecs(ac, ab) #Handle cases where c projects outside ab if e <= 0.: return cinner_3vecs(ac, ac) f = cinner_3vecs(ab, ab) if e >= f : return cinner_3vecs(bc, bc) #Handle case where c projects onto ab return cinner_3vecs(ac, ac) - e * e / f def track_dist_3pts(tracka,trackb): ''' Calculate the euclidean distance between two 3pt tracks Both direct and flip distances are calculated but only the smallest is returned Parameters ---------- a : array, shape (3,3) a three point track b : array, shape (3,3) a three point track Returns ------- dist :float Examples -------- >>> a = np.array([[0,0,0],[1,0,0,],[2,0,0]]) >>> b = np.array([[3,0,0],[3.5,1,0],[4,2,0]]) >>> c = track_dist_3pts(a, b) >>> print('%.6f' % c) 2.721573 ''' cdef cnp.ndarray[cnp.float32_t, ndim=2] a,b cdef float d[2] a=np.ascontiguousarray(tracka,dtype=f32_dt) b=np.ascontiguousarray(trackb,dtype=f32_dt) track_direct_flip_3dist(asfp(a[0]),asfp(a[1]),asfp(a[2]), asfp(b[0]),asfp(b[1]),asfp(b[2]),d) if d[0]rows out[1]=distf/rows @cython.cdivision(True) cdef inline void track_direct_flip_3dist(float *a1, float *b1,float *c1,float *a2, float *b2, float *c2, float *out) nogil: ''' Calculate the euclidean distance between two 3pt tracks both direct and flip are given as output Parameters ---------- a1,b1,c1 : 3 float[3] arrays representing the first track a2,b2,c2 : 3 float[3] arrays representing the second track Returns ------- out : a float[2] array having the euclidean distance and the fliped euclidean distance ''' cdef: int i float tmp1=0,tmp2=0,tmp3=0,tmp1f=0,tmp3f=0 #for i in range(3): for i from 0<=i<3: tmp1=tmp1+(a1[i]-a2[i])*(a1[i]-a2[i]) tmp2=tmp2+(b1[i]-b2[i])*(b1[i]-b2[i]) tmp3=tmp3+(c1[i]-c2[i])*(c1[i]-c2[i]) tmp1f=tmp1f+(a1[i]-c2[i])*(a1[i]-c2[i]) tmp3f=tmp3f+(c1[i]-a2[i])*(c1[i]-a2[i]) out[0]=(sqrt(tmp1)+sqrt(tmp2)+sqrt(tmp3))/3.0 out[1]=(sqrt(tmp1f)+sqrt(tmp2)+sqrt(tmp3f))/3.0 #out[0]=(tmp1+tmp2+tmp3)/3.0 #out[1]=(tmp1f+tmp2+tmp3f)/3.0 ctypedef struct LSC_Cluster: long *indices float *hidden long N @cython.boundscheck(False) @cython.wraparound(False) @cython.cdivision(True) def local_skeleton_clustering(tracks, d_thr=10): r"""Efficient tractography clustering Every track can needs to have the same number of points. Use `dipy.tracking.metrics.downsample` to restrict the number of points Parameters ---------- tracks : sequence of tracks as arrays, shape (N,3) .. (N,3) where N=points d_thr : float average euclidean distance threshold Returns ------- C : dict Clusters. Examples -------- >>> tracks=[np.array([[0,0,0],[1,0,0,],[2,0,0]]), ... np.array([[3,0,0],[3.5,1,0],[4,2,0]]), ... np.array([[3.2,0,0],[3.7,1,0],[4.4,2,0]]), ... np.array([[3.4,0,0],[3.9,1,0],[4.6,2,0]]), ... np.array([[0,0.2,0],[1,0.2,0],[2,0.2,0]]), ... np.array([[2,0.2,0],[1,0.2,0],[0,0.2,0]]), ... np.array([[0,0,0],[0,1,0],[0,2,0]])] >>> C = local_skeleton_clustering(tracks, d_thr=0.5) Notes ----- The distance calculated between two tracks:: t_1 t_2 0* a *0 \ | \ | 1* | | b *1 | \ 2* \ c *2 is equal to $(a+b+c)/3$ where $a$ the euclidean distance between ``t_1[0]`` and ``t_2[0]``, $b$ between ``t_1[1]`` and ``t_2[1]`` and $c$ between ``t_1[2]`` and ``t_2[2]``. Also the same with t2 flipped (so ``t_1[0]`` compared to ``t_2[2]`` etc). Visualization: It is possible to visualize the clustering C from the example above using the fvtk module:: from dipy.viz import fvtk r=fvtk.ren() for c in C: color=np.random.rand(3) for i in C[c]['indices']: fvtk.add(r,fvtk.line(tracks[i],color)) fvtk.show(r) See Also -------- dipy.tracking.metrics.downsample """ cdef: cnp.ndarray[cnp.float32_t, ndim=2] track LSC_Cluster *cluster long lent = 0,lenC = 0, dim = 0, points=0 long i=0, j=0, c=0, i_k=0, rows=0 ,cit=0 float *ptr, *hid, *alld float d[2],m_d,cd_thr long *flip points=len(tracks[0]) dim = points*3 rows = points cd_thr = d_thr #Allocate and copy memory for first cluster cluster=realloc(NULL,sizeof(LSC_Cluster)) cluster[0].indices=realloc(NULL,sizeof(long)) cluster[0].hidden=realloc(NULL,dim*sizeof(float)) cluster[0].indices[0]=0 track=np.ascontiguousarray(tracks[0],dtype=f32_dt) ptr=track.data for i from 0<=irealloc(NULL,dim*sizeof(float)) #Work with the rest of the tracks lent=len(tracks) for it in range(1,lent): track=np.ascontiguousarray(tracks[it],dtype=f32_dt) ptr=track.data cit=it with nogil: alld=calloc(lenC,sizeof(float)) flip=calloc(lenC,sizeof(long)) for k from 0<=kcluster[k].N #track_direct_flip_3dist(&ptr[0],&ptr[3],&ptr[6],&hid[0],&hid[3],&hid[6],d) #track_direct_flip_3dist(ptr,ptr+3,ptr+6,hid,hid+3,hid+6,d) track_direct_flip_dist(ptr, hid,rows,d) if d[1]realloc(cluster[i_k].indices,cluster[i_k].N*sizeof(long)) cluster[i_k].indices[cluster[i_k].N-1]=cit else:#New cluster added lenC+=1 cluster=realloc(cluster,lenC*sizeof(LSC_Cluster)) cluster[lenC-1].indices=realloc(NULL,sizeof(long)) cluster[lenC-1].hidden=realloc(NULL,dim*sizeof(float)) cluster[lenC-1].indices[0]=cit for i from 0<=i>> tracks=[np.array([[0,0,0],[1,0,0,],[2,0,0]]), ... np.array([[3,0,0],[3.5,1,0],[4,2,0]]), ... np.array([[3.2,0,0],[3.7,1,0],[4.4,2,0]]), ... np.array([[3.4,0,0],[3.9,1,0],[4.6,2,0]]), ... np.array([[0,0.2,0],[1,0.2,0],[2,0.2,0]]), ... np.array([[2,0.2,0],[1,0.2,0],[0,0.2,0]]), ... np.array([[0,0,0],[0,1,0],[0,2,0]])] >>> C=local_skeleton_clustering_3pts(tracks,d_thr=0.5) Notes ----- It is possible to visualize the clustering C from the example above using the fvtk module:: r=fvtk.ren() for c in C: color=np.random.rand(3) for i in C[c]['indices']: fvtk.add(r,fos.line(tracks[i],color)) fvtk.show(r) ''' cdef : cnp.ndarray[cnp.float32_t, ndim=2] track cnp.ndarray[cnp.float32_t, ndim=2] h int lent,k,it float d[2] #float d_sq=d_thr**2 lent=len(tracks) #Network C C={0:{'indices':[0],'hidden':tracks[0].copy(),'N':1}} ts=np.zeros((3,3),dtype=np.float32) #for (it,t) in enumerate(tracks[1:]): for it in range(1,lent): track=np.ascontiguousarray(tracks[it],dtype=f32_dt) lenC=len(C.keys()) #if it%1000==0: # print it,lenC alld=np.zeros(lenC) flip=np.zeros(lenC) for k in range(lenC): h=np.ascontiguousarray(C[k]['hidden']/C[k]['N'],dtype=f32_dt) #print track #print h track_direct_flip_3dist( asfp(track[0]),asfp(track[1]),asfp(track[2]), asfp(h[0]), asfp(h[1]),asfp(h[2]),d) #d=np.sum(np.sqrt(np.sum((t-h)**2,axis=1)))/3.0 #ts[0]=t[-1];ts[1]=t[1];ts[-1]=t[0] #ds=np.sum(np.sqrt(np.sum((ts-h)**2,axis=1)))/3.0 #print d[0],d[1] if d[1]>> tracks=[np.array([[0,0,0],[1,0,0,],[2,0,0]],dtype=np.float32), ... np.array([[3,0,0],[3.5,1,0],[4,2,0]],dtype=np.float32), ... np.array([[3.2,0,0],[3.7,1,0],[4.4,2,0]],dtype=np.float32), ... np.array([[3.4,0,0],[3.9,1,0],[4.6,2,0]],dtype=np.float32), ... np.array([[0,0.2,0],[1,0.2,0],[2,0.2,0]],dtype=np.float32), ... np.array([[2,0.2,0],[1,0.2,0],[0,0.2,0]],dtype=np.float32), ... np.array([[0,0,0],[0,1,0],[0,2,0]],dtype=np.float32), ... np.array([[0.2,0,0],[0.2,1,0],[0.2,2,0]],dtype=np.float32), ... np.array([[-0.2,0,0],[-0.2,1,0],[-0.2,2,0]],dtype=np.float32)] >>> C = larch_3split(tracks, None, 0.5) Here is an example of how to visualize the clustering above:: from dipy.viz import fvtk r=fvtk.ren() fvtk.add(r,fvtk.line(tracks,fvtk.red)) fvtk.show(r) for c in C: color=np.random.rand(3) for i in C[c]['indices']: fos.add(r,fvtk.line(tracks[i],color)) fvtk.show(r) for c in C: fvtk.add(r,fos.line(C[c]['rep3']/C[c]['N'],fos.white)) fvtk.show(r) ''' cdef: cnp.ndarray[cnp.float32_t, ndim=2] track cnp.ndarray[cnp.float32_t, ndim=2] h int lent,k,it float d[2] lent=len(tracks) if indices==None: C={0:{'indices':[0],'rep3':tracks[0].copy(),'N':1}} itrange=range(1,lent) else: C={0:{'indices':[indices[0]],'rep3':tracks[indices[0]].copy(),'N':1}} itrange=indices[1:] ts=np.zeros((3,3),dtype=np.float32) for it in itrange: track=np.ascontiguousarray(tracks[it],dtype=f32_dt) lenC=len(C.keys()) alld=np.zeros(lenC) flip=np.zeros(lenC) for k in range(lenC): h=np.ascontiguousarray(C[k]['rep3']/C[k]['N'],dtype=f32_dt) track_direct_flip_3dist(asfp(track[0]),asfp(track[1]),asfp(track[2]), asfp(h[0]), asfp(h[1]), asfp(h[2]),d) if d[1]>> t=np.random.rand(10,3).astype(np.float32) >>> p=np.array([0.5,0.5,0.5],dtype=np.float32) >>> point_track_sq_distance_check(t,p,2**2) True >>> t=np.array([[0,0,0],[1,1,1],[2,2,2]],dtype='f4') >>> p=np.array([-1,-1.,-1],dtype='f4') >>> point_track_sq_distance_check(t,p,.2**2) False >>> point_track_sq_distance_check(t,p,2**2) True ''' cdef: float *t=track.data float *p=point.data float a[3],b[3] int tlen = len(track) int curr = 0 float dist = 0 int i int intersects = 0 with nogil: for i from 0<=ia,b,p) if dist<=sq_dist_thr: intersects=1 break if intersects==1: return True else: return False def track_roi_intersection_check(cnp.ndarray[float,ndim=2] track, cnp.ndarray[float,ndim=2] roi, double sq_dist_thr): ''' Check if a track is intersecting a region of interest Parameters ---------- track: array,float32, shape (N,3) roi: array,float32, shape (M,3) sq_dist_thr: double, threshold, check squared euclidean distance from every roi point Returns ------- bool: True, if sq_distance <= sq_dist_thr, otherwise False. Examples -------- >>> roi=np.array([[0,0,0],[1,0,0],[2,0,0]],dtype='f4') >>> t=np.array([[0,0,0],[1,1,1],[2,2,2]],dtype='f4') >>> track_roi_intersection_check(t,roi,1) True >>> track_roi_intersection_check(t,np.array([[10,0,0]],dtype='f4'),1) False ''' cdef: float *t=track.data float *r=roi.data float a[3],b[3],p[3] int tlen = len(track) int rlen = len(roi) int curr = 0 int currp = 0 float dist = 0 int i,j int intersects=0 with nogil: for i from 0<=ia,b,p) if dist<=sq_dist_thr: intersects=1 break if intersects==1: break if intersects==1: return True else: return False dipy-0.13.0/dipy/tracking/eudx.py000066400000000000000000000210541317371701200166320ustar00rootroot00000000000000import numpy as np from dipy.tracking import utils from dipy.tracking.propspeed import eudx_both_directions from dipy.data import get_sphere class EuDX(object): '''Euler Delta Crossings Generates tracks with termination criteria defined by a delta function [1]_ and it has similarities with FACT algorithm [2]_ and Basser's method but uses trilinear interpolation. Can be used with any reconstruction method as DTI, DSI, QBI, GQI which can calculate an orientation distribution function and find the local peaks of that function. For example a single tensor model can give you only one peak a dual tensor model 2 peaks and quantitative anisotropy method as used in GQI can give you 3,4,5 or even more peaks. The parameters of the delta function are checking thresholds for the direction propagation magnitude and the angle of propagation. A specific number of seeds is defined randomly and then the tracks are generated for that seed if the delta function returns true. Trilinear interpolation is being used for defining the weights of the propagation. References ------------ .. [1] Garyfallidis, Towards an accurate brain tractography, PhD thesis, University of Cambridge, 2012. .. [2] Mori et al. Three-dimensional tracking of axonal projections in the brain by magnetic resonance imaging. Ann. Neurol. 1999. Notes ----- The coordinate system of the tractography is that of native space of image coordinates not native space world coordinates therefore voxel size is always considered as having size (1,1,1). Therefore, the origin is at the center of the center of the first voxel of the volume and all i,j,k coordinates start from the center of the voxel they represent. ''' def __init__(self, a, ind, seeds, odf_vertices, a_low=0.0239, step_sz=0.5, ang_thr=60., length_thr=0., total_weight=.5, max_points=1000, affine=None): ''' Euler integration with multiple stopping criteria and supporting multiple multiple fibres in crossings [1]_. Parameters ------------ a : array, Shape (I, J, K, Np), magnitude of the peak of a scalar anisotropic function e.g. QA (quantitative anisotropy) where Np is the number of peaks or a different function of shape (I, J, K) e.g FA or GFA. ind : array, shape(x, y, z, Np) indices of orientations of the scalar anisotropic peaks found on the resampling sphere seeds : int or ndarray If an int is specified then that number of random seeds is generated in the volume. If an (N, 3) array of points is given, each of the N points is used as a seed. Seed points should be given in the point space of the track (see ``affine``). The latter is useful when you need to track from specific regions e.g. the white/gray matter interface or a specific ROI e.g. in the corpus callosum. odf_vertices : ndarray, shape (N, 3) sphere points which define a discrete representation of orientations for the peaks, the same for all voxels. Usually the same sphere is used as an input for a reconstruction algorithm e.g. DSI. a_low : float, optional low threshold for QA(typical 0.023) or FA(typical 0.2) or any other anisotropic function step_sz : float, optional euler propagation step size ang_thr : float, optional if turning angle is bigger than this threshold then tracking stops. total_weight : float, optional total weighting threshold max_points : int, optional maximum number of points in a track. Used to stop tracks from looping forever. affine : array (4, 4) optional An affine mapping from the voxel indices of the input data to the point space of the streamlines. That is if ``[x, y, z, 1] == point_space * [i, j, k, 1]``, then the streamline with point ``[x, y, z]`` passes though the center of voxel ``[i, j, k]``. If no point_space is given, the point space will be in voxel coordinates. Returns ------- generator : obj By iterating this generator you can obtain all the streamlines. Examples -------- >>> import nibabel as nib >>> from dipy.reconst.dti import TensorModel, quantize_evecs >>> from dipy.data import get_data, get_sphere >>> from dipy.core.gradients import gradient_table >>> fimg,fbvals,fbvecs = get_data('small_101D') >>> img = nib.load(fimg) >>> affine = img.affine >>> data = img.get_data() >>> gtab = gradient_table(fbvals, fbvecs) >>> model = TensorModel(gtab) >>> ten = model.fit(data) >>> sphere = get_sphere('symmetric724') >>> ind = quantize_evecs(ten.evecs, sphere.vertices) >>> eu = EuDX(a=ten.fa, ind=ind, seeds=100, odf_vertices=sphere.vertices, a_low=.2) >>> tracks = [e for e in eu] Notes ------- This works as an iterator class because otherwise it could fill your entire memory if you generate many tracks. Something very common as you can easily generate millions of tracks if you have many seeds. References ---------- .. [1] E. Garyfallidis (2012), "Towards an accurate brain tractography", PhD thesis, University of Cambridge, UK. ''' self.a = np.array(a, dtype=np.float64, copy=True, order="C") self.ind = np.array(ind, dtype=np.float64, copy=True, order="C") self.a_low = a_low self.ang_thr = ang_thr self.step_sz = step_sz self.length_thr = length_thr self.total_weight = total_weight self.max_points = max_points self.affine = affine if affine is not None else np.eye(4) if len(self.a.shape) == 3: self.a.shape = self.a.shape + (1,) self.ind.shape = self.ind.shape + (1,) # store number of maximum peaks x, y, z, g = self.a.shape self.Np = g self.odf_vertices = np.ascontiguousarray(odf_vertices, dtype='f8') try: self.seed_no = len(seeds) self.seed_list = seeds except TypeError: self.seed_no = seeds self.seed_list = None def __iter__(self): if self.seed_list is not None: inv = np.linalg.inv(self.affine) seed_voxels = np.dot(self.seed_list, inv[:3, :3].T) seed_voxels += inv[:3, 3] else: seed_voxels = None voxel_tracks = self._voxel_tracks(seed_voxels) return utils.move_streamlines(voxel_tracks, self.affine) def _voxel_tracks(self, seed_voxels): ''' This is were all the fun starts ''' if seed_voxels is not None and seed_voxels.dtype != np.float64: # This is a private method so users should never see this error. If # you've reached this error, there is a bug somewhere. raise ValueError("wrong dtype seeds have to be float64") x, y, z, g = self.a.shape edge = np.array([x, y, z], dtype=np.float64) - 1. # for all seeds for i in range(self.seed_no): if seed_voxels is None: seed = np.random.rand(3) * edge else: seed = seed_voxels[i] if np.any(seed < 0.) or np.any(seed > edge): raise ValueError('Seed outside boundaries', seed) seed = np.ascontiguousarray(seed) # for all peaks for ref in range(g): track = eudx_both_directions(seed.copy(), ref, self.a, self.ind, self.odf_vertices, self.a_low, self.ang_thr, self.step_sz, self.total_weight, self.max_points) if track is not None and track.shape[0] > 1: yield track dipy-0.13.0/dipy/tracking/fbcmeasures.pyx000066400000000000000000000345021317371701200203560ustar00rootroot00000000000000import numpy as np cimport numpy as cnp cimport cython cimport safe_openmp as openmp from safe_openmp cimport have_openmp from cython.parallel import parallel, prange, threadid from scipy.spatial import KDTree from scipy.interpolate import interp1d from math import sqrt, log from dipy.data import get_sphere from dipy.denoise.enhancement_kernel import EnhancementKernel from dipy.core.ndindex import ndindex cdef class FBCMeasures: cdef int [:] streamline_length cdef double [:, :, :] streamline_points cdef double [:, :] streamlines_lfbc cdef double [:] streamlines_rfbc def __init__(self, streamlines, kernel, min_fiberlength=10, max_windowsize=7, num_threads=None, verbose=False): """ Compute the fiber to bundle coherence measures for a set of streamlines. Parameters ---------- streamlines : list A collection of streamlines, each n by 3, with n being the number of nodes in the fiber. kernel : Kernel object A diffusion kernel object created from EnhancementKernel. min_fiberlength : int Fibers with fewer points than minimum_length are excluded from FBC computation. max_windowsize : int The maximal window size used to calculate the average LFBC region num_threads : int Number of threads to use for OpenMP. verbose : boolean Enable verbose mode. References ---------- [Meesters2016_HBM] S. Meesters, G. Sanguinetti, E. Garyfallidis, J. Portegies, P. Ossenblok, R. Duits. (2016) Cleaning output of tractography via fiber to bundle coherence, a new open source implementation. Human Brain Mapping conference 2016. [Portegies2015b] J. Portegies, R. Fick, G. Sanguinetti, S. Meesters, G.Girard, and R. Duits. (2015) Improving Fiber Alignment in HARDI by Combining Contextual PDE flow with Constrained Spherical Deconvolution. PLoS One. """ self.compute(streamlines, kernel, min_fiberlength, max_windowsize, num_threads, verbose) def get_points_rfbc_thresholded(self, threshold, emphasis=.5, verbose=False): """ Set a threshold on the RFBC to remove spurious fibers. Parameters ---------- threshold : float The threshold to set on the RFBC, should be within 0 and 1. emphasis : float Enhances the coloring of the fibers by LFBC. Increasing emphasis will stress spurious fibers by logarithmic weighting. verbose : boolean Prints info about the found RFBC for the set of fibers such as median, mean, min and max values. Returns ------- output : tuple with 3 elements The output contains: 1) a collection of streamlines, each n by 3, with n being the number of nodes in the fiber that remain after filtering 2) the r,g,b values of the local fiber to bundle coherence (LFBC) 3) the relative fiber to bundle coherence (RFBC) """ if verbose: print "median RFBC: " + str(np.median(self.streamlines_rfbc)) print "mean RFBC: " + str(np.mean(self.streamlines_rfbc)) print "min RFBC: " + str(np.min(self.streamlines_rfbc)) print "max RFBC: " + str(np.max(self.streamlines_rfbc)) # logarithmic transform of color values to emphasize spurious fibers minval = np.nanmin(self.streamlines_lfbc) maxval = np.nanmax(self.streamlines_lfbc) lfbc_log = np.log((self.streamlines_lfbc - minval) / (maxval - minval + 10e-10) + emphasis) minval = np.nanmin(lfbc_log) maxval = np.nanmax(lfbc_log) lfbc_log = (lfbc_log - minval) / (maxval - minval) # define color interpolation functions x = np.linspace(0, 1, num=4, endpoint=True) r = np.array([1, 1, 0, 0]) g = np.array([1, 0, 0, 1]) b = np.array([0, 0, 1, 1]) fr = interp1d(x, r, bounds_error=False, fill_value=0) fg = interp1d(x, g, bounds_error=False, fill_value=0) fb = interp1d(x, b, bounds_error=False, fill_value=0) # select fibers above the RFBC threshold streamline_out = [] color_out = [] rfbc_out = [] for i in range((self.streamlines_rfbc).shape[0]): rfbc = self.streamlines_rfbc[i] lfbc = lfbc_log[i] if rfbc > threshold: fiber = np.array(self.streamline_points[i]) fiber = fiber[0:self.streamline_length[i] - 1] streamline_out.append(fiber) rfbc_out.append(rfbc) lfbc = lfbc[0:self.streamline_length[i] - 1] lfbc_colors = np.transpose([fr(lfbc), fg(lfbc), fb(lfbc)]) color_out.append(lfbc_colors.tolist()) return streamline_out, color_out, rfbc_out @cython.wraparound(False) @cython.boundscheck(False) @cython.nonecheck(False) @cython.cdivision(True) cdef void compute(self, py_streamlines, kernel, min_fiberlength, max_windowsize, num_threads=None, verbose=False): """ Compute the fiber to bundle coherence measures for a set of streamlines. Parameters ---------- py_streamlines : list A collection of streamlines, each n by 3, with n being the number of nodes in the fiber. kernel : Kernel object A diffusion kernel object created from EnhancementKernel. min_fiberlength : int Fibers with fewer points than minimum_length are excluded from FBC computation. max_windowsize : int The maximal window size used to calculate the average LFBC region num_threads : int Number of threads to use for OpenMP. verbose : boolean Enable verbose mode. """ cdef: int num_fibers, max_length, dim double [:, :, :] streamlines int [:] streamlines_length double [:, :, :] streamlines_tangent int [:, :] streamlines_nearestp double [:, :] streamline_scores double [:] tangent int line_id, point_id int line_id2, point_id2 double score double [:] score_mp int [:] xd_mp, yd_mp, zd_mp int xd, yd, zd, N, hn double [:, :, :, :, ::1] lut int threads_to_use = -1 int all_cores = openmp.omp_get_num_procs() if num_threads is not None: threads_to_use = num_threads else: threads_to_use = all_cores if have_openmp: openmp.omp_set_dynamic(0) openmp.omp_set_num_threads(threads_to_use) # if the fibers are too short FBC measures cannot be applied, # remove these. streamlines_length = np.array([x.shape[0] for x in py_streamlines], dtype=np.int32) min_length = min(streamlines_length) if min_length < min_fiberlength: print("The minimum fiber length is 10 points. \ Shorter fibers were found and removed.") py_streamlines = [x for x in py_streamlines if x.shape[0] >= min_fiberlength] streamlines_length = np.array([x.shape[0] for x in py_streamlines], dtype=np.int32) num_fibers = len(py_streamlines) self.streamline_length = streamlines_length max_length = max(streamlines_length) dim = 3 # get lookup table info lut = kernel.get_lookup_table() N = lut.shape[2] hn = (N-1) / 2 # prepare numpy arrays for speed streamlines = np.zeros((num_fibers, max_length, dim), dtype=np.float64) * np.nan streamlines_tangents = np.zeros((num_fibers, max_length, dim), dtype=np.float64) streamlines_nearestp = np.zeros((num_fibers, max_length), dtype=np.int32) streamline_scores = np.zeros((num_fibers, max_length), dtype=np.float64) * np.nan # copy python streamlines into numpy array for line_id in range(num_fibers): for point_id in range(streamlines_length[line_id]): for dim in range(3): streamlines[line_id, point_id, dim] = \ py_streamlines[line_id][point_id][dim] self.streamline_points = streamlines # compute tangents for line_id in range(num_fibers): for point_id in range(streamlines_length[line_id] - 1): tangent = np.subtract(streamlines[line_id, point_id + 1], streamlines[line_id, point_id]) streamlines_tangents[line_id, point_id] = \ np.divide(tangent, np.sqrt(np.dot(tangent, tangent))) # estimate which kernel LUT index corresponds to angles tree = KDTree(kernel.get_orientations()) for line_id in range(num_fibers): for point_id in range(streamlines_length[line_id] - 1): streamlines_nearestp[line_id, point_id] = \ tree.query(streamlines[line_id, point_id])[1] # arrays for parallel computing score_mp = np.zeros(num_fibers) xd_mp = np.zeros(num_fibers, dtype=np.int32) yd_mp = np.zeros(num_fibers, dtype=np.int32) zd_mp = np.zeros(num_fibers, dtype=np.int32) if verbose: if have_openmp: print("Running in parallel!") else: print("No OpenMP...") # compute fiber LFBC measures with nogil: for line_id in prange(num_fibers, schedule='guided'): for point_id in range(streamlines_length[line_id] - 1): score_mp[line_id] = 0.0 for line_id2 in range(num_fibers): # skip lfbc computation with itself if line_id == line_id2: continue for point_id2 in range(streamlines_length[line_id2] - 1): # compute displacement xd_mp[line_id] = \ int(streamlines[line_id, point_id, 0] - streamlines[line_id2, point_id2, 0] + 0.5) yd_mp[line_id] = \ int(streamlines[line_id, point_id, 1] - streamlines[line_id2, point_id2, 1] + 0.5) zd_mp[line_id] = \ int(streamlines[line_id, point_id, 2] - streamlines[line_id2, point_id2, 2] + 0.5) # if position is outside the kernel bounds, skip if xd_mp[line_id] > hn or -xd_mp[line_id] > hn or \ yd_mp[line_id] > hn or -yd_mp[line_id] > hn or \ zd_mp[line_id] > hn or -zd_mp[line_id] > hn: continue # grab kernel value from LUT score_mp[line_id] += \ lut[streamlines_nearestp[line_id, point_id], streamlines_nearestp[line_id2, point_id2], hn+xd_mp[line_id], hn+yd_mp[line_id], hn+zd_mp[line_id]] # ang_v, ang_r, x, y, z streamline_scores[line_id, point_id] = score_mp[line_id] # Reset number of OpenMP cores to default if have_openmp and num_threads is not None: openmp.omp_set_num_threads(all_cores) # Save LFBC as class member self.streamlines_lfbc = streamline_scores # compute RFBC for each fiber self.streamlines_rfbc = compute_rfbc(streamlines_length, streamline_scores, max_windowsize) def compute_rfbc(streamlines_length, streamline_scores, max_windowsize=7): """ Compute the relative fiber to bundle coherence (RFBC) Parameters ---------- streamlines_length : 1D int array Contains the length of each streamline streamlines_scores : 2D double array Contains the local fiber to bundle coherence (LFBC) for each streamline element. max_windowsize : int The maximal window size used to calculate the average LFBC region Returns ---------- output: normalized lowest average LFBC region along the fiber """ # finds the region of the fiber with maximal length of max_windowsize in # which the LFBC is the lowest int_length = min(np.amin(streamlines_length), max_windowsize) int_value = np.apply_along_axis(lambda x: min_moving_average(x[~np.isnan(x)], int_length), 1, streamline_scores) avg_total = np.mean( np.apply_along_axis( lambda x: np.mean(np.extract(x[~np.isnan(x)] >= 0, x[~np.isnan(x)])), 1, streamline_scores)) if not avg_total == 0: return int_value / avg_total else: return int_value def min_moving_average(a, n): """ Return the lowest cumulative sum for the score of a streamline segment Parameters ---------- a : array Input array n : int Length of the segment Returns ---------- output: normalized lowest average LFBC region along the fiber """ ret = np.cumsum(np.extract(a >= 0, a)) ret[n:] = ret[n:] - ret[:-n] return np.amin(ret[n - 1:] / n) dipy-0.13.0/dipy/tracking/learning.py000066400000000000000000000071361317371701200174710ustar00rootroot00000000000000''' Learning algorithms for tractography''' import numpy as np import dipy.tracking.distances as pf def detect_corresponding_tracks(indices, tracks1, tracks2): ''' Detect corresponding tracks from list tracks1 to list tracks2 where tracks1 & tracks2 are lists of tracks Parameters ------------ indices : sequence of indices of tracks1 that are to be detected in tracks2 tracks1 : sequence of tracks as arrays, shape (N1,3) .. (Nm,3) tracks2 : sequence of tracks as arrays, shape (M1,3) .. (Mm,3) Returns --------- track2track : array (N,2) where N is len(indices) of int it shows the correspondance in the following way: the first column is the current index in tracks1 the second column is the corresponding index in tracks2 Examples ---------- >>> import numpy as np >>> import dipy.tracking.learning as tl >>> A = np.array([[0, 0, 0], [1, 1, 1], [2, 2, 2]]) >>> B = np.array([[1, 0, 0], [2, 0, 0], [3, 0, 0]]) >>> C = np.array([[0, 0, -1], [0, 0, -2], [0, 0, -3]]) >>> bundle1 = [A, B, C] >>> bundle2 = [B, A] >>> indices = [0, 1] >>> arr = tl.detect_corresponding_tracks(indices, bundle1, bundle2) Notes ------- To find the corresponding tracks we use mam_distances with 'avg' option. Then we calculate the argmin of all the calculated distances and return it for every index. (See 3rd column of arr in the example given below.) ''' li = len(indices) track2track = np.zeros((li, 2)) cnt = 0 for i in indices: rt = [pf.mam_distances(tracks1[i], t, 'avg') for t in tracks2] rt = np.array(rt) track2track[cnt] = np.array([i, rt.argmin()]) cnt += 1 return track2track.astype(int) def detect_corresponding_tracks_plus(indices, tracks1, indices2, tracks2): ''' Detect corresponding tracks from 1 to 2 where tracks1 & tracks2 are sequences of tracks Parameters ------------ indices : sequence of indices of tracks1 that are to be detected in tracks2 tracks1 : sequence of tracks as arrays, shape (N1,3) .. (Nm,3) indices2 : sequence of indices of tracks2 in the initial brain tracks2 : sequence of tracks as arrays, shape (M1,3) .. (Mm,3) Returns --------- track2track : array (N,2) where N is len(indices) of int showing the correspondance in th following way the first colum is the current index of tracks1 the second column is the corresponding index in tracks2 Examples ---------- >>> import numpy as np >>> import dipy.tracking.learning as tl >>> A = np.array([[0, 0, 0], [1, 1, 1], [2, 2, 2]]) >>> B = np.array([[1, 0, 0], [2, 0, 0], [3, 0, 0]]) >>> C = np.array([[0, 0, -1], [0, 0, -2], [0, 0, -3]]) >>> bundle1 = [A, B, C] >>> bundle2 = [B, A] >>> indices = [0, 1] >>> indices2 = indices >>> arr = tl.detect_corresponding_tracks_plus(indices, bundle1, indices2, bundle2) Notes ------- To find the corresponding tracks we use mam_distances with 'avg' option. Then we calculate the argmin of all the calculated distances and return it for every index. (See 3rd column of arr in the example given below.) See also ---------- distances.mam_distances ''' li = len(indices) track2track = np.zeros((li, 2)) cnt = 0 for i in indices: rt = [pf.mam_distances(tracks1[i], t, 'avg') for t in tracks2] rt = np.array(rt) track2track[cnt] = np.array([i, indices2[rt.argmin()]]) cnt += 1 return track2track.astype(int) dipy-0.13.0/dipy/tracking/life.py000066400000000000000000000471161317371701200166130ustar00rootroot00000000000000""" This is an implementation of the Linear Fascicle Evaluation (LiFE) algorithm described in: Pestilli, F., Yeatman, J, Rokem, A. Kay, K. and Wandell B.A. (2014). Validation and statistical inference in living connectomes. Nature Methods 11: 1058-1063. doi:10.1038/nmeth.3098 """ import numpy as np import scipy.sparse as sps import scipy.linalg as la from dipy.reconst.base import ReconstModel, ReconstFit from dipy.utils.six.moves import range from dipy.tracking.utils import unique_rows from dipy.tracking.streamline import transform_streamlines from dipy.tracking.vox2track import _voxel2streamline import dipy.data as dpd import dipy.core.optimize as opt from dipy.testing import setup_test def gradient(f): """ Return the gradient of an N-dimensional array. The gradient is computed using central differences in the interior and first differences at the boundaries. The returned gradient hence has the same shape as the input array. Parameters ---------- f : array_like An N-dimensional array containing samples of a scalar function. Returns ------- gradient : ndarray N arrays of the same shape as `f` giving the derivative of `f` with respect to each dimension. Examples -------- >>> x = np.array([1, 2, 4, 7, 11, 16], dtype=np.float) >>> gradient(x) array([ 1. , 1.5, 2.5, 3.5, 4.5, 5. ]) >>> gradient(np.array([[1, 2, 6], [3, 4, 5]], dtype=np.float)) [array([[ 2., 2., -1.], [ 2., 2., -1.]]), array([[ 1. , 2.5, 4. ], [ 1. , 1. , 1. ]])] Note ---- This is a simplified implementation of gradient that is part of numpy 1.8. In order to mitigate the effects of changes added to this implementation in version 1.9 of numpy, we include this implementation here. """ f = np.asanyarray(f) N = len(f.shape) # number of dimensions dx = [1.0]*N # use central differences on interior and first differences on endpoints outvals = [] # create slice objects --- initially all are [:, :, ..., :] slice1 = [slice(None)]*N slice2 = [slice(None)]*N slice3 = [slice(None)]*N for axis in range(N): # select out appropriate parts for this dimension out = np.empty_like(f) slice1[axis] = slice(1, -1) slice2[axis] = slice(2, None) slice3[axis] = slice(None, -2) # 1D equivalent -- out[1:-1] = (f[2:] - f[:-2])/2.0 out[slice1] = (f[slice2] - f[slice3])/2.0 slice1[axis] = 0 slice2[axis] = 1 slice3[axis] = 0 # 1D equivalent -- out[0] = (f[1] - f[0]) out[slice1] = (f[slice2] - f[slice3]) slice1[axis] = -1 slice2[axis] = -1 slice3[axis] = -2 # 1D equivalent -- out[-1] = (f[-1] - f[-2]) out[slice1] = (f[slice2] - f[slice3]) # divide by step size outvals.append(out / dx[axis]) # reset the slice object in this dimension to ":" slice1[axis] = slice(None) slice2[axis] = slice(None) slice3[axis] = slice(None) if N == 1: return outvals[0] else: return outvals def streamline_gradients(streamline): """ Calculate the gradients of the streamline along the spatial dimension Parameters ---------- streamline : array-like of shape (n, 3) The 3d coordinates of a single streamline Returns ------- Array of shape (3, n): Spatial gradients along the length of the streamline. """ return np.array(gradient(np.asarray(streamline))[0]) def grad_tensor(grad, evals): """ Calculate the 3 by 3 tensor for a given spatial gradient, given a canonical tensor shape (also as a 3 by 3), pointing at [1,0,0] Parameters ---------- grad : 1d array of shape (3,) The spatial gradient (e.g between two nodes of a streamline). evals: 1d array of shape (3,) The eigenvalues of a canonical tensor to be used as a response function. """ # This is the rotation matrix from [1, 0, 0] to this gradient of the sl: R = la.svd(np.matrix(grad), overwrite_a=True)[2] # This is the 3 by 3 tensor after rotation: T = np.dot(np.dot(R, np.diag(evals)), R.T) return T def streamline_tensors(streamline, evals=[0.001, 0, 0]): """ The tensors generated by this fiber. Parameters ---------- streamline : array-like of shape (n, 3) The 3d coordinates of a single streamline evals : iterable with three entries The estimated eigenvalues of a single fiber tensor. (default: [0.001, 0, 0]). Returns ------- An n_nodes by 3 by 3 array with the tensor for each node in the fiber. Note ---- Estimates of the radial/axial diffusivities may rely on empirical measurements (for example, the AD in the Corpus Callosum), or may be based on a biophysical model of some kind. """ grad = streamline_gradients(streamline) # Preallocate: tensors = np.empty((grad.shape[0], 3, 3)) for grad_idx, this_grad in enumerate(grad): tensors[grad_idx] = grad_tensor(this_grad, evals) return tensors def streamline_signal(streamline, gtab, evals=[0.001, 0, 0]): """ The signal from a single streamline estimate along each of its nodes. Parameters ---------- streamline : a single streamline gtab : GradientTable class instance evals : list of length 3 (optional. Default: [0.001, 0, 0]) The eigenvalues of the canonical tensor used as an estimate of the signal generated by each node of the streamline. """ # Gotta have those tensors: tensors = streamline_tensors(streamline, evals) sig = np.empty((len(streamline), np.sum(~gtab.b0s_mask))) # Extract them once: bvecs = gtab.bvecs[~gtab.b0s_mask] bvals = gtab.bvals[~gtab.b0s_mask] for ii, tensor in enumerate(tensors): ADC = np.diag(np.dot(np.dot(bvecs, tensor), bvecs.T)) # Use the Stejskal-Tanner equation with the ADC as input, and S0 = 1: sig[ii] = np.exp(-bvals * ADC) return sig - np.mean(sig) class LifeSignalMaker(object): """ A class for generating signals from streamlines in an efficient and speedy manner. """ def __init__(self, gtab, evals=[0.001, 0, 0], sphere=None): """ Initialize a signal maker Parameters ---------- gtab : GradientTable class instance The gradient table on which the signal is calculated. evals : list of 3 items The eigenvalues of the canonical tensor to use in calculating the signal. n_points : `dipy.core.Sphere` class instance The discrete sphere to use as an approximation for the continuous sphere on which the signal is represented. If integer - we will use an instance of one of the symmetric spheres cached in `dps.get_sphere`. If a 'dipy.core.Sphere' class instance is provided, we will use this object. Default: the :mod:`dipy.data` symmetric sphere with 724 vertices """ if sphere is None: self.sphere = dpd.get_sphere('symmetric724') else: self.sphere = sphere self.gtab = gtab self.evals = evals # Initialize an empty dict to fill with signals for each of the sphere # vertices: self.signal = np.empty((self.sphere.vertices.shape[0], np.sum(~gtab.b0s_mask))) # We'll need to keep track of what we've already calculated: self._calculated = [] def calc_signal(self, xyz): idx = self.sphere.find_closest(xyz) if idx not in self._calculated: bvecs = self.gtab.bvecs[~self.gtab.b0s_mask] bvals = self.gtab.bvals[~self.gtab.b0s_mask] tensor = grad_tensor(self.sphere.vertices[idx], self.evals) ADC = np.diag(np.dot(np.dot(bvecs, tensor), bvecs.T)) sig = np.exp(-bvals * ADC) sig = sig - np.mean(sig) self.signal[idx] = sig self._calculated.append(idx) return self.signal[idx] def streamline_signal(self, streamline): """ Approximate the signal for a given streamline """ grad = streamline_gradients(streamline) sig_out = np.zeros((grad.shape[0], self.signal.shape[-1])) for ii, g in enumerate(grad): sig_out[ii] = self.calc_signal(g) return sig_out def voxel2streamline(streamline, transformed=False, affine=None, unique_idx=None): """ Maps voxels to streamlines and streamlines to voxels, for setting up the LiFE equations matrix Parameters ---------- streamline : list A collection of streamlines, each n by 3, with n being the number of nodes in the fiber. affine : 4 by 4 array (optional) Defines the spatial transformation from streamline to data. Default: np.eye(4) transformed : bool (optional) Whether the streamlines have been already transformed (in which case they don't need to be transformed in here). unique_idx : array (optional). The unique indices in the streamlines Returns ------- v2f, v2fn : tuple of dicts The first dict in the tuple answers the question: Given a voxel (from the unique indices in this model), which fibers pass through it? The second answers the question: Given a streamline, for each voxel that this streamline passes through, which nodes of that streamline are in that voxel? """ if transformed: transformed_streamline = streamline else: if affine is None: affine = np.eye(4) transformed_streamline = transform_streamlines(streamline, affine) if unique_idx is None: all_coords = np.concatenate(transformed_streamline) unique_idx = unique_rows(np.round(all_coords)) return _voxel2streamline(transformed_streamline, unique_idx.astype(np.intp)) class FiberModel(ReconstModel): """ A class for representing and solving predictive models based on tractography solutions. Notes ----- This is an implementation of the LiFE model described in [1]_ [1] Pestilli, F., Yeatman, J, Rokem, A. Kay, K. and Wandell B.A. (2014). Validation and statistical inference in living connectomes. Nature Methods. """ def __init__(self, gtab): """ Parameters ---------- gtab : a GradientTable class instance """ # Initialize the super-class: ReconstModel.__init__(self, gtab) def setup(self, streamline, affine, evals=[0.001, 0, 0], sphere=None): """ Set up the necessary components for the LiFE model: the matrix of fiber-contributions to the DWI signal, and the coordinates of voxels for which the equations will be solved Parameters ---------- streamline : list Streamlines, each is an array of shape (n, 3) affine : 4 by 4 array Mapping from the streamline coordinates to the data evals : list (3 items, optional) The eigenvalues of the canonical tensor used as a response function. Default:[0.001, 0, 0]. sphere: `dipy.core.Sphere` instance. Whether to approximate (and cache) the signal on a discrete sphere. This may confer a significant speed-up in setting up the problem, but is not as accurate. If `False`, we use the exact gradients along the streamlines to calculate the matrix, instead of an approximation. Defaults to use the 724-vertex symmetric sphere from :mod:`dipy.data` """ if sphere is not False: SignalMaker = LifeSignalMaker(self.gtab, evals=evals, sphere=sphere) if affine is None: affine = np.eye(4) streamline = transform_streamlines(streamline, affine) # Assign some local variables, for shorthand: all_coords = np.concatenate(streamline) vox_coords = unique_rows(np.round(all_coords).astype(np.intp)) del all_coords # We only consider the diffusion-weighted signals: n_bvecs = self.gtab.bvals[~self.gtab.b0s_mask].shape[0] v2f, v2fn = voxel2streamline(streamline, transformed=True, affine=affine, unique_idx=vox_coords) # How many fibers in each voxel (this will determine how many # components are in the matrix): n_unique_f = len(np.hstack(v2f.values())) # Preallocate these, which will be used to generate the sparse # matrix: f_matrix_sig = np.zeros(n_unique_f * n_bvecs, dtype=np.float) f_matrix_row = np.zeros(n_unique_f * n_bvecs, dtype=np.intp) f_matrix_col = np.zeros(n_unique_f * n_bvecs, dtype=np.intp) fiber_signal = [] for s_idx, s in enumerate(streamline): if sphere is not False: fiber_signal.append(SignalMaker.streamline_signal(s)) else: fiber_signal.append(streamline_signal(s, self.gtab, evals)) del streamline if sphere is not False: del SignalMaker keep_ct = 0 range_bvecs = np.arange(n_bvecs).astype(int) # In each voxel: for v_idx in range(vox_coords.shape[0]): mat_row_idx = (range_bvecs + v_idx * n_bvecs).astype(np.intp) # For each fiber in that voxel: for f_idx in v2f[v_idx]: # For each fiber-voxel combination, store the row/column # indices in the pre-allocated linear arrays f_matrix_row[keep_ct:keep_ct+n_bvecs] = mat_row_idx f_matrix_col[keep_ct:keep_ct+n_bvecs] = f_idx vox_fiber_sig = np.zeros(n_bvecs) for node_idx in v2fn[f_idx][v_idx]: # Sum the signal from each node of the fiber in that voxel: vox_fiber_sig += fiber_signal[f_idx][node_idx] # And add the summed thing into the corresponding rows: f_matrix_sig[keep_ct:keep_ct+n_bvecs] += vox_fiber_sig keep_ct = keep_ct + n_bvecs del v2f, v2fn # Allocate the sparse matrix, using the more memory-efficient 'csr' # format: life_matrix = sps.csr_matrix((f_matrix_sig, [f_matrix_row, f_matrix_col])) return life_matrix, vox_coords def _signals(self, data, vox_coords): """ Helper function to extract and separate all the signals we need to fit and evaluate a fit of this model Parameters ---------- data : 4D array vox_coords: n by 3 array The coordinates into the data array of the fiber nodes. """ # Fitting is done on the S0-normalized-and-demeaned diffusion-weighted # signal: idx_tuple = (vox_coords[:, 0], vox_coords[:, 1], vox_coords[:, 2]) # We'll look at a 2D array, extracting the data from the voxels: vox_data = data[idx_tuple] weighted_signal = vox_data[:, ~self.gtab.b0s_mask] b0_signal = np.mean(vox_data[:, self.gtab.b0s_mask], -1) relative_signal = (weighted_signal/b0_signal[:, None]) # The mean of the relative signal across directions in each voxel: mean_sig = np.mean(relative_signal, -1) to_fit = (relative_signal - mean_sig[:, None]).ravel() return (to_fit, weighted_signal, b0_signal, relative_signal, mean_sig, vox_data) def fit(self, data, streamline, affine=None, evals=[0.001, 0, 0], sphere=None): """ Fit the LiFE FiberModel for data and a set of streamlines associated with this data Parameters ---------- data : 4D array Diffusion-weighted data streamline : list A bunch of streamlines affine: 4 by 4 array (optional) The affine to go from the streamline coordinates to the data coordinates. Defaults to use `np.eye(4)` evals : list (optional) The eigenvalues of the tensor response function used in constructing the model signal. Default: [0.001, 0, 0] sphere: `dipy.core.Sphere` instance, or False Whether to approximate (and cache) the signal on a discrete sphere. This may confer a significant speed-up in setting up the problem, but is not as accurate. If `False`, we use the exact gradients along the streamlines to calculate the matrix, instead of an approximation. Returns ------- FiberFit class instance """ if affine is None: affine = np.eye(4) life_matrix, vox_coords = \ self.setup(streamline, affine, evals=evals, sphere=sphere) (to_fit, weighted_signal, b0_signal, relative_signal, mean_sig, vox_data) = self._signals(data, vox_coords) beta = opt.sparse_nnls(to_fit, life_matrix) return FiberFit(self, life_matrix, vox_coords, to_fit, beta, weighted_signal, b0_signal, relative_signal, mean_sig, vox_data, streamline, affine, evals) class FiberFit(ReconstFit): """ A fit of the LiFE model to diffusion data """ def __init__(self, fiber_model, life_matrix, vox_coords, to_fit, beta, weighted_signal, b0_signal, relative_signal, mean_sig, vox_data, streamline, affine, evals): """ Parameters ---------- fiber_model : A FiberModel class instance params : the parameters derived from a fit of the model to the data. """ ReconstFit.__init__(self, fiber_model, vox_data) self.life_matrix = life_matrix self.vox_coords = vox_coords self.fit_data = to_fit self.beta = beta self.weighted_signal = weighted_signal self.b0_signal = b0_signal self.relative_signal = relative_signal self.mean_signal = mean_sig self.streamline = streamline self.affine = affine self.evals = evals def predict(self, gtab=None, S0=None): """ Predict the signal Parameters ---------- gtab : GradientTable Default: use self.gtab S0 : float or array The non-diffusion-weighted signal in the voxels for which a prediction is made. Default: use self.b0_signal Returns ------- prediction : ndarray of shape (voxels, bvecs) An array with a prediction of the signal in each voxel/direction """ # We generate the prediction and in each voxel, we add the # offset, according to the isotropic part of the signal, which was # removed prior to fitting: if gtab is None: _matrix = self.life_matrix gtab = self.model.gtab else: _model = FiberModel(gtab) _matrix, _ = _model.setup(self.streamline, self.affine, self.evals) pred_weighted = np.reshape(opt.spdot(_matrix, self.beta), (self.vox_coords.shape[0], np.sum(~gtab.b0s_mask))) pred = np.empty((self.vox_coords.shape[0], gtab.bvals.shape[0])) if S0 is None: S0 = self.b0_signal pred[..., gtab.b0s_mask] = S0[:, None] pred[..., ~gtab.b0s_mask] =\ (pred_weighted + self.mean_signal[:, None]) * S0[:, None] return pred dipy-0.13.0/dipy/tracking/local/000077500000000000000000000000001317371701200164035ustar00rootroot00000000000000dipy-0.13.0/dipy/tracking/local/__init__.py000066400000000000000000000006071317371701200205170ustar00rootroot00000000000000from .localtracking import LocalTracking from .tissue_classifier import (ActTissueClassifier, BinaryTissueClassifier, ThresholdTissueClassifier, TissueClassifier) from .direction_getter import DirectionGetter from dipy.tracking import utils __all__ = ["ActTissueClassifier", "BinaryTissueClassifier", "LocalTracking", "ThresholdTissueClassifier"] dipy-0.13.0/dipy/tracking/local/direction_getter.pxd000066400000000000000000000003331317371701200224510ustar00rootroot00000000000000cimport numpy as np cdef class DirectionGetter: cpdef int get_direction(self, double[::1] point, double[::1] direction) except -1 cpdef np.ndarray[np.float_t, ndim=2] initial_direction(self, double[::1] point) dipy-0.13.0/dipy/tracking/local/direction_getter.pyx000066400000000000000000000011141317371701200224740ustar00rootroot00000000000000cimport numpy as np """ # DirectionGetter declaration: cdef class DirectionGetter: cdef int get_direction(self, double *point, double *direction) except -1 cdef np.ndarray[np.float_t, ndim=2] initial_direction(self, double *point) """ cdef class DirectionGetter: cpdef int get_direction(self, double[::1] point, double[::1] direction) except -1: pass cpdef np.ndarray[np.float_t, ndim=2] initial_direction(self, double[::1] point): pass dipy-0.13.0/dipy/tracking/local/interpolation.pxd000066400000000000000000000004341317371701200220100ustar00rootroot00000000000000cimport numpy as np cdef int _trilinear_interpolate_c_4d(double[:, :, :, :] data, double[:] point, double[::1] result) nogil cpdef trilinear_interpolate4d(double[:, :, :, :] data, double[:] point, np.ndarray out=*) dipy-0.13.0/dipy/tracking/local/interpolation.pyx000066400000000000000000000064131317371701200220400ustar00rootroot00000000000000cimport cython cimport numpy as np import numpy as np import time from libc.math cimport floor @cython.boundscheck(False) @cython.wraparound(False) cdef int _trilinear_interpolate_c_4d(double[:, :, :, :] data, double[:] point, double[::1] result) nogil: """Tri-linear interpolation along the last dimension of a 4d array Parameters ---------- point : 1d array (3,) 3 doubles representing a 3d point in space. If point has integer values ``[i, j, k]``, the result will be the same as ``data[i, j, k]``. data : 4d array Data to be interpolated. result : 1d array The result of interpolation. Should have length equal to the ``data.shape[3]``. Returns ------- err : int 0 : successful interpolation. -1 : point is outside the data area, meaning round(point) is not a valid index to data. -2 : point has the wrong shape -3 : shape of data and result do not match """ cdef: np.npy_intp flr, N double w, rem np.npy_intp index[3][2] double weight[3][2] if point.shape[0] != 3: return -2 if data.shape[3] != result.shape[0]: return -3 for i in range(3): if point[i] < -.5 or point[i] >= (data.shape[i] - .5): return -1 flr = floor(point[i]) rem = point[i] - flr index[i][0] = flr + (flr == -1) index[i][1] = flr + (flr != (data.shape[i] - 1)) weight[i][0] = 1 - rem weight[i][1] = rem N = result.shape[0] for i in range(N): result[i] = 0 for i in range(2): for j in range(2): for k in range(2): w = weight[0][i] * weight[1][j] * weight[2][k] for L in range(N): result[L] += w * data[index[0][i], index[1][j], index[2][k], L] return 0 cpdef trilinear_interpolate4d(double[:, :, :, :] data, double[:] point, np.ndarray out=None): """Tri-linear interpolation along the last dimension of a 4d array Parameters ---------- point : 1d array (3,) 3 doubles representing a 3d point in space. If point has integer values ``[i, j, k]``, the result will be the same as ``data[i, j, k]``. data : 4d array Data to be interpolated. out : 1d array, optional The output array for the result of the interpolation. Returns ------- out : 1d array The result of interpolation. """ cdef: int err double[::1] outview if out is None: out = np.empty(data.shape[3]) outview = out err = _trilinear_interpolate_c_4d(data, point, out) if err == 0: return out elif err == -1: raise IndexError("The point point is outside data") elif err == -2: raise ValueError("Point must be a 1d array with shape (3,).") elif err == -3: # This should only happen if the user passes an bad out array msg = "out array must have same size as the last dimension of data." raise ValueError(msg) def nearestneighbor_interpolate(data, point): index = tuple(np.round(point).astype(np.int)) return data[index] dipy-0.13.0/dipy/tracking/local/localtrack.pyx000066400000000000000000000143121317371701200212650ustar00rootroot00000000000000cimport cython cimport numpy as np from .direction_getter cimport DirectionGetter from .tissue_classifier cimport (TissueClassifier, TissueClass, TRACKPOINT, ENDPOINT, OUTSIDEIMAGE, INVALIDPOINT) cdef extern from "dpy_math.h" nogil: int dpy_signbit(double x) double dpy_rint(double x) double abs(double) @cython.cdivision(True) cdef inline double stepsize(double point, double increment) nogil: """Compute the step size to the closest boundary in units of increment.""" cdef: double dist dist = dpy_rint(point) + .5 - dpy_signbit(increment) - point if dist == 0: # Point is on an edge, return step size to next edge. This is most # likely to come up if overstep is set to 0. return 1. / abs(increment) else: return dist / increment cdef void step_to_boundary(double *point, double *direction, double overstep) nogil: """Takes a step from point in along direction just past a voxel boundary. Parameters ---------- direction : c-pointer to double[3] The direction along which the step should be taken. point : c-pointer to double[3] The tracking point which will be updated by this function. overstep : double It's often useful to have the points of a streamline lie inside of a voxel instead of having them lie on the boundary. For this reason, each step will overshoot the boundary by ``overstep * direction``. This should not be negative. """ cdef: double step_sizes[3] double smallest_step for i in range(3): step_sizes[i] = stepsize(point[i], direction[i]) smallest_step = step_sizes[0] for i in range(1, 3): if step_sizes[i] < smallest_step: smallest_step = step_sizes[i] smallest_step += overstep for i in range(3): point[i] += smallest_step * direction[i] cdef void fixed_step(double *point, double *direction, double stepsize) nogil: """Updates point by stepping in direction. Parameters ---------- direction : c-pointer to double[3] The direction along which the step should be taken. point : c-pointer to double[3] The tracking point which will be updated by this function. step_size : double The size of step in units of direction. """ for i in range(3): point[i] += direction[i] * stepsize cdef inline void copypoint(double *a, double *b) nogil: for i in range(3): b[i] = a[i] @cython.boundscheck(False) @cython.wraparound(False) @cython.cdivision(True) def local_tracker(DirectionGetter dg, TissueClassifier tc, np.ndarray[np.float_t, ndim=1] seed, np.ndarray[np.float_t, ndim=1] first_step, np.ndarray[np.float_t, ndim=1] voxel_size, np.ndarray[np.float_t, ndim=2, mode='c'] streamline, double stepsize, int fixedstep): """Tracks one direction from a seed. This function is the main workhorse of the ``LocalTracking`` class defined in ``dipy.tracking.local.localtracking``. Parameters ---------- dg : DirectionGetter Used to choosing tracking directions. tc : TissueClassifier Used to check tissue type along path. seed : array, float, 1d, (3,) First point of the (partial) streamline. first_step : array, float, 1d, (3,) Used as ``prev_dir`` for selecting the step direction from the seed point. voxel_size : array, float, 1d, (3,) Size of voxels in the data set. streamline : array, float, 2d, (N, 3) Output of tracking will be put into this array. The length of this array, ``N``, will set the maximum allowable length of the streamline. stepsize : float Size of tracking steps in mm if ``fixed_step``. fixedstep : bool If true, a fixed stepsize is used, otherwise a variable step size is used. Returns ------- end : int This function updates the ``streamline`` array with points as it tracks. Points in ``streamline[:abs(end)]`` were updated by the function. The sign of ``end`` and whether the last point was included depend on the reason that the streamline was terminated. End reasons: 1) maximum length of the streamline was reached. ``end == N`` 2) ``direction_getter`` could not return a direction. ``end > 0`` Last point is the point at which no direction could be found. 3) Streamline encountered an ENDPOINT. ``end > 0`` Last point is the ENDPOINT. 3) Streamline encountered an OUTSIDEIMAGE. ``end > 0`` Last point is the point before OUTSIDEIMAGE. 5) Streamline encountered an INVALIDPOINT. ``end < 0`` Last point is INVALIDPOINT. """ if (seed.shape[0] != 3 or first_step.shape[0] != 3 or voxel_size.shape[0] != 3 or streamline.shape[1] != 3): raise ValueError() cdef: int i TissueClass tissue_class double point[3], dir[3], vs[3], voxdir[3] double[::1] pview = point, dview = dir void (*step)(double*, double*, double) nogil if fixedstep: step = fixed_step else: step = step_to_boundary for i in range(3): streamline[0, i] = point[i] = seed[i] dir[i] = first_step[i] vs[i] = voxel_size[i] tissue_class = TRACKPOINT for i in range(1, streamline.shape[0]): if dg.get_direction(pview, dview): break for j in range(3): voxdir[j] = dir[j] / vs[j] step(point, voxdir, stepsize) copypoint(point, &streamline[i, 0]) tissue_class = tc.check_point(pview) if tissue_class == TRACKPOINT: continue elif (tissue_class == ENDPOINT or tissue_class == INVALIDPOINT): i += 1 break elif tissue_class == OUTSIDEIMAGE: break else: # maximum length of streamline has been reached, return everything i = streamline.shape[0] return i, tissue_class dipy-0.13.0/dipy/tracking/local/localtracking.py000066400000000000000000000127461317371701200216040ustar00rootroot00000000000000import numpy as np from dipy.tracking.local.localtrack import local_tracker from dipy.align import Bunch from dipy.tracking import utils # enum TissueClass (tissue_classifier.pxd) is not accessible # from here. To be changed when minimal cython version > 0.21. # cython 0.21 - cpdef enum to export values into Python-level namespace # https://github.com/cython/cython/commit/50133b5a91eea348eddaaad22a606a7fa1c7c457 TissueTypes = Bunch(OUTSIDEIMAGE=-1, INVALIDPOINT=0, TRACKPOINT=1, ENDPOINT=2) class LocalTracking(object): """A streamline generator for local tracking methods""" @staticmethod def _get_voxel_size(affine): """Computes the voxel sizes of an image from the affine. Checks that the affine does not have any shear because local_tracker assumes that the data is sampled on a regular grid. """ lin = affine[:3, :3] dotlin = np.dot(lin.T, lin) # Check that the affine is well behaved if not np.allclose(np.triu(dotlin, 1), 0., atol=1e-5): msg = ("The affine provided seems to contain shearing, data must " "be acquired or interpolated on a regular grid to be used " "with `LocalTracking`.") raise ValueError(msg) return np.sqrt(dotlin.diagonal()) def __init__(self, direction_getter, tissue_classifier, seeds, affine, step_size, max_cross=None, maxlen=500, fixedstep=True, return_all=True): """Creates streamlines by using local fiber-tracking. Parameters ---------- direction_getter : instance of DirectionGetter Used to get directions for fiber tracking. tissue_classifier : instance of TissueClassifier Identifies endpoints and invalid points to inform tracking. seeds : array (N, 3) Points to seed the tracking. Seed points should be given in point space of the track (see ``affine``). affine : array (4, 4) Coordinate space for the streamline point with respect to voxel indices of input data. This affine can contain scaling, rotational, and translational components but should not contain any shearing. An identity matrix can be used to generate streamlines in "voxel coordinates" as long as isotropic voxels were used to acquire the data. step_size : float Step size used for tracking. max_cross : int or None The maximum number of direction to track from each seed in crossing voxels. By default all initial directions are tracked. maxlen : int Maximum number of steps to track from seed. Used to prevent infinite loops. fixedstep : bool If true, a fixed stepsize is used, otherwise a variable step size is used. return_all : bool If true, return all generated streamlines, otherwise only streamlines reaching end points or exiting the image. """ self.direction_getter = direction_getter self.tissue_classifier = tissue_classifier self.seeds = seeds if affine.shape != (4, 4): raise ValueError("affine should be a (4, 4) array.") self.affine = affine self._voxel_size = self._get_voxel_size(affine) self.step_size = step_size self.fixed = fixedstep self.max_cross = max_cross self.maxlen = maxlen self.return_all = return_all def __iter__(self): # Make tracks, move them to point space and return track = self._generate_streamlines() return utils.move_streamlines(track, self.affine) def _generate_streamlines(self): """A streamline generator""" N = self.maxlen dg = self.direction_getter tc = self.tissue_classifier ss = self.step_size fixed = self.fixed max_cross = self.max_cross vs = self._voxel_size # Get inverse transform (lin/offset) for seeds inv_A = np.linalg.inv(self.affine) lin = inv_A[:3, :3] offset = inv_A[:3, 3] F = np.empty((N + 1, 3), dtype=float) B = F.copy() for s in self.seeds: s = np.dot(lin, s) + offset directions = dg.initial_direction(s) if directions.size == 0 and self.return_all: # only the seed position yield [s] directions = directions[:max_cross] for first_step in directions: stepsF, tissue_class = local_tracker(dg, tc, s, first_step, vs, F, ss, fixed) if not (self.return_all or tissue_class == TissueTypes.ENDPOINT or tissue_class == TissueTypes.OUTSIDEIMAGE): continue first_step = -first_step stepsB, tissue_class = local_tracker(dg, tc, s, first_step, vs, B, ss, fixed) if not (self.return_all or tissue_class == TissueTypes.ENDPOINT or tissue_class == TissueTypes.OUTSIDEIMAGE): continue if stepsB == 1: streamline = F[:stepsF].copy() else: parts = (B[stepsB-1:0:-1], F[:stepsF]) streamline = np.concatenate(parts, axis=0) yield streamline dipy-0.13.0/dipy/tracking/local/tests/000077500000000000000000000000001317371701200175455ustar00rootroot00000000000000dipy-0.13.0/dipy/tracking/local/tests/__init__.py000066400000000000000000000000461317371701200216560ustar00rootroot00000000000000# Make tracking/local/tests a package dipy-0.13.0/dipy/tracking/local/tests/test_local_tracking.py000066400000000000000000000403011317371701200241300ustar00rootroot00000000000000 import nibabel as nib import numpy as np import numpy.testing as npt from dipy.core.sphere import HemiSphere, unit_octahedron from dipy.core.gradients import gradient_table from dipy.data import get_data from dipy.tracking.local import (LocalTracking, ThresholdTissueClassifier, DirectionGetter, TissueClassifier, BinaryTissueClassifier) from dipy.direction import (ProbabilisticDirectionGetter, DeterministicMaximumDirectionGetter) from dipy.tracking.local.interpolation import trilinear_interpolate4d from dipy.tracking.local.localtracking import TissueTypes def test_stop_conditions(): """This tests that the Local Tracker behaves as expected for the following tissue types. """ # TissueTypes.TRACKPOINT = 1 # TissueTypes.ENDPOINT = 2 # TissueTypes.INVALIDPOINT = 0 tissue = np.array([[2, 1, 1, 2, 1], [2, 2, 1, 1, 2], [1, 1, 1, 1, 1], [1, 1, 1, 2, 2], [0, 1, 1, 1, 2], [0, 1, 1, 0, 2], [1, 0, 1, 1, 1]]) tissue = tissue[None] class SimpleTissueClassifier(TissueClassifier): def check_point(self, point): p = np.round(point).astype(int) if any(p < 0) or any(p >= tissue.shape): return TissueTypes.OUTSIDEIMAGE return tissue[p[0], p[1], p[2]] class SimpleDirectionGetter(DirectionGetter): def initial_direction(self, point): # Test tracking along the rows (z direction) # of the tissue array above p = np.round(point).astype(int) if (any(p < 0) or any(p >= tissue.shape) or tissue[p[0], p[1], p[2]] == TissueTypes.INVALIDPOINT): return np.array([]) return np.array([[0., 0., 1.]]) def get_direction(self, p, d): # Always keep previous direction return 0 # Create a seeds along x = np.array([0., 0, 0, 0, 0, 0, 0]) y = np.array([0., 1, 2, 3, 4, 5, 6]) z = np.array([1., 1, 1, 0, 1, 1, 1]) seeds = np.column_stack([x, y, z]) # Set up tracking dg = SimpleDirectionGetter() tc = SimpleTissueClassifier() streamlines_not_all = LocalTracking(direction_getter=dg, tissue_classifier=tc, seeds=seeds, affine=np.eye(4), step_size=1., return_all=False) streamlines_all = LocalTracking(direction_getter=dg, tissue_classifier=tc, seeds=seeds, affine=np.eye(4), step_size=1., return_all=True) streamlines_not_all = iter(streamlines_not_all) # valid streamlines only streamlines_all = iter(streamlines_all) # all streamlines # Check that the first streamline stops at 0 and 3 (ENDPOINT) y = 0 sl = next(streamlines_not_all) npt.assert_equal(sl[0], [0, y, 0]) npt.assert_equal(sl[-1], [0, y, 3]) npt.assert_equal(len(sl), 4) sl = next(streamlines_all) npt.assert_equal(sl[0], [0, y, 0]) npt.assert_equal(sl[-1], [0, y, 3]) npt.assert_equal(len(sl), 4) # Check that the first streamline stops at 0 and 4 (ENDPOINT) y = 1 sl = next(streamlines_not_all) npt.assert_equal(sl[0], [0, y, 0]) npt.assert_equal(sl[-1], [0, y, 4]) npt.assert_equal(len(sl), 5) sl = next(streamlines_all) npt.assert_equal(sl[0], [0, y, 0]) npt.assert_equal(sl[-1], [0, y, 4]) npt.assert_equal(len(sl), 5) # This streamline should be the same as above. This row does not have # ENDPOINTs, but the streamline should stop at the edge and not include # OUTSIDEIMAGE points. y = 2 sl = next(streamlines_not_all) npt.assert_equal(sl[0], [0, y, 0]) npt.assert_equal(sl[-1], [0, y, 4]) npt.assert_equal(len(sl), 5) sl = next(streamlines_all) npt.assert_equal(sl[0], [0, y, 0]) npt.assert_equal(sl[-1], [0, y, 4]) npt.assert_equal(len(sl), 5) # If we seed on the edge, the first (or last) point in the streamline # should be the seed. y = 3 sl = next(streamlines_not_all) npt.assert_equal(sl[0], seeds[y]) sl = next(streamlines_all) npt.assert_equal(sl[0], seeds[y]) # The last 3 seeds should not produce streamlines, # INVALIDPOINT streamlines are rejected (return_all=False). npt.assert_equal(len(list(streamlines_not_all)), 0) # The last 3 seeds should produce invalid streamlines, # INVALIDPOINT streamlines are kept (return_all=True). # The streamline stops at 0 (INVALIDPOINT) and 4 (ENDPOINT) y = 4 sl = next(streamlines_all) npt.assert_equal(sl[0], [0, y, 0]) npt.assert_equal(sl[-1], [0, y, 4]) npt.assert_equal(len(sl), 5) # The streamline stops at 0 (INVALIDPOINT) and 4 (INVALIDPOINT) y = 5 sl = next(streamlines_all) npt.assert_equal(sl[0], [0, y, 0]) npt.assert_equal(sl[-1], [0, y, 3]) npt.assert_equal(len(sl), 4) # The last streamline should contain only one point, the seed point, # because no valid inital direction was returned. y = 6 sl = next(streamlines_all) npt.assert_equal(sl[0], seeds[y]) npt.assert_equal(sl[-1], seeds[y]) npt.assert_equal(len(sl), 1) def test_trilinear_interpolate(): a, b, c = np.random.random(3) def linear_function(x, y, z): return a * x + b * y + c * z N = 6 x, y, z = np.mgrid[:N, :N, :N] data = np.empty((N, N, N, 2)) data[..., 0] = linear_function(x, y, z) data[..., 1] = 99. # Use a point not near the edges point = np.array([2.1, 4.8, 3.3]) out = trilinear_interpolate4d(data, point) expected = [linear_function(*point), 99.] npt.assert_array_almost_equal(out, expected) # Pass in out ourselves out[:] = -1 trilinear_interpolate4d(data, point, out) npt.assert_array_almost_equal(out, expected) # use a point close to an edge point = np.array([-.1, -.1, -.1]) expected = [0., 99.] out = trilinear_interpolate4d(data, point) npt.assert_array_almost_equal(out, expected) # different edge point = np.array([2.4, 5.4, 3.3]) # On the edge 5.4 get treated as the max y value, 5. expected = [linear_function(point[0], 5., point[2]), 99.] out = trilinear_interpolate4d(data, point) npt.assert_array_almost_equal(out, expected) # Test index errors point = np.array([2.4, 5.5, 3.3]) npt.assert_raises(IndexError, trilinear_interpolate4d, data, point) point = np.array([2.4, -1., 3.3]) npt.assert_raises(IndexError, trilinear_interpolate4d, data, point) def test_ProbabilisticOdfWeightedTracker(): """This tests that the Probabalistic Direction Getter plays nice LocalTracking and produces reasonable streamlines in a simple example. """ sphere = HemiSphere.from_sphere(unit_octahedron) # A simple image with three possible configurations, a vertical tract, # a horizontal tract and a crossing pmf_lookup = np.array([[0., 0., 1.], [1., 0., 0.], [0., 1., 0.], [.6, .4, 0.]]) simple_image = np.array([[0, 1, 0, 0, 0, 0], [0, 1, 0, 0, 0, 0], [0, 3, 2, 2, 2, 0], [0, 1, 0, 0, 0, 0], [0, 1, 0, 0, 0, 0], ]) simple_image = simple_image[..., None] pmf = pmf_lookup[simple_image] seeds = [np.array([1., 1., 0.])] * 30 mask = (simple_image > 0).astype(float) tc = ThresholdTissueClassifier(mask, .5) dg = ProbabilisticDirectionGetter.from_pmf(pmf, 90, sphere, pmf_threshold=0.1) streamlines = LocalTracking(dg, tc, seeds, np.eye(4), 1.) expected = [np.array([[0., 1., 0.], [1., 1., 0.], [2., 1., 0.], [2., 2., 0.], [2., 3., 0.], [2., 4., 0.], [2., 5., 0.]]), np.array([[0., 1., 0.], [1., 1., 0.], [2., 1., 0.], [3., 1., 0.], [4., 1., 0.]])] def allclose(x, y): return x.shape == y.shape and np.allclose(x, y) path = [False, False] for sl in streamlines: if allclose(sl, expected[0]): path[0] = True elif allclose(sl, expected[1]): path[1] = True else: raise AssertionError() npt.assert_(all(path)) # The first path is not possible if 90 degree turns are excluded dg = ProbabilisticDirectionGetter.from_pmf(pmf, 80, sphere, pmf_threshold=0.1) streamlines = LocalTracking(dg, tc, seeds, np.eye(4), 1.) for sl in streamlines: npt.assert_(np.allclose(sl, expected[1])) # The first path is not possible if pmf_threshold > 0.4 dg = ProbabilisticDirectionGetter.from_pmf(pmf, 90, sphere, pmf_threshold=0.5) streamlines = LocalTracking(dg, tc, seeds, np.eye(4), 1.) for sl in streamlines: npt.assert_(np.allclose(sl, expected[1])) def test_MaximumDeterministicTracker(): """This tests that the Maximum Deterministic Direction Getter plays nice LocalTracking and produces reasonable streamlines in a simple example. """ sphere = HemiSphere.from_sphere(unit_octahedron) # A simple image with three possible configurations, a vertical tract, # a horizontal tract and a crossing pmf_lookup = np.array([[0., 0., 1.], [1., 0., 0.], [0., 1., 0.], [.4, .6, 0.]]) simple_image = np.array([[0, 1, 0, 0, 0, 0], [0, 1, 0, 0, 0, 0], [0, 3, 2, 2, 2, 0], [0, 1, 0, 0, 0, 0], [0, 1, 0, 0, 0, 0], ]) simple_image = simple_image[..., None] pmf = pmf_lookup[simple_image] seeds = [np.array([1., 1., 0.])] * 30 mask = (simple_image > 0).astype(float) tc = ThresholdTissueClassifier(mask, .5) dg = DeterministicMaximumDirectionGetter.from_pmf(pmf, 90, sphere, pmf_threshold=0.1) streamlines = LocalTracking(dg, tc, seeds, np.eye(4), 1.) expected = [np.array([[0., 1., 0.], [1., 1., 0.], [2., 1., 0.], [2., 2., 0.], [2., 3., 0.], [2., 4., 0.], [2., 5., 0.]]), np.array([[0., 1., 0.], [1., 1., 0.], [2., 1., 0.], [3., 1., 0.], [4., 1., 0.]]), np.array([[0., 1., 0.], [1., 1., 0.], [2., 1., 0.]])] def allclose(x, y): return x.shape == y.shape and np.allclose(x, y) for sl in streamlines: if not allclose(sl, expected[0]): raise AssertionError() # The first path is not possible if 90 degree turns are excluded dg = DeterministicMaximumDirectionGetter.from_pmf(pmf, 80, sphere, pmf_threshold=0.1) streamlines = LocalTracking(dg, tc, seeds, np.eye(4), 1.) for sl in streamlines: npt.assert_(np.allclose(sl, expected[1])) # Both path are not possible if 90 degree turns are exclude and # if pmf_threhold is larger than 0.4. Streamlines should stop at # the crossing dg = DeterministicMaximumDirectionGetter.from_pmf(pmf, 80, sphere, pmf_threshold=0.5) streamlines = LocalTracking(dg, tc, seeds, np.eye(4), 1.) for sl in streamlines: npt.assert_(np.allclose(sl, expected[2])) def test_affine_transformations(): """This tests that the input affine is properly handled by LocalTracking and produces reasonable streamlines in a simple example. """ sphere = HemiSphere.from_sphere(unit_octahedron) # A simple image with three possible configurations, a vertical tract, # a horizontal tract and a crossing pmf_lookup = np.array([[0., 0., 1.], [1., 0., 0.], [0., 1., 0.], [.4, .6, 0.]]) simple_image = np.array([[0, 0, 0, 0, 0, 0], [0, 1, 0, 0, 0, 0], [0, 3, 2, 2, 2, 0], [0, 1, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], ]) simple_image = simple_image[..., None] pmf = pmf_lookup[simple_image] seeds = [np.array([1., 1., 0.]), np.array([2., 4., 0.])] expected = [np.array([[0., 1., 0.], [1., 1., 0.], [2., 1., 0.], [3., 1., 0.], [4., 1., 0.]]), np.array([[2., 0., 0.], [2., 1., 0.], [2., 2., 0.], [2., 3., 0.], [2., 4., 0.], [2., 5., 0.]])] mask = (simple_image > 0).astype(float) tc = BinaryTissueClassifier(mask) dg = DeterministicMaximumDirectionGetter.from_pmf(pmf, 60, sphere, pmf_threshold=0.1) streamlines = LocalTracking(dg, tc, seeds, np.eye(4), 1.) # TST- bad affine wrong shape bad_affine = np.eye(3) npt.assert_raises(ValueError, LocalTracking, dg, tc, seeds, bad_affine, 1.) # TST - bad affine with shearing bad_affine = np.eye(4) bad_affine[0, 1] = 1. npt.assert_raises(ValueError, LocalTracking, dg, tc, seeds, bad_affine, 1.) # TST - identity a0 = np.eye(4) # TST - affines with positive/negative offsets a1 = np.eye(4) a1[:3, 3] = [1, 2, 3] a2 = np.eye(4) a2[:3, 3] = [-2, 0, -1] # TST - affine with scaling a3 = np.eye(4) a3[0, 0] = a3[1, 1] = a3[2, 2] = 8 # TST - affine with axes inverting (negative value) a4 = np.eye(4) a4[1, 1] = a4[2, 2] = -1 # TST - combined affines a5 = a1 + a2 + a3 a5[3, 3] = 1 # TST - in vivo affine exemple # Sometimes data have affines with tiny shear components. # For example, the small_101D data-set has some of that: fdata, _, _ = get_data('small_101D') a6 = nib.load(fdata).affine for affine in [a0, a1, a2, a3, a4, a5, a6]: lin = affine[:3, :3] offset = affine[:3, 3] seeds_trans = [np.dot(lin, s) + offset for s in seeds] # We compute the voxel size to ajust the step size to one voxel voxel_size = np.mean(np.sqrt(np.dot(lin, lin).diagonal())) streamlines = LocalTracking(direction_getter=dg, tissue_classifier=tc, seeds=seeds_trans, affine=affine, step_size=voxel_size, return_all=True) # We apply the inverse affine transformation to the generated # streamlines. It should be equals to the expected streamlines # (generated with the identity affine matrix). affine_inv = np.linalg.inv(affine) lin = affine_inv[:3, :3] offset = affine_inv[:3, 3] streamlines_inv = [] for line in streamlines: streamlines_inv.append([np.dot(pts, lin) + offset for pts in line]) npt.assert_equal(len(streamlines_inv[0]), len(expected[0])) npt.assert_(np.allclose(streamlines_inv[0], expected[0], atol=0.3)) npt.assert_equal(len(streamlines_inv[1]), len(expected[1])) npt.assert_(np.allclose(streamlines_inv[1], expected[1], atol=0.3)) if __name__ == "__main__": npt.run_module_suite() dipy-0.13.0/dipy/tracking/local/tests/test_tissue_classifier.py000066400000000000000000000131611317371701200247000ustar00rootroot00000000000000 import numpy as np import numpy.testing as npt import scipy.ndimage from dipy.core.ndindex import ndindex from dipy.tracking.local import (BinaryTissueClassifier, ThresholdTissueClassifier, ActTissueClassifier) from dipy.tracking.local.localtracking import TissueTypes def test_binary_tissue_classifier(): """This tests that the binary tissue classifier returns expected tissue types. """ mask = np.random.random((4, 4, 4)) mask[mask < 0.4] = 0.0 btc_boolean = BinaryTissueClassifier(mask > 0) btc_float64 = BinaryTissueClassifier(mask) # test voxel center for ind in ndindex(mask.shape): pts = np.array(ind, dtype='float64') state_boolean = btc_boolean.check_point(pts) state_float64 = btc_float64.check_point(pts) if mask[ind] > 0: npt.assert_equal(state_boolean, TissueTypes.TRACKPOINT) npt.assert_equal(state_float64, TissueTypes.TRACKPOINT) else: npt.assert_equal(state_boolean, TissueTypes.ENDPOINT) npt.assert_equal(state_float64, TissueTypes.ENDPOINT) # test random points in voxel for ind in ndindex(mask.shape): for _ in range(50): pts = np.array(ind, dtype='float64') + np.random.random(3) - 0.5 state_boolean = btc_boolean.check_point(pts) state_float64 = btc_float64.check_point(pts) if mask[ind] > 0: npt.assert_equal(state_boolean, TissueTypes.TRACKPOINT) npt.assert_equal(state_float64, TissueTypes.TRACKPOINT) else: npt.assert_equal(state_boolean, TissueTypes.ENDPOINT) npt.assert_equal(state_float64, TissueTypes.ENDPOINT) # test outside points outside_pts = [[100, 100, 100], [0, -1, 1], [0, 10, 2], [0, 0.5, -0.51], [0, -0.51, 0.1], [4, 0, 0]] for pts in outside_pts: pts = np.array(pts, dtype='float64') state_boolean = btc_boolean.check_point(pts) state_float64 = btc_float64.check_point(pts) npt.assert_equal(state_boolean, TissueTypes.OUTSIDEIMAGE) npt.assert_equal(state_float64, TissueTypes.OUTSIDEIMAGE) def test_threshold_tissue_classifier(): """This tests that the thresholdy tissue classifier returns expected tissue types. """ tissue_map = np.random.random((4, 4, 4)) ttc = ThresholdTissueClassifier(tissue_map.astype('float32'), 0.5) # test voxel center for ind in ndindex(tissue_map.shape): pts = np.array(ind, dtype='float64') state = ttc.check_point(pts) if tissue_map[ind] > 0.5: npt.assert_equal(state, TissueTypes.TRACKPOINT) else: npt.assert_equal(state, TissueTypes.ENDPOINT) # test random points in voxel inds = [[0, 1.4, 2.2], [0, 2.3, 2.3], [0, 2.2, 1.3], [0, 0.9, 2.2], [0, 2.8, 1.1], [0, 1.1, 3.3], [0, 2.1, 1.9], [0, 3.1, 3.1], [0, 0.1, 0.1], [0, 0.9, 0.5], [0, 0.9, 0.5], [0, 2.9, 0.1]] for pts in inds: pts = np.array(pts, dtype='float64') state = ttc.check_point(pts) res = scipy.ndimage.map_coordinates( tissue_map, np.reshape(pts, (3, 1)), order=1, mode='nearest') if res > 0.5: npt.assert_equal(state, TissueTypes.TRACKPOINT) else: npt.assert_equal(state, TissueTypes.ENDPOINT) # test outside points outside_pts = [[100, 100, 100], [0, -1, 1], [0, 10, 2], [0, 0.5, -0.51], [0, -0.51, 0.1]] for pts in outside_pts: pts = np.array(pts, dtype='float64') state = ttc.check_point(pts) npt.assert_equal(state, TissueTypes.OUTSIDEIMAGE) def test_act_tissue_classifier(): """This tests that the act tissue classifier returns expected tissue types. """ gm = np.random.random((4, 4, 4)) wm = np.random.random((4, 4, 4)) csf = np.random.random((4, 4, 4)) tissue_sum = gm + wm + csf gm /= tissue_sum wm /= tissue_sum csf /= tissue_sum act_tc = ActTissueClassifier(include_map=gm, exclude_map=csf) # test voxel center for ind in ndindex(wm.shape): pts = np.array(ind, dtype='float64') state = act_tc.check_point(pts) if csf[ind] > 0.5: npt.assert_equal(state, TissueTypes.INVALIDPOINT) elif gm[ind] > 0.5: npt.assert_equal(state, TissueTypes.ENDPOINT) else: npt.assert_equal(state, TissueTypes.TRACKPOINT) # test random points in voxel inds = [[0, 1.4, 2.2], [0, 2.3, 2.3], [0, 2.2, 1.3], [0, 0.9, 2.2], [0, 2.8, 1.1], [0, 1.1, 3.3], [0, 2.1, 1.9], [0, 3.1, 3.1], [0, 0.1, 0.1], [0, 0.9, 0.5], [0, 0.9, 0.5], [0, 2.9, 0.1]] for pts in inds: pts = np.array(pts, dtype='float64') state = act_tc.check_point(pts) gm_res = scipy.ndimage.map_coordinates( gm, np.reshape(pts, (3, 1)), order=1, mode='nearest') csf_res = scipy.ndimage.map_coordinates( csf, np.reshape(pts, (3, 1)), order=1, mode='nearest') if csf_res > 0.5: npt.assert_equal(state, TissueTypes.INVALIDPOINT) elif gm_res > 0.5: npt.assert_equal(state, TissueTypes.ENDPOINT) else: npt.assert_equal(state, TissueTypes.TRACKPOINT) # test outside points outside_pts = [[100, 100, 100], [0, -1, 1], [0, 10, 2], [0, 0.5, -0.51], [0, -0.51, 0.1]] for pts in outside_pts: pts = np.array(pts, dtype='float64') state = act_tc.check_point(pts) npt.assert_equal(state, TissueTypes.OUTSIDEIMAGE) if __name__ == '__main__': run_module_suite() dipy-0.13.0/dipy/tracking/local/tissue_classifier.pxd000066400000000000000000000014201317371701200226350ustar00rootroot00000000000000cdef enum TissueClass: PYERROR = -2 OUTSIDEIMAGE = -1 INVALIDPOINT = 0 TRACKPOINT = 1 ENDPOINT = 2 cdef class TissueClassifier: # Please update doc-string in tissue_classifer.pyx if you change these # declarations cdef: double interp_out_double[1] double[::1] interp_out_view cpdef TissueClass check_point(self, double[::1] point) except PYERROR cdef class BinaryTissueClassifier(TissueClassifier): cdef: unsigned char [:, :, :] mask pass cdef class ThresholdTissueClassifier(TissueClassifier): cdef: double threshold double[:, :, :] metric_map pass cdef class ActTissueClassifier(TissueClassifier): cdef: double[:, :, :] include_map, exclude_map pass dipy-0.13.0/dipy/tracking/local/tissue_classifier.pyx000066400000000000000000000123721317371701200226720ustar00rootroot00000000000000cimport cython cimport numpy as np cdef extern from "dpy_math.h" nogil: int dpy_rint(double) from .interpolation cimport(trilinear_interpolate4d, _trilinear_interpolate_c_4d) import numpy as np cdef class TissueClassifier: cpdef TissueClass check_point(self, double[::1] point) except PYERROR: pass cdef class BinaryTissueClassifier(TissueClassifier): """ cdef: unsigned char[:, :, :] mask """ def __cinit__(self, mask): self.interp_out_view = self.interp_out_double self.mask = (mask > 0).astype('uint8') @cython.boundscheck(False) @cython.wraparound(False) @cython.initializedcheck(False) cpdef TissueClass check_point(self, double[::1] point) except PYERROR: cdef: unsigned char result int err int voxel[3] if point.shape[0] != 3: raise ValueError("Point has wrong shape") voxel[0] = int(dpy_rint(point[0])) voxel[1] = int(dpy_rint(point[1])) voxel[2] = int(dpy_rint(point[2])) if (voxel[0] < 0 or voxel[0] >= self.mask.shape[0] or voxel[1] < 0 or voxel[1] >= self.mask.shape[1] or voxel[2] < 0 or voxel[2] >= self.mask.shape[2]): return OUTSIDEIMAGE result = self.mask[voxel[0], voxel[1], voxel[2]] if result > 0: return TRACKPOINT else: return ENDPOINT cdef class ThresholdTissueClassifier(TissueClassifier): """ # Declarations from tissue_classifier.pxd bellow cdef: double threshold, interp_out_double[1] double[:] interp_out_view = interp_out_view double[:, :, :] metric_map """ def __cinit__(self, metric_map, threshold): self.interp_out_view = self.interp_out_double self.metric_map = np.asarray(metric_map, 'float64') self.threshold = threshold @cython.boundscheck(False) @cython.wraparound(False) @cython.initializedcheck(False) cpdef TissueClass check_point(self, double[::1] point) except PYERROR: cdef: double result int err err = _trilinear_interpolate_c_4d(self.metric_map[..., None], point, self.interp_out_view) if err == -1: return OUTSIDEIMAGE elif err == -2: raise ValueError("Point has wrong shape") elif err != 0: # This should never happen raise RuntimeError( "Unexpected interpolation error (code:%i)" % err) result = self.interp_out_view[0] if result > self.threshold: return TRACKPOINT else: return ENDPOINT cdef class ActTissueClassifier(TissueClassifier): r""" Anatomically-Constrained Tractography (ACT) stopping criteria from [1]_. This implements the use of partial volume fraction (PVE) maps to determine when the tracking stops. The proposed ([1]_) method that cuts streamlines going through subcortical gray matter regions is not implemented here. The backtracking technique for streamlines reaching INVALIDPOINT is not implemented either. cdef: double interp_out_double[1] double[:] interp_out_view = interp_out_view double[:, :, :] include_map, exclude_map References ---------- .. [1] Smith, R. E., Tournier, J.-D., Calamante, F., & Connelly, A. "Anatomically-constrained tractography: Improved diffusion MRI streamlines tractography through effective use of anatomical information." NeuroImage, 63(3), 1924–1938, 2012. """ def __cinit__(self, include_map, exclude_map): self.interp_out_view = self.interp_out_double self.include_map = np.asarray(include_map, 'float64') self.exclude_map = np.asarray(exclude_map, 'float64') @cython.boundscheck(False) @cython.wraparound(False) @cython.initializedcheck(False) cpdef TissueClass check_point(self, double[::1] point) except PYERROR: cdef: double include_result, exclude_result int include_err, exclude_err include_err = _trilinear_interpolate_c_4d(self.include_map[..., None], point, self.interp_out_view) include_result = self.interp_out_view[0] exclude_err = _trilinear_interpolate_c_4d(self.exclude_map[..., None], point, self.interp_out_view) exclude_result = self.interp_out_view[0] if include_err == -1 or exclude_err == -1: return OUTSIDEIMAGE elif include_err == -2 or exclude_err == -2: raise ValueError("Point has wrong shape") elif include_err != 0: # This should never happen raise RuntimeError("Unexpected interpolation error " + "(include_map - code:%i)" % include_err) elif exclude_err != 0: # This should never happen raise RuntimeError("Unexpected interpolation error " + "(exclude_map - code:%i)" % exclude_err) if include_result > 0.5: return ENDPOINT elif exclude_result > 0.5: return INVALIDPOINT else: return TRACKPOINT dipy-0.13.0/dipy/tracking/metrics.py000066400000000000000000000607421317371701200173420ustar00rootroot00000000000000''' Metrics for tracks, where tracks are arrays of points ''' from __future__ import division, print_function, absolute_import from dipy.utils.six.moves import xrange from dipy.testing import setup_test import numpy as np from scipy.interpolate import splprep, splev def winding(xyz): '''Total turning angle projected. Project space curve to best fitting plane. Calculate the cumulative signed angle between each line segment and the previous one. Parameters ------------ xyz : array-like shape (N,3) Array representing x,y,z of N points in a track. Returns --------- a : scalar Total turning angle in degrees. ''' U, s, V = np.linalg.svd(xyz-np.mean(xyz, axis=0), 0) proj = np.dot(U[:, 0:2], np.diag(s[0:2])) turn = 0 for j in range(len(xyz)-1): v0 = proj[j] v1 = proj[j+1] v = np.dot(v0, v1)/(np.linalg.norm(v0)*np.linalg.norm(v1)) v = np.clip(v, -1, 1) tmp = np.arccos(v) turn += tmp return np.rad2deg(turn) def length(xyz, along=False): ''' Euclidean length of track line This will give length in mm if tracks are expressed in world coordinates. Parameters ------------ xyz : array-like shape (N,3) array representing x,y,z of N points in a track along : bool, optional If True, return array giving cumulative length along track, otherwise (default) return scalar giving total length. Returns --------- L : scalar or array shape (N-1,) scalar in case of `along` == False, giving total length, array if `along` == True, giving cumulative lengths. Examples ---------- >>> from dipy.tracking.metrics import length >>> xyz = np.array([[1,1,1],[2,3,4],[0,0,0]]) >>> expected_lens = np.sqrt([1+2**2+3**2, 2**2+3**2+4**2]) >>> length(xyz) == expected_lens.sum() True >>> len_along = length(xyz, along=True) >>> np.allclose(len_along, expected_lens.cumsum()) True >>> length([]) 0 >>> length([[1, 2, 3]]) 0 >>> length([], along=True) array([0]) ''' xyz = np.asarray(xyz) if xyz.shape[0] < 2: if along: return np.array([0]) return 0 dists = np.sqrt((np.diff(xyz, axis=0)**2).sum(axis=1)) if along: return np.cumsum(dists) return np.sum(dists) def bytes(xyz): '''Size of track in bytes. Parameters ------------ xyz : array-like shape (N,3) Array representing x,y,z of N points in a track. Returns --------- b : int Number of bytes. ''' return xyz.nbytes def midpoint(xyz): ''' Midpoint of track Parameters ---------- xyz : array-like shape (N,3) array representing x,y,z of N points in a track Returns --------- mp : array shape (3,) Middle point of line, such that, if L is the line length then `np` is the point such that the length xyz[0] to `mp` and from `mp` to xyz[-1] is L/2. If the middle point is not a point in `xyz`, then we take the interpolation between the two nearest `xyz` points. If `xyz` is empty, return a ValueError Examples -------- >>> from dipy.tracking.metrics import midpoint >>> midpoint([]) Traceback (most recent call last): ... ValueError: xyz array cannot be empty >>> midpoint([[1, 2, 3]]) array([1, 2, 3]) >>> xyz = np.array([[1,1,1],[2,3,4]]) >>> midpoint(xyz) array([ 1.5, 2. , 2.5]) >>> xyz = np.array([[0,0,0],[1,1,1],[2,2,2]]) >>> midpoint(xyz) array([ 1., 1., 1.]) >>> xyz = np.array([[0,0,0],[1,0,0],[3,0,0]]) >>> midpoint(xyz) array([ 1.5, 0. , 0. ]) >>> xyz = np.array([[0,9,7],[1,9,7],[3,9,7]]) >>> midpoint(xyz) array([ 1.5, 9. , 7. ]) ''' xyz = np.asarray(xyz) n_pts = xyz.shape[0] if n_pts == 0: raise ValueError('xyz array cannot be empty') if n_pts == 1: return xyz.copy().squeeze() cumlen = np.zeros(n_pts) cumlen[1:] = length(xyz, along=True) midlen = cumlen[-1]/2.0 ind = np.where((cumlen-midlen) > 0)[0][0] len0 = cumlen[ind-1] len1 = cumlen[ind] Ds = midlen-len0 Lambda = Ds/(len1-len0) return Lambda*xyz[ind]+(1-Lambda)*xyz[ind-1] def center_of_mass(xyz): ''' Center of mass of streamline Parameters ------------ xyz : array-like shape (N,3) array representing x,y,z of N points in a track Returns --------- com : array shape (3,) center of mass of streamline Examples ---------- >>> from dipy.tracking.metrics import center_of_mass >>> center_of_mass([]) Traceback (most recent call last): ... ValueError: xyz array cannot be empty >>> center_of_mass([[1,1,1]]) array([ 1., 1., 1.]) >>> xyz = np.array([[0,0,0],[1,1,1],[2,2,2]]) >>> center_of_mass(xyz) array([ 1., 1., 1.]) ''' xyz = np.asarray(xyz) if xyz.size == 0: raise ValueError('xyz array cannot be empty') return np.mean(xyz, axis=0) def magn(xyz, n=1): ''' magnitude of vector ''' mag = np.sum(xyz**2, axis=1)**0.5 imag = np.where(mag == 0) mag[imag] = np.finfo(float).eps if n > 1: return np.tile(mag, (n, 1)).T return mag.reshape(len(mag), 1) def frenet_serret(xyz): r''' Frenet-Serret Space Curve Invariants Calculates the 3 vector and 2 scalar invariants of a space curve defined by vectors r = (x,y,z). If z is omitted (i.e. the array xyz has shape (N,2)), then the curve is only 2D (planar), but the equations are still valid. Similar to http://www.mathworks.com/matlabcentral/fileexchange/11169 In the following equations the prime ($'$) indicates differentiation with respect to the parameter $s$ of a parametrised curve $\mathbf{r}(s)$. - $\mathbf{T}=\mathbf{r'}/|\mathbf{r'}|\qquad$ (Tangent vector)} - $\mathbf{N}=\mathbf{T'}/|\mathbf{T'}|\qquad$ (Normal vector) - $\mathbf{B}=\mathbf{T}\times\mathbf{N}\qquad$ (Binormal vector) - $\kappa=|\mathbf{T'}|\qquad$ (Curvature) - $\mathrm{\tau}=-\mathbf{B'}\cdot\mathbf{N}$ (Torsion) Parameters ---------- xyz : array-like shape (N,3) array representing x,y,z of N points in a track Returns ------- T : array shape (N,3) array representing the tangent of the curve xyz N : array shape (N,3) array representing the normal of the curve xyz B : array shape (N,3) array representing the binormal of the curve xyz k : array shape (N,1) array representing the curvature of the curve xyz t : array shape (N,1) array representing the torsion of the curve xyz Examples ---------- Create a helix and calculate its tangent, normal, binormal, curvature and torsion >>> from dipy.tracking import metrics as tm >>> import numpy as np >>> theta = 2*np.pi*np.linspace(0,2,100) >>> x=np.cos(theta) >>> y=np.sin(theta) >>> z=theta/(2*np.pi) >>> xyz=np.vstack((x,y,z)).T >>> T,N,B,k,t=tm.frenet_serret(xyz) ''' xyz = np.asarray(xyz) n_pts = xyz.shape[0] if n_pts == 0: raise ValueError('xyz array cannot be empty') dxyz = np.gradient(xyz)[0] ddxyz = np.gradient(dxyz)[0] # Tangent T = np.divide(dxyz, magn(dxyz, 3)) # Derivative of Tangent dT = np.gradient(T)[0] # Normal N = np.divide(dT, magn(dT, 3)) # Binormal B = np.cross(T, N) # Curvature k = magn(np.cross(dxyz, ddxyz), 1)/(magn(dxyz, 1)**3) # Torsion # (In matlab was t=dot(-B,N,2)) t = np.sum(-B*N, axis=1) # return T,N,B,k,t,dxyz,ddxyz,dT return T, N, B, k, t def mean_curvature(xyz): ''' Calculates the mean curvature of a curve Parameters ------------ xyz : array-like shape (N,3) array representing x,y,z of N points in a curve Returns ----------- m : float Mean curvature. Examples -------- Create a straight line and a semi-circle and print their mean curvatures >>> from dipy.tracking import metrics as tm >>> import numpy as np >>> x=np.linspace(0,1,100) >>> y=0*x >>> z=0*x >>> xyz=np.vstack((x,y,z)).T >>> m=tm.mean_curvature(xyz) #mean curvature straight line >>> theta=np.pi*np.linspace(0,1,100) >>> x=np.cos(theta) >>> y=np.sin(theta) >>> z=0*x >>> xyz=np.vstack((x,y,z)).T >>> m=tm.mean_curvature(xyz) #mean curvature for semi-circle ''' xyz = np.asarray(xyz) n_pts = xyz.shape[0] if n_pts == 0: raise ValueError('xyz array cannot be empty') dxyz = np.gradient(xyz)[0] ddxyz = np.gradient(dxyz)[0] # Curvature k = magn(np.cross(dxyz, ddxyz), 1)/(magn(dxyz, 1)**3) return np.mean(k) def mean_orientation(xyz): ''' Calculates the mean orientation of a curve Parameters ------------ xyz : array-like shape (N,3) array representing x,y,z of N points in a curve Returns ------- m : float Mean orientation. ''' xyz = np.asarray(xyz) n_pts = xyz.shape[0] if n_pts == 0: raise ValueError('xyz array cannot be empty') dxyz = np.gradient(xyz)[0] return np.mean(dxyz, axis=0) def generate_combinations(items, n): """ Combine sets of size n from items Parameters ------------ items : sequence n : int Returns -------- ic : iterator Examples -------- >>> from dipy.tracking.metrics import generate_combinations >>> ic=generate_combinations(range(3),2) >>> for i in ic: print(i) [0, 1] [0, 2] [1, 2] """ if n == 0: yield [] elif n == 2: # if n=2 non_recursive for i in xrange(len(items)-1): for j in xrange(i+1, len(items)): yield [i, j] else: # if n>2 uses recursion for i in xrange(len(items)): for cc in generate_combinations(items[i+1:], n-1): yield [items[i]] + cc def longest_track_bundle(bundle, sort=False): ''' Return longest track or length sorted track indices in `bundle` If `sort` == True, return the indices of the sorted tracks in the bundle, otherwise return the longest track. Parameters ---------- bundle : sequence of tracks as arrays, shape (N1,3) ... (Nm,3) sort : bool, optional If False (default) return longest track. If True, return length sorted indices for tracks in bundle Returns ------- longest_or_indices : array longest track - shape (N,3) - (if `sort` is False), or indices of length sorted tracks (if `sort` is True) Examples -------- >>> from dipy.tracking.metrics import longest_track_bundle >>> import numpy as np >>> bundle = [np.array([[0,0,0],[2,2,2]]),np.array([[0,0,0],[4,4,4]])] >>> longest_track_bundle(bundle) array([[0, 0, 0], [4, 4, 4]]) >>> longest_track_bundle(bundle, True) #doctest: +ELLIPSIS array([0, 1]...) ''' alllengths = [length(t) for t in bundle] alllengths = np.array(alllengths) if sort: ilongest = alllengths.argsort() return ilongest else: ilongest = alllengths.argmax() return bundle[ilongest] def intersect_sphere(xyz, center, radius): ''' If any segment of the track is intersecting with a sphere of specific center and radius return True otherwise False Parameters ---------- xyz : array, shape (N,3) representing x,y,z of the N points of the track center : array, shape (3,) center of the sphere radius : float radius of the sphere Returns ------- tf : {True, False} True if track `xyz` intersects sphere >>> from dipy.tracking.metrics import intersect_sphere >>> line=np.array(([0,0,0],[1,1,1],[2,2,2])) >>> sph_cent=np.array([1,1,1]) >>> sph_radius = 1 >>> intersect_sphere(line,sph_cent,sph_radius) True Notes ----- The ray to sphere intersection method used here is similar with http://local.wasp.uwa.edu.au/~pbourke/geometry/sphereline/ http://local.wasp.uwa.edu.au/~pbourke/geometry/sphereline/source.cpp we just applied it for every segment neglecting the intersections where the intersecting points are not inside the segment ''' center = np.array(center) # print center lt = xyz.shape[0] for i in xrange(lt-1): # first point x1 = xyz[i] # second point x2 = xyz[i+1] # do the calculations as given in the Notes x = x2-x1 a = np.inner(x, x) x1c = x1-center b = 2*np.inner(x, x1c) c = (np.inner(center, center)+np.inner(x1, x1)-2*np.inner(center, x1) - radius**2) bb4ac = b*b-4*a*c # print 'bb4ac',bb4ac if abs(a) < np.finfo(float).eps or bb4ac < 0: # too small segment or # no intersection continue if bb4ac == 0: # one intersection point p mu = -b/2*a p = x1+mu*x # check if point is inside the segment # print 'p',p if np.inner(p-x1, p-x1) <= a: return True if bb4ac > 0: # two intersection points p1 and p2 mu = (-b+np.sqrt(bb4ac))/(2*a) p1 = x1+mu*x mu = (-b-np.sqrt(bb4ac))/(2*a) p2 = x1+mu*x # check if points are inside the line segment # print 'p1,p2',p1,p2 if np.inner(p1-x1, p1-x1) <= a or np.inner(p2-x1, p2-x1) <= a: return True return False def inside_sphere(xyz, center, radius): ''' If any point of the track is inside a sphere of a specified center and radius return True otherwise False. Mathematicaly this can be simply described by $|x-c|\le r$ where $x$ a point $c$ the center of the sphere and $r$ the radius of the sphere. Parameters ------------- xyz : array, shape (N,3) representing x,y,z of the N points of the track center : array, shape (3,) center of the sphere radius : float radius of the sphere Returns ---------- tf : {True,False} Whether point is inside sphere. Examples -------- >>> from dipy.tracking.metrics import inside_sphere >>> line=np.array(([0,0,0],[1,1,1],[2,2,2])) >>> sph_cent=np.array([1,1,1]) >>> sph_radius = 1 >>> inside_sphere(line,sph_cent,sph_radius) True ''' return (np.sqrt(np.sum((xyz-center)**2, axis=1)) <= radius).any() def inside_sphere_points(xyz, center, radius): ''' If a track intersects with a sphere of a specified center and radius return the points that are inside the sphere otherwise False. Mathematicaly this can be simply described by $|x-c| \le r$ where $x$ a point $c$ the center of the sphere and $r$ the radius of the sphere. Parameters ------------ xyz : array, shape (N,3) representing x,y,z of the N points of the track center : array, shape (3,) center of the sphere radius : float radius of the sphere Returns --------- xyzn : array, shape(M,3) array representing x,y,z of the M points inside the sphere Examples ---------- >>> from dipy.tracking.metrics import inside_sphere_points >>> line=np.array(([0,0,0],[1,1,1],[2,2,2])) >>> sph_cent=np.array([1,1,1]) >>> sph_radius = 1 >>> inside_sphere_points(line,sph_cent,sph_radius) array([[1, 1, 1]]) ''' return xyz[(np.sqrt(np.sum((xyz-center)**2, axis=1)) <= radius)] def spline(xyz, s=3, k=2, nest=-1): ''' Generate B-splines as documented in http://www.scipy.org/Cookbook/Interpolation The scipy.interpolate packages wraps the netlib FITPACK routines (Dierckx) for calculating smoothing splines for various kinds of data and geometries. Although the data is evenly spaced in this example, it need not be so to use this routine. Parameters --------------- xyz : array, shape (N,3) array representing x,y,z of N points in 3d space s : float, optional A smoothing condition. The amount of smoothness is determined by satisfying the conditions: sum((w * (y - g))**2,axis=0) <= s where g(x) is the smoothed interpolation of (x,y). The user can use s to control the tradeoff between closeness and smoothness of fit. Larger satisfying the conditions: sum((w * (y - g))**2,axis=0) <= s where g(x) is the smoothed interpolation of (x,y). The user can use s to control the tradeoff between closeness and smoothness of fit. Larger s means more smoothing while smaller values of s indicate less smoothing. Recommended values of s depend on the weights, w. If the weights represent the inverse of the standard-deviation of y, then a: good s value should be found in the range (m-sqrt(2*m),m+sqrt(2*m)) where m is the number of datapoints in x, y, and w. k : int, optional Degree of the spline. Cubic splines are recommended. Even values of k should be avoided especially with a small s-value. for the same set of data. If task=-1 find the weighted least square spline for a given set of knots, t. nest : None or int, optional An over-estimate of the total number of knots of the spline to help in determining the storage space. None results in value m+2*k. -1 results in m+k+1. Always large enough is nest=m+k+1. Default is -1. Returns ---------- xyzn : array, shape (M,3) array representing x,y,z of the M points inside the sphere Examples ---------- >>> import numpy as np >>> t=np.linspace(0,1.75*2*np.pi,100)# make ascending spiral in 3-space >>> x = np.sin(t) >>> y = np.cos(t) >>> z = t >>> x+= np.random.normal(scale=0.1, size=x.shape) # add noise >>> y+= np.random.normal(scale=0.1, size=y.shape) >>> z+= np.random.normal(scale=0.1, size=z.shape) >>> xyz=np.vstack((x,y,z)).T >>> xyzn=spline(xyz,3,2,-1) >>> len(xyzn) > len(xyz) True See also ---------- scipy.interpolate.splprep scipy.interpolate.splev ''' # find the knot points tckp, u = splprep([xyz[:, 0], xyz[:, 1], xyz[:, 2]], s=s, k=k, nest=nest) # evaluate spline, including interpolated points xnew, ynew, znew = splev(np.linspace(0, 1, 400), tckp) return np.vstack((xnew, ynew, znew)).T def startpoint(xyz): ''' First point of the track Parameters ------------- xyz : array, shape(N,3) Track. Returns --------- sp : array, shape(3,) First track point. Examples ---------- >>> from dipy.tracking.metrics import startpoint >>> import numpy as np >>> theta=np.pi*np.linspace(0,1,100) >>> x=np.cos(theta) >>> y=np.sin(theta) >>> z=0*x >>> xyz=np.vstack((x,y,z)).T >>> sp=startpoint(xyz) >>> sp.any()==xyz[0].any() True ''' return xyz[0] def endpoint(xyz): ''' Parameters ---------- xyz : array, shape(N,3) Track. Returns ------- ep : array, shape(3,) First track point. Examples ---------- >>> from dipy.tracking.metrics import endpoint >>> import numpy as np >>> theta=np.pi*np.linspace(0,1,100) >>> x=np.cos(theta) >>> y=np.sin(theta) >>> z=0*x >>> xyz=np.vstack((x,y,z)).T >>> ep=endpoint(xyz) >>> ep.any()==xyz[-1].any() True ''' return xyz[-1] def arbitrarypoint(xyz, distance): ''' Select an arbitrary point along distance on the track (curve) Parameters ---------- xyz : array-like shape (N,3) array representing x,y,z of N points in a track distance : float float representing distance travelled from the xyz[0] point of the curve along the curve. Returns ------- ap : array shape (3,) Arbitrary point of line, such that, if the arbitrary point is not a point in `xyz`, then we take the interpolation between the two nearest `xyz` points. If `xyz` is empty, return a ValueError Examples -------- >>> import numpy as np >>> from dipy.tracking.metrics import arbitrarypoint, length >>> theta=np.pi*np.linspace(0,1,100) >>> x=np.cos(theta) >>> y=np.sin(theta) >>> z=0*x >>> xyz=np.vstack((x,y,z)).T >>> ap=arbitrarypoint(xyz,length(xyz)/3) ''' xyz = np.asarray(xyz) n_pts = xyz.shape[0] if n_pts == 0: raise ValueError('xyz array cannot be empty') if n_pts == 1: return xyz.copy().squeeze() cumlen = np.zeros(n_pts) cumlen[1:] = length(xyz, along=True) if cumlen[-1] < distance: raise ValueError('Given distance is bigger than ' 'the length of the curve') ind = np.where((cumlen-distance) > 0)[0][0] len0 = cumlen[ind-1] len1 = cumlen[ind] Ds = distance-len0 Lambda = Ds/(len1-len0) return Lambda*xyz[ind]+(1-Lambda)*xyz[ind-1] def _extrap(xyz, cumlen, distance): ''' Helper function for extrapolate ''' ind = np.where((cumlen-distance) > 0)[0][0] len0 = cumlen[ind-1] len1 = cumlen[ind] Ds = distance-len0 Lambda = Ds/(len1-len0) return Lambda*xyz[ind]+(1-Lambda)*xyz[ind-1] def downsample(xyz, n_pols=3): ''' downsample for a specific number of points along the curve/track Uses the length of the curve. It works in a similar fashion to midpoint and arbitrarypoint but it also reduces the number of segments of a track. Parameters ---------- xyz : array-like shape (N,3) array representing x,y,z of N points in a track n_pol : int integer representing number of points (poles) we need along the curve. Returns ------- xyz2 : array shape (M,3) array representing x,y,z of M points that where extrapolated. M should be equal to n_pols Examples -------- >>> import numpy as np >>> # a semi-circle >>> theta=np.pi*np.linspace(0,1,100) >>> x=np.cos(theta) >>> y=np.sin(theta) >>> z=0*x >>> xyz=np.vstack((x,y,z)).T >>> xyz2=downsample(xyz,3) >>> # a cosine >>> x=np.pi*np.linspace(0,1,100) >>> y=np.cos(theta) >>> z=0*y >>> xyz=np.vstack((x,y,z)).T >>> xyz2=downsample(xyz,3) >>> len(xyz2) 3 >>> xyz3=downsample(xyz,10) >>> len(xyz3) 10 ''' xyz = np.asarray(xyz) n_pts = xyz.shape[0] if n_pts == 0: raise ValueError('xyz array cannot be empty') if n_pts == 1: return xyz.copy().squeeze() cumlen = np.zeros(n_pts) cumlen[1:] = length(xyz, along=True) step = cumlen[-1]/(n_pols-1) if cumlen[-1] < step: raise ValueError('Given number of points n_pols is incorrect. ') if n_pols <= 2: raise ValueError('Given number of points n_pols needs to be' ' higher than 2. ') ar = np.arange(0, cumlen[-1], step) if np.abs(ar[-1]-cumlen[-1]) < np.finfo('f4').eps: ar = ar[:-1] xyz2 = [_extrap(xyz, cumlen, distance) for distance in ar] return np.vstack((np.array(xyz2), xyz[-1])) def principal_components(xyz): ''' We use PCA to calculate the 3 principal directions for a track Parameters ---------- xyz : array-like shape (N,3) array representing x,y,z of N points in a track Returns ------- va : array_like eigenvalues ve : array_like eigenvectors Examples -------- >>> import numpy as np >>> from dipy.tracking.metrics import principal_components >>> theta=np.pi*np.linspace(0,1,100) >>> x=np.cos(theta) >>> y=np.sin(theta) >>> z=0*x >>> xyz=np.vstack((x,y,z)).T >>> va, ve = principal_components(xyz) >>> np.allclose(va, [0.51010101, 0.09883545, 0]) True ''' C = np.cov(xyz.T) va, ve = np.linalg.eig(C) return va, ve def midpoint2point(xyz, p): ''' Calculate distance from midpoint of a curve to arbitrary point p Parameters ------------- xyz : array-like shape (N,3) array representing x,y,z of N points in a track p : array shape (3,) array representing an arbitrary point with x,y,z coordinates in space. Returns --------- d : float a float number representing Euclidean distance Examples ----------- >>> import numpy as np >>> from dipy.tracking.metrics import midpoint2point, midpoint >>> theta=np.pi*np.linspace(0,1,100) >>> x=np.cos(theta) >>> y=np.sin(theta) >>> z=0*x >>> xyz=np.vstack((x,y,z)).T >>> dist=midpoint2point(xyz,np.array([0,0,0])) ''' mid = midpoint(xyz) return np.sqrt(np.sum((xyz-mid)**2)) if __name__ == "__main__": pass dipy-0.13.0/dipy/tracking/propspeed.pxd000066400000000000000000000006071317371701200200320ustar00rootroot00000000000000cimport numpy as cnp cdef cnp.npy_intp _propagation_direction(double *point,double* prev,double* qa,\ double *ind, double *odf_vertices,\ double qa_thr, double ang_thr,\ cnp.npy_intp *qa_shape,cnp.npy_intp* strides,\ double *direction,double total_weight) nogil dipy-0.13.0/dipy/tracking/propspeed.pyx000066400000000000000000000456021317371701200200630ustar00rootroot00000000000000# A type of -*- python -*- file """ Track propagation performance functions """ # cython: profile=True # cython: embedsignature=True cimport cython import numpy as np cimport numpy as cnp cdef extern from "dpy_math.h" nogil: double floor(double x) float fabs(float x) double cos(double x) double sin(double x) float acos(float x ) double sqrt(double x) double DPY_PI DEF PEAK_NO=5 # initialize numpy runtime cnp.import_array() @cython.cdivision(True) cdef cnp.npy_intp offset(cnp.npy_intp *indices, cnp.npy_intp *strides, int lenind, int typesize) nogil: ''' Access any element of any ndimensional numpy array using cython. Parameters ------------ indices : cnp.npy_intp * (int64 *) Indices of the array for which we want to find the offset. strides : cnp.npy_intp * strides lenind : int, len(indices) typesize : int Number of bytes for data type e.g. if 8 for double, 4 for int32 Returns ---------- offset : integer Element position in array ''' cdef int i cdef cnp.npy_intp summ = 0 for i from 0 <= i < lenind: summ += strides[i] * indices[i] summ /= typesize return summ def ndarray_offset(cnp.ndarray[cnp.npy_intp, ndim=1] indices, cnp.ndarray[cnp.npy_intp, ndim=1] strides, int lenind, int typesize): ''' Find offset in an N-dimensional ndarray using strides Parameters ---------- indices : array, npy_intp shape (N,) Indices of the array which we want to find the offset. strides : array, shape (N,) Strides of array. lenind : int len of the `indices` array. typesize : int Number of bytes for data type e.g. if 8 for double, 4 for int32 Returns ------- offset : integer Index position in flattened array Examples -------- >>> import numpy as np >>> from dipy.tracking.propspeed import ndarray_offset >>> I=np.array([1,1]) >>> A=np.array([[1,0,0],[0,2,0],[0,0,3]]) >>> S=np.array(A.strides) >>> ndarray_offset(I,S,2,A.dtype.itemsize) 4 >>> A.ravel()[4]==A[1,1] True ''' if not cnp.PyArray_CHKFLAGS(indices, cnp.NPY_C_CONTIGUOUS): raise ValueError(u"indices is not C contiguous") if not cnp.PyArray_CHKFLAGS(strides, cnp.NPY_C_CONTIGUOUS): raise ValueError(u"strides is not C contiguous") return offset( cnp.PyArray_DATA(indices), cnp.PyArray_DATA(strides), lenind, typesize) @cython.boundscheck(False) @cython.wraparound(False) def map_coordinates_trilinear_iso(cnp.ndarray[double, ndim=3] data, cnp.ndarray[double, ndim=2] points, cnp.ndarray[cnp.npy_intp, ndim=1] data_strides, cnp.npy_intp len_points, cnp.ndarray[double, ndim=1] result): ''' Trilinear interpolation (isotropic voxel size) Has similar behavior to ``map_coordinates`` from ``scipy.ndimage`` Parameters ---------- data : array, float64 shape (X, Y, Z) points : array, float64 shape(N, 3) data_strides : array npy_intp shape (3,) Strides sequence for `data` array len_points : cnp.npy_intp Number of points to interpolate result : array, float64 shape(N) The output array. This array should be initialized before you call this function. On exit it will contain the interpolated values from `data` at points given by `points`. Returns ------- None Notes ----- The output array `result` is filled in-place. ''' cdef: double w[8], values[24] cnp.npy_intp index[24], off, i, j double *ds= cnp.PyArray_DATA(data) double *ps= cnp.PyArray_DATA(points) cnp.npy_intp *strides = cnp.PyArray_DATA(data_strides) double *rs= cnp.PyArray_DATA(result) if not cnp.PyArray_CHKFLAGS(data, cnp.NPY_C_CONTIGUOUS): raise ValueError(u"data is not C contiguous") if not cnp.PyArray_CHKFLAGS(points, cnp.NPY_C_CONTIGUOUS): raise ValueError(u"points is not C contiguous") if not cnp.PyArray_CHKFLAGS(data_strides, cnp.NPY_C_CONTIGUOUS): raise ValueError(u"data_strides is not C contiguous") if not cnp.PyArray_CHKFLAGS(result, cnp.NPY_C_CONTIGUOUS): raise ValueError(u"result is not C contiguous") with nogil: for i in range(len_points): _trilinear_interpolation_iso(&ps[i * 3], w, index) rs[i] = 0 for j in range(8): weight = w[j] off = offset(&index[j * 3], strides, 3, 8) value = ds[off] rs[i] += weight * value return cdef void _trilinear_interpolation_iso(double *X, double *W, cnp.npy_intp *IN) nogil: ''' Interpolate in 3d volumes given point X Returns ------- W : weights IN : indices of the volume ''' cdef double Xf[3], d[3], nd[3] cdef cnp.npy_intp i # define the rectangular box where every corner is a neighboring voxel # (assuming center) !!! this needs to change for the affine case for i from 0 <= i < 3: Xf[i] = floor(X[i]) d[i] = X[i] - Xf[i] nd[i] = 1 - d[i] # weights # the weights are actualy the volumes of the 8 smaller boxes that define # the initial rectangular box for more on trilinear have a look here # http://en.wikipedia.org/wiki/Trilinear_interpolation # http://local.wasp.uwa.edu.au/~pbourke/miscellaneous/interpolation/index.html W[0]=nd[0] * nd[1] * nd[2] W[1]= d[0] * nd[1] * nd[2] W[2]=nd[0] * d[1] * nd[2] W[3]=nd[0] * nd[1] * d[2] W[4]= d[0] * d[1] * nd[2] W[5]=nd[0] * d[1] * d[2] W[6]= d[0] * nd[1] * d[2] W[7]= d[0] * d[1] * d[2] # indices # the indices give you the indices of the neighboring voxels (the corners # of the box) e.g. the qa coordinates IN[0] =Xf[0]; IN[1] =Xf[1]; IN[2] =Xf[2] IN[3] =Xf[0]+1; IN[4] =Xf[1]; IN[5] =Xf[2] IN[6] =Xf[0]; IN[7] =Xf[1]+1; IN[8] =Xf[2] IN[9] =Xf[0]; IN[10]=Xf[1]; IN[11]=Xf[2]+1 IN[12]=Xf[0]+1; IN[13]=Xf[1]+1; IN[14]=Xf[2] IN[15]=Xf[0]; IN[16]=Xf[1]+1; IN[17]=Xf[2]+1 IN[18]=Xf[0]+1; IN[19]=Xf[1]; IN[20]=Xf[2]+1 IN[21]=Xf[0]+1; IN[22]=Xf[1]+1; IN[23]=Xf[2]+1 return cdef cnp.npy_intp _nearest_direction(double* dx, double* qa, double *ind, cnp.npy_intp peaks, double *odf_vertices, double qa_thr, double ang_thr, double *direction) nogil: ''' Give the nearest direction to a point, checking threshold and angle Parameters ------------ dx : double array shape (3,) Moving direction of the current tracking. qa : double array shape (Np,) Quantitative anisotropy matrix, where ``Np`` is the number of peaks. ind : array, float64 shape(x, y, z, Np) Index of the track orientation. peaks : npy_intp odf_vertices : double array shape (N, 3) Sampling directions on the sphere. qa_thr : float Threshold for QA, we want everything higher than this threshold. ang_thr : float Angle threshold, we only select fiber orientation within this range. direction : double array shape (3,) The fiber orientation to be considered in the interpolation. The array gets modified in-place. Returns -------- delta : bool Delta funtion: if 1 we give it weighting, if it is 0 we don't give any weighting. ''' cdef: double max_dot = 0 double angl,curr_dot double odfv[3] cnp.npy_intp i, j, max_doti = 0 # calculate the cos with radians angl = cos((DPY_PI * ang_thr) / 180.) # if the maximum peak is lower than the threshold then there is no point # continuing tracking if qa[0] <= qa_thr: return 0 # for all peaks find the minimum angle between odf_vertices and dx for i from 0 <= i < peaks: # if the current peak is smaller than the threshold then jump out if qa[i] <= qa_thr: break # copy odf_vertices for j from 0 <= j < 3: odfv[j]=odf_vertices[3 * ind[i] + j] # calculate the absolute dot product between dx and odf_vertices curr_dot = dx[0] * odfv[0] + dx[1] * odfv[1] + dx[2] * odfv[2] if curr_dot < 0: #abs check curr_dot = -curr_dot # maximum dot means minimum angle # store tha maximum dot and the corresponding index from the # neighboring voxel in maxdoti if curr_dot > max_dot: max_dot=curr_dot max_doti = i # if maxdot smaller than our angular *dot* threshold stop tracking if max_dot < angl: return 0 # copy the odf_vertices for the voxel qa indices which have the smaller # angle for j from 0 <= j < 3: odfv[j] = odf_vertices[3 * ind[max_doti] + j] # if the dot product is negative then return the opposite direction # otherwise return the same direction if dx[0] * odfv[0] + dx[1] * odfv[1] + dx[2] * odfv[2] < 0: for j from 0 <= j < 3: direction[j] = -odf_vertices[3 * ind[max_doti] + j] return 1 for j from 0 <= j < 3: direction[j]= odf_vertices[3 * ind[max_doti] + j] return 1 @cython.cdivision(True) cdef cnp.npy_intp _propagation_direction(double *point, double* dx, double* qa, double *ind, double *odf_vertices, double qa_thr, double ang_thr, cnp.npy_intp *qa_shape, cnp.npy_intp* strides, double *direction, double total_weight) nogil: cdef: double total_w = 0 # total weighting useful for interpolation double delta = 0 # store delta function (stopping function) result double new_direction[3] # new propagation direction double w[8], qa_tmp[PEAK_NO], ind_tmp[PEAK_NO] cnp.npy_intp index[24], xyz[4] cnp.npy_intp i, j, m double normd # number of allowed peaks e.g. for fa is 1 for gqi.qa is 5 cnp.npy_intp peaks = qa_shape[3] # Calculate qa & ind of each of the 8 neighboring voxels. # To do that we use trilinear interpolation and return the weights and the # indices for the weights i.e. xyz in qa[x,y,z] _trilinear_interpolation_iso(point, w, index) # check if you are outside of the volume for i from 0 <= i < 3: new_direction[i] = 0 if index[7 * 3 + i] >= qa_shape[i] or index[i] < 0: return 0 # for every weight sum the total weighting for m from 0 <= m < 8: for i from 0 <= i < 3: xyz[i]=index[m * 3 + i] # fill qa_tmp and ind_tmp for j from 0 <= j < peaks: xyz[3] = j off = offset( xyz, strides, 4, 8) qa_tmp[j] = qa[off] ind_tmp[j] = ind[off] # return the nearest direction by searching in all peaks delta=_nearest_direction(dx, qa_tmp, ind_tmp, peaks, odf_vertices, qa_thr, ang_thr, direction) # if delta is 0 then that means that there was no good direction # (obeying the thresholds) from that neighboring voxel, so this voxel # is not adding to the total weight if delta == 0: continue # add in total total_w += w[m] for i from 0 <= i < 3: new_direction[i] += w[m] * direction[i] # if less than half the volume is time to stop propagating if total_w < total_weight: # termination return 0 # all good return normalized weighted next direction normd = new_direction[0]**2 + new_direction[1]**2 + new_direction[2]**2 normd = 1 / sqrt(normd) for i from 0 <= i < 3: direction[i] = new_direction[i] * normd return 1 cdef cnp.npy_intp _initial_direction(double* seed,double *qa, double* ind, double* odf_vertices, double qa_thr, cnp.npy_intp* strides, cnp.npy_intp ref, double* direction) nogil: ''' First direction that we get from a seeding point ''' cdef: cnp.npy_intp point[4],off cnp.npy_intp i double qa_tmp,ind_tmp # Very tricky/cool addition/flooring that helps create a valid neighborhood # (grid) for the trilinear interpolation to run smoothly. # Find the index for qa for i from 0 <= i < 3: point[i] = floor(seed[i] + .5) point[3] = ref # Find the offcet in memory to access the qa value off = offset(point,strides, 4, 8) qa_tmp = qa[off] # Check for scalar threshold if qa_tmp < qa_thr: return 0 # Find the correct direction from the indices ind_tmp = ind[off] # similar to ind[point] in numpy syntax # Return initial direction through odf_vertices by ind for i from 0 <= i < 3: direction[i] = odf_vertices[3 * ind_tmp + i] return 1 def eudx_both_directions(cnp.ndarray[double, ndim=1] seed, cnp.npy_intp ref, cnp.ndarray[double, ndim=4] qa, cnp.ndarray[double, ndim=4] ind, cnp.ndarray[double, ndim=2] odf_vertices, double qa_thr, double ang_thr, double step_sz, double total_weight, cnp.npy_intp max_points): ''' Parameters ------------ seed : array, float64 shape (3,) Point where the tracking starts. ref : cnp.npy_intp int Index of peak to follow first. qa : array, float64 shape (X, Y, Z, Np) Anisotropy matrix, where ``Np`` is the number of maximum allowed peaks. ind : array, float64 shape(x, y, z, Np) Index of the track orientation. odf_vertices : double array shape (N, 3) Sampling directions on the sphere. qa_thr : float Threshold for QA, we want everything higher than this threshold. ang_thr : float Angle threshold, we only select fiber orientation within this range. step_sz : double total_weight : double max_points : cnp.npy_intp Returns ------- track : array, shape (N,3) ''' cdef: double *ps = cnp.PyArray_DATA(seed) double *pqa = cnp.PyArray_DATA(qa) double *pin = cnp.PyArray_DATA(ind) double *pverts = cnp.PyArray_DATA(odf_vertices) cnp.npy_intp *pstr = qa.strides cnp.npy_intp *qa_shape = qa.shape cnp.npy_intp *pvstr = odf_vertices.strides cnp.npy_intp d, i, j, cnt double direction[3], dx[3], idirection[3], ps2[3] double tmp, ftmp if not cnp.PyArray_CHKFLAGS(seed, cnp.NPY_C_CONTIGUOUS): raise ValueError(u"seed is not C contiguous") if not cnp.PyArray_CHKFLAGS(qa, cnp.NPY_C_CONTIGUOUS): raise ValueError(u"qa is not C contiguous") if not cnp.PyArray_CHKFLAGS(ind, cnp.NPY_C_CONTIGUOUS): raise ValueError(u"ind is not C contiguous") if not cnp.PyArray_CHKFLAGS(odf_vertices, cnp.NPY_C_CONTIGUOUS): raise ValueError(u"odf_vertices is not C contiguous") cnt = 0 d = _initial_direction(ps, pqa, pin, pverts, qa_thr, pstr, ref, idirection) if d == 0: return None for i from 0 <= i < 3: # store the initial direction dx[i] = idirection[i] # ps2 is for downwards and ps for upwards propagation ps2[i] = ps[i] point = seed.copy() track = [] track.append(point.copy()) # track towards one direction while d: d = _propagation_direction(ps, dx, pqa, pin, pverts, qa_thr, ang_thr, qa_shape, pstr, direction, total_weight) if d == 0: break if cnt > max_points: break # update the track for i from 0 <= i < 3: dx[i] = direction[i] # check for boundaries tmp = ps[i] + step_sz * dx[i] if tmp > qa_shape[i] - 1 or tmp < 0.: d = 0 break # propagate ps[i] = tmp point[i] = ps[i] if d == 1: track.append(point.copy()) cnt += 1 d = 1 for i from 0 <= i < 3: dx[i] = -idirection[i] cnt = 0 # track towards the opposite direction while d: d = _propagation_direction(ps2, dx, pqa, pin, pverts, qa_thr, ang_thr, qa_shape, pstr, direction, total_weight) if d == 0: break if cnt > max_points: break # update the track for i from 0 <= i < 3: dx[i] = direction[i] # check for boundaries tmp=ps2[i] + step_sz*dx[i] if tmp > qa_shape[i] - 1 or tmp < 0.: d = 0 break # propagate ps2[i] = tmp point[i] = ps2[i] # to be changed # add track point if d == 1: track.insert(0, point.copy()) cnt += 1 # prepare to return final track for the current seed tmp_track = np.array(track, dtype=np.float32) # Sometimes one of the ends takes small negative values; needs to be # investigated further # Return track for the current seed point and ref return tmp_track dipy-0.13.0/dipy/tracking/streamline.py000066400000000000000000000417261317371701200200400ustar00rootroot00000000000000from copy import deepcopy from warnings import warn import types from scipy.spatial.distance import cdist import numpy as np from nibabel.affines import apply_affine from nibabel.streamlines import ArraySequence as Streamlines from dipy.tracking.streamlinespeed import set_number_of_points from dipy.tracking.streamlinespeed import length from dipy.tracking.streamlinespeed import compress_streamlines import dipy.tracking.utils as ut from dipy.tracking.utils import streamline_near_roi from dipy.core.geometry import dist_to_corner import dipy.align.vector_fields as vfu from dipy.testing import setup_test def unlist_streamlines(streamlines): """ Return the streamlines not as a list but as an array and an offset Parameters ---------- streamlines: sequence Returns ------- points : array offsets : array """ points = np.concatenate(streamlines, axis=0) offsets = np.zeros(len(streamlines), dtype='i8') curr_pos = 0 prev_pos = 0 for (i, s) in enumerate(streamlines): prev_pos = curr_pos curr_pos += s.shape[0] points[prev_pos:curr_pos] = s offsets[i] = curr_pos return points, offsets def relist_streamlines(points, offsets): """ Given a representation of a set of streamlines as a large array and an offsets array return the streamlines as a list of shorter arrays. Parameters ----------- points : array offsets : array Returns ------- streamlines: sequence """ streamlines = [] streamlines.append(points[0: offsets[0]]) for i in range(len(offsets) - 1): streamlines.append(points[offsets[i]: offsets[i + 1]]) return streamlines def center_streamlines(streamlines): """ Move streamlines to the origin Parameters ---------- streamlines : list List of 2D ndarrays of shape[-1]==3 Returns ------- new_streamlines : list List of 2D ndarrays of shape[-1]==3 inv_shift : ndarray Translation in x,y,z to go back in the initial position """ center = np.mean(np.concatenate(streamlines, axis=0), axis=0) return [s - center for s in streamlines], center def transform_streamlines(streamlines, mat): """ Apply affine transformation to streamlines Parameters ---------- streamlines : list List of 2D ndarrays of shape[-1]==3 mat : array, (4, 4) transformation matrix Returns ------- new_streamlines : list List of the transformed 2D ndarrays of shape[-1]==3 """ return [apply_affine(mat, s) for s in streamlines] def select_random_set_of_streamlines(streamlines, select): """ Select a random set of streamlines Parameters ---------- streamlines : list List of 2D ndarrays of shape[-1]==3 select : int Number of streamlines to select. If there are less streamlines than ``select`` then ``select=len(streamlines)``. Returns ------- selected_streamlines : list Notes ----- The same streamline will not be selected twice. """ len_s = len(streamlines) index = np.random.choice(len_s, min(select, len_s), replace=False) return [streamlines[i] for i in index] def select_by_rois(streamlines, rois, include, mode=None, affine=None, tol=None): """Select streamlines based on logical relations with several regions of interest (ROIs). For example, select streamlines that pass near ROI1, but only if they do not pass near ROI2. Parameters ---------- streamlines : list A list of candidate streamlines for selection rois : list or ndarray A list of 3D arrays, each with shape (x, y, z) corresponding to the shape of the brain volume, or a 4D array with shape (n_rois, x, y, z). Non-zeros in each volume are considered to be within the region include : array or list A list or 1D array of boolean values marking inclusion or exclusion criteria. If a streamline is near any of the inclusion ROIs, it should evaluate to True, unless it is also near any of the exclusion ROIs. mode : string, optional One of {"any", "all", "either_end", "both_end"}, where a streamline is associated with an ROI if: "any" : any point is within tol from ROI. Default. "all" : all points are within tol from ROI. "either_end" : either of the end-points is within tol from ROI "both_end" : both end points are within tol from ROI. affine : ndarray Affine transformation from voxels to streamlines. Default: identity. tol : float Distance (in the units of the streamlines, usually mm). If any coordinate in the streamline is within this distance from the center of any voxel in the ROI, the filtering criterion is set to True for this streamline, otherwise False. Defaults to the distance between the center of each voxel and the corner of the voxel. Notes ----- The only operation currently possible is "(A or B or ...) and not (X or Y or ...)", where A, B are inclusion regions and X, Y are exclusion regions. Returns ------- generator Generates the streamlines to be included based on these criteria. See also -------- :func:`dipy.tracking.utils.near_roi` :func:`dipy.tracking.utils.reduce_rois` Examples -------- >>> streamlines = [np.array([[0, 0., 0.9], ... [1.9, 0., 0.]]), ... np.array([[0., 0., 0], ... [0, 1., 1.], ... [0, 2., 2.]]), ... np.array([[2, 2, 2], ... [3, 3, 3]])] >>> mask1 = np.zeros((4, 4, 4), dtype=bool) >>> mask2 = np.zeros_like(mask1) >>> mask1[0, 0, 0] = True >>> mask2[1, 0, 0] = True >>> selection = select_by_rois(streamlines, [mask1, mask2], ... [True, True], ... tol=1) >>> list(selection) # The result is a generator [array([[ 0. , 0. , 0.9], [ 1.9, 0. , 0. ]]), array([[ 0., 0., 0.], [ 0., 1., 1.], [ 0., 2., 2.]])] >>> selection = select_by_rois(streamlines, [mask1, mask2], ... [True, False], ... tol=0.87) >>> list(selection) [array([[ 0., 0., 0.], [ 0., 1., 1.], [ 0., 2., 2.]])] >>> selection = select_by_rois(streamlines, [mask1, mask2], ... [True, True], ... mode="both_end", ... tol=1.0) >>> list(selection) [array([[ 0. , 0. , 0.9], [ 1.9, 0. , 0. ]])] >>> mask2[0, 2, 2] = True >>> selection = select_by_rois(streamlines, [mask1, mask2], ... [True, True], ... mode="both_end", ... tol=1.0) >>> list(selection) [array([[ 0. , 0. , 0.9], [ 1.9, 0. , 0. ]]), array([[ 0., 0., 0.], [ 0., 1., 1.], [ 0., 2., 2.]])] """ if affine is None: affine = np.eye(4) # This calculates the maximal distance to a corner of the voxel: dtc = dist_to_corner(affine) if tol is None: tol = dtc elif tol < dtc: w_s = "Tolerance input provided would create gaps in your" w_s += " inclusion ROI. Setting to: %s" % dist_to_corner warn(w_s) tol = dtc include_roi, exclude_roi = ut.reduce_rois(rois, include) include_roi_coords = np.array(np.where(include_roi)).T x_include_roi_coords = apply_affine(affine, include_roi_coords) exclude_roi_coords = np.array(np.where(exclude_roi)).T x_exclude_roi_coords = apply_affine(affine, exclude_roi_coords) if mode is None: mode = "any" for sl in streamlines: include = streamline_near_roi(sl, x_include_roi_coords, tol=tol, mode=mode) exclude = streamline_near_roi(sl, x_exclude_roi_coords, tol=tol, mode=mode) if include & ~exclude: yield sl def _orient_generator(out, roi1, roi2): """ Helper function to `orient_by_rois` Performs the inner loop separately. This is needed, because functions with `yield` always return a generator """ for idx, sl in enumerate(out): dist1 = cdist(sl, roi1, 'euclidean') dist2 = cdist(sl, roi2, 'euclidean') min1 = np.argmin(dist1, 0) min2 = np.argmin(dist2, 0) if min1[0] > min2[0]: yield sl[::-1] else: yield sl def _orient_list(out, roi1, roi2): """ Helper function to `orient_by_rois` Performs the inner loop separately. This is needed, because functions with `yield` always return a generator. Flips the streamlines in place (as needed) and returns a reference to the updated list. """ for idx, sl in enumerate(out): dist1 = cdist(sl, roi1, 'euclidean') dist2 = cdist(sl, roi2, 'euclidean') min1 = np.argmin(dist1, 0) min2 = np.argmin(dist2, 0) if min1[0] > min2[0]: out[idx] = sl[::-1] return out def orient_by_rois(streamlines, roi1, roi2, in_place=False, as_generator=False, affine=None): """Orient a set of streamlines according to a pair of ROIs Parameters ---------- streamlines : list or generator List or generator of 2d arrays of 3d coordinates. Each array contains the xyz coordinates of a single streamline. roi1, roi2 : ndarray Binary masks designating the location of the regions of interest, or coordinate arrays (n-by-3 array with ROI coordinate in each row). in_place : bool Whether to make the change in-place in the original list (and return a reference to the list), or to make a copy of the list and return this copy, with the relevant streamlines reoriented. Default: False. as_generator : bool Whether to return a generator as output. Default: False affine : ndarray Affine transformation from voxels to streamlines. Default: identity. Returns ------- streamlines : list or generator The same 3D arrays as a list or generator, but reoriented with respect to the ROIs Examples -------- >>> streamlines = [np.array([[0, 0., 0], ... [1, 0., 0.], ... [2, 0., 0.]]), ... np.array([[2, 0., 0.], ... [1, 0., 0], ... [0, 0, 0.]])] >>> roi1 = np.zeros((4, 4, 4), dtype=bool) >>> roi2 = np.zeros_like(roi1) >>> roi1[0, 0, 0] = True >>> roi2[1, 0, 0] = True >>> orient_by_rois(streamlines, roi1, roi2) [array([[ 0., 0., 0.], [ 1., 0., 0.], [ 2., 0., 0.]]), array([[ 0., 0., 0.], [ 1., 0., 0.], [ 2., 0., 0.]])] """ # If we don't already have coordinates on our hands: if len(roi1.shape) == 3: roi1 = np.asarray(np.where(roi1.astype(bool))).T if len(roi2.shape) == 3: roi2 = np.asarray(np.where(roi2.astype(bool))).T if affine is not None: roi1 = apply_affine(affine, roi1) roi2 = apply_affine(affine, roi2) if as_generator: if in_place: w_s = "Cannot return a generator when in_place is set to True" raise ValueError(w_s) return _orient_generator(streamlines, roi1, roi2) # If it's a generator on input, we may as well generate it # here and now: if isinstance(streamlines, types.GeneratorType): out = list(streamlines) elif in_place: out = streamlines else: # Make a copy, so you don't change the output in place: out = deepcopy(streamlines) return _orient_list(out, roi1, roi2) def _extract_vals(data, streamlines, affine=None, threedvec=False): """ Helper function for use with `values_from_volume`. Parameters ---------- data : 3D or 4D array Scalar (for 3D) and vector (for 4D) values to be extracted. For 4D data, interpolation will be done on the 3 spatial dimensions in each volume. streamlines : ndarray or list If array, of shape (n_streamlines, n_nodes, 3) If list, len(n_streamlines) with (n_nodes, 3) array in each element of the list. affine : ndarray, shape (4, 4) Affine transformation from voxels (image coordinates) to streamlines. Default: identity. threedvec : bool Whether the last dimension has length 3. This is a special case in which we can use :func:`vfu.interpolate_vector_3d` for the interploation of 4D volumes without looping over the elements of the last dimension. Return ------ array or list (depending on the input) : values interpolate to each coordinate along the length of each streamline """ data = data.astype(np.float) if (isinstance(streamlines, list) or isinstance(streamlines, types.GeneratorType)): if affine is not None: streamlines = ut.move_streamlines(streamlines, np.linalg.inv(affine)) vals = [] for sl in streamlines: if threedvec: vals.append(list(vfu.interpolate_vector_3d(data, sl.astype(np.float))[0])) else: vals.append(list(vfu.interpolate_scalar_3d(data, sl.astype(np.float))[0])) elif isinstance(streamlines, np.ndarray): sl_shape = streamlines.shape sl_cat = streamlines.reshape(sl_shape[0] * sl_shape[1], 3).astype(np.float) if affine is not None: inv_affine = np.linalg.inv(affine) sl_cat = (np.dot(sl_cat, inv_affine[:3, :3]) + inv_affine[:3, 3]) # So that we can index in one operation: if threedvec: vals = np.array(vfu.interpolate_vector_3d(data, sl_cat)[0]) else: vals = np.array(vfu.interpolate_scalar_3d(data, sl_cat)[0]) vals = np.reshape(vals, (sl_shape[0], sl_shape[1], -1)) if vals.shape[-1] == 1: vals = np.reshape(vals, vals.shape[:-1]) else: raise RuntimeError("Extracting values from a volume ", "requires streamlines input as an array, ", "a list of arrays, or a streamline generator.") return vals def values_from_volume(data, streamlines, affine=None): """Extract values of a scalar/vector along each streamline from a volume. Parameters ---------- data : 3D or 4D array Scalar (for 3D) and vector (for 4D) values to be extracted. For 4D data, interpolation will be done on the 3 spatial dimensions in each volume. streamlines : ndarray or list If array, of shape (n_streamlines, n_nodes, 3) If list, len(n_streamlines) with (n_nodes, 3) array in each element of the list. affine : ndarray, shape (4, 4) Affine transformation from voxels (image coordinates) to streamlines. Default: identity. For example, if no affine is provided and the first coordinate of the first streamline is ``[1, 0, 0]``, data[1, 0, 0] would be returned as the value for that streamline coordinate Return ------ array or list (depending on the input) : values interpolate to each coordinate along the length of each streamline. Notes ----- Values are extracted from the image based on the 3D coordinates of the nodes that comprise the points in the streamline, without any interpolation into segments between the nodes. Using this function with streamlines that have been resampled into a very small number of nodes will result in very few values. """ data = np.asarray(data) if len(data.shape) == 4: if data.shape[-1] == 3: return _extract_vals(data, streamlines, affine=affine, threedvec=True) if isinstance(streamlines, types.GeneratorType): streamlines = list(streamlines) vals = [] for ii in range(data.shape[-1]): vals.append(_extract_vals(data[..., ii], streamlines, affine=affine)) if isinstance(vals[-1], np.ndarray): return np.swapaxes(np.array(vals), 2, 1).T else: new_vals = [] for sl_idx in range(len(streamlines)): sl_vals = [] for ii in range(data.shape[-1]): sl_vals.append(vals[ii][sl_idx]) new_vals.append(np.array(sl_vals).T) return new_vals elif len(data.shape) == 3: return _extract_vals(data, streamlines, affine=affine) else: raise ValueError("Data needs to have 3 or 4 dimensions") dipy-0.13.0/dipy/tracking/streamlinespeed.pxd000066400000000000000000000006201317371701200212100ustar00rootroot00000000000000# distutils: language = c # cython: wraparound=False, cdivision=True, boundscheck=False ctypedef float[:, :] float2d ctypedef double[:, :] double2d ctypedef fused Streamline: float2d double2d cdef double c_length(Streamline streamline) nogil cdef void c_arclengths(Streamline streamline, double * out) nogil cdef void c_set_number_of_points(Streamline streamline, Streamline out) nogil dipy-0.13.0/dipy/tracking/streamlinespeed.pyx000066400000000000000000000570361317371701200212520ustar00rootroot00000000000000# distutils: language = c # cython: wraparound=False, cdivision=True, boundscheck=False import cython import numpy as np from libc.math cimport sqrt from libc.stdlib cimport malloc, free cimport numpy as np from dipy.tracking import Streamlines cdef extern from "dpy_math.h" nogil: bint dpy_isnan(double x) cdef double c_length(Streamline streamline) nogil: cdef: np.npy_intp i double out = 0.0 double dn, sum_dn_sqr for i in range(1, streamline.shape[0]): sum_dn_sqr = 0.0 for j in range(streamline.shape[1]): dn = streamline[i, j] - streamline[i-1, j] sum_dn_sqr += dn*dn out += sqrt(sum_dn_sqr) return out cdef void c_arclengths_from_arraysequence(Streamline points, np.npy_intp[:] offsets, np.npy_intp[:] lengths, double[:] arclengths) nogil: cdef: np.npy_intp i, j, k np.npy_intp offset double dn, sum_dn_sqr for i in range(offsets.shape[0]): offset = offsets[i] arclengths[i] = 0 for j in range(1, lengths[i]): sum_dn_sqr = 0.0 for k in range(points.shape[1]): dn = points[offset+j, k] - points[offset+j-1, k] sum_dn_sqr += dn*dn arclengths[i] += sqrt(sum_dn_sqr) def length(streamlines): ''' Euclidean length of streamlines Length is in mm only if streamlines are expressed in world coordinates. Parameters ------------ streamlines : ndarray or a list or :class:`dipy.tracking.Streamlines` If ndarray, must have shape (N,3) where N is the number of points of the streamline. If list, each item must be ndarray shape (Ni,3) where Ni is the number of points of streamline i. If :class:`dipy.tracking.Streamlines`, its `common_shape` must be 3. Returns --------- lengths : scalar or ndarray shape (N,) If there is only one streamline, a scalar representing the length of the streamline. If there are several streamlines, ndarray containing the length of every streamline. Examples ---------- >>> from dipy.tracking.streamline import length >>> import numpy as np >>> streamline = np.array([[1, 1, 1], [2, 3, 4], [0, 0, 0]]) >>> expected_length = np.sqrt([1+2**2+3**2, 2**2+3**2+4**2]).sum() >>> length(streamline) == expected_length True >>> streamlines = [streamline, np.vstack([streamline, streamline[::-1]])] >>> expected_lengths = [expected_length, 2*expected_length] >>> lengths = [length(streamlines[0]), length(streamlines[1])] >>> np.allclose(lengths, expected_lengths) True >>> length([]) 0.0 >>> length(np.array([[1, 2, 3]])) 0.0 ''' if isinstance(streamlines, Streamlines): if len(streamlines) == 0: return 0.0 arclengths = np.zeros(len(streamlines), dtype=np.float64) if streamlines.data.dtype == np.float32: c_arclengths_from_arraysequence[float2d]( streamlines.data, streamlines._offsets.astype(np.intp), streamlines._lengths.astype(np.intp), arclengths) else: c_arclengths_from_arraysequence[double2d]( streamlines.data, streamlines._offsets.astype(np.intp), streamlines._lengths.astype(np.intp), arclengths) return arclengths only_one_streamlines = False if type(streamlines) is np.ndarray: only_one_streamlines = True streamlines = [streamlines] if len(streamlines) == 0: return 0.0 dtype = streamlines[0].dtype for streamline in streamlines: if streamline.dtype != dtype: dtype = None break # Allocate memory for each streamline length. streamlines_length = np.empty(len(streamlines), dtype=np.float64) cdef np.npy_intp i if dtype is None: # List of streamlines having different dtypes for i in range(len(streamlines)): dtype = streamlines[i].dtype # HACK: To avoid memleaks we have to recast with astype(dtype). streamline = streamlines[i].astype(dtype) if dtype != np.float32 and dtype != np.float64: is_integer = dtype == np.int64 or dtype == np.uint64 dtype = np.float64 if is_integer else np.float32 streamline = streamlines[i].astype(dtype) if dtype == np.float32: streamlines_length[i] = c_length[float2d](streamline) else: streamlines_length[i] = c_length[double2d](streamline) elif dtype == np.float32: # All streamlines have composed of float32 points for i in range(len(streamlines)): # HACK: To avoid memleaks we have to recast with astype(dtype). streamline = streamlines[i].astype(dtype) streamlines_length[i] = c_length[float2d](streamline) elif dtype == np.float64: # All streamlines are composed of float64 points for i in range(len(streamlines)): # HACK: To avoid memleaks we have to recast with astype(dtype). streamline = streamlines[i].astype(dtype) streamlines_length[i] = c_length[double2d](streamline) elif dtype == np.int64 or dtype == np.uint64: # All streamlines are composed of int64 or uint64 points so convert # them in float64 one at the time. for i in range(len(streamlines)): streamline = streamlines[i].astype(np.float64) streamlines_length[i] = c_length[double2d](streamline) else: # All streamlines are composed of points with a dtype fitting in # 32 bits so convert them in float32 one at the time. for i in range(len(streamlines)): streamline = streamlines[i].astype(np.float32) streamlines_length[i] = c_length[float2d](streamline) if only_one_streamlines: return streamlines_length[0] else: return streamlines_length cdef void c_arclengths(Streamline streamline, double* out) nogil: cdef np.npy_intp i = 0 cdef double dn out[0] = 0.0 for i in range(1, streamline.shape[0]): out[i] = 0.0 for j in range(streamline.shape[1]): dn = streamline[i, j] - streamline[i-1, j] out[i] += dn*dn out[i] = out[i-1] + sqrt(out[i]) cdef void c_set_number_of_points(Streamline streamline, Streamline out) nogil: cdef: np.npy_intp N = streamline.shape[0] np.npy_intp D = streamline.shape[1] np.npy_intp new_N = out.shape[0] double ratio, step, next_point, delta np.npy_intp i, j, k, dim # Get arclength at each point. arclengths = malloc(streamline.shape[0] * sizeof(double)) c_arclengths(streamline, arclengths) step = arclengths[N-1] / (new_N-1) next_point = 0.0 i = 0 j = 0 k = 0 while next_point < arclengths[N-1]: if next_point == arclengths[k]: for dim in range(D): out[i, dim] = streamline[j, dim] next_point += step i += 1 j += 1 k += 1 elif next_point < arclengths[k]: ratio = 1 - ((arclengths[k]-next_point) / (arclengths[k]-arclengths[k-1])) for dim in range(D): delta = streamline[j, dim] - streamline[j-1, dim] out[i, dim] = streamline[j-1, dim] + ratio * delta next_point += step i += 1 else: j += 1 k += 1 # Last resampled point always the one from original streamline. for dim in range(D): out[new_N-1, dim] = streamline[N-1, dim] free(arclengths) cdef void c_set_number_of_points_from_arraysequence(Streamline points, np.npy_intp[:] offsets, np.npy_intp[:] lengths, long nb_points, Streamline out) nogil: cdef: np.npy_intp i, j, k np.npy_intp offset, length np.npy_intp offset_out = 0 double dn, sum_dn_sqr for i in range(offsets.shape[0]): offset = offsets[i] length = lengths[i] c_set_number_of_points(points[offset:offset+length, :], out[offset_out:offset_out+nb_points, :]) offset_out += nb_points def set_number_of_points(streamlines, nb_points=3): ''' Change the number of points of streamlines (either by downsampling or upsampling) Change the number of points of streamlines in order to obtain `nb_points`-1 segments of equal length. Points of streamlines will be modified along the curve. Parameters ---------- streamlines : ndarray or a list or :class:`dipy.tracking.Streamlines` If ndarray, must have shape (N,3) where N is the number of points of the streamline. If list, each item must be ndarray shape (Ni,3) where Ni is the number of points of streamline i. If :class:`dipy.tracking.Streamlines`, its `common_shape` must be 3. nb_points : int integer representing number of points wanted along the curve. Returns ------- new_streamlines : ndarray or a list or :class:`dipy.tracking.Streamlines` Results of the downsampling or upsampling process. Examples -------- >>> from dipy.tracking.streamline import set_number_of_points >>> import numpy as np One streamline, a semi-circle: >>> theta = np.pi*np.linspace(0, 1, 100) >>> x = np.cos(theta) >>> y = np.sin(theta) >>> z = 0 * x >>> streamline = np.vstack((x, y, z)).T >>> modified_streamline = set_number_of_points(streamline, 3) >>> len(modified_streamline) 3 Multiple streamlines: >>> streamlines = [streamline, streamline[::2]] >>> new_streamlines = set_number_of_points(streamlines, 10) >>> [len(s) for s in streamlines] [100, 50] >>> [len(s) for s in new_streamlines] [10, 10] ''' if isinstance(streamlines, Streamlines): if len(streamlines) == 0: return Streamlines() nb_streamlines = len(streamlines) dtype = streamlines._data.dtype new_streamlines = Streamlines() new_streamlines._data = np.zeros((nb_streamlines * nb_points, 3), dtype=dtype) new_streamlines._offsets = nb_points * np.arange(nb_streamlines, dtype=np.intp) new_streamlines._lengths = nb_points * np.ones(nb_streamlines, dtype=np.intp) if dtype == np.float32: c_set_number_of_points_from_arraysequence[float2d]( streamlines._data, streamlines._offsets.astype(np.intp), streamlines._lengths.astype(np.intp), nb_points, new_streamlines._data) else: c_set_number_of_points_from_arraysequence[double2d]( streamlines._data, streamlines._offsets.astype(np.intp), streamlines._lengths.astype(np.intp), nb_points, new_streamlines._data) return new_streamlines only_one_streamlines = False if type(streamlines) is np.ndarray: only_one_streamlines = True streamlines = [streamlines] if len(streamlines) == 0: return [] if nb_points < 2: raise ValueError("nb_points must be at least 2") dtype = streamlines[0].dtype for streamline in streamlines: if streamline.dtype != dtype: dtype = None if len(streamline) < 2: raise ValueError("All streamlines must have at least 2 points.") # Allocate memory for each modified streamline new_streamlines = [] cdef np.npy_intp i if dtype is None: # List of streamlines having different dtypes for i in range(len(streamlines)): dtype = streamlines[i].dtype # HACK: To avoid memleaks we have to recast with astype(dtype). streamline = streamlines[i].astype(dtype) if dtype != np.float32 and dtype != np.float64: dtype = np.float32 if dtype == np.int64 or dtype == np.uint64: dtype = np.float64 streamline = streamline.astype(dtype) new_streamline = np.empty((nb_points, streamline.shape[1]), dtype=dtype) if dtype == np.float32: c_set_number_of_points[float2d](streamline, new_streamline) else: c_set_number_of_points[double2d](streamline, new_streamline) # HACK: To avoid memleaks we have to recast with astype(dtype). new_streamlines.append(new_streamline.astype(dtype)) elif dtype == np.float32: # All streamlines have composed of float32 points for i in range(len(streamlines)): streamline = streamlines[i].astype(dtype) modified_streamline = np.empty((nb_points, streamline.shape[1]), dtype=streamline.dtype) c_set_number_of_points[float2d](streamline, modified_streamline) # HACK: To avoid memleaks we have to recast with astype(dtype). new_streamlines.append(modified_streamline.astype(dtype)) elif dtype == np.float64: # All streamlines are composed of float64 points for i in range(len(streamlines)): streamline = streamlines[i].astype(dtype) modified_streamline = np.empty((nb_points, streamline.shape[1]), dtype=streamline.dtype) c_set_number_of_points[double2d](streamline, modified_streamline) # HACK: To avoid memleaks we have to recast with astype(dtype). new_streamlines.append(modified_streamline.astype(dtype)) elif dtype == np.int64 or dtype == np.uint64: # All streamlines are composed of int64 or uint64 points so convert # them in float64 one at the time for i in range(len(streamlines)): streamline = streamlines[i].astype(np.float64) modified_streamline = np.empty((nb_points, streamline.shape[1]), dtype=streamline.dtype) c_set_number_of_points[double2d](streamline, modified_streamline) # HACK: To avoid memleaks we've to recast with astype(np.float64). new_streamlines.append(modified_streamline.astype(np.float64)) else: # All streamlines are composed of points with a dtype fitting in # 32bits so convert them in float32 one at the time for i in range(len(streamlines)): streamline = streamlines[i].astype(np.float32) modified_streamline = np.empty((nb_points, streamline.shape[1]), dtype=streamline.dtype) c_set_number_of_points[float2d](streamline, modified_streamline) # HACK: To avoid memleaks we've to recast with astype(np.float32). new_streamlines.append(modified_streamline.astype(np.float32)) if only_one_streamlines: return new_streamlines[0] else: return new_streamlines cdef double c_norm_of_cross_product(double bx, double by, double bz, double cx, double cy, double cz) nogil: """ Computes the norm of the cross-product in 3D. """ cdef double ax, ay, az ax = by*cz - bz*cy ay = bz*cx - bx*cz az = bx*cy - by*cx return sqrt(ax*ax + ay*ay + az*az) cdef double c_dist_to_line(Streamline streamline, np.npy_intp prev, np.npy_intp next, np.npy_intp curr) nogil: """ Computes the shortest Euclidean distance between a point `curr` and the line passing through `prev` and `next`. """ cdef: double dn, norm1, norm2 np.npy_intp D = streamline.shape[1] # Compute cross product of next-prev and curr-next norm1 = c_norm_of_cross_product(streamline[next, 0]-streamline[prev, 0], streamline[next, 1]-streamline[prev, 1], streamline[next, 2]-streamline[prev, 2], streamline[curr, 0]-streamline[next, 0], streamline[curr, 1]-streamline[next, 1], streamline[curr, 2]-streamline[next, 2]) # Norm of next-prev norm2 = 0.0 for d in range(D): dn = streamline[next, d]-streamline[prev, d] norm2 += dn*dn norm2 = sqrt(norm2) return norm1 / norm2 cdef double c_segment_length(Streamline streamline, np.npy_intp start, np.npy_intp end) nogil: """ Computes the length of the segment going from `start` to `end`. """ cdef: np.npy_intp D = streamline.shape[1] np.npy_intp d double segment_length = 0.0 double dn for d in range(D): dn = streamline[end, d] - streamline[start, d] segment_length += dn*dn return sqrt(segment_length) cdef np.npy_intp c_compress_streamline(Streamline streamline, Streamline out, double tol_error, double max_segment_length) nogil: """ Compresses a streamline (see function `compress_streamlines`).""" cdef: np.npy_intp N = streamline.shape[0] np.npy_intp D = streamline.shape[1] np.npy_intp nb_points = 0 np.npy_intp d, prev, next, curr double segment_length # Copy first point since it is always kept. for d in range(D): out[0, d] = streamline[0, d] nb_points = 1 prev = 0 # Loop through the points of the streamline checking if we can use the # linearized segment: next-prev. We start with next=2 (third points) since # we already added point 0 and segment between the two firsts is linear. for next in range(2, N): # Euclidean distance between last added point and current point. if c_segment_length(streamline, prev, next) > max_segment_length: for d in range(D): out[nb_points, d] = streamline[next-1, d] nb_points += 1 prev = next-1 continue # Check that each point is not offset by more than `tol_error` mm. for curr in range(prev+1, next): dist = c_dist_to_line(streamline, prev, next, curr) if dpy_isnan(dist) or dist > tol_error: for d in range(D): out[nb_points, d] = streamline[next-1, d] nb_points += 1 prev = next-1 break # Copy last point since it is always kept. for d in range(D): out[nb_points, d] = streamline[N-1, d] nb_points += 1 return nb_points def compress_streamlines(streamlines, tol_error=0.01, max_segment_length=10): """ Compress streamlines by linearization as in [Presseau15]_. The compression consists in merging consecutive segments that are nearly collinear. The merging is achieved by removing the point the two segments have in common. The linearization process [Presseau15]_ ensures that every point being removed are within a certain margin (in mm) of the resulting streamline. Recommendations for setting this margin can be found in [Presseau15]_ (in which they called it tolerance error). The compression also ensures that two consecutive points won't be too far from each other (precisely less or equal than `max_segment_length`mm). This is a tradeoff to speed up the linearization process [Rheault15]_. A low value will result in a faster linearization but low compression, whereas a high value will result in a slower linearization but high compression. Parameters ---------- streamlines : one or a list of array-like of shape (N,3) Array representing x,y,z of N points in a streamline. tol_error : float (optional) Tolerance error in mm (default: 0.01). A rule of thumb is to set it to 0.01mm for deterministic streamlines and 0.1mm for probabilitic streamlines. max_segment_length : float (optional) Maximum length in mm of any given segment produced by the compression. The default is 10mm. (In [Presseau15]_, they used a value of `np.inf`). Returns ------- compressed_streamlines : one or a list of array-like Results of the linearization process. Examples -------- >>> from dipy.tracking.streamline import compress_streamlines >>> import numpy as np >>> # One streamline: a wiggling line >>> rng = np.random.RandomState(42) >>> streamline = np.linspace(0, 10, 100*3).reshape((100, 3)) >>> streamline += 0.2 * rng.rand(100, 3) >>> c_streamline = compress_streamlines(streamline, tol_error=0.2) >>> len(streamline) 100 >>> len(c_streamline) 10 >>> # Multiple streamlines >>> streamlines = [streamline, streamline[::2]] >>> c_streamlines = compress_streamlines(streamlines, tol_error=0.2) >>> [len(s) for s in streamlines] [100, 50] >>> [len(s) for s in c_streamlines] [10, 7] Notes ----- Be aware that compressed streamlines have variable step sizes. One needs to be careful when computing streamlines-based metrics [Houde15]_. References ---------- .. [Presseau15] Presseau C. et al., A new compression format for fiber tracking datasets, NeuroImage, no 109, 73-83, 2015. .. [Rheault15] Rheault F. et al., Real Time Interaction with Millions of Streamlines, ISMRM, 2015. .. [Houde15] Houde J.-C. et al. How to Avoid Biased Streamlines-Based Metrics for Streamlines with Variable Step Sizes, ISMRM, 2015. """ only_one_streamlines = False if type(streamlines) is np.ndarray: only_one_streamlines = True streamlines = [streamlines] if len(streamlines) == 0: return [] compressed_streamlines = [] cdef np.npy_intp i for i in range(len(streamlines)): dtype = streamlines[i].dtype # HACK: To avoid memleaks we have to recast with astype(dtype). streamline = streamlines[i].astype(dtype) shape = streamline.shape if dtype != np.float32 and dtype != np.float64: dtype = np.float64 if dtype == np.int64 or dtype == np.uint64 else np.float32 streamline = streamline.astype(dtype) if shape[0] <= 2: compressed_streamlines.append(streamline.copy()) continue compressed_streamline = np.empty(shape, dtype) if dtype == np.float32: nb_points = c_compress_streamline[float2d](streamline, compressed_streamline, tol_error, max_segment_length) else: nb_points = c_compress_streamline[double2d](streamline, compressed_streamline, tol_error, max_segment_length) compressed_streamline.resize((nb_points, streamline.shape[1])) # HACK: To avoid memleaks we have to recast with astype(dtype). compressed_streamlines.append(compressed_streamline.astype(dtype)) if only_one_streamlines: return compressed_streamlines[0] else: return compressed_streamlines dipy-0.13.0/dipy/tracking/tests/000077500000000000000000000000001317371701200164535ustar00rootroot00000000000000dipy-0.13.0/dipy/tracking/tests/__init__.py000066400000000000000000000001211317371701200205560ustar00rootroot00000000000000# Test callable from numpy.testing import Tester test = Tester().test del Tester dipy-0.13.0/dipy/tracking/tests/test_distances.py000066400000000000000000000247121317371701200220470ustar00rootroot00000000000000from __future__ import division, print_function, absolute_import import numpy as np import nose from nose.tools import (assert_true, assert_false, assert_equal, assert_almost_equal) from numpy.testing import assert_array_equal, assert_array_almost_equal from dipy.tracking import metrics as tm from dipy.tracking import distances as pf def test_LSCv2(): xyz1 = np.array([[1, 0, 0], [2, 0, 0], [3, 0, 0]], dtype='float32') xyz2 = np.array([[1, 0, 0], [1, 2, 0], [1, 3, 0]], dtype='float32') xyz3 = np.array([[1.1, 0, 0], [1, 2, 0], [1, 3, 0]], dtype='float32') xyz4 = np.array([[1, 0, 0], [2.1, 0, 0], [3, 0, 0]], dtype='float32') xyz5 = np.array([[100, 0, 0], [200, 0, 0], [300, 0, 0]], dtype='float32') xyz6 = np.array([[0, 20, 0], [0, 40, 0], [300, 50, 0]], dtype='float32') T = [xyz1, xyz2, xyz3, xyz4, xyz5, xyz6] C = pf.local_skeleton_clustering(T, 0.2) # print C # print len(C) C2 = pf.local_skeleton_clustering_3pts(T, 0.2) # print C2 # print len(C2) # """ for i in range(40): xyz = np.random.rand(3, 3).astype('f4') T.append(xyz) from time import time t1 = time() C3 = pf.local_skeleton_clustering(T, .5) t2 = time() print(t2-t1) print(len(C3)) t1 = time() C4 = pf.local_skeleton_clustering_3pts(T, .5) t2 = time() print(t2-t1) print(len(C4)) for c in C3: assert_equal(np.sum(C3[c]['hidden']-C4[c]['hidden']), 0) T2 = [] for i in range(10**4): xyz = np.random.rand(10, 3).astype('f4') T2.append(xyz) t1 = time() C5 = pf.local_skeleton_clustering(T2, .5) t2 = time() print(t2-t1) print(len(C5)) from dipy.data import get_data from nibabel import trackvis as tv try: from dipy.viz import fvtk except ImportError as e: raise nose.plugins.skip.SkipTest( 'Fails to import dipy.viz due to %s' % str(e)) streams, hdr = tv.read(get_data('fornix')) T3 = [tm.downsample(s[0], 6) for s in streams] print('lenT3', len(T3)) C = pf.local_skeleton_clustering(T3, 10.) print('lenC', len(C)) """ r = fvtk.ren() colors = np.zeros((len(C), 3)) for c in C: color = np.random.rand(3) for i in C[c]['indices']: fvtk.add(r, fvtk.line(T3[i], color)) colors[c] = color fvtk.show(r) fvtk.clear(r) skeleton = [] def width(w): if w<1: return 1 else: return w for c in C: bundle = [T3[i] for i in C[c]['indices']] si,s = pf.most_similar_track_mam(bundle, 'avg') skeleton.append(bundle[si]) fvtk.label(r,text = str(len(bundle)), pos=(bundle[si][-1]), scale=(2, 2, 2)) fvtk.add(r, fvtk.line(skeleton, colors, opacity=1, linewidth = width(len(bundle)/10.))) fvtk.show(r) """ def test_bundles_distances_mam(): xyz1A = np.array([[0, 0, 0], [1, 0, 0], [2, 0, 0], [3, 0, 0]], dtype='float32') xyz2A = np.array([[0, 1, 1], [1, 0, 1], [2, 3, -2]], dtype='float32') xyz1B = np.array([[-1, 0, 0], [2, 0, 0], [2, 3, 0], [3, 0, 0]], dtype='float32') tracksA = [xyz1A, xyz2A] tracksB = [xyz1B, xyz1A, xyz2A] for metric in ('avg', 'min', 'max'): DM2 = pf.bundles_distances_mam(tracksA, tracksB, metric=metric) def test_bundles_distances_mdf(): xyz1A = np.array([[0, 0, 0], [1, 0, 0], [2, 0, 0]], dtype='float32') xyz2A = np.array([[0, 1, 1], [1, 0, 1], [2, 3, -2]], dtype='float32') xyz3A = np.array([[0, 0, 0], [1, 0, 0], [3, 0, 0]], dtype='float32') xyz1B = np.array([[-1, 0, 0], [2, 0, 0], [2, 3, 0]], dtype='float32') tracksA = [xyz1A, xyz2A] tracksB = [xyz1B, xyz1A, xyz2A] DM2 = pf.bundles_distances_mdf(tracksA, tracksB) tracksA = [xyz1A, xyz1A] tracksB = [xyz1A, xyz1A] DM2 = pf.bundles_distances_mdf(tracksA, tracksB) assert_array_almost_equal(DM2, np.zeros((2, 2))) tracksA = [xyz1A, xyz3A] tracksB = [xyz2A] DM2 = pf.bundles_distances_mdf(tracksA, tracksB) print(DM2) # assert_array_almost_equal(DM2,np.zeros((2,2))) DM = np.zeros(DM2.shape) for (a, ta) in enumerate(tracksA): for (b, tb) in enumerate(tracksB): md = np.sum(np.sqrt(np.sum((ta-tb)**2, axis=1)))/3. md2 = np.sum(np.sqrt(np.sum((ta-tb[::-1])**2, axis=1)))/3. DM[a, b] = np.min((md, md2)) print(DM) print('--------------') for t in tracksA: print(t) print('--------------') for t in tracksB: print(t) assert_array_almost_equal(DM, DM2, 4) def test_mam_distances(): xyz1 = np.array([[0, 0, 0], [1, 0, 0], [2, 0, 0], [3, 0, 0]]) xyz2 = np.array([[0, 1, 1], [1, 0, 1], [2, 3, -2]]) # dm=array([[ 2, 2, 17], [ 3, 1, 14], [6, 2, 13], [11, 5, 14]]) # this is the distance matrix between points of xyz1 # and points of xyz2 xyz1 = xyz1.astype('float32') xyz2 = xyz2.astype('float32') zd2 = pf.mam_distances(xyz1, xyz2) assert_almost_equal(zd2[0], 1.76135602742) def test_approx_ei_traj(): segs = 100 t = np.linspace(0, 1.75*2*np.pi, segs) x = t y = 5*np.sin(5*t) z = np.zeros(x.shape) xyz = np.vstack((x, y, z)).T xyza = pf.approx_polygon_track(xyz) assert_equal(len(xyza), 27) def test_approx_mdl_traj(): t = np.linspace(0, 1.75*2*np.pi, 100) x = np.sin(t) y = np.cos(t) z = t xyz = np.vstack((x, y, z)).T xyza1 = pf.approximate_mdl_trajectory(xyz, alpha=1.) xyza2 = pf.approximate_mdl_trajectory(xyz, alpha=2.) assert_equal(len(xyza1), 10) assert_equal(len(xyza2), 8) assert_array_almost_equal( xyza1, np.array([[ 0.00000000e+00, 1.00000000e+00, 0.00000000e+00], [ 9.39692621e-01, 3.42020143e-01, 1.22173048e+00], [ 6.42787610e-01, -7.66044443e-01, 2.44346095e+00], [ -5.00000000e-01, -8.66025404e-01, 3.66519143e+00], [ -9.84807753e-01, 1.73648178e-01, 4.88692191e+00], [ -1.73648178e-01, 9.84807753e-01, 6.10865238e+00], [ 8.66025404e-01, 5.00000000e-01, 7.33038286e+00], [ 7.66044443e-01, -6.42787610e-01, 8.55211333e+00], [ -3.42020143e-01, -9.39692621e-01, 9.77384381e+00], [ -1.00000000e+00, -4.28626380e-16, 1.09955743e+01]])) assert_array_almost_equal( xyza2, np.array([[ 0.00000000e+00, 1.00000000e+00, 0.00000000e+00], [ 9.95471923e-01, -9.50560433e-02, 1.66599610e+00], [ -1.89251244e-01, -9.81928697e-01, 3.33199221e+00], [ -9.59492974e-01, 2.81732557e-01, 4.99798831e+00], [ 3.71662456e-01, 9.28367933e-01, 6.66398442e+00], [ 8.88835449e-01, -4.58226522e-01, 8.32998052e+00], [ -5.40640817e-01, -8.41253533e-01, 9.99597663e+00], [ -1.00000000e+00, -4.28626380e-16, 1.09955743e+01]])) def test_point_track_sq_distance(): t = np.array([[0, 0, 0], [1, 1, 1], [2, 2, 2]], dtype='f4') p = np.array([-1, -1., -1], dtype='f4') assert_equal( pf.point_track_sq_distance_check(t, p, .2**2), False) pf.point_track_sq_distance_check(t, p, 2**2), True t = np.array([[0, 0, 0], [1, 0, 0], [2, 2, 0]], dtype='f4') p = np.array([.5, 0, 0], dtype='f4') assert_equal( pf.point_track_sq_distance_check(t, p, .2**2), True) p = np.array([.5, 1, 0], dtype='f4') assert_equal( pf.point_track_sq_distance_check(t, p, .2**2), False) def test_track_roi_intersection_check(): roi = np.array([[0, 0, 0], [1, 0, 0], [2, 0, 0]], dtype='f4') t = np.array([[0, 0, 0], [1, 1, 1], [2, 2, 2]], dtype='f4') assert_equal( pf.track_roi_intersection_check(t, roi, 1), True) t = np.array([[0, 0, 0], [1, 0, 0], [2, 2, 2]], dtype='f4') assert_equal(pf.track_roi_intersection_check(t, roi, 1), True) t = np.array([[1, 1, 0], [1, 0, 0], [1, -1, 0]], dtype='f4') assert_equal( pf.track_roi_intersection_check(t, roi, 1), True) t = np.array([[4, 0, 0], [4, 1, 1], [4, 2, 0]], dtype='f4') assert_equal(pf.track_roi_intersection_check(t, roi, 1), False) def test_minimum_distance(): xyz1 = np.array([[1, 0, 0], [2, 0, 0]], dtype='float32') xyz2 = np.array([[3, 0, 0], [4, 0, 0]], dtype='float32') assert_equal(pf.minimum_closest_distance(xyz1, xyz2), 1.0) def test_most_similar_mam(): xyz1 = np.array([[0, 0, 0], [1, 0, 0], [2, 0, 0], [3, 0, 0]], dtype='float32') xyz2 = np.array([[0, 1, 1], [1, 0, 1], [2, 3, -2]], dtype='float32') xyz3 = np.array([[-1, 0, 0], [2, 0, 0], [2, 3, 0], [3, 0, 0]], dtype='float32') tracks = [xyz1, xyz2, xyz3] for metric in ('avg', 'min', 'max'): # pf should be much faster and the results equivalent si2, s2 = pf.most_similar_track_mam(tracks, metric=metric) def test_cut_plane(): dt = np.dtype(np.float32) refx = np.array([[0, 0, 0], [1, 0, 0], [2, 0, 0], [3, 0, 0]], dtype=dt) bundlex = [np.array([[0.5, 1, 0], [1.5, 2, 0], [2.5, 3, 0]], dtype=dt), np.array([[0.5, 2, 0], [1.5, 3, 0], [2.5, 4, 0]], dtype=dt), np.array([[0.5, 1, 1], [1.5, 2, 2], [2.5, 3, 3]], dtype=dt), np.array([[-0.5, 2, -1], [-1.5, 3, -2], [-2.5, 4, -3]], dtype=dt)] expected_hit0 = [ [ 1. , 1.5 , 0. , 0.70710683, 0. ], [ 1. , 2.5 , 0. , 0.70710677, 1. ], [ 1. , 1.5 , 1.5 , 0.81649661, 2. ]] expected_hit1 = [ [ 2. , 2.5 , 0. , 0.70710677, 0. ], [ 2. , 3.5 , 0. , 0.70710677, 1. ], [ 2. , 2.5 , 2.5 , 0.81649655, 2. ]] hitx = pf.cut_plane(bundlex, refx) assert_array_almost_equal(hitx[0], expected_hit0) assert_array_almost_equal(hitx[1], expected_hit1) # check that algorithm allows types other than float32 bundlex[0] = np.asarray(bundlex[0], dtype=np.float64) hitx = pf.cut_plane(bundlex, refx) assert_array_almost_equal(hitx[0], expected_hit0) assert_array_almost_equal(hitx[1], expected_hit1) refx = np.asarray(refx, dtype=np.float64) hitx = pf.cut_plane(bundlex, refx) assert_array_almost_equal(hitx[0], expected_hit0) assert_array_almost_equal(hitx[1], expected_hit1) dipy-0.13.0/dipy/tracking/tests/test_fbc.py000066400000000000000000000031171317371701200206200ustar00rootroot00000000000000from dipy.denoise.enhancement_kernel import EnhancementKernel from dipy.tracking.fbcmeasures import FBCMeasures from dipy.viz import fvtk from dipy.viz.colormap import line_colors from dipy.viz import window, actor from dipy.core.sphere import Sphere import numpy as np import numpy.testing as npt def test_fbc(): """Test the FBC measures on a set of fibers""" # Generate two fibers of 10 points streamlines = [] for i in range(2): fiber = np.zeros((10, 3)) for j in range(10): fiber[j, 0] = j fiber[j, 1] = i*0.2 fiber[j, 2] = 0 streamlines.append(fiber) # Create lookup table. # A fixed set of orientations is used to guarantee deterministic results D33 = 1.0 D44 = 0.04 t = 1 sphere = Sphere(xyz=np.array([[0.82819078, 0.51050355, 0.23127074], [-0.10761926, -0.95554309, 0.27450957], [0.4101745, -0.07154038, 0.90919682], [-0.75573448, 0.64854889, 0.09082809], [-0.56874549, 0.01377562, 0.8223982]])) k = EnhancementKernel(D33, D44, t, orientations=sphere, force_recompute=True) # run FBC fbc = FBCMeasures(streamlines, k, verbose=True) # get FBC values fbc_sl_orig, clrs_orig, rfbc_orig = \ fbc.get_points_rfbc_thresholded(0, emphasis=0.01) # check mean RFBC against tested value npt.assert_almost_equal(np.mean(rfbc_orig), 1.0500466494329224) if __name__ == '__main__': npt.run_module_suite() dipy-0.13.0/dipy/tracking/tests/test_learning.py000066400000000000000000000017521317371701200216700ustar00rootroot00000000000000''' Testing track_metrics module ''' import numpy as np from nose.tools import (assert_true, assert_false, assert_equal, assert_almost_equal) from numpy.testing import assert_array_equal, assert_array_almost_equal from dipy.tracking import metrics as tm from dipy.tracking import distances as td from dipy.tracking import learning as tl def test_det_corr_tracks(): A = np.array([[0, 0, 0], [1, 1, 1], [2, 2, 2]]) B = np.array([[1, 0, 0], [2, 0, 0], [3, 0, 0]]) C = np.array([[0, 0, -1], [0, 0, -2], [0, 0, -3]]) bundle1 = [A, B, C] bundle2 = [B, A] indices = [0, 1] print(A) print(B) print(C) arr = tl.detect_corresponding_tracks(indices, bundle1, bundle2) print(arr) assert_array_equal(arr, np.array([[0, 1], [1, 0]])) indices2 = [0, 1] arr2 = tl.detect_corresponding_tracks_plus(indices, bundle1, indices2, bundle2) print(arr2) assert_array_equal(arr, arr2) dipy-0.13.0/dipy/tracking/tests/test_life.py000066400000000000000000000164201317371701200210060ustar00rootroot00000000000000import os import os.path as op import numpy as np import numpy.testing as npt import numpy.testing.decorators as dec import scipy.sparse as sps import scipy.linalg as la import nibabel as nib import dipy.tracking.life as life import dipy.tracking.eudx as edx import dipy.core.sphere as dps import dipy.core.gradients as dpg import dipy.data as dpd import dipy.core.optimize as opt import dipy.core.ndindex as nd import dipy.core.gradients as grad import dipy.reconst.dti as dti THIS_DIR = op.dirname(__file__) def test_streamline_gradients(): streamline = [[1, 2, 3], [4, 5, 6], [5, 6, 7], [8, 9, 10]] grads = np.array([[3, 3, 3], [2, 2, 2], [2, 2, 2], [3, 3, 3]]) npt.assert_array_equal(life.streamline_gradients(streamline), grads) def test_streamline_tensors(): # Small streamline streamline = [[1, 2, 3], [4, 5, 3], [5, 6, 3]] # Non-default eigenvalues: evals = [0.0012, 0.0006, 0.0004] streamline_tensors = life.streamline_tensors(streamline, evals=evals) npt.assert_array_almost_equal(streamline_tensors[0], np.array([[0.0009, 0.0003, 0.], [0.0003, 0.0009, 0.], [0., 0., 0.0004]])) # Get the eigenvalues/eigenvectors: eigvals, eigvecs = la.eig(streamline_tensors[0]) eigvecs = eigvecs[np.argsort(eigvals)[::-1]] eigvals = eigvals[np.argsort(eigvals)[::-1]] npt.assert_array_almost_equal(eigvals, np.array([0.0012, 0.0006, 0.0004])) npt.assert_array_almost_equal(eigvecs[0], np.array([0.70710678, -0.70710678, 0.])) # Another small streamline streamline = [[1, 0, 0], [2, 0, 0], [3, 0, 0]] streamline_tensors = life.streamline_tensors(streamline, evals=evals) for t in streamline_tensors: eigvals, eigvecs = la.eig(t) eigvecs = eigvecs[np.argsort(eigvals)[::-1]] eigvals = eigvals[np.argsort(eigvals)[::-1]] # This one has no rotations - all tensors are simply the canonical: npt.assert_almost_equal(np.rad2deg(np.arccos( np.dot(eigvecs[0], [1, 0, 0]))), 0) npt.assert_almost_equal(np.rad2deg(np.arccos( np.dot(eigvecs[1], [0, 1, 0]))), 0) npt.assert_almost_equal(np.rad2deg(np.arccos( np.dot(eigvecs[2], [0, 0, 1]))), 0) def test_streamline_signal(): data_file, bval_file, bvec_file = dpd.get_data('small_64D') gtab = dpg.gradient_table(bval_file, bvec_file) evals = [0.0015, 0.0005, 0.0005] streamline1 = [[[1, 2, 3], [4, 5, 3], [5, 6, 3], [6, 7, 3]], [[1, 2, 3], [4, 5, 3], [5, 6, 3]]] sig1 = [life.streamline_signal(s, gtab, evals) for s in streamline1] streamline2 = [[[1, 2, 3], [4, 5, 3], [5, 6, 3], [6, 7, 3]]] sig2 = [life.streamline_signal(s, gtab, evals) for s in streamline2] npt.assert_array_equal(streamline2[0], streamline1[0]) def test_voxel2streamline(): streamline = [[[1.1, 2.4, 2.9], [4, 5, 3], [5, 6, 3], [6, 7, 3]], [[1, 2, 3], [4, 5, 3], [5, 6, 3]]] affine = np.eye(4) v2f, v2fn = life.voxel2streamline(streamline, False, affine) npt.assert_equal(v2f, {0: [0, 1], 1: [0, 1], 2: [0, 1], 3: [0]}) npt.assert_equal(v2fn, {0: {0: [0], 1: [1], 2: [2], 3: [3]}, 1: {0: [0], 1: [1], 2: [2]}}) affine = np.array([[0.9, 0, 0, 10], [0, 0.9, 0, -100], [0, 0, 0.9, 2], [0, 0, 0, 1]]) xform_sl = life.transform_streamlines(streamline, np.linalg.inv(affine)) v2f, v2fn = life.voxel2streamline(xform_sl, False, affine) npt.assert_equal(v2f, {0: [0, 1], 1: [0, 1], 2: [0, 1], 3: [0]}) npt.assert_equal(v2fn, {0: {0: [0], 1: [1], 2: [2], 3: [3]}, 1: {0: [0], 1: [1], 2: [2]}}) def test_FiberModel_init(): # Get some small amount of data: data_file, bval_file, bvec_file = dpd.get_data('small_64D') data_ni = nib.load(data_file) data = data_ni.get_data() data_aff = data_ni.affine bvals, bvecs = (np.load(f) for f in (bval_file, bvec_file)) gtab = dpg.gradient_table(bvals, bvecs) FM = life.FiberModel(gtab) streamline = [[[1, 2, 3], [4, 5, 3], [5, 6, 3], [6, 7, 3]], [[1, 2, 3], [4, 5, 3], [5, 6, 3]]] affine = np.eye(4) for sphere in [None, False, dpd.get_sphere('symmetric362')]: fiber_matrix, vox_coords = FM.setup(streamline, affine, sphere=sphere) npt.assert_array_equal(np.array(vox_coords), np.array([[1, 2, 3], [4, 5, 3], [5, 6, 3], [6, 7, 3]])) npt.assert_equal(fiber_matrix.shape, (len(vox_coords) * 64, len(streamline))) def test_FiberFit(): data_file, bval_file, bvec_file = dpd.get_data('small_64D') data_ni = nib.load(data_file) data = data_ni.get_data() data_aff = data_ni.affine bvals, bvecs = (np.load(f) for f in (bval_file, bvec_file)) gtab = dpg.gradient_table(bvals, bvecs) FM = life.FiberModel(gtab) evals = [0.0015, 0.0005, 0.0005] streamline = [[[1, 2, 3], [4, 5, 3], [5, 6, 3], [6, 7, 3]], [[1, 2, 3], [4, 5, 3], [5, 6, 3]]] fiber_matrix, vox_coords = FM.setup(streamline, None, evals) w = np.array([0.5, 0.5]) sig = opt.spdot(fiber_matrix, w) + 1.0 # Add some isotropic stuff S0 = data[..., gtab.b0s_mask] rel_sig = data[..., ~gtab.b0s_mask]/data[..., gtab.b0s_mask] this_data = np.zeros((10, 10, 10, 64)) this_data[vox_coords[:, 0], vox_coords[:, 1], vox_coords[:, 2]] =\ (sig.reshape((4, 64)) * S0[vox_coords[:, 0], vox_coords[:, 1], vox_coords[:, 2]]) # Grab some realistic S0 values: this_data = np.concatenate([data[..., gtab.b0s_mask], this_data], -1) fit = FM.fit(this_data, streamline) npt.assert_almost_equal(fit.predict()[1], fit.data[1], decimal=-1) # Predict with an input GradientTable npt.assert_almost_equal(fit.predict(gtab)[1], fit.data[1], decimal=-1) npt.assert_almost_equal( this_data[vox_coords[:, 0], vox_coords[:, 1], vox_coords[:, 2]], fit.data) def test_fit_data(): fdata, fbval, fbvec = dpd.get_data('small_25') gtab = grad.gradient_table(fbval, fbvec) ni_data = nib.load(fdata) data = ni_data.get_data() dtmodel = dti.TensorModel(gtab) dtfit = dtmodel.fit(data) sphere = dpd.get_sphere() peak_idx = dti.quantize_evecs(dtfit.evecs, sphere.vertices) eu = edx.EuDX(dtfit.fa.astype('f8'), peak_idx, seeds=list(nd.ndindex(data.shape[:-1])), odf_vertices=sphere.vertices, a_low=0) tensor_streamlines = [streamline for streamline in eu] life_model = life.FiberModel(gtab) life_fit = life_model.fit(data, tensor_streamlines) model_error = life_fit.predict() - life_fit.data model_rmse = np.sqrt(np.mean(model_error ** 2, -1)) matlab_rmse, matlab_weights = dpd.matlab_life_results() # Lower error than the matlab implementation for these data: npt.assert_(np.median(model_rmse) < np.median(matlab_rmse)) # And a moderate correlation with the Matlab implementation weights: npt.assert_(np.corrcoef(matlab_weights, life_fit.beta)[0, 1] > 0.6) dipy-0.13.0/dipy/tracking/tests/test_localtrack.py000066400000000000000000000012561317371701200222070ustar00rootroot00000000000000import numpy as np import numpy.testing as npt from dipy.tracking.local.tissue_classifier import ThresholdTissueClassifier from dipy.data import default_sphere from dipy.direction import peaks_from_model def test_ThresholdTissueClassifier(): a = np.random.random((3, 5, 7)) mid = np.sort(a.ravel())[(3 * 5 * 7) // 2] ttc = ThresholdTissueClassifier(a, mid) for i in range(3): for j in range(5): for k in range(7): tissue = ttc.check_point(np.array([i, j, k], dtype=float)) if a[i, j, k] > mid: npt.assert_equal(tissue, 1) else: npt.assert_equal(tissue, 2) dipy-0.13.0/dipy/tracking/tests/test_metrics.py000066400000000000000000000244141317371701200215370ustar00rootroot00000000000000''' Testing track_metrics module ''' from __future__ import division, print_function, absolute_import from dipy.utils.six.moves import xrange import numpy as np from nose.tools import (assert_true, assert_false, assert_equal, assert_almost_equal) from numpy.testing import assert_array_equal, assert_array_almost_equal from dipy.tracking import metrics as tm from dipy.tracking import distances as pf def test_downsample(): t = np.array([[ 82.20181274, 91.3650589 , 43.15737152], [ 82.3844223 , 91.79336548, 43.87036514], [ 82.48710632, 92.27861023, 44.56298065], [ 82.53310394, 92.7854538 , 45.24635315], [ 82.53793335, 93.26902008, 45.94785309], [ 82.48797607, 93.75003815, 46.6493988 ], [ 82.35533142, 94.2518158 , 47.32533264], [ 82.15484619, 94.76634216, 47.97451019], [ 81.90982819, 95.28792572, 48.6024437 ], [ 81.63336945, 95.78153229, 49.23971176], [ 81.35479736, 96.24868011, 49.89558792], [ 81.08713531, 96.69807434, 50.56812668], [ 80.81504822, 97.14285278, 51.24193192], [ 80.52591705, 97.56719971, 51.92168427], [ 80.26599884, 97.98269653, 52.61848068], [ 80.0463562 , 98.38131714, 53.3385582 ], [ 79.8469162 , 98.77052307, 54.06955338], [ 79.57667542, 99.13599396, 54.78985596], [ 79.23351288, 99.4320755 , 55.51065063], [ 78.84815979, 99.64141846, 56.24016571], [ 78.47383881, 99.77347565, 56.9929924 ], [ 78.12837219, 99.81330872, 57.76969528], [ 77.80438995, 99.85082245, 58.55574799], [ 77.4943924 , 99.88065338, 59.34777069], [ 77.21414185, 99.85343933, 60.15090561], [ 76.96416473, 99.82772827, 60.96406937], [ 76.74712372, 99.80519104, 61.78676605], [ 76.52263641, 99.79122162, 62.60765076], [ 76.03757477, 100.08692169, 63.24152374], [ 75.44867706, 100.3526535 , 63.79513168], [ 74.78033447, 100.57255554, 64.272789 ], [ 74.11605835, 100.7733078 , 64.76428986], [ 73.51222992, 100.98779297, 65.32373047], [ 72.97387695, 101.23387146, 65.93502045], [ 72.47355652, 101.49151611, 66.57343292], [ 71.99834442, 101.72480774, 67.2397995 ], [ 71.5690918 , 101.98665619, 67.92664337], [ 71.18083191, 102.29483795, 68.61888123], [ 70.81879425, 102.63343048, 69.31127167], [ 70.47422791, 102.98672485, 70.00532532], [ 70.10092926, 103.28502655, 70.70999908], [ 69.69512177, 103.51667023, 71.42147064], [ 69.27423096, 103.71351624, 72.13452911], [ 68.91260529, 103.81676483, 72.89796448], [ 68.60788727, 103.81982422, 73.69258118], [ 68.34162903, 103.7661972 , 74.49915314], [ 68.08542633, 103.70635223, 75.30856323], [ 67.83590698, 103.60187531, 76.11553955], [ 67.56822968, 103.4482193 , 76.90870667], [ 67.28399658, 103.25878906, 77.68825531], [ 67.00117493, 103.03740692, 78.45989227], [ 66.72718048, 102.80329895, 79.23099518], [ 66.4619751 , 102.54130554, 79.99622345], [ 66.20803833, 102.22305298, 80.7438736 ], [ 65.96872711, 101.88980865, 81.48987579], [ 65.72864532, 101.59316254, 82.25085449], [ 65.47808075, 101.33383942, 83.02194214], [ 65.21841431, 101.11295319, 83.80186462], [ 64.95678711, 100.94080353, 84.59326935], [ 64.71759033, 100.82022095, 85.40114594], [ 64.48053741, 100.73490143, 86.21411896], [ 64.24304199, 100.65074158, 87.02709198], [ 64.01773834, 100.55318451, 87.84204865], [ 63.83801651, 100.41996765, 88.66333008], [ 63.70982361, 100.25119019, 89.48779297], [ 63.60707855, 100.06730652, 90.31262207], [ 63.46164322, 99.91001892, 91.13648224], [ 63.26287842, 99.78648376, 91.95485687], [ 63.03713226, 99.68377686, 92.76905823], [ 62.81192398, 99.56619263, 93.58140564], [ 62.57145309, 99.42708588, 94.38592529], [ 62.32259369, 99.25592804, 95.18167114], [ 62.07497787, 99.05770111, 95.97154236], [ 61.82253647, 98.83877563, 96.7543869 ], [ 61.59536743, 98.59293365, 97.5370636 ], [ 61.46530151, 98.30503845, 98.32772827], [ 61.39904785, 97.97928619, 99.11172485], [ 61.33279419, 97.65353394, 99.89572906], [ 61.26067352, 97.30914307, 100.67123413], [ 61.19459534, 96.96743011, 101.44847107], [ 61.1958046 , 96.63417053, 102.23215485], [ 61.26572037, 96.2988739 , 103.01185608], [ 61.39840698, 95.96297455, 103.78307343], [ 61.5720787 , 95.6426239 , 104.55268097], [ 61.78163528, 95.35540771, 105.32629395], [ 62.06700134, 95.09746552, 106.08564758], [ 62.39427185, 94.8572464 , 106.83369446], [ 62.74076462, 94.62278748, 107.57482147], [ 63.11461639, 94.40107727, 108.30641937], [ 63.53397751, 94.20418549, 109.02002716], [ 64.00019836, 94.03809357, 109.71183777], [ 64.43580627, 93.87523651, 110.42416382], [ 64.84857941, 93.69993591, 111.14715576], [ 65.26740265, 93.51858521, 111.86515808], [ 65.69511414, 93.3671875 , 112.58474731], [ 66.10470581, 93.22719574, 113.31711578], [ 66.45891571, 93.06028748, 114.07256317], [ 66.78582001, 92.90560913, 114.84281921], [ 67.11138916, 92.79004669, 115.6204071 ], [ 67.44729614, 92.75711823, 116.40135193], [ 67.75688171, 92.98265076, 117.16111755], [ 68.02041626, 93.28012848, 117.91371155], [ 68.25725555, 93.53466797, 118.69052124], [ 68.46047974, 93.63263702, 119.51107788], [ 68.62039948, 93.62007141, 120.34690094], [ 68.76782227, 93.56475067, 121.18331909], [ 68.90222168, 93.46326447, 122.01765442], [ 68.99872589, 93.30039978, 122.84759521], [ 69.04119873, 93.05428314, 123.66156769], [ 69.05086517, 92.74394989, 124.45450592], [ 69.02742004, 92.40427399, 125.23509979], [ 68.95466614, 92.09059143, 126.02339935], [ 68.84975433, 91.7967453 , 126.81564331], [ 68.72673798, 91.53726196, 127.61715698], [ 68.6068573 , 91.3030014 , 128.42681885], [ 68.50636292, 91.12481689, 129.25317383], [ 68.39311218, 91.01572418, 130.08976746], [ 68.25946808, 90.94654083, 130.92756653]], dtype=np.float32) pts = 12 td = tm.downsample(t, pts) # print td assert_equal(len(td), pts) res = [] t = np.array([[0, 0, 0], [1, 1, 1], [2, 2, 2]], 'f4') for pts in range(3, 200): td = tm.downsample(t, pts) res.append(pts-len(td)) assert_equal(np.sum(res), 0) """ from dipy.data import get_data from nibabel import trackvis as tv streams, hdr = tv.read(get_data('fornix')) Td = [tm.downsample(s[0], pts) for s in streams] T = [s[0] for s in streams] from dipy.viz import fvtk r = fvtk.ren() fvtk.add(r, fvtk.line(T, fvtk.red)) fvtk.add(r, fvtk.line(Td, fvtk.green)) fvtk.show(r) """ def test_splines(): # create a helix t = np.linspace(0, 1.75*2*np.pi, 100) x = np.sin(t) y = np.cos(t) z = t # add noise x += np.random.normal(scale=0.1, size=x.shape) y += np.random.normal(scale=0.1, size=y.shape) z += np.random.normal(scale=0.1, size=z.shape) xyz = np.vstack((x, y, z)).T # get the B-splines smoothed result xyzn = tm.spline(xyz, 3, 2, -1) def test_segment_intersection(): xyz = np.array([[1, 1, 1], [2, 2, 2], [2, 2, 2]]) center = [10, 4, 10] radius = 1 assert_equal(tm.intersect_sphere(xyz, center, radius), False) xyz = np.array([[1, 1, 1], [2, 2, 2], [3, 3, 3], [4, 4, 4]]) center = [10, 10, 10] radius = 2 assert_equal(tm.intersect_sphere(xyz, center, radius), False) xyz = np.array([[1, 1, 1], [2, 2, 2], [3, 3, 3], [4, 4, 4]]) center = [2.1, 2, 2.2] radius = 2 assert_equal(tm.intersect_sphere(xyz, center, radius), True) def test_normalized_3vec(): vec = [1, 2, 3] l2n = np.sqrt(np.dot(vec, vec)) assert_array_almost_equal(l2n, pf.norm_3vec(vec)) nvec = pf.normalized_3vec(vec) assert_array_almost_equal(np.array(vec) / l2n, nvec) vec = np.array([[1, 2, 3]]) assert_equal(vec.shape, (1, 3)) assert_equal(pf.normalized_3vec(vec).shape, (3,)) def test_inner_3vecs(): vec1 = [1, 2.3, 3] vec2 = [2, 3, 4.3] assert_array_almost_equal(np.inner(vec1, vec2), pf.inner_3vecs(vec1, vec2)) vec2 = [2, -3, 4.3] assert_array_almost_equal(np.inner(vec1, vec2), pf.inner_3vecs(vec1, vec2)) def test_add_sub_3vecs(): vec1 = np.array([1, 2.3, 3]) vec2 = np.array([2, 3, 4.3]) assert_array_almost_equal(vec1 - vec2, pf.sub_3vecs(vec1, vec2)) assert_array_almost_equal(vec1 + vec2, pf.add_3vecs(vec1, vec2)) vec2 = [2, -3, 4.3] assert_array_almost_equal(vec1 - vec2, pf.sub_3vecs(vec1, vec2)) assert_array_almost_equal(vec1 + vec2, pf.add_3vecs(vec1, vec2)) def test_winding(): t = np.array([[63.90763092, 66.25634766, 74.84692383], [63.19578171, 65.95800018, 74.77872467], [61.79797363, 64.91297913, 75.04083252], [60.22916412, 64.11988068, 75.12763214], [59.47861481, 63.50800323, 75.25228882], [58.29077911, 62.88838959, 75.59411621], [57.40341568, 62.48369217, 75.46385193], [56.08355713, 61.64668274, 75.50260162], [54.88656616, 60.34751129, 75.49420929], [52.57548523, 58.3325882 , 76.18450928], [50.99916077, 56.06463623, 76.07842255], [50.2379303 , 54.92457962, 76.14080811], [49.29185867, 54.21960449, 76.04216003], [48.56259918, 53.58783722, 75.95063782], [48.13407516, 53.19916534, 75.91035461], [47.29430389, 52.12264252, 76.05912018]], dtype=np.float32) assert_equal(np.isnan(tm.winding(t)), False) dipy-0.13.0/dipy/tracking/tests/test_propagation.py000066400000000000000000000163351317371701200224170ustar00rootroot00000000000000import os import numpy as np import numpy.testing from dipy.data import get_data, get_sphere from dipy.core.gradients import gradient_table from dipy.reconst.gqi import GeneralizedQSamplingModel from dipy.reconst.dti import TensorModel, quantize_evecs from dipy.tracking import utils from dipy.tracking.eudx import EuDX from dipy.tracking.propspeed import ndarray_offset, eudx_both_directions from dipy.tracking.metrics import length from dipy.tracking.propspeed import map_coordinates_trilinear_iso import nibabel as ni from nose.tools import assert_true, assert_false, \ assert_equal, assert_raises, assert_almost_equal from numpy.testing import (assert_array_equal, assert_array_almost_equal, run_module_suite) def stepped_1d(arr_1d): # Make a version of `arr_1d` which is not contiguous return np.vstack((arr_1d, arr_1d)).ravel(order='F')[::2] def test_offset(): # Test ndarray_offset function for dt in (np.int32, np.float64): index = np.array([1, 1], dtype=np.intp) A = np.array([[1, 0, 0], [0, 2, 0], [0, 0, 3]], dtype=dt) strides = np.array(A.strides, np.intp) i_size = A.dtype.itemsize assert_equal(ndarray_offset(index, strides, 2, i_size), 4) assert_equal(A.ravel()[4], A[1, 1]) # Index and strides arrays must be C-continuous. Test this is enforced # by using non-contiguous versions of the input arrays. assert_raises(ValueError, ndarray_offset, stepped_1d(index), strides, 2, i_size) assert_raises(ValueError, ndarray_offset, index, stepped_1d(strides), 2, i_size) def test_trilinear_interp_cubic_voxels(): A = np.ones((17, 17, 17)) B = np.zeros(3) strides = np.array(A.strides, np.intp) A[7, 7, 7] = 2 points = np.array([[0, 0, 0], [7., 7.5, 7.], [3.5, 3.5, 3.5]]) map_coordinates_trilinear_iso(A, points, strides, 3, B) assert_array_almost_equal(B, np.array([1., 1.5, 1.])) # All of the input array, points array, strides array and output array must # be C-contiguous. Check by passing in versions that aren't C contiguous assert_raises(ValueError, map_coordinates_trilinear_iso, A.copy(order='F'), points, strides, 3, B) assert_raises(ValueError, map_coordinates_trilinear_iso, A, points.copy(order='F'), strides, 3, B) assert_raises(ValueError, map_coordinates_trilinear_iso, A, points, stepped_1d(strides), 3, B) assert_raises(ValueError, map_coordinates_trilinear_iso, A, points, strides, 3, stepped_1d(B)) def test_eudx_further(): """ Cause we love testin.. ;-) """ fimg, fbvals, fbvecs = get_data('small_101D') img = ni.load(fimg) affine = img.affine data = img.get_data() gtab = gradient_table(fbvals, fbvecs) tensor_model = TensorModel(gtab) ten = tensor_model.fit(data) x, y, z = data.shape[:3] seeds = np.zeros((10**4, 3)) for i in range(10**4): rx = (x-1)*np.random.rand() ry = (y-1)*np.random.rand() rz = (z-1)*np.random.rand() seeds[i] = np.ascontiguousarray(np.array([rx, ry, rz]), dtype=np.float64) sphere = get_sphere('symmetric724') ind = quantize_evecs(ten.evecs) eu = EuDX(a=ten.fa, ind=ind, seeds=seeds, odf_vertices=sphere.vertices, a_low=.2) T = [e for e in eu] # check that there are no negative elements for t in T: assert_equal(np.sum(t.ravel() < 0), 0) # Test eudx with affine def random_affine(seeds): affine = np.eye(4) affine[:3, :] = np.random.random((3, 4)) seeds = np.dot(seeds, affine[:3, :3].T) seeds += affine[:3, 3] return affine, seeds # Make two random affines and move seeds affine1, seeds1 = random_affine(seeds) affine2, seeds2 = random_affine(seeds) # Make tracks using different affines eu1 = EuDX(a=ten.fa, ind=ind, odf_vertices=sphere.vertices, seeds=seeds1, a_low=.2, affine=affine1) eu2 = EuDX(a=ten.fa, ind=ind, odf_vertices=sphere.vertices, seeds=seeds2, a_low=.2, affine=affine2) # Move from eu2 affine2 to affine1 eu2_to_eu1 = utils.move_streamlines(eu2, output_space=affine1, input_space=affine2) # Check that the tracks are the same for sl1, sl2 in zip(eu1, eu2_to_eu1): assert_array_almost_equal(sl1, sl2) def test_eudx_bad_seed(): """Test passing a bad seed to eudx""" fimg, fbvals, fbvecs = get_data('small_101D') img = ni.load(fimg) affine = img.affine data = img.get_data() gtab = gradient_table(fbvals, fbvecs) tensor_model = TensorModel(gtab) ten = tensor_model.fit(data) ind = quantize_evecs(ten.evecs) sphere = get_sphere('symmetric724') seed = [1000000., 1000000., 1000000.] eu = EuDX(a=ten.fa, ind=ind, seeds=[seed], odf_vertices=sphere.vertices, a_low=.2) assert_raises(ValueError, list, eu) print(data.shape) seed = [1., 5., 8.] eu = EuDX(a=ten.fa, ind=ind, seeds=[seed], odf_vertices=sphere.vertices, a_low=.2) track = list(eu) seed = [-1., 1000000., 1000000.] eu = EuDX(a=ten.fa, ind=ind, seeds=[seed], odf_vertices=sphere.vertices, a_low=.2) assert_raises(ValueError, list, eu) def test_eudx_boundaries(): """ This test checks that the tracking will exclude seeds in both directions. Here we create a volume of shape (50, 60, 40) and we will add 2 seeds exactly at the volume's boundaries (49, 0, 0) and (0, 0, 0). Those should not generate any streamlines as EuDX does not interpolate on the boundary voxels. We also add 3 seeds not in the boundaries which should generate streamlines without a problem. """ fa = np.ones((50, 60, 40)) ind = np.zeros(fa.shape) sphere = get_sphere('repulsion724') seed = [49., 0, 0] seed2 = [0., 0, 0] seed3 = [48., 0, 0] seed4 = [1., 0, 0] seed5 = [5., 5, 5] eu = EuDX(a=fa, ind=ind, seeds=[seed, seed2, seed3, seed4, seed5], odf_vertices=sphere.vertices, a_low=.2, total_weight=0.) track = list(eu) assert_equal(len(track), 3) def test_eudx_both_directions_errors(): # Test error conditions for both directions function sphere = get_sphere('symmetric724') seed = np.zeros(3, np.float64) qa = np.zeros((4, 5, 6, 7), np.float64) ind = qa.copy() # All of seed, qa, ind, odf_vertices must be C-contiguous. Check by # passing in versions that aren't C contiguous assert_raises(ValueError, eudx_both_directions, stepped_1d(seed), 0, qa, ind, sphere.vertices, 0.5, 0.1, 1., 1., 2) assert_raises(ValueError, eudx_both_directions, seed, 0, qa[..., ::2], ind, sphere.vertices, 0.5, 0.1, 1., 1., 2) assert_raises(ValueError, eudx_both_directions, seed, 0, qa, ind[..., ::2], sphere.vertices, 0.5, 0.1, 1., 1., 2) assert_raises(ValueError, eudx_both_directions, seed, 0, qa, ind, sphere.vertices[::2], 0.5, 0.1, 1., 1., 2) if __name__ == '__main__': run_module_suite() dipy-0.13.0/dipy/tracking/tests/test_streamline.py000066400000000000000000001300101317371701200222220ustar00rootroot00000000000000from __future__ import print_function import types import numpy as np from numpy.linalg import norm import numpy.testing as npt from dipy.testing.memory import get_type_refcount from dipy.testing import assert_arrays_equal from nose.tools import assert_true, assert_equal, assert_almost_equal from numpy.testing import (assert_array_equal, assert_array_almost_equal, assert_raises, run_module_suite) from dipy.tracking import Streamlines import dipy.tracking.utils as ut from dipy.tracking.streamline import (set_number_of_points, length, relist_streamlines, unlist_streamlines, center_streamlines, transform_streamlines, select_random_set_of_streamlines, compress_streamlines, select_by_rois, orient_by_rois, values_from_volume) streamline = np.array([[82.20181274, 91.36505890, 43.15737152], [82.38442230, 91.79336548, 43.87036514], [82.48710632, 92.27861023, 44.56298065], [82.53310394, 92.78545380, 45.24635315], [82.53793335, 93.26902008, 45.94785309], [82.48797607, 93.75003815, 46.64939880], [82.35533142, 94.25181580, 47.32533264], [82.15484619, 94.76634216, 47.97451019], [81.90982819, 95.28792572, 48.60244370], [81.63336945, 95.78153229, 49.23971176], [81.35479736, 96.24868011, 49.89558792], [81.08713531, 96.69807434, 50.56812668], [80.81504822, 97.14285278, 51.24193192], [80.52591705, 97.56719971, 51.92168427], [80.26599884, 97.98269653, 52.61848068], [80.04635620, 98.38131714, 53.33855820], [79.84691620, 98.77052307, 54.06955338], [79.57667542, 99.13599396, 54.78985596], [79.23351288, 99.43207550, 55.51065063], [78.84815979, 99.64141846, 56.24016571], [78.47383881, 99.77347565, 56.99299240], [78.12837219, 99.81330872, 57.76969528], [77.80438995, 99.85082245, 58.55574799], [77.49439240, 99.88065338, 59.34777069], [77.21414185, 99.85343933, 60.15090561], [76.96416473, 99.82772827, 60.96406937], [76.74712372, 99.80519104, 61.78676605], [76.52263641, 99.79122162, 62.60765076], [76.03757477, 100.08692169, 63.24152374], [75.44867706, 100.35265350, 63.79513168], [74.78033447, 100.57255554, 64.27278900], [74.11605835, 100.77330780, 64.76428986], [73.51222992, 100.98779297, 65.32373047], [72.97387695, 101.23387146, 65.93502045], [72.47355652, 101.49151611, 66.57343292], [71.99834442, 101.72480774, 67.23979950], [71.56909180, 101.98665619, 67.92664337], [71.18083191, 102.29483795, 68.61888123], [70.81879425, 102.63343048, 69.31127167], [70.47422791, 102.98672485, 70.00532532], [70.10092926, 103.28502655, 70.70999908], [69.69512177, 103.51667023, 71.42147064], [69.27423096, 103.71351624, 72.13452911], [68.91260529, 103.81676483, 72.89796448], [68.60788727, 103.81982422, 73.69258118], [68.34162903, 103.76619720, 74.49915314], [68.08542633, 103.70635223, 75.30856323], [67.83590698, 103.60187531, 76.11553955], [67.56822968, 103.44821930, 76.90870667], [67.28399658, 103.25878906, 77.68825531], [67.00117493, 103.03740692, 78.45989227], [66.72718048, 102.80329895, 79.23099518], [66.46197510, 102.54130554, 79.99622345], [66.20803833, 102.22305298, 80.74387360], [65.96872711, 101.88980865, 81.48987579], [65.72864532, 101.59316254, 82.25085449], [65.47808075, 101.33383942, 83.02194214], [65.21841431, 101.11295319, 83.80186462], [64.95678711, 100.94080353, 84.59326935], [64.71759033, 100.82022095, 85.40114594], [64.48053741, 100.73490143, 86.21411896], [64.24304199, 100.65074158, 87.02709198], [64.01773834, 100.55318451, 87.84204865], [63.83801651, 100.41996765, 88.66333008], [63.70982361, 100.25119019, 89.48779297], [63.60707855, 100.06730652, 90.31262207], [63.46164322, 99.91001892, 91.13648224], [63.26287842, 99.78648376, 91.95485687], [63.03713226, 99.68377686, 92.76905823], [62.81192398, 99.56619263, 93.58140564], [62.57145309, 99.42708588, 94.38592529], [62.32259369, 99.25592804, 95.18167114], [62.07497787, 99.05770111, 95.97154236], [61.82253647, 98.83877563, 96.75438690], [61.59536743, 98.59293365, 97.53706360], [61.46530151, 98.30503845, 98.32772827], [61.39904785, 97.97928619, 99.11172485], [61.33279419, 97.65353394, 99.89572906], [61.26067352, 97.30914307, 100.67123413], [61.19459534, 96.96743011, 101.44847107], [61.19580460, 96.63417053, 102.23215485], [61.26572037, 96.29887390, 103.01185608], [61.39840698, 95.96297455, 103.78307343], [61.57207870, 95.64262390, 104.55268097], [61.78163528, 95.35540771, 105.32629395], [62.06700134, 95.09746552, 106.08564758], [62.39427185, 94.85724640, 106.83369446], [62.74076462, 94.62278748, 107.57482147], [63.11461639, 94.40107727, 108.30641937], [63.53397751, 94.20418549, 109.02002716], [64.00019836, 94.03809357, 109.71183777], [64.43580627, 93.87523651, 110.42416382], [64.84857941, 93.69993591, 111.14715576], [65.26740265, 93.51858521, 111.86515808], [65.69511414, 93.36718750, 112.58474731], [66.10470581, 93.22719574, 113.31711578], [66.45891571, 93.06028748, 114.07256317], [66.78582001, 92.90560913, 114.84281921], [67.11138916, 92.79004669, 115.62040710], [67.44729614, 92.75711823, 116.40135193], [67.75688171, 92.98265076, 117.16111755], [68.02041626, 93.28012848, 117.91371155], [68.25725555, 93.53466797, 118.69052124], [68.46047974, 93.63263702, 119.51107788], [68.62039948, 93.62007141, 120.34690094], [68.76782227, 93.56475067, 121.18331909], [68.90222168, 93.46326447, 122.01765442], [68.99872589, 93.30039978, 122.84759521], [69.04119873, 93.05428314, 123.66156769], [69.05086517, 92.74394989, 124.45450592], [69.02742004, 92.40427399, 125.23509979], [68.95466614, 92.09059143, 126.02339935], [68.84975433, 91.79674530, 126.81564331], [68.72673798, 91.53726196, 127.61715698], [68.60685730, 91.30300140, 128.42681885], [68.50636292, 91.12481689, 129.25317383], [68.39311218, 91.01572418, 130.08976746], [68.25946808, 90.94654083, 130.92756653]], dtype=np.float32) streamline_64bit = streamline.astype(np.float64) streamlines = [streamline[[0, 10]], streamline, streamline[::2], streamline[::3], streamline[::5], streamline[::6]] streamlines_64bit = [streamline_64bit[[0, 10]], streamline_64bit, streamline_64bit[::2], streamline_64bit[::3], streamline_64bit[::4], streamline_64bit[::5]] heterogeneous_streamlines = [streamline_64bit, streamline_64bit.reshape((-1, 6)), streamline_64bit.reshape((-1, 2))] def length_python(xyz, along=False): xyz = np.asarray(xyz, dtype=np.float64) if xyz.shape[0] < 2: if along: return np.array([0]) return 0 dists = np.sqrt((np.diff(xyz, axis=0)**2).sum(axis=1)) if along: return np.cumsum(dists) return np.sum(dists) def set_number_of_points_python(xyz, n_pols=3): def _extrap(xyz, cumlen, distance): ''' Helper function for extrapolate ''' ind = np.where((cumlen-distance) > 0)[0][0] len0 = cumlen[ind-1] len1 = cumlen[ind] Ds = distance-len0 Lambda = Ds/(len1-len0) return Lambda*xyz[ind] + (1-Lambda)*xyz[ind-1] cumlen = np.zeros(xyz.shape[0]) cumlen[1:] = length_python(xyz, along=True) step = cumlen[-1] / (n_pols-1) ar = np.arange(0, cumlen[-1], step) if np.abs(ar[-1] - cumlen[-1]) < np.finfo('f4').eps: ar = ar[:-1] xyz2 = [_extrap(xyz, cumlen, distance) for distance in ar] return np.vstack((np.array(xyz2), xyz[-1])) def test_set_number_of_points(): # Test resampling of only one streamline nb_points = 12 new_streamline_cython = set_number_of_points( streamline, nb_points) new_streamline_python = set_number_of_points_python( streamline, nb_points) assert_equal(len(new_streamline_cython), nb_points) # Using a 5 digits precision because of streamline is in float32. assert_array_almost_equal(new_streamline_cython, new_streamline_python, 5) new_streamline_cython = set_number_of_points( streamline_64bit, nb_points) new_streamline_python = set_number_of_points_python( streamline_64bit, nb_points) assert_equal(len(new_streamline_cython), nb_points) assert_array_almost_equal(new_streamline_cython, new_streamline_python) res = [] simple_streamline = np.array([[0, 0, 0], [1, 1, 1], [2, 2, 2]], 'f4') for nb_points in range(2, 200): new_streamline_cython = set_number_of_points( simple_streamline, nb_points) res.append(nb_points - len(new_streamline_cython)) assert_equal(np.sum(res), 0) # Test resampling of multiple streamlines of different nb_points nb_points = 12 new_streamlines_cython = set_number_of_points( streamlines, nb_points) for i, s in enumerate(streamlines): new_streamline_python = set_number_of_points_python(s, nb_points) # Using a 5 digits precision because of streamline is in float32. assert_array_almost_equal(new_streamlines_cython[i], new_streamline_python, 5) # ArraySequence arrseq = Streamlines(streamlines) new_streamlines_as_seq_cython = set_number_of_points(arrseq, nb_points) assert_array_almost_equal(new_streamlines_as_seq_cython, new_streamlines_cython) new_streamlines_cython = set_number_of_points( streamlines_64bit, nb_points) for i, s in enumerate(streamlines_64bit): new_streamline_python = set_number_of_points_python(s, nb_points) assert_array_almost_equal(new_streamlines_cython[i], new_streamline_python) # ArraySequence arrseq = Streamlines(streamlines_64bit) new_streamlines_as_seq_cython = set_number_of_points(arrseq, nb_points) assert_array_almost_equal(new_streamlines_as_seq_cython, new_streamlines_cython) # Test streamlines with mixed dtype streamlines_mixed_dtype = [streamline, streamline.astype(np.float64), streamline.astype(np.int32), streamline.astype(np.int64)] nb_points_mixed_dtype = [len(s) for s in set_number_of_points( streamlines_mixed_dtype, nb_points)] assert_array_equal(nb_points_mixed_dtype, [nb_points] * len(streamlines_mixed_dtype)) # Test streamlines with different shape new_streamlines_cython = set_number_of_points( heterogeneous_streamlines, nb_points) for i, s in enumerate(heterogeneous_streamlines): new_streamline_python = set_number_of_points_python(s, nb_points) assert_array_almost_equal(new_streamlines_cython[i], new_streamline_python) # Test streamline with integer dtype new_streamline = set_number_of_points(streamline.astype(np.int32)) assert_true(new_streamline.dtype == np.float32) new_streamline = set_number_of_points(streamline.astype(np.int64)) assert_true(new_streamline.dtype == np.float64) # Test empty list assert_equal(set_number_of_points([]), []) # Test streamline having only one point assert_raises(ValueError, set_number_of_points, np.array([[1, 2, 3]])) # We do not support list of lists, it should be numpy ndarray. streamline_unsupported = [[1, 2, 3], [4, 5, 5], [2, 1, 3], [4, 2, 1]] assert_raises(AttributeError, set_number_of_points, streamline_unsupported) # Test setting number of points of a numpy with flag WRITABLE=False streamline_readonly = streamline.copy() streamline_readonly.setflags(write=False) assert_equal(len(set_number_of_points(streamline_readonly, nb_points=42)), 42) # Test setting computing length of a numpy with flag WRITABLE=False streamlines_readonly = [] for s in streamlines: streamlines_readonly.append(s.copy()) streamlines_readonly[-1].setflags(write=False) assert_equal(len(set_number_of_points(streamlines_readonly, nb_points=42)), len(streamlines_readonly)) streamlines_readonly = [] for s in streamlines_64bit: streamlines_readonly.append(s.copy()) streamlines_readonly[-1].setflags(write=False) assert_equal(len(set_number_of_points(streamlines_readonly, nb_points=42)), len(streamlines_readonly)) # Test if nb_points is less than 2 assert_raises(ValueError, set_number_of_points, [np.ones((10, 3)), np.ones((10, 3))], nb_points=1) def test_set_number_of_points_memory_leaks(): # Test some dtypes dtypes = [np.float32, np.float64, np.int32, np.int64] for dtype in dtypes: rng = np.random.RandomState(1234) NB_STREAMLINES = 10000 streamlines = [rng.randn(rng.randint(10, 100), 3).astype(dtype) for _ in range(NB_STREAMLINES)] list_refcount_before = get_type_refcount()["list"] rstreamlines = set_number_of_points(streamlines, nb_points=2) list_refcount_after = get_type_refcount()["list"] del rstreamlines # Delete `rstreamlines` because it holds a reference # to `list`. # Calling `set_number_of_points` should increase the refcount of `list` # by one since we kept the returned value. assert_equal(list_refcount_after, list_refcount_before+1) # Test mixed dtypes rng = np.random.RandomState(1234) NB_STREAMLINES = 10000 streamlines = [] for i in range(NB_STREAMLINES): dtype = dtypes[i % len(dtypes)] streamlines.append(rng.randn(rng.randint(10, 100), 3).astype(dtype)) list_refcount_before = get_type_refcount()["list"] rstreamlines = set_number_of_points(streamlines, nb_points=2) list_refcount_after = get_type_refcount()["list"] # Calling `set_number_of_points` should increase the refcount of `list` # by one since we kept the returned value. assert_equal(list_refcount_after, list_refcount_before+1) def test_length(): # Test length of only one streamline length_streamline_cython = length(streamline) length_streamline_python = length_python(streamline) assert_almost_equal(length_streamline_cython, length_streamline_python) length_streamline_cython = length(streamline_64bit) length_streamline_python = length_python(streamline_64bit) assert_almost_equal(length_streamline_cython, length_streamline_python) # Test computing length of multiple streamlines of different nb_points length_streamlines_cython = length(streamlines) for i, s in enumerate(streamlines): length_streamline_python = length_python(s) assert_array_almost_equal(length_streamlines_cython[i], length_streamline_python) length_streamlines_cython = length(streamlines_64bit) for i, s in enumerate(streamlines_64bit): length_streamline_python = length_python(s) assert_array_almost_equal(length_streamlines_cython[i], length_streamline_python) # ArraySequence # Test length of only one streamline length_streamline_cython = length(streamline_64bit) length_streamline_arrseq = length(Streamlines([streamline])) assert_almost_equal(length_streamline_arrseq, length_streamline_cython) length_streamline_cython = length(streamline_64bit) length_streamline_arrseq = length(Streamlines([streamline_64bit])) assert_almost_equal(length_streamline_arrseq, length_streamline_cython) # Test computing length of multiple streamlines of different nb_points length_streamlines_cython = length(streamlines) length_streamlines_arrseq = length(Streamlines(streamlines)) assert_array_almost_equal(length_streamlines_arrseq, length_streamlines_cython) length_streamlines_cython = length(streamlines_64bit) length_streamlines_arrseq = length(Streamlines(streamlines_64bit)) assert_array_almost_equal(length_streamlines_arrseq, length_streamlines_cython) # Test on a sliced ArraySequence length_streamlines_cython = length(streamlines_64bit[::2]) length_streamlines_arrseq = length(Streamlines(streamlines_64bit)[::2]) assert_array_almost_equal(length_streamlines_arrseq, length_streamlines_cython) length_streamlines_cython = length(streamlines[::-1]) length_streamlines_arrseq = length(Streamlines(streamlines)[::-1]) assert_array_almost_equal(length_streamlines_arrseq, length_streamlines_cython) # Test streamlines having mixed dtype streamlines_mixed_dtype = [streamline, streamline.astype(np.float64), streamline.astype(np.int32), streamline.astype(np.int64)] lengths_mixed_dtype = [length(s) for s in streamlines_mixed_dtype] assert_array_equal(length(streamlines_mixed_dtype), lengths_mixed_dtype) # Test streamlines with different shape length_streamlines_cython = length( heterogeneous_streamlines) for i, s in enumerate(heterogeneous_streamlines): length_streamline_python = length_python(s) assert_array_almost_equal(length_streamlines_cython[i], length_streamline_python) # Test streamline having integer dtype length_streamline = length(streamline.astype('int')) assert_true(length_streamline.dtype == np.float64) # Test empty list assert_equal(length([]), 0.0) # Test streamline having only one point assert_equal(length(np.array([[1, 2, 3]])), 0.0) # We do not support list of lists, it should be numpy ndarray. streamline_unsupported = [[1, 2, 3], [4, 5, 5], [2, 1, 3], [4, 2, 1]] assert_raises(AttributeError, length, streamline_unsupported) # Test setting computing length of a numpy with flag WRITABLE=False streamlines_readonly = [] for s in streamlines: streamlines_readonly.append(s.copy()) streamlines_readonly[-1].setflags(write=False) assert_array_almost_equal(length(streamlines_readonly), [length_python(s) for s in streamlines_readonly]) streamlines_readonly = [] for s in streamlines_64bit: streamlines_readonly.append(s.copy()) streamlines_readonly[-1].setflags(write=False) assert_array_almost_equal(length(streamlines_readonly), [length_python(s) for s in streamlines_readonly]) def test_length_memory_leaks(): # Test some dtypes dtypes = [np.float32, np.float64, np.int32, np.int64] for dtype in dtypes: rng = np.random.RandomState(1234) NB_STREAMLINES = 10000 streamlines = [rng.randn(rng.randint(10, 100), 3).astype(dtype) for _ in range(NB_STREAMLINES)] list_refcount_before = get_type_refcount()["list"] lengths = length(streamlines) list_refcount_after = get_type_refcount()["list"] # Calling `length` shouldn't increase the refcount of `list` # since the return value is a numpy array. assert_equal(list_refcount_after, list_refcount_before) # Test mixed dtypes rng = np.random.RandomState(1234) NB_STREAMLINES = 10000 streamlines = [] for i in range(NB_STREAMLINES): dtype = dtypes[i % len(dtypes)] streamlines.append(rng.randn(rng.randint(10, 100), 3).astype(dtype)) list_refcount_before = get_type_refcount()["list"] lengths = length(streamlines) list_refcount_after = get_type_refcount()["list"] # Calling `length` shouldn't increase the refcount of `list` # since the return value is a numpy array. assert_equal(list_refcount_after, list_refcount_before) def test_unlist_relist_streamlines(): streamlines = [np.random.rand(10, 3), np.random.rand(20, 3), np.random.rand(5, 3)] points, offsets = unlist_streamlines(streamlines) assert_equal(offsets.dtype, np.dtype('i8')) assert_equal(points.shape, (35, 3)) assert_equal(len(offsets), len(streamlines)) streamlines2 = relist_streamlines(points, offsets) assert_equal(len(streamlines), len(streamlines2)) for i in range(len(streamlines)): assert_array_equal(streamlines[i], streamlines2[i]) def test_center_and_transform(): A = np.array([[1, 2, 3], [1, 2, 3.]]) streamlines = [A for i in range(10)] streamlines2, center = center_streamlines(streamlines) B = np.zeros((2, 3)) assert_array_equal(streamlines2[0], B) assert_array_equal(center, A[0]) affine = np.eye(4) affine[0, 0] = 2 affine[:3, -1] = - np.array([2, 1, 1]) * center streamlines3 = transform_streamlines(streamlines, affine) assert_array_equal(streamlines3[0], B) def test_select_random_streamlines(): streamlines = [np.random.rand(10, 3), np.random.rand(20, 3), np.random.rand(5, 3)] new_streamlines = select_random_set_of_streamlines(streamlines, 2) assert_equal(len(new_streamlines), 2) new_streamlines = select_random_set_of_streamlines(streamlines, 4) assert_equal(len(new_streamlines), 3) def compress_streamlines_python(streamline, tol_error=0.01, max_segment_length=10): """ Python version of the FiberCompression found on https://github.com/scilus/FiberCompression. """ if streamline.shape[0] <= 2: return streamline.copy() # Euclidean distance def segment_length(prev, next): return np.sqrt(((prev-next)**2).sum()) # Projection of a 3D point on a 3D line, minimal distance def dist_to_line(prev, next, curr): return norm(np.cross(next-prev, curr-next)) / norm(next-prev) nb_points = 0 compressed_streamline = np.zeros_like(streamline) # Copy first point since it is always kept. compressed_streamline[0, :] = streamline[0, :] nb_points += 1 prev = streamline[0] prev_id = 0 for next_id, next in enumerate(streamline[2:], start=2): # Euclidean distance between last added point and current point. if segment_length(prev, next) > max_segment_length: compressed_streamline[nb_points, :] = streamline[next_id-1, :] nb_points += 1 prev = streamline[next_id-1] prev_id = next_id-1 continue # Check that each point is not offset by more than `tol_error` mm. for o, curr in enumerate(streamline[prev_id+1:next_id], start=prev_id+1): dist = dist_to_line(prev, next, curr) if np.isnan(dist) or dist > tol_error: compressed_streamline[nb_points, :] = streamline[next_id-1, :] nb_points += 1 prev = streamline[next_id-1] prev_id = next_id-1 break # Copy last point since it is always kept. compressed_streamline[nb_points, :] = streamline[-1, :] nb_points += 1 # Make sure the array have the correct size return compressed_streamline[:nb_points] def test_compress_streamlines(): for compress_func in [compress_streamlines_python, compress_streamlines]: # Small streamlines (less than two points) are uncompressable. for small_streamline in [np.array([[]]), np.array([[1, 1, 1]]), np.array([[1, 1, 1], [2, 2, 2]])]: c_streamline = compress_func(small_streamline) assert_equal(len(c_streamline), len(small_streamline)) assert_array_equal(c_streamline, small_streamline) # Compressing a straight streamline that is less than 10mm long # should output a two points streamline. linear_streamline = np.linspace(0, 5, 100*3).reshape((100, 3)) c_streamline = compress_func(linear_streamline) assert_equal(len(c_streamline), 2) assert_array_equal(c_streamline, [linear_streamline[0], linear_streamline[-1]]) # The distance of consecutive points must be less or equal than some # value. max_segment_length = 10 linear_streamline = np.linspace(0, 100, 100*3).reshape((100, 3)) linear_streamline[:, 1:] = 0. c_streamline = compress_func(linear_streamline, max_segment_length=max_segment_length) segments_length = np.sqrt((np.diff(c_streamline, axis=0)**2).sum(axis=1)) assert_true(np.all(segments_length <= max_segment_length)) assert_equal(len(c_streamline), 12) assert_array_equal(c_streamline, linear_streamline[::9]) # A small `max_segment_length` should keep all points. c_streamline = compress_func(linear_streamline, max_segment_length=0.01) assert_array_equal(c_streamline, linear_streamline) # Test we can set `max_segment_length` to infinity # (like the C++ version) compress_func(streamline, max_segment_length=np.inf) # Uncompressable streamline when `tol_error` == 1. simple_streamline = np.array([[0, 0, 0], [1, 1, 0], [1.5, np.inf, 0], [2, 2, 0], [2.5, 20, 0], [3, 3, 0]]) # Because of np.inf, compressing that streamline causes a warning. with np.errstate(invalid='ignore'): c_streamline = compress_func(simple_streamline, tol_error=1) assert_array_equal(c_streamline, simple_streamline) # Create a special streamline where every other point is increasingly # farther from a straigth line formed by the streamline endpoints. tol_errors = np.linspace(0, 10, 21) orthogonal_line = np.array([[-np.sqrt(2)/2, np.sqrt(2)/2, 0]], dtype=np.float32) special_streamline = np.array([range(len(tol_errors)*2+1)] * 3, dtype=np.float32).T special_streamline[1::2] += orthogonal_line * tol_errors[:, None] # # Uncomment to see the streamline. # import pylab as plt # plt.plot(special_streamline[:, 0], special_streamline[:, 1], '.-') # plt.axis('equal'); plt.show() # Test different values for `tol_error`. for i, tol_error in enumerate(tol_errors): cspecial_streamline = compress_streamlines(special_streamline, tol_error=tol_error+1e-4, max_segment_length=np.inf) # First and last points should always be the same as the original ones. assert_array_equal(cspecial_streamline[0], special_streamline[0]) assert_array_equal(cspecial_streamline[-1], special_streamline[-1]) assert_equal(len(cspecial_streamline), len(special_streamline)-((i*2)+1)) # Make sure Cython and Python versions are the same. cstreamline_python = compress_streamlines_python( special_streamline, tol_error=tol_error+1e-4, max_segment_length=np.inf) assert_equal(len(cspecial_streamline), len(cstreamline_python)) assert_array_almost_equal(cspecial_streamline, cstreamline_python) def test_compress_streamlines_memory_leaks(): # Test some dtypes dtypes = [np.float32, np.float64, np.int32, np.int64] for dtype in dtypes: rng = np.random.RandomState(1234) NB_STREAMLINES = 10000 streamlines = [rng.randn(rng.randint(10, 100), 3).astype(dtype) for _ in range(NB_STREAMLINES)] list_refcount_before = get_type_refcount()["list"] cstreamlines = compress_streamlines(streamlines) list_refcount_after = get_type_refcount()["list"] del cstreamlines # Delete `cstreamlines` because it holds a reference # to `list`. # Calling `compress_streamlines` should increase the refcount of `list` # by one since we kept the returned value. assert_equal(list_refcount_after, list_refcount_before+1) # Test mixed dtypes rng = np.random.RandomState(1234) NB_STREAMLINES = 10000 streamlines = [] for i in range(NB_STREAMLINES): dtype = dtypes[i % len(dtypes)] streamlines.append(rng.randn(rng.randint(10, 100), 3).astype(dtype)) list_refcount_before = get_type_refcount()["list"] cstreamlines = compress_streamlines(streamlines) list_refcount_after = get_type_refcount()["list"] # Calling `compress_streamlines` should increase the refcount of `list` by # one since we kept the returned value. assert_equal(list_refcount_after, list_refcount_before+1) def generate_sl(streamlines): """ Helper function that takes a sequence and returns a generator Parameters ---------- streamlines : sequence Usually, this would be a list of 2D arrays, representing streamlines Returns ------- generator """ for sl in streamlines: yield sl def test_select_by_rois(): streamlines = [np.array([[0, 0., 0.9], [1.9, 0., 0.]]), np.array([[0.1, 0., 0], [0, 1., 1.], [0, 2., 2.]]), np.array([[2, 2, 2], [3, 3, 3]])] # Make two ROIs: mask1 = np.zeros((4, 4, 4), dtype=bool) mask2 = np.zeros_like(mask1) mask1[0, 0, 0] = True mask2[1, 0, 0] = True selection = select_by_rois(streamlines, [mask1], [True], tol=1) assert_arrays_equal(list(selection), [streamlines[0], streamlines[1]]) selection = select_by_rois(streamlines, [mask1, mask2], [True, True], tol=1) assert_arrays_equal(list(selection), [streamlines[0], streamlines[1]]) selection = select_by_rois(streamlines, [mask1, mask2], [True, False]) assert_arrays_equal(list(selection), [streamlines[1]]) # Setting tolerance too low gets overridden: selection = select_by_rois(streamlines, [mask1, mask2], [True, False], tol=0.1) assert_arrays_equal(list(selection), [streamlines[1]]) selection = select_by_rois(streamlines, [mask1, mask2], [True, True], tol=0.87) assert_arrays_equal(list(selection), [streamlines[1]]) mask3 = np.zeros_like(mask1) mask3[0, 2, 2] = 1 selection = select_by_rois(streamlines, [mask1, mask2, mask3], [True, True, False], tol=1.0) assert_arrays_equal(list(selection), [streamlines[0]]) # Select using only one ROI selection = select_by_rois(streamlines, [mask1], [True], tol=0.87) assert_arrays_equal(list(selection), [streamlines[1]]) selection = select_by_rois(streamlines, [mask1], [True], tol=1.0) assert_arrays_equal(list(selection), [streamlines[0], streamlines[1]]) # Use different modes: selection = select_by_rois(streamlines, [mask1, mask2, mask3], [True, True, False], mode="all", tol=1.0) assert_arrays_equal(list(selection), [streamlines[0]]) selection = select_by_rois(streamlines, [mask1, mask2, mask3], [True, True, False], mode="either_end", tol=1.0) assert_arrays_equal(list(selection), [streamlines[0]]) selection = select_by_rois(streamlines, [mask1, mask2, mask3], [True, True, False], mode="both_end", tol=1.0) assert_arrays_equal(list(selection), [streamlines[0]]) mask2[0, 2, 2] = True selection = select_by_rois(streamlines, [mask1, mask2, mask3], [True, True, False], mode="both_end", tol=1.0) assert_arrays_equal(list(selection), [streamlines[0], streamlines[1]]) # Test with generator input: selection = select_by_rois(generate_sl(streamlines), [mask1], [True], tol=1.0) assert_arrays_equal(list(selection), [streamlines[0], streamlines[1]]) def test_orient_by_rois(): streamlines = [np.array([[0, 0., 0], [1, 0., 0.], [2, 0., 0.]]), np.array([[2, 0., 0.], [1, 0., 0], [0, 0, 0.]])] # Make two ROIs: mask1_vol = np.zeros((4, 4, 4), dtype=bool) mask2_vol = np.zeros_like(mask1_vol) mask1_vol[0, 0, 0] = True mask2_vol[1, 0, 0] = True mask1_coords = np.array(np.where(mask1_vol)).T mask2_coords = np.array(np.where(mask2_vol)).T # If there is an affine, we'll use it: affine = np.eye(4) affine[:, 3] = [-1, 100, -20, 1] # Transform the streamlines: x_streamlines = [sl + affine[:3, 3] for sl in streamlines] # After reorientation, this should be the answer: flipped_sl = [streamlines[0], streamlines[1][::-1]] new_streamlines = orient_by_rois(streamlines, mask1_vol, mask2_vol, in_place=False, affine=None, as_generator=False) npt.assert_equal(new_streamlines, flipped_sl) npt.assert_(new_streamlines is not streamlines) # Test with affine: x_flipped_sl = [s + affine[:3, 3] for s in flipped_sl] new_streamlines = orient_by_rois(x_streamlines, mask1_vol, mask2_vol, in_place=False, affine=affine, as_generator=False) npt.assert_equal(new_streamlines, x_flipped_sl) npt.assert_(new_streamlines is not x_streamlines) # Test providing coord ROIs instead of vol ROIs: new_streamlines = orient_by_rois(x_streamlines, mask1_coords, mask2_coords, in_place=False, affine=affine, as_generator=False) npt.assert_equal(new_streamlines, x_flipped_sl) # Test with as_generator set to True new_streamlines = orient_by_rois(streamlines, mask1_vol, mask2_vol, in_place=False, affine=None, as_generator=True) npt.assert_(isinstance(new_streamlines, types.GeneratorType)) ll = list(new_streamlines) npt.assert_equal(ll, flipped_sl) # Test with as_generator set to True and with the affine new_streamlines = orient_by_rois(x_streamlines, mask1_vol, mask2_vol, in_place=False, affine=affine, as_generator=True) npt.assert_(isinstance(new_streamlines, types.GeneratorType)) ll = list(new_streamlines) npt.assert_equal(ll, x_flipped_sl) # Test with generator input: new_streamlines = orient_by_rois(generate_sl(streamlines), mask1_vol, mask2_vol, in_place=False, affine=None, as_generator=True) npt.assert_(isinstance(new_streamlines, types.GeneratorType)) ll = list(new_streamlines) npt.assert_equal(ll, flipped_sl) # Generator output cannot take a True `in_place` kwarg: npt.assert_raises(ValueError, orient_by_rois, *[generate_sl(streamlines), mask1_vol, mask2_vol], **dict(in_place=True, affine=None, as_generator=True)) # But you can input a generator and get a non-generator as output: new_streamlines = orient_by_rois(generate_sl(streamlines), mask1_vol, mask2_vol, in_place=False, affine=None, as_generator=False) npt.assert_(not isinstance(new_streamlines, types.GeneratorType)) npt.assert_equal(new_streamlines, flipped_sl) # Modify in-place: new_streamlines = orient_by_rois(streamlines, mask1_vol, mask2_vol, in_place=True, affine=None, as_generator=False) npt.assert_equal(new_streamlines, flipped_sl) # The two objects are one and the same: npt.assert_(new_streamlines is streamlines) def test_values_from_volume(): decimal = 4 data3d = np.arange(2000).reshape(20, 10, 10) # Test two cases of 4D data (handled differently) # One where the last dimension is length 3: data4d_3vec = np.arange(6000).reshape(20, 10, 10, 3) # The other where the last dimension is not 3: data4d_2vec = np.arange(4000).reshape(20, 10, 10, 2) for dt in [np.float32, np.float64]: for data in [data3d, data4d_3vec, data4d_2vec]: sl1 = [np.array([[1, 0, 0], [1.5, 0, 0], [2, 0, 0], [2.5, 0, 0]]).astype(dt), np.array([[2, 0, 0], [3.1, 0, 0], [3.9, 0, 0], [4.1, 0, 0]]).astype(dt)] ans1 = [[data[1, 0, 0], data[1, 0, 0] + (data[2, 0, 0] - data[1, 0, 0]) / 2, data[2, 0, 0], data[2, 0, 0] + (data[3, 0, 0] - data[2, 0, 0]) / 2], [data[2, 0, 0], data[3, 0, 0] + (data[4, 0, 0] - data[3, 0, 0]) * 0.1, data[3, 0, 0] + (data[4, 0, 0] - data[3, 0, 0]) * 0.9, data[4, 0, 0] + (data[5, 0, 0] - data[4, 0, 0]) * 0.1]] vv = values_from_volume(data, sl1) npt.assert_almost_equal(vv, ans1, decimal=decimal) vv = values_from_volume(data, np.array(sl1)) npt.assert_almost_equal(vv, ans1, decimal=decimal) affine = np.eye(4) affine[:, 3] = [-100, 10, 1, 1] x_sl1 = ut.move_streamlines(sl1, affine) x_sl2 = ut.move_streamlines(sl1, affine) vv = values_from_volume(data, x_sl1, affine=affine) npt.assert_almost_equal(vv, ans1, decimal=decimal) # The generator has already been consumed so needs to be # regenerated: x_sl1 = list(ut.move_streamlines(sl1, affine)) vv = values_from_volume(data, x_sl1, affine=affine) npt.assert_almost_equal(vv, ans1, decimal=decimal) # Test that the streamlines haven't mutated: l_sl2 = list(x_sl2) npt.assert_equal(x_sl1, l_sl2) vv = values_from_volume(data, np.array(x_sl1), affine=affine) npt.assert_almost_equal(vv, ans1, decimal=decimal) npt.assert_equal(np.array(x_sl1), np.array(l_sl2)) # Test for lists of streamlines with different numbers of nodes: sl2 = [sl1[0][:-1], sl1[1]] ans2 = [ans1[0][:-1], ans1[1]] vv = values_from_volume(data, sl2) for ii, v in enumerate(vv): npt.assert_almost_equal(v, ans2[ii], decimal=decimal) # We raise an error if the streamlines fed don't make sense. In this # case, a tuple instead of a list, generator or array nonsense_sl = (np.array([[1, 0, 0], [1.5, 0, 0], [2, 0, 0], [2.5, 0, 0]]), np.array([[2, 0, 0], [3.1, 0, 0], [3.9, 0, 0], [4.1, 0, 0]])) npt.assert_raises(RuntimeError, values_from_volume, data, nonsense_sl) # For some use-cases we might have singleton streamlines (with only one # node each): data3D = np.ones((2, 2, 2)) streamlines = np.ones((10, 1, 3)) npt.assert_equal(values_from_volume(data3D, streamlines).shape, (10, 1)) data4D = np.ones((2, 2, 2, 2)) streamlines = np.ones((10, 1, 3)) npt.assert_equal(values_from_volume(data4D, streamlines).shape, (10, 1, 2)) if __name__ == '__main__': run_module_suite() dipy-0.13.0/dipy/tracking/tests/test_track_volumes.py000066400000000000000000000046001317371701200227420ustar00rootroot00000000000000 import numpy as np from nose.tools import assert_true, assert_false, \ assert_equal, assert_raises from numpy.testing import assert_array_equal, assert_array_almost_equal import dipy.tracking.vox2track as tvo def tracks_to_expected(tracks, vol_dims): # simulate expected behavior of module vol_dims = np.array(vol_dims, dtype=np.int32) counts = np.zeros(vol_dims, dtype=np.int32) elements = {} for t_no, t in enumerate(tracks): u_ps = set() ti = np.round(t).astype(np.int32) for p_no, p in enumerate(ti): if np.any(p < 0): p[p < 0] = 0 too_high = p >= vol_dims if np.any(too_high): p[too_high] = vol_dims[too_high]-1 p = tuple(p) if p in u_ps: continue u_ps.add(p) val = t_no if counts[p]: elements[p].append(val) else: elements[p] = [val] counts[p] += 1 return counts, elements def test_track_volumes(): # simplest case vol_dims = (1, 2, 3) tracks = ([[0, 0, 0], [0, 1, 1]],) tracks = [np.array(t) for t in tracks] ex_counts, ex_els = tracks_to_expected(tracks, vol_dims) tcs, tes = tvo.track_counts(tracks, vol_dims, [1, 1, 1]) assert_array_equal(tcs, ex_counts) assert_array_equal(tes, ex_els) # check only counts returned for return_elements=False tcs = tvo.track_counts(tracks, vol_dims, [1, 1, 1], False) assert_array_equal(tcs, ex_counts) # non-unique points, non-integer points, points outside vol_dims = (5, 10, 15) tracks = ([[-1, 0, 1], [0, 0.1, 0], [1, 1, 1], [1, 1, 1], [2, 2, 2]], [[0.7, 0, 0], [1, 1, 1], [1, 2, 2], [1, 11, 0]]) tracks = [np.array(t) for t in tracks] ex_counts, ex_els = tracks_to_expected(tracks, vol_dims) tcs, tes = tvo.track_counts(tracks, vol_dims, [1, 1, 1]) assert_array_equal(tcs, ex_counts) assert_array_equal(tes, ex_els) # points with non-unit voxel sizes vox_sizes = [1.4, 2.1, 3.7] float_tracks = [] for t in tracks: float_tracks.append(t * vox_sizes) tcs, tes = tvo.track_counts(float_tracks, vol_dims, vox_sizes) assert_array_equal(tcs, ex_counts) assert_array_equal(tes, ex_els) dipy-0.13.0/dipy/tracking/tests/test_utils.py000066400000000000000000000653231317371701200212350ustar00rootroot00000000000000from __future__ import division, print_function, absolute_import from dipy.utils.six.moves import xrange import numpy as np import nose from dipy.io.bvectxt import orientation_from_string from dipy.tracking.utils import (affine_for_trackvis, connectivity_matrix, density_map, length, move_streamlines, ndbincount, reduce_labels, reorder_voxels_affine, seeds_from_mask, random_seeds_from_mask, target, target_line_based, _rmi, unique_rows, near_roi, reduce_rois, path_length, flexi_tvis_affine, get_flexi_tvis_affine, _min_at) from dipy.tracking._utils import _to_voxel_coordinates import dipy.tracking.metrics as metrix from dipy.tracking.vox2track import streamline_mapping import numpy.testing as npt from numpy.testing import assert_array_almost_equal, assert_array_equal from nose.tools import assert_equal, assert_raises, assert_true def make_streamlines(): streamlines = [np.array([[0, 0, 0], [1, 1, 1], [2, 2, 2], [5, 10, 12]], 'float'), np.array([[1, 2, 3], [3, 2, 0], [5, 20, 33], [40, 80, 120]], 'float')] return streamlines def test_density_map(): # One streamline diagonal in volume streamlines = [np.array([np.arange(10)] * 3).T] shape = (10, 10, 10) x = np.arange(10) expected = np.zeros(shape) expected[x, x, x] = 1. dm = density_map(streamlines, vol_dims=shape, voxel_size=(1, 1, 1)) assert_array_equal(dm, expected) # add streamline, make voxel_size smaller. Each streamline should only be # counted once, even if multiple points lie in a voxel streamlines.append(np.ones((5, 3))) shape = (5, 5, 5) x = np.arange(5) expected = np.zeros(shape) expected[x, x, x] = 1. expected[0, 0, 0] += 1 dm = density_map(streamlines, vol_dims=shape, voxel_size=(2, 2, 2)) assert_array_equal(dm, expected) # should work with a generator dm = density_map(iter(streamlines), vol_dims=shape, voxel_size=(2, 2, 2)) assert_array_equal(dm, expected) # Test passing affine affine = np.diag([2, 2, 2, 1.]) affine[:3, 3] = 1. dm = density_map(streamlines, shape, affine=affine) assert_array_equal(dm, expected) # Shift the image by 2 voxels, ie 4mm affine[:3, 3] -= 4. expected_old = expected new_shape = [i + 2 for i in shape] expected = np.zeros(new_shape) expected[2:, 2:, 2:] = expected_old dm = density_map(streamlines, new_shape, affine=affine) assert_array_equal(dm, expected) def test_to_voxel_coordinates_precision(): # To simplify tests, use an identity affine. This would be the result of # a call to _mapping_to_voxel with another identity affine. transfo = np.array([[1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]]) # Offset is computed by _mapping_to_voxel. With a 1x1x1 dataset # having no translation, the offset is half the voxel size, i.e. 0.5. offset = np.array([0.5, 0.5, 0.5]) # Without the added tolerance in _to_voxel_coordinates, this streamline # should raise an Error in the call to _to_voxel_coordinates. failing_strl = [np.array([[-0.5000001, 0.0, 0.0], [0.0, 1.0, 0.0]], dtype=np.float32)] indices = _to_voxel_coordinates(failing_strl, transfo, offset) expected_indices = np.array([[[0, 0, 0], [0, 1, 0]]]) assert_array_equal(indices, expected_indices) def test_connectivity_matrix(): label_volume = np.array([[[3, 0, 0], [0, 0, 0], [0, 0, 4]]]) streamlines = [np.array([[0, 0, 0], [0, 0, 0], [0, 2, 2]], 'float'), np.array([[0, 0, 0], [0, 1, 1], [0, 2, 2]], 'float'), np.array([[0, 2, 2], [0, 1, 1], [0, 0, 0]], 'float')] expected = np.zeros((5, 5), 'int') expected[3, 4] = 2 expected[4, 3] = 1 # Check basic Case matrix = connectivity_matrix(streamlines, label_volume, (1, 1, 1), symmetric=False) assert_array_equal(matrix, expected) # Test mapping matrix, mapping = connectivity_matrix(streamlines, label_volume, (1, 1, 1), symmetric=False, return_mapping=True) assert_array_equal(matrix, expected) assert_equal(mapping[3, 4], [0, 1]) assert_equal(mapping[4, 3], [2]) assert_equal(mapping.get((0, 0)), None) # Test mapping and symmetric matrix, mapping = connectivity_matrix(streamlines, label_volume, (1, 1, 1), symmetric=True, return_mapping=True) assert_equal(mapping[3, 4], [0, 1, 2]) # When symmetric only (3,4) is a key, not (4, 3) assert_equal(mapping.get((4, 3)), None) # expected output matrix is symmetric version of expected expected = expected + expected.T assert_array_equal(matrix, expected) # Test mapping_as_streamlines, mapping dict has lists of streamlines matrix, mapping = connectivity_matrix(streamlines, label_volume, (1, 1, 1), symmetric=False, return_mapping=True, mapping_as_streamlines=True) assert_true(mapping[3, 4][0] is streamlines[0]) assert_true(mapping[3, 4][1] is streamlines[1]) assert_true(mapping[4, 3][0] is streamlines[2]) # Test passing affine to connectivity_matrix expected = matrix affine = np.diag([-1, -1, -1, 1.]) streamlines = [-i for i in streamlines] matrix = connectivity_matrix(streamlines, label_volume, affine=affine) # In the symmetrical case, the matrix should be, well, symmetric: assert_equal(matrix[4, 3], matrix[4, 3]) def test_ndbincount(): def check(expected): assert_equal(bc[0, 0], expected[0]) assert_equal(bc[0, 1], expected[1]) assert_equal(bc[1, 0], expected[2]) assert_equal(bc[2, 2], expected[3]) x = np.array([[0, 0], [0, 0], [0, 1], [0, 1], [1, 0], [2, 2]]).T expected = [2, 2, 1, 1] # count occurrences in x bc = ndbincount(x) assert_equal(bc.shape, (3, 3)) check(expected) # pass in shape bc = ndbincount(x, shape=(4, 5)) assert_equal(bc.shape, (4, 5)) check(expected) # pass in weights weights = np.arange(6.) weights[-1] = 1.23 expeceted = [1., 5., 4., 1.23] bc = ndbincount(x, weights=weights) check(expeceted) # raises an error if shape is too small assert_raises(ValueError, ndbincount, x, None, (2, 2)) def test_reduce_labels(): shape = (4, 5, 6) # labels from 100 to 220 labels = np.arange(100, np.prod(shape) + 100).reshape(shape) # new labels form 0 to 120, and lookup maps range(0,120) to range(100, 220) new_labels, lookup = reduce_labels(labels) assert_array_equal(new_labels, labels - 100) assert_array_equal(lookup, labels.ravel()) def test_move_streamlines(): streamlines = make_streamlines() affine = np.eye(4) new_streamlines = move_streamlines(streamlines, affine) for i, test_sl in enumerate(new_streamlines): assert_array_equal(test_sl, streamlines[i]) affine[:3, 3] += (4, 5, 6) new_streamlines = move_streamlines(streamlines, affine) for i, test_sl in enumerate(new_streamlines): assert_array_equal(test_sl, streamlines[i] + (4, 5, 6)) affine = np.eye(4) affine = affine[[2, 1, 0, 3]] new_streamlines = move_streamlines(streamlines, affine) for i, test_sl in enumerate(new_streamlines): assert_array_equal(test_sl, streamlines[i][:, [2, 1, 0]]) affine[:3, 3] += (4, 5, 6) new_streamlines = move_streamlines(streamlines, affine) undo_affine = move_streamlines(new_streamlines, np.eye(4), input_space=affine) for i, test_sl in enumerate(undo_affine): assert_array_almost_equal(test_sl, streamlines[i]) # Test that changing affine does affect moving streamlines affineA = affine.copy() affineB = affine.copy() streamlinesA = move_streamlines(streamlines, affineA) streamlinesB = move_streamlines(streamlines, affineB) affineB[:] = 0 for (a, b) in zip(streamlinesA, streamlinesB): assert_array_equal(a, b) def test_target(): streamlines = [np.array([[0., 0., 0.], [1., 0., 0.], [2., 0., 0.]]), np.array([[0., 0., 0], [0, 1., 1.], [0, 2., 2.]])] _target(target, streamlines, (0, 0, 0), (1, 0, 0), True) def test_target_lb(): streamlines = [np.array([[0., 1., 1.], [3., 1., 1.]]), np.array([[0., 0., 0.], [2., 2., 2.]]), np.array([[1., 1., 1.]])] # Single-point streamline _target(target_line_based, streamlines, (1, 1, 1), (2, 1, 1), False) def _target(target_f, streamlines, voxel_both_true, voxel_one_true, test_bad_points): affine = np.eye(4) mask = np.zeros((4, 4, 4), dtype=bool) # Both pass though mask[voxel_both_true] = True new = list(target_f(streamlines, mask, affine=affine)) assert_equal(len(new), 2) new = list(target_f(streamlines, mask, affine=affine, include=False)) assert_equal(len(new), 0) # only first mask[:] = False mask[voxel_one_true] = True new = list(target_f(streamlines, mask, affine=affine)) assert_equal(len(new), 1) assert_true(new[0] is streamlines[0]) new = list(target_f(streamlines, mask, affine=affine, include=False)) assert_equal(len(new), 1) assert_true(new[0] is streamlines[1]) # Test that bad points raise a value error if test_bad_points: bad_sl = streamlines + [np.array([[10.0, 10.0, 10.0]])] new = target_f(bad_sl, mask, affine=affine) assert_raises(ValueError, list, new) bad_sl = streamlines + [-np.array([[10.0, 10.0, 10.0]])] new = target_f(bad_sl, mask, affine=affine) assert_raises(ValueError, list, new) # Test smaller voxels affine = np.random.random((4, 4)) - .5 affine[3] = [0, 0, 0, 1] streamlines = list(move_streamlines(streamlines, affine)) new = list(target_f(streamlines, mask, affine=affine)) assert_equal(len(new), 1) assert_true(new[0] is streamlines[0]) new = list(target_f(streamlines, mask, affine=affine, include=False)) assert_equal(len(new), 1) assert_true(new[0] is streamlines[1]) # Test that changing mask or affine does not break target/target_line_based include = target_f(streamlines, mask, affine=affine) exclude = target_f(streamlines, mask, affine=affine, include=False) affine[:] = np.eye(4) mask[:] = False include = list(include) exclude = list(exclude) assert_equal(len(include), 1) assert_true(include[0] is streamlines[0]) assert_equal(len(exclude), 1) assert_true(exclude[0] is streamlines[1]) def test_near_roi(): streamlines = [np.array([[0., 0., 0.9], [1.9, 0., 0.], [3, 2., 2.]]), np.array([[0.1, 0., 0], [0, 1., 1.], [0, 2., 2.]]), np.array([[2, 2, 2], [3, 3, 3]])] affine = np.eye(4) mask = np.zeros((4, 4, 4), dtype=bool) mask[0, 0, 0] = True mask[1, 0, 0] = True assert_array_equal(near_roi(streamlines, mask, tol=1), np.array([True, True, False])) assert_array_equal(near_roi(streamlines, mask), np.array([False, True, False])) # If there is an affine, we need to use it: affine[:, 3] = [-1, 100, -20, 1] # Transform the streamlines: x_streamlines = [sl + affine[:3, 3] for sl in streamlines] assert_array_equal(near_roi(x_streamlines, mask, affine=affine, tol=1), np.array([True, True, False])) assert_array_equal(near_roi(x_streamlines, mask, affine=affine, tol=None), np.array([False, True, False])) # Test for use of the 'all' mode: assert_array_equal(near_roi(x_streamlines, mask, affine=affine, tol=None, mode='all'), np.array([False, False, False])) mask[0, 1, 1] = True mask[0, 2, 2] = True # Test for use of the 'all' mode, also testing that setting the tolerance # to a very small number gets overridden: assert_array_equal(near_roi(x_streamlines, mask, affine=affine, tol=0.1, mode='all'), np.array([False, True, False])) mask[2, 2, 2] = True mask[3, 3, 3] = True assert_array_equal(near_roi(x_streamlines, mask, affine=affine, tol=None, mode='all'), np.array([False, True, True])) # Test for use of endpoints as selection criteria: mask = np.zeros((4, 4, 4), dtype=bool) mask[0, 1, 1] = True mask[3, 2, 2] = True assert_array_equal(near_roi(streamlines, mask, tol=0.87, mode="either_end"), np.array([True, False, False])) assert_array_equal(near_roi(streamlines, mask, tol=0.87, mode="both_end"), np.array([False, False, False])) mask[0, 0, 0] = True mask[0, 2, 2] = True assert_array_equal(near_roi(streamlines, mask, mode="both_end"), np.array([False, True, False])) # Test with a generator input: def generate_sl(streamlines): for sl in streamlines: yield sl assert_array_equal(near_roi(generate_sl(streamlines), mask, mode="both_end"), np.array([False, True, False])) def test_voxel_ornt(): sh = (40, 40, 40) sz = (1, 2, 3) I4 = np.eye(4) ras = orientation_from_string('ras') sra = orientation_from_string('sra') lpi = orientation_from_string('lpi') srp = orientation_from_string('srp') affine = reorder_voxels_affine(ras, ras, sh, sz) assert_array_equal(affine, I4) affine = reorder_voxels_affine(sra, sra, sh, sz) assert_array_equal(affine, I4) affine = reorder_voxels_affine(lpi, lpi, sh, sz) assert_array_equal(affine, I4) affine = reorder_voxels_affine(srp, srp, sh, sz) assert_array_equal(affine, I4) streamlines = make_streamlines() box = np.array(sh) * sz sra_affine = reorder_voxels_affine(ras, sra, sh, sz) toras_affine = reorder_voxels_affine(sra, ras, sh, sz) assert_array_equal(np.dot(toras_affine, sra_affine), I4) expected_sl = (sl[:, [2, 0, 1]] for sl in streamlines) test_sl = move_streamlines(streamlines, sra_affine) for ii in xrange(len(streamlines)): assert_array_equal(next(test_sl), next(expected_sl)) lpi_affine = reorder_voxels_affine(ras, lpi, sh, sz) toras_affine = reorder_voxels_affine(lpi, ras, sh, sz) assert_array_equal(np.dot(toras_affine, lpi_affine), I4) expected_sl = (box - sl for sl in streamlines) test_sl = move_streamlines(streamlines, lpi_affine) for ii in xrange(len(streamlines)): assert_array_equal(next(test_sl), next(expected_sl)) srp_affine = reorder_voxels_affine(ras, srp, sh, sz) toras_affine = reorder_voxels_affine(srp, ras, (40, 40, 40), (3, 1, 2)) assert_array_equal(np.dot(toras_affine, srp_affine), I4) expected_sl = [sl.copy() for sl in streamlines] for sl in expected_sl: sl[:, 1] = box[1] - sl[:, 1] expected_sl = (sl[:, [2, 0, 1]] for sl in expected_sl) test_sl = move_streamlines(streamlines, srp_affine) for ii in xrange(len(streamlines)): assert_array_equal(next(test_sl), next(expected_sl)) def test_streamline_mapping(): streamlines = [np.array([[0, 0, 0], [0, 0, 0], [0, 2, 2]], 'float'), np.array([[0, 0, 0], [0, 1, 1], [0, 2, 2]], 'float'), np.array([[0, 2, 2], [0, 1, 1], [0, 0, 0]], 'float')] mapping = streamline_mapping(streamlines, (1, 1, 1)) expected = {(0, 0, 0): [0, 1, 2], (0, 2, 2): [0, 1, 2], (0, 1, 1): [1, 2]} assert_equal(mapping, expected) mapping = streamline_mapping(streamlines, (1, 1, 1), mapping_as_streamlines=True) expected = dict((k, [streamlines[i] for i in indices]) for k, indices in expected.items()) assert_equal(mapping, expected) # Test passing affine affine = np.eye(4) affine[:3, 3] = .5 mapping = streamline_mapping(streamlines, affine=affine, mapping_as_streamlines=True) assert_equal(mapping, expected) # Make the voxel size smaller affine = np.diag([.5, .5, .5, 1.]) affine[:3, 3] = .25 expected = dict((tuple(i * 2 for i in key), value) for key, value in expected.items()) mapping = streamline_mapping(streamlines, affine=affine, mapping_as_streamlines=True) assert_equal(mapping, expected) def test_rmi(): I1 = _rmi([3, 4], [10, 10]) assert_equal(I1, 34) I1 = _rmi([0, 0], [10, 10]) assert_equal(I1, 0) assert_raises(ValueError, _rmi, [10, 0], [10, 10]) try: from numpy import ravel_multi_index except ImportError: raise nose.SkipTest() # Dtype of random integers is system dependent A, B, C, D = np.random.randint(0, 1000, size=[4, 100]) I1 = _rmi([A, B], dims=[1000, 1000]) I2 = ravel_multi_index([A, B], dims=[1000, 1000]) assert_array_equal(I1, I2) I1 = _rmi([A, B, C, D], dims=[1000] * 4) I2 = ravel_multi_index([A, B, C, D], dims=[1000] * 4) assert_array_equal(I1, I2) # Check for overflow with small int types indices = np.random.randint(0, 255, size=(2, 100)) dims = (1000, 1000) I1 = _rmi(indices, dims=dims) I2 = ravel_multi_index(indices, dims=dims) assert_array_equal(I1, I2) def test_affine_for_trackvis(): voxel_size = np.array([1., 2, 3.]) affine = affine_for_trackvis(voxel_size) origin = np.dot(affine, [0, 0, 0, 1]) assert_array_almost_equal(origin[:3], voxel_size / 2) def test_length(): # Generate a simulated bundle of fibers: n_streamlines = 50 n_pts = 100 t = np.linspace(-10, 10, n_pts) bundle = [] for i in np.linspace(3, 5, n_streamlines): pts = np.vstack((np.cos(2 * t / np.pi), np.zeros(t.shape) + i, t)).T bundle.append(pts) start = np.random.randint(10, 30, n_streamlines) end = np.random.randint(60, 100, n_streamlines) bundle = [10 * streamline[start[i]:end[i]] for (i, streamline) in enumerate(bundle)] bundle_lengths = length(bundle) for idx, this_length in enumerate(bundle_lengths): assert_equal(this_length, metrix.length(bundle[idx])) def test_seeds_from_mask(): mask = np.random.random_integers(0, 1, size=(10, 10, 10)) seeds = seeds_from_mask(mask, density=1) assert_equal(mask.sum(), len(seeds)) assert_array_equal(np.argwhere(mask), seeds) mask[:] = False mask[3, 3, 3] = True seeds = seeds_from_mask(mask, density=[3, 4, 5]) assert_equal(len(seeds), 3 * 4 * 5) assert_true(np.all((seeds > 2.5) & (seeds < 3.5))) mask[4, 4, 4] = True seeds = seeds_from_mask(mask, density=[3, 4, 5]) assert_equal(len(seeds), 2 * 3 * 4 * 5) assert_true(np.all((seeds > 2.5) & (seeds < 4.5))) in_333 = ((seeds > 2.5) & (seeds < 3.5)).all(1) assert_equal(in_333.sum(), 3 * 4 * 5) in_444 = ((seeds > 3.5) & (seeds < 4.5)).all(1) assert_equal(in_444.sum(), 3 * 4 * 5) def test_random_seeds_from_mask(): mask = np.random.random_integers(0, 1, size=(4, 6, 3)) seeds = random_seeds_from_mask(mask, seeds_count=24, seed_count_per_voxel=True) assert_equal(mask.sum() * 24, len(seeds)) seeds = random_seeds_from_mask(mask, seeds_count=0, seed_count_per_voxel=True) assert_equal(0, len(seeds)) mask[:] = False mask[2, 2, 2] = True seeds = random_seeds_from_mask(mask, seeds_count=8, seed_count_per_voxel=True) assert_equal(mask.sum() * 8, len(seeds)) assert_true(np.all((seeds > 1.5) & (seeds < 2.5))) seeds = random_seeds_from_mask(mask, seeds_count=24, seed_count_per_voxel=False) assert_equal(24, len(seeds)) seeds = random_seeds_from_mask(mask, seeds_count=0, seed_count_per_voxel=False) assert_equal(0, len(seeds)) mask[:] = False mask[2, 2, 2] = True seeds = random_seeds_from_mask(mask, seeds_count=100, seed_count_per_voxel=False) assert_equal(100, len(seeds)) assert_true(np.all((seeds > 1.5) & (seeds < 2.5))) def test_connectivity_matrix_shape(): # Labels: z-planes have labels 0,1,2 labels = np.zeros((3, 3, 3), dtype=int) labels[:, :, 1] = 1 labels[:, :, 2] = 2 # Streamline set, only moves between first two z-planes. streamlines = [np.array([[0., 0., 0.], [0., 0., 0.5], [0., 0., 1.]]), np.array([[0., 1., 1.], [0., 1., 0.5], [0., 1., 0.]])] matrix = connectivity_matrix(streamlines, labels, affine=np.eye(4)) assert_equal(matrix.shape, (3, 3)) def test_unique_rows(): """ Testing the function unique_coords """ arr = np.array([[1, 2, 3], [1, 2, 3], [2, 3, 4], [3, 4, 5]]) arr_w_unique = np.array([[1, 2, 3], [2, 3, 4], [3, 4, 5]]) assert_array_equal(unique_rows(arr), arr_w_unique) # Should preserve order: arr = np.array([[2, 3, 4], [1, 2, 3], [1, 2, 3], [3, 4, 5]]) arr_w_unique = np.array([[2, 3, 4], [1, 2, 3], [3, 4, 5]]) assert_array_equal(unique_rows(arr), arr_w_unique) # Should work even with longer arrays: arr = np.array([[2, 3, 4], [1, 2, 3], [1, 2, 3], [3, 4, 5], [6, 7, 8], [0, 1, 0], [1, 0, 1]]) arr_w_unique = np.array([[2, 3, 4], [1, 2, 3], [3, 4, 5], [6, 7, 8], [0, 1, 0], [1, 0, 1]]) assert_array_equal(unique_rows(arr), arr_w_unique) def test_reduce_rois(): roi1 = np.zeros((4, 4, 4), dtype=np.bool) roi2 = np.zeros((4, 4, 4), dtype=np.bool) roi1[1, 1, 1] = 1 roi2[2, 2, 2] = 1 include_roi, exclude_roi = reduce_rois([roi1, roi2], [True, True]) npt.assert_equal(include_roi, roi1 + roi2) npt.assert_equal(exclude_roi, np.zeros((4, 4, 4))) include_roi, exclude_roi = reduce_rois([roi1, roi2], [True, False]) npt.assert_equal(include_roi, roi1) npt.assert_equal(exclude_roi, roi2) # Array input: include_roi, exclude_roi = reduce_rois(np.array([roi1, roi2]), [True, True]) npt.assert_equal(include_roi, roi1 + roi2) npt.assert_equal(exclude_roi, np.zeros((4, 4, 4))) def test_flexi_tvis_affine(): sl_vox_order = 'RPI' grid_affine = np.array( [[-1.08566022e+00, 1.42664334e-03, 2.43463114e-01, 1.34783203e+02], [2.43251352e-03, 1.09376717e+00, 1.48301506e-02, -1.07367630e+02], [1.33170187e-01, -8.34854878e-03, 1.98454463e+00, -9.98151169e+01], [0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 1.00000000e+00]]) dim = (256, 256, 86) voxel_size = np.array([1.09379995, 1.09379995, 1.99947774]) affine = flexi_tvis_affine(sl_vox_order, grid_affine, dim, voxel_size) origin = np.dot(affine, [0, 0, 0, 1]) assert_array_almost_equal(origin[:3], np.multiply(dim, voxel_size) - voxel_size / 2) def test_get_flexi_tvis_affine(): tvis_hdr = {'voxel_order': 'RPI', 'dim': (30, 40, 50), 'voxel_size': [2, 3, 4]} grid_affine = np.array([[-2, 0, 0, 0], [0, 3, 0, 0], [0, 0, 4, 0], [0, 0, 0, 1.]]) affine = get_flexi_tvis_affine(tvis_hdr, grid_affine) origin = np.dot(affine, [0, 0, 0, 1]) vsz = np.array(tvis_hdr['voxel_size']) assert_array_almost_equal(origin[:3], np.multiply(tvis_hdr['dim'], vsz) - vsz / 2) # grid_affine = tvis_hdr['voxel_order'] = 'ASL' vsz = tvis_hdr['voxel_size'] = np.array([3, 4, 2.]) affine = get_flexi_tvis_affine(tvis_hdr, grid_affine) vox_point = np.array([9, 8, 7]) trk_point = np.dot(affine, np.append(vox_point, 1)) assert_array_almost_equal(trk_point[:3], (vox_point[[1, 2, 0]] + 0.5) * vsz) def test_path_length(): aoi = np.zeros((20, 20, 20), dtype=bool) aoi[0, 0, 0] = 1 # A few tests for basic usage x = np.arange(20) streamlines = [np.array([x, x, x]).T] pl = path_length(streamlines, aoi, affine=np.eye(4)) expected = x.copy() * np.sqrt(3) # expected[0] = np.inf npt.assert_array_almost_equal(pl[x, x, x], expected) aoi[19, 19, 19] = 1 pl = path_length(streamlines, aoi, affine=np.eye(4)) expected = np.minimum(expected, expected[::-1]) npt.assert_array_almost_equal(pl[x, x, x], expected) aoi[19, 19, 19] = 0 aoi[1, 1, 1] = 1 pl = path_length(streamlines, aoi, affine=np.eye(4)) expected = (x - 1) * np.sqrt(3) expected[0] = 0 npt.assert_array_almost_equal(pl[x, x, x], expected) z = np.zeros(x.shape, x.dtype) streamlines.append(np.array([x, z, z]).T) pl = path_length(streamlines, aoi, affine=np.eye(4)) npt.assert_array_almost_equal(pl[x, x, x], expected) npt.assert_array_almost_equal(pl[x, 0, 0], x) # Only streamlines that pass through aoi contribute to path length so if # all streamlines are duds, plm will be all inf. aoi[:] = 0 aoi[0, 0, 0] = 1 streamlines = [] for i in range(1000): rando = np.random.random(size=(100, 3)) * 19 + .5 assert (rando > .5).all() assert (rando < 19.5).all() streamlines.append(rando) pl = path_length(streamlines, aoi, affine=np.eye(4)) npt.assert_array_almost_equal(pl, -1) pl = path_length(streamlines, aoi, affine=np.eye(4), fill_value=-12.) npt.assert_array_almost_equal(pl, -12.) def test_min_at(): k = np.array([3, 2, 2, 2, 1, 1, 1]) values = np.array([10., 1, 2, 3, 31, 21, 11]) i = np.zeros(k.shape, int) j = np.zeros(k.shape, int) a = np.zeros([1, 1, 4]) + 100. _min_at(a, (i, j, k), values) npt.assert_array_equal(a, [[[100, 11, 1, 10]]]) dipy-0.13.0/dipy/tracking/utils.py000066400000000000000000001123051317371701200170250ustar00rootroot00000000000000"""Various tools related to creating and working with streamlines This module provides tools for targeting streamlines using ROIs, for making connectivity matrices from whole brain fiber tracking and some other tools that allow streamlines to interact with image data. Important Note: --------------- Dipy uses affine matrices to represent the relationship between streamline points, which are defined as points in a continuous 3d space, and image voxels, which are typically arranged in a discrete 3d grid. Dipy uses a convention similar to nifti files to interpret these affine matrices. This convention is that the point at the center of voxel ``[i, j, k]`` is represented by the point ``[x, y, z]`` where ``[x, y, z, 1] = affine * [i, j, k, 1]``. Also when the phrase "voxel coordinates" is used, it is understood to be the same as ``affine = eye(4)``. As an example, lets take a 2d image where the affine is:: [[1., 0., 0.], [0., 2., 0.], [0., 0., 1.]] The pixels of an image with this affine would look something like:: A------------ | | | | | C | | | | | | | ----B-------- | | | | | | | | | | | | ------------- | | | | | | | | | | | | ------------D And the letters A-D represent the following points in "real world coordinates":: A = [-.5, -1.] B = [ .5, 1.] C = [ 0., 0.] D = [ 2.5, 5.] """ from __future__ import division, print_function, absolute_import from functools import wraps from warnings import warn from nibabel.affines import apply_affine from scipy.spatial.distance import cdist from dipy.core.geometry import dist_to_corner from collections import defaultdict from dipy.utils.six.moves import xrange, map import numpy as np from numpy import (asarray, ceil, dot, empty, eye, sqrt) from dipy.io.bvectxt import ornt_mapping from dipy.tracking import metrics from dipy.tracking.vox2track import _streamlines_in_mask from dipy.testing import setup_test # Import helper functions shared with vox2track from dipy.tracking._utils import (_mapping_to_voxel, _to_voxel_coordinates) from dipy.io.bvectxt import orientation_from_string import nibabel as nib def _rmi(index, dims): """An alternate implementation of numpy.ravel_multi_index for older versions of numpy. Assumes array layout is C contiguous """ # Upcast to integer type capable of holding largest array index index = np.asarray(index, dtype=np.intp) dims = np.asarray(dims) if index.ndim > 2: raise ValueError("Index should be 1 or 2-D") elif index.ndim == 2: index = index.T if (index >= dims).any(): raise ValueError("Index exceeds dimensions") strides = np.r_[dims[:0:-1].cumprod()[::-1], 1] return (strides * index).sum(-1) try: from numpy import ravel_multi_index except ImportError: ravel_multi_index = _rmi def density_map(streamlines, vol_dims, voxel_size=None, affine=None): """Counts the number of unique streamlines that pass through each voxel. Parameters ---------- streamlines : iterable A sequence of streamlines. vol_dims : 3 ints The shape of the volume to be returned containing the streamlines counts voxel_size : This argument is deprecated. affine : array_like (4, 4) The mapping from voxel coordinates to streamline points. Returns ------- image_volume : ndarray, shape=vol_dims The number of streamline points in each voxel of volume. Raises ------ IndexError When the points of the streamlines lie outside of the return volume. Notes ----- A streamline can pass through a voxel even if one of the points of the streamline does not lie in the voxel. For example a step from [0,0,0] to [0,0,2] passes through [0,0,1]. Consider subsegmenting the streamlines when the edges of the voxels are smaller than the steps of the streamlines. """ lin_T, offset = _mapping_to_voxel(affine, voxel_size) counts = np.zeros(vol_dims, 'int') for sl in streamlines: inds = _to_voxel_coordinates(sl, lin_T, offset) i, j, k = inds.T # this takes advantage of the fact that numpy's += operator only # acts once even if there are repeats in inds counts[i, j, k] += 1 return counts def connectivity_matrix(streamlines, label_volume, voxel_size=None, affine=None, symmetric=True, return_mapping=False, mapping_as_streamlines=False): """Counts the streamlines that start and end at each label pair. Parameters ---------- streamlines : sequence A sequence of streamlines. label_volume : ndarray An image volume with an integer data type, where the intensities in the volume map to anatomical structures. voxel_size : This argument is deprecated. affine : array_like (4, 4) The mapping from voxel coordinates to streamline coordinates. symmetric : bool, False by default Symmetric means we don't distinguish between start and end points. If symmetric is True, ``matrix[i, j] == matrix[j, i]``. return_mapping : bool, False by default If True, a mapping is returned which maps matrix indices to streamlines. mapping_as_streamlines : bool, False by default If True voxel indices map to lists of streamline objects. Otherwise voxel indices map to lists of integers. Returns ------- matrix : ndarray The number of connection between each pair of regions in `label_volume`. mapping : defaultdict(list) ``mapping[i, j]`` returns all the streamlines that connect region `i` to region `j`. If `symmetric` is True mapping will only have one key for each start end pair such that if ``i < j`` mapping will have key ``(i, j)`` but not key ``(j, i)``. """ # Error checking on label_volume kind = label_volume.dtype.kind labels_positive = ((kind == 'u') or ((kind == 'i') and (label_volume.min() >= 0))) valid_label_volume = (labels_positive and label_volume.ndim == 3) if not valid_label_volume: raise ValueError("label_volume must be a 3d integer array with" "non-negative label values") # If streamlines is an iterators if return_mapping and mapping_as_streamlines: streamlines = list(streamlines) # take the first and last point of each streamline endpoints = [sl[0::len(sl)-1] for sl in streamlines] # Map the streamlines coordinates to voxel coordinates lin_T, offset = _mapping_to_voxel(affine, voxel_size) endpoints = _to_voxel_coordinates(endpoints, lin_T, offset) # get labels for label_volume i, j, k = endpoints.T endlabels = label_volume[i, j, k] if symmetric: endlabels.sort(0) mx = label_volume.max() + 1 matrix = ndbincount(endlabels, shape=(mx, mx)) if symmetric: matrix = np.maximum(matrix, matrix.T) if return_mapping: mapping = defaultdict(list) for i, (a, b) in enumerate(endlabels.T): mapping[a, b].append(i) # Replace each list of indices with the streamlines they index if mapping_as_streamlines: for key in mapping: mapping[key] = [streamlines[i] for i in mapping[key]] # Return the mapping matrix and the mapping return matrix, mapping else: return matrix def ndbincount(x, weights=None, shape=None): """Like bincount, but for nd-indicies. Parameters ---------- x : array_like (N, M) M indices to a an Nd-array weights : array_like (M,), optional Weights associated with indices shape : optional the shape of the output """ x = np.asarray(x) if shape is None: shape = x.max(1) + 1 x = ravel_multi_index(x, shape) out = np.bincount(x, weights, minlength=np.prod(shape)) out.shape = shape return out def reduce_labels(label_volume): """Reduces an array of labels to the integers from 0 to n with smallest possible n. Examples -------- >>> labels = np.array([[1, 3, 9], ... [1, 3, 8], ... [1, 3, 7]]) >>> new_labels, lookup = reduce_labels(labels) >>> lookup array([1, 3, 7, 8, 9]) >>> new_labels #doctest: +ELLIPSIS array([[0, 1, 4], [0, 1, 3], [0, 1, 2]]...) >>> (lookup[new_labels] == labels).all() True """ lookup_table = np.unique(label_volume) label_volume = lookup_table.searchsorted(label_volume) return label_volume, lookup_table def subsegment(streamlines, max_segment_length): """Splits the segments of the streamlines into small segments. Replaces each segment of each of the streamlines with the smallest possible number of equally sized smaller segments such that no segment is longer than max_segment_length. Among other things, this can useful for getting streamline counts on a grid that is smaller than the length of the streamline segments. Parameters ---------- streamlines : sequence of ndarrays The streamlines to be subsegmented. max_segment_length : float The longest allowable segment length. Returns ------- output_streamlines : generator A set of streamlines. Notes ----- Segments of 0 length are removed. If unchanged Examples -------- >>> streamlines = [np.array([[0,0,0],[2,0,0],[5,0,0]])] >>> list(subsegment(streamlines, 3.)) [array([[ 0., 0., 0.], [ 2., 0., 0.], [ 5., 0., 0.]])] >>> list(subsegment(streamlines, 1)) [array([[ 0., 0., 0.], [ 1., 0., 0.], [ 2., 0., 0.], [ 3., 0., 0.], [ 4., 0., 0.], [ 5., 0., 0.]])] >>> list(subsegment(streamlines, 1.6)) [array([[ 0. , 0. , 0. ], [ 1. , 0. , 0. ], [ 2. , 0. , 0. ], [ 3.5, 0. , 0. ], [ 5. , 0. , 0. ]])] """ for sl in streamlines: diff = (sl[1:] - sl[:-1]) length = sqrt((diff*diff).sum(-1)) num_segments = ceil(length/max_segment_length).astype('int') output_sl = empty((num_segments.sum()+1, 3), 'float') output_sl[0] = sl[0] count = 1 for ii in xrange(len(num_segments)): ns = num_segments[ii] if ns == 1: output_sl[count] = sl[ii+1] count += 1 elif ns > 1: small_d = diff[ii]/ns point = sl[ii] for jj in xrange(ns): point = point + small_d output_sl[count] = point count += 1 elif ns == 0: pass # repeated point else: # this should never happen because ns should be a positive # int assert(ns >= 0) yield output_sl def seeds_from_mask(mask, density=[1, 1, 1], voxel_size=None, affine=None): """Creates seeds for fiber tracking from a binary mask. Seeds points are placed evenly distributed in all voxels of ``mask`` which are ``True``. Parameters ---------- mask : binary 3d array_like A binary array specifying where to place the seeds for fiber tracking. density : int or array_like (3,) Specifies the number of seeds to place along each dimension. A ``density`` of `2` is the same as ``[2, 2, 2]`` and will result in a total of 8 seeds per voxel. voxel_size : This argument is deprecated. affine : array, (4, 4) The mapping between voxel indices and the point space for seeds. A seed point at the center the voxel ``[i, j, k]`` will be represented as ``[x, y, z]`` where ``[x, y, z, 1] == np.dot(affine, [i, j, k , 1])``. See Also -------- random_seeds_from_mask Raises ------ ValueError When ``mask`` is not a three-dimensional array Examples -------- >>> mask = np.zeros((3,3,3), 'bool') >>> mask[0,0,0] = 1 >>> seeds_from_mask(mask, [1,1,1], [1,1,1]) array([[ 0.5, 0.5, 0.5]]) >>> seeds_from_mask(mask, [1,2,3], [1,1,1]) array([[ 0.5 , 0.25 , 0.16666667], [ 0.5 , 0.75 , 0.16666667], [ 0.5 , 0.25 , 0.5 ], [ 0.5 , 0.75 , 0.5 ], [ 0.5 , 0.25 , 0.83333333], [ 0.5 , 0.75 , 0.83333333]]) >>> mask[0,1,2] = 1 >>> seeds_from_mask(mask, [1,1,2], [1.1,1.1,2.5]) array([[ 0.55 , 0.55 , 0.625], [ 0.55 , 0.55 , 1.875], [ 0.55 , 1.65 , 5.625], [ 0.55 , 1.65 , 6.875]]) """ mask = np.array(mask, dtype=bool, copy=False, ndmin=3) if mask.ndim != 3: raise ValueError('mask cannot be more than 3d') density = asarray(density, int) if density.size == 1: d = density density = np.empty(3, dtype=int) density.fill(d) elif density.shape != (3,): raise ValueError("density should be in integer array of shape (3,)") # Grid of points between -.5 and .5, centered at 0, with given density grid = np.mgrid[0:density[0], 0:density[1], 0:density[2]] grid = grid.T.reshape((-1, 3)) grid = grid / density grid += (.5 / density - .5) where = np.argwhere(mask) # Add the grid of points to each voxel in mask seeds = where[:, np.newaxis, :] + grid[np.newaxis, :, :] seeds = seeds.reshape((-1, 3)) # Apply the spatial transform if affine is not None: # Use affine to move seeds into real world coordinates seeds = np.dot(seeds, affine[:3, :3].T) seeds += affine[:3, 3] elif voxel_size is not None: # Use voxel_size to move seeds into trackvis space seeds += .5 seeds *= voxel_size return seeds def random_seeds_from_mask(mask, seeds_count=1, seed_count_per_voxel=True, affine=None): """Creates randomly placed seeds for fiber tracking from a binary mask. Seeds points are placed randomly distributed in voxels of ``mask`` which are ``True``. If ``seed_count_per_voxel`` is ``True``, this function is similar to ``seeds_from_mask()``, with the difference that instead of evenly distributing the seeds, it randomly places the seeds within the voxels specified by the ``mask``. The initial random conditions can be set using ``numpy.random.seed(...)``, prior to calling this function. Parameters ---------- mask : binary 3d array_like A binary array specifying where to place the seeds for fiber tracking. seeds_count : int The number of seeds to generate. If ``seed_count_per_voxel`` is True, specifies the number of seeds to place in each voxel. Otherwise, specifies the total number of seeds to place in the mask. seed_count_per_voxel: bool If True, seeds_count is per voxel, else seeds_count is the total number of seeds. affine : array, (4, 4) The mapping between voxel indices and the point space for seeds. A seed point at the center the voxel ``[i, j, k]`` will be represented as ``[x, y, z]`` where ``[x, y, z, 1] == np.dot(affine, [i, j, k , 1])``. See Also -------- seeds_from_mask Raises ------ ValueError When ``mask`` is not a three-dimensional array Examples -------- >>> mask = np.zeros((3,3,3), 'bool') >>> mask[0,0,0] = 1 >>> np.random.seed(1) >>> random_seeds_from_mask(mask, seeds_count=1, seed_count_per_voxel=True) array([[-0.082978 , 0.22032449, -0.49988563]]) >>> random_seeds_from_mask(mask, seeds_count=6, seed_count_per_voxel=True) array([[-0.19766743, -0.35324411, -0.40766141], [-0.31373979, -0.15443927, -0.10323253], [ 0.03881673, -0.08080549, 0.1852195 ], [-0.29554775, 0.37811744, -0.47261241], [ 0.17046751, -0.0826952 , 0.05868983], [-0.35961306, -0.30189851, 0.30074457]]) >>> mask[0,1,2] = 1 >>> random_seeds_from_mask(mask, seeds_count=2, seed_count_per_voxel=True) array([[ 0.46826158, -0.18657582, 0.19232262], [ 0.37638915, 0.39460666, -0.41495579], [-0.46094522, 0.66983042, 2.3781425 ], [-0.40165317, 0.92110763, 2.45788953]]) """ mask = np.array(mask, dtype=bool, copy=False, ndmin=3) if mask.ndim != 3: raise ValueError('mask cannot be more than 3d') where = np.argwhere(mask) num_voxels = len(where) if not seed_count_per_voxel: # Generate enough seeds per voxel seeds_per_voxel = seeds_count // num_voxels + 1 else: seeds_per_voxel = seeds_count # Generate as many random triplets as the number of seeds needed grid = np.random.random([seeds_per_voxel * num_voxels, 3]) # Repeat elements of 'where' so that it can be added to grid where = np.repeat(where, seeds_per_voxel, axis=0) seeds = where + grid - .5 seeds = asarray(seeds) if not seed_count_per_voxel: # Randomize the seeds and select the requested amount np.random.shuffle(seeds) seeds = seeds[:seeds_count] # Apply the spatial transform if affine is not None: # Use affine to move seeds into real world coordinates seeds = np.dot(seeds, affine[:3, :3].T) seeds += affine[:3, 3] return seeds def _with_initialize(generator): """Allows one to write a generator with initialization code. All code up to the first yield is run as soon as the generator function is called and the first yield value is ignored. """ @wraps(generator) def helper(*args, **kwargs): gen = generator(*args, **kwargs) next(gen) return gen return helper @_with_initialize def target(streamlines, target_mask, affine, include=True): """Filters streamlines based on whether or not they pass through an ROI. Parameters ---------- streamlines : iterable A sequence of streamlines. Each streamline should be a (N, 3) array, where N is the length of the streamline. target_mask : array-like A mask used as a target. Non-zero values are considered to be within the target region. affine : array (4, 4) The affine transform from voxel indices to streamline points. include : bool, default True If True, streamlines passing through `target_mask` are kept. If False, the streamlines not passing through `target_mask` are kept. Returns ------- streamlines : generator A sequence of streamlines that pass through `target_mask`. Raises ------ ValueError When the points of the streamlines lie outside of the `target_mask`. See Also -------- density_map """ target_mask = np.array(target_mask, dtype=bool, copy=True) lin_T, offset = _mapping_to_voxel(affine, voxel_size=None) yield # End of initialization for sl in streamlines: try: ind = _to_voxel_coordinates(sl, lin_T, offset) i, j, k = ind.T state = target_mask[i, j, k] except IndexError: raise ValueError("streamlines points are outside of target_mask") if state.any() == include: yield sl @_with_initialize def target_line_based(streamlines, target_mask, affine=None, include=True): """Filters streamlines based on whether or not they pass through a ROI, using a line-based algorithm. Mostly used as a remplacement of `target` for compressed streamlines. This function never returns single-point streamlines, wathever the value of `include`. Parameters ---------- streamlines : iterable A sequence of streamlines. Each streamline should be a (N, 3) array, where N is the length of the streamline. target_mask : array-like A mask used as a target. Non-zero values are considered to be within the target region. affine : array (4, 4) The affine transform from voxel indices to streamline points. include : bool, default True If True, streamlines passing through `target_mask` are kept. If False, the streamlines not passing through `target_mask` are kept. Returns ------- streamlines : generator A sequence of streamlines that pass through `target_mask`. References ---------- [Bresenham5] Bresenham, Jack Elton. "Algorithm for computer control of a digital plotter", IBM Systems Journal, vol 4, no. 1, 1965. [Houde15] Houde et al. How to avoid biased streamlines-based metrics for streamlines with variable step sizes, ISMRM 2015. See Also -------- dipy.tracking.utils.density_map dipy.tracking.streamline.compress_streamlines """ target_mask = np.array(target_mask, dtype=np.uint8, copy=True) lin_T, offset = _mapping_to_voxel(affine, voxel_size=None) streamline_index = _streamlines_in_mask( streamlines, target_mask, lin_T, offset) yield # End of initialization for idx in np.where(streamline_index == [0, 1][include])[0]: yield streamlines[idx] def streamline_near_roi(streamline, roi_coords, tol, mode='any'): """Is a streamline near an ROI. Implements the inner loops of the :func:`near_roi` function. Parameters ---------- streamline : array, shape (N, 3) A single streamline roi_coords : array, shape (M, 3) ROI coordinates transformed to the streamline coordinate frame. tol : float Distance (in the units of the streamlines, usually mm). If any coordinate in the streamline is within this distance from the center of any voxel in the ROI, this function returns True. mode : string One of {"any", "all", "either_end", "both_end"}, where return True if: "any" : any point is within tol from ROI. "all" : all points are within tol from ROI. "either_end" : either of the end-points is within tol from ROI "both_end" : both end points are within tol from ROI. Returns ------- out : boolean """ if len(roi_coords) == 0: return False if mode == "any" or mode == "all": s = streamline elif mode == "either_end" or mode == "both_end": # 'end' modes, use a streamline with 2 nodes: s = np.vstack([streamline[0], streamline[-1]]) else: e_s = "For determining relationship to an array, you can use " e_s += "one of the following modes: 'any', 'all', 'both_end'," e_s += "'either_end', but you entered: %s." % mode raise ValueError(e_s) dist = cdist(s, roi_coords, 'euclidean') if mode == "any" or mode == "either_end": return np.min(dist) <= tol else: return np.all(np.min(dist, -1) <= tol) def near_roi(streamlines, region_of_interest, affine=None, tol=None, mode="any"): """Provide filtering criteria for a set of streamlines based on whether they fall within a tolerance distance from an ROI Parameters ---------- streamlines : list or generator A sequence of streamlines. Each streamline should be a (N, 3) array, where N is the length of the streamline. region_of_interest : ndarray A mask used as a target. Non-zero values are considered to be within the target region. affine : ndarray Affine transformation from voxels to streamlines. Default: identity. tol : float Distance (in the units of the streamlines, usually mm). If any coordinate in the streamline is within this distance from the center of any voxel in the ROI, the filtering criterion is set to True for this streamline, otherwise False. Defaults to the distance between the center of each voxel and the corner of the voxel. mode : string, optional One of {"any", "all", "either_end", "both_end"}, where return True if: "any" : any point is within tol from ROI. Default. "all" : all points are within tol from ROI. "either_end" : either of the end-points is within tol from ROI "both_end" : both end points are within tol from ROI. Returns ------- 1D array of boolean dtype, shape (len(streamlines), ) This contains `True` for indices corresponding to each streamline that passes within a tolerance distance from the target ROI, `False` otherwise. """ if affine is None: affine = np.eye(4) dtc = dist_to_corner(affine) if tol is None: tol = dtc elif tol < dtc: w_s = "Tolerance input provided would create gaps in your" w_s += " inclusion ROI. Setting to: %s" % dtc warn(w_s) tol = dtc roi_coords = np.array(np.where(region_of_interest)).T x_roi_coords = apply_affine(affine, roi_coords) # If it's already a list, we can save time by preallocating the output if isinstance(streamlines, list): out = np.zeros(len(streamlines), dtype=bool) for ii, sl in enumerate(streamlines): out[ii] = streamline_near_roi(sl, x_roi_coords, tol=tol, mode=mode) return out # If it's a generator, we'll need to generate the output into a list else: out = [] for sl in streamlines: out.append(streamline_near_roi(sl, x_roi_coords, tol=tol, mode=mode)) return(np.array(out, dtype=bool)) def reorder_voxels_affine(input_ornt, output_ornt, shape, voxel_size): """Calculates a linear transformation equivalent to changing voxel order. Calculates a linear tranformation A such that [a, b, c, 1] = A[x, y, z, 1]. where [x, y, z] is a point in the coordinate system defined by input_ornt and [a, b, c] is the same point in the coordinate system defined by output_ornt. Parameters ---------- input_ornt : array (n, 2) A description of the orientation of a point in n-space. See ``nibabel.orientation`` or ``dipy.io.bvectxt`` for more information. output_ornt : array (n, 2) A description of the orientation of a point in n-space. shape : tuple of int Shape of the image in the input orientation. ``map = ornt_mapping(input_ornt, output_ornt)`` voxel_size : int Voxel size of the image in the input orientation. Returns ------- A : array (n+1, n+1) Affine matrix of the transformation between input_ornt and output_ornt. See Also -------- nibabel.orientation dipy.io.bvectxt.orientation_to_string dipy.io.bvectxt.orientation_from_string """ map = ornt_mapping(input_ornt, output_ornt) if input_ornt.shape != output_ornt.shape: raise ValueError("input_ornt and output_ornt must have the same shape") affine = eye(len(input_ornt)+1) affine[:3] = affine[map[:, 0]] corner = asarray(voxel_size) * shape affine[:3, 3] = (map[:, 1] < 0) * corner[map[:, 0]] # multiply the rows of affine to get right sign affine[:3, :3] *= map[:, 1:] return affine def affine_from_fsl_mat_file(mat_affine, input_voxsz, output_voxsz): """ Converts an affine matrix from flirt (FSLdot) and a given voxel size for input and output images and returns an adjusted affine matrix for trackvis. Parameters ---------- mat_affine : array of shape (4, 4) An FSL flirt affine. input_voxsz : array of shape (3,) The input image voxel dimensions. output_voxsz : array of shape (3,) Returns ------- affine : array of shape (4, 4) A trackvis-compatible affine. """ # TODO the affine returned by this function uses a different reference than # the nifti-style index coordinates dipy has adopted as a convention. We # should either fix this function in a backward compatible way or replace # and deprecate it. input_voxsz = asarray(input_voxsz) output_voxsz = asarray(output_voxsz) shift = eye(4) shift[:3, 3] = -input_voxsz / 2 affine = dot(mat_affine, shift) affine[:3, 3] += output_voxsz / 2 return affine def affine_for_trackvis(voxel_size, voxel_order=None, dim=None, ref_img_voxel_order=None): """Returns an affine which maps points for voxel indices to trackvis space. Parameters ---------- voxel_size : array (3,) The sizes of the voxels in the reference image. Returns ------- affine : array (4, 4) Mapping from the voxel indices of the reference image to trackvis space. """ if ((voxel_order is not None or dim is not None or ref_img_voxel_order is not None)): raise NotImplemented # Create affine voxel_size = np.asarray(voxel_size) affine = np.eye(4) affine[[0, 1, 2], [0, 1, 2]] = voxel_size affine[:3, 3] = voxel_size / 2. return affine def length(streamlines, affine=None): """ Calculate the lengths of many streamlines in a bundle. Parameters ---------- streamlines : list Each item in the list is an array with 3D coordinates of a streamline. affine : 4 x 4 array An affine transformation to move the fibers by, before computing their lengths. Returns ------- Iterator object which then computes the length of each streamline in the bundle, upon iteration. """ if affine is not None: streamlines = move_streamlines(streamlines, affine) return map(metrics.length, streamlines) def unique_rows(in_array, dtype='f4'): """ This (quickly) finds the unique rows in an array Parameters ---------- in_array: ndarray The array for which the unique rows should be found dtype: str, optional This determines the intermediate representation used for the values. Should at least preserve the values of the input array. Returns ------- u_return: ndarray Array with the unique rows of the original array. """ # Sort input array order = np.lexsort(in_array.T) # Apply sort and compare neighbors x = in_array[order] diff_x = np.ones(len(x), dtype=bool) diff_x[1:] = (x[1:] != x[:-1]).any(-1) # Reverse sort and return unique rows un_order = order.argsort() diff_in_array = diff_x[un_order] return in_array[diff_in_array] @_with_initialize def move_streamlines(streamlines, output_space, input_space=None): """Applies a linear transformation, given by affine, to streamlines. Parameters ---------- streamlines : sequence A set of streamlines to be transformed. output_space : array (4, 4) An affine matrix describing the target space to which the streamlines will be transformed. input_space : array (4, 4), optional An affine matrix describing the current space of the streamlines, if no ``input_space`` is specified, it's assumed the streamlines are in the reference space. The reference space is the same as the space associated with the affine matrix ``np.eye(4)``. Returns ------- streamlines : generator A sequence of transformed streamlines. """ if input_space is None: affine = output_space else: inv = np.linalg.inv(input_space) affine = np.dot(output_space, inv) lin_T = affine[:3, :3].T.copy() offset = affine[:3, 3].copy() yield # End of initialization for sl in streamlines: yield np.dot(sl, lin_T) + offset def reduce_rois(rois, include): """Reduce multiple ROIs to one inclusion and one exclusion ROI Parameters ---------- rois : list or ndarray A list of 3D arrays, each with shape (x, y, z) corresponding to the shape of the brain volume, or a 4D array with shape (n_rois, x, y, z). Non-zeros in each volume are considered to be within the region. include : array or list A list or 1D array of boolean marking inclusion or exclusion criteria. Returns ------- include_roi : boolean 3D array An array marking the inclusion mask. exclude_roi : boolean 3D array An array marking the exclusion mask Note ---- The include_roi and exclude_roi can be used to perfom the operation: "(A or B or ...) and not (X or Y or ...)", where A, B are inclusion regions and X, Y are exclusion regions. """ include_roi = np.zeros(rois[0].shape, dtype=bool) exclude_roi = np.zeros(rois[0].shape, dtype=bool) for i in range(len(rois)): if include[i]: include_roi |= rois[i] else: exclude_roi |= rois[i] return include_roi, exclude_roi def flexi_tvis_affine(sl_vox_order, grid_affine, dim, voxel_size): """ Computes the mapping from voxel indices to streamline points, reconciling streamlines and grids with different voxel orders Parameters ---------- sl_vox_order : string of length 3 a string that describes the voxel order of the streamlines (ex: LPS) grid_affine : array (4, 4), An affine matrix describing the current space of the grid in relation to RAS+ scanner space dim : tuple of length 3 dimension of the grid voxel_size : array (3,0) voxel size of the grid Returns ------- flexi_tvis_aff : this affine maps between a grid and a trackvis space """ sl_ornt = orientation_from_string(str(sl_vox_order)) grid_ornt = nib.io_orientation(grid_affine) reorder_grid = reorder_voxels_affine( grid_ornt, sl_ornt, np.array(dim)-1, np.array([1,1,1])) tvis_aff = affine_for_trackvis(voxel_size) flexi_tvis_aff = np.dot(tvis_aff, reorder_grid) return flexi_tvis_aff def get_flexi_tvis_affine(tvis_hdr, nii_aff): """ Computes the mapping from voxel indices to streamline points, reconciling streamlines and grids with different voxel orders Parameters ---------- tvis_hdr : header from a trackvis file nii_aff : array (4, 4), An affine matrix describing the current space of the grid in relation to RAS+ scanner space nii_data : nd array 3D array, each with shape (x, y, z) corresponding to the shape of the brain volume. Returns ------- flexi_tvis_aff : array (4,4) this affine maps between a grid and a trackvis space """ sl_vox_order = tvis_hdr['voxel_order'] voxel_size = tvis_hdr['voxel_size'] dim = tvis_hdr['dim'] flexi_tvis_aff = flexi_tvis_affine(sl_vox_order, nii_aff, dim, voxel_size) return flexi_tvis_aff def _min_at(a, index, value): index = np.asarray(index) sort_keys = [value] + list(index) order = np.lexsort(sort_keys) index = index[:, order] value = value[order] uniq = np.ones(index.shape[1], dtype=bool) uniq[1:] = (index[:, 1:] != index[:, :-1]).any(axis=0) index = index[:, uniq] value = value[uniq] a[tuple(index)] = np.minimum(a[tuple(index)], value) try: minimum_at = np.minimum.at except AttributeError: minimum_at = _min_at def path_length(streamlines, aoi, affine, fill_value=-1): """ Computes the shortest path, along any streamline, between aoi and each voxel. Parameters ---------- streamlines : seq of (N, 3) arrays A sequence of streamlines, path length is given in mm along the curve of the streamline. aoi : array, 3d A mask (binary array) of voxels from which to start computing distance. affine : array (4, 4) The mapping from voxel indices to streamline points. fill_value : float The value of voxel in the path length map that are not connected to the aoi. Returns ------- plm : array Same shape as aoi. The minimum distance between every point and aoi along the path of a streamline. """ aoi = np.asarray(aoi, dtype=bool) # path length map plm = np.empty(aoi.shape, dtype=float) plm[:] = np.inf lin_T, offset = _mapping_to_voxel(affine, None) for sl in streamlines: seg_ind = _to_voxel_coordinates(sl, lin_T, offset) i, j, k = seg_ind.T # Get where streamlines passes through aoi breaks = aoi[i, j, k] # Where streamline passes aoi, dist is zero i, j, k = seg_ind[breaks].T plm[i, j, k] = 0 # If a streamline crosses aoi >1, re-start counting distance for each for seg in _as_segments(sl, breaks): i, j, k = _to_voxel_coordinates(seg[1:], lin_T, offset).T # Get the distance, in mm, between streamline points segment_length = np.sqrt(((seg[1:] - seg[:-1]) ** 2).sum(1)) dist = segment_length.cumsum() # Updates path length map with shorter distances minimum_at(plm, (i, j, k), dist) if fill_value != np.inf: plm = np.where(plm == np.inf, fill_value, plm) return plm def _part_segments(streamline, break_points): segments = np.split(streamline, break_points.nonzero()[0]) # Skip first segment, all points before first break # first segment is empty when break_points[0] == 0 segments = segments[1:] for each in segments: if len(each) > 1: yield each def _as_segments(streamline, break_points): for seg in _part_segments(streamline, break_points): yield seg for seg in _part_segments(streamline[::-1], break_points[::-1]): yield seg dipy-0.13.0/dipy/tracking/vox2track.pyx000066400000000000000000000414251317371701200200040ustar00rootroot00000000000000# A type of -*- python -*- file """This module contains the parts of dipy.tracking.utils that need to be implemented in cython. """ import cython cdef extern from "dpy_math.h" nogil: double fmin(double x, double y) from libc.math cimport ceil, floor, fabs, sqrt import numpy as np cimport numpy as cnp from ._utils import _mapping_to_voxel, _to_voxel_coordinates from ..utils.six.moves import xrange @cython.boundscheck(False) @cython.wraparound(False) @cython.profile(False) def _voxel2streamline(sl, cnp.ndarray[cnp.npy_intp, ndim=2] unique_idx): """ Maps voxels to streamlines and streamlines to voxels, for setting up the LiFE equations matrix Parameters ---------- sl : list A collection of streamlines, each n by 3, with n being the number of nodes in the fiber. unique_idx : array. The unique indices in the streamlines Returns ------- v2f, v2fn : tuple of dicts The first dict in the tuple answers the question: Given a voxel (from the unique indices in this model), which fibers pass through it? The second answers the question: Given a streamline, for each voxel that this streamline passes through, which nodes of that streamline are in that voxel? """ # Define local counters: cdef int s_idx, node_idx, voxel_id, ii cdef dict vox_dict = {} for ii in range(len(unique_idx)): vox = unique_idx[ii] vox_dict[vox[0], vox[1], vox[2]] = ii # Outputs are these dicts: cdef dict v2f = {} cdef dict v2fn = {} # In each fiber: for s_idx in range(len(sl)): sl_as_idx = np.round(sl[s_idx]).astype(int) v2fn[s_idx] = {} # In each voxel present in there: for node_idx in range(len(sl_as_idx)): node = sl_as_idx[node_idx] # What serial number is this voxel in the unique voxel indices: voxel_id = vox_dict[node[0], node[1], node[2]] # Add that combination to the dict: if voxel_id in v2f: if s_idx not in v2f[voxel_id]: v2f[voxel_id].append(s_idx) else: v2f[voxel_id] = [s_idx] # All the nodes going through this voxel get its number: if voxel_id in v2fn[s_idx]: v2fn[s_idx][voxel_id].append(node_idx) else: v2fn[s_idx][voxel_id] = [node_idx] return v2f ,v2fn def streamline_mapping(streamlines, voxel_size=None, affine=None, mapping_as_streamlines=False): """Creates a mapping from voxel indices to streamlines. Returns a dictionary where each key is a 3d voxel index and the associated value is a list of the streamlines that pass through that voxel. Parameters ---------- streamlines : sequence A sequence of streamlines. voxel_size : array_like (3,), optional The size of the voxels in the image volume. This is ignored if affine is set. affine : array_like (4, 4), optional The mapping from voxel coordinates to streamline coordinates. If neither `affine` or `voxel_size` is set, the streamline values are assumed to be in voxel coordinates. IE ``[0, 0, 0]`` is the center of the first voxel and the voxel size is ``[1, 1, 1]``. mapping_as_streamlines : bool, optional, False by default If True voxel indices map to lists of streamline objects. Otherwise voxel indices map to lists of integers. Returns ------- mapping : defaultdict(list) A mapping from voxel indices to the streamlines that pass through that voxel. Examples -------- >>> streamlines = [np.array([[0., 0., 0.], ... [1., 1., 1.], ... [2., 3., 4.]]), ... np.array([[0., 0., 0.], ... [1., 2., 3.]])] >>> mapping = streamline_mapping(streamlines, (1, 1, 1)) >>> mapping[0, 0, 0] [0, 1] >>> mapping[1, 1, 1] [0] >>> mapping[1, 2, 3] [1] >>> mapping.get((3, 2, 1), 'no streamlines') 'no streamlines' >>> mapping = streamline_mapping(streamlines, (1, 1, 1), ... mapping_as_streamlines=True) >>> mapping[1, 2, 3][0] is streamlines[1] True """ cdef: cnp.ndarray[cnp.int_t, ndim=2, mode='strided'] voxel_indices lin, offset = _mapping_to_voxel(affine, voxel_size) if mapping_as_streamlines: streamlines = list(streamlines) mapping = {} for i, sl in enumerate(streamlines): voxel_indices = _to_voxel_coordinates(sl, lin, offset) # Get the unique voxels every streamline passes through uniq_points = set() for j in range(voxel_indices.shape[0]): point = (voxel_indices[j, 0], voxel_indices[j, 1], voxel_indices[j, 2]) uniq_points.add(point) # Add the index of this streamline for each uniq voxel for point in uniq_points: if point in mapping: mapping[point].append(i) else: mapping[point] = [i] # If mapping_as_streamlines replace ids with streamlines if mapping_as_streamlines: for key in mapping: mapping[key] = [streamlines[i] for i in mapping[key]] return mapping @cython.boundscheck(False) @cython.wraparound(False) cdef inline cnp.double_t norm(cnp.double_t x, cnp.double_t y, cnp.double_t z) nogil: cdef cnp.double_t val = sqrt(x*x + y*y + z*z) return val # Changing this to a memview was slower. @cython.boundscheck(False) @cython.wraparound(False) cdef inline void c_get_closest_edge(cnp.double_t* p, cnp.double_t* direction, cnp.double_t* edge, double eps=1.) nogil: edge[0] = floor(p[0] + eps) if direction[0] >= 0.0 else ceil(p[0] - eps) edge[1] = floor(p[1] + eps) if direction[1] >= 0.0 else ceil(p[1] - eps) edge[2] = floor(p[2] + eps) if direction[2] >= 0.0 else ceil(p[2] - eps) @cython.boundscheck(False) @cython.wraparound(False) @cython.cdivision(True) def _streamlines_in_mask(list streamlines, cnp.uint8_t[:,:,:] mask, lin_T, offset): """Filters streamlines based on whether or not they pass through a ROI, using a line-based algorithm for compressed streamlines. This function is private because it's supposed to be called only by tracking.utils.target_line_based. Parameters ---------- streamlines : sequence A sequence of streamlines. mask : array-like (uint8) A mask used as a target. Non-zero values are considered to be within the target region. lin_T : array (3, 3) Transpose of the linear part of the mapping to voxel space. Obtained with `_mapping_to_voxel`. offset : array or scalar Mapping to voxel space. Obtained with `_mapping_to_voxel`. Returns ------- in_mask : 1D array of bool (uint8), one for each streamline. 0 if passing through mask, 1 otherwise (2 for single-point streamline) """ cdef cnp.double_t[:,:] voxel_indices cdef cnp.npy_intp nb_streamlines = len(streamlines) cdef cnp.uint8_t[:] in_mask = np.zeros(nb_streamlines, dtype=np.uint8) cdef cnp.npy_intp streamline_idx for streamline_idx in range(nb_streamlines): # Can't call _to_voxel_coordinates because it casts to int voxel_indices = np.dot(streamlines[streamline_idx], lin_T) + offset in_mask[streamline_idx] = _streamline_in_mask(voxel_indices, mask) return np.asarray(in_mask) @cython.boundscheck(False) @cython.wraparound(False) @cython.cdivision(True) cdef cnp.npy_intp _streamline_in_mask( cnp.double_t[:,:] streamline, cnp.uint8_t[:,:,:] mask) nogil: """ Check if a single streamline is passing through a mask. This ia an utility function to make streamlines_in_mask() more readable. """ cdef cnp.double_t *current_pt = [0.0, 0.0, 0.0] cdef cnp.double_t *next_pt = [0.0, 0.0, 0.0] cdef cnp.double_t *direction = [0.0, 0.0, 0.0] cdef cnp.double_t *current_edge = [0.0, 0.0, 0.0] cdef cnp.double_t direction_norm, remaining_distance cdef cnp.double_t length_ratio, half_ratio cdef cnp.npy_intp point_idx, dim_idx cdef cnp.npy_intp x, y, z if streamline.shape[0] <= 1: return 2 # Single-point streamline # This loop is time-critical # Changed to -1 because we get the next point in the loop for point_idx in range(streamline.shape[0] - 1): # Assign current and next point, find vector between both, # and use the current point as nearest edge for testing. for dim_idx in range(3): current_pt[dim_idx] = streamline[point_idx, dim_idx] next_pt[dim_idx] = streamline[point_idx + 1, dim_idx] direction[dim_idx] = next_pt[dim_idx] - current_pt[dim_idx] current_edge[dim_idx] = current_pt[dim_idx] # Set the "remaining_distance" var to compute remaining length of # vector to process direction_norm = norm(direction[0], direction[1], direction[2]) remaining_distance = direction_norm # Check if it's already a real edge. If not, find the closest edge. if floor(current_edge[0]) != current_edge[0] and \ floor(current_edge[1]) != current_edge[1] and \ floor(current_edge[2]) != current_edge[2]: # All coordinates are not "integers", and therefore, not on the # edge. Fetch the closest edge. c_get_closest_edge(current_pt, direction, current_edge) while True: # Compute the smallest ratio of direction's length to get to an # edge. This effectively means we find the first edge # encountered # Set large value for length_ratio length_ratio = 10000 for dim_idx in range(3): if direction[dim_idx] != 0: length_ratio = fmin( fabs((current_edge[dim_idx] - current_pt[dim_idx]) / direction[dim_idx]), length_ratio) # Check if last point is already on an edge remaining_distance -= length_ratio * direction_norm if remaining_distance < 0 and not fabs(remaining_distance) < 1e-8: break # Find the coordinates of voxel containing current point, to # tag it in the map half_ratio = 0.5 * length_ratio x = (current_pt[0] + half_ratio * direction[0]) y = (current_pt[1] + half_ratio * direction[1]) z = (current_pt[2] + half_ratio * direction[2]) if mask[x, y, z]: return 1 # current_pt is moved to the closest edge for dim_idx in range(3): current_pt[dim_idx] = \ length_ratio * direction[dim_idx] + current_pt[dim_idx] # Snap really small values to 0. if fabs(current_pt[dim_idx]) <= 1e-16: current_pt[dim_idx] = 0.0 c_get_closest_edge(current_pt, direction, current_edge) # Check last point x = next_pt[0] y = next_pt[1] z = next_pt[2] return mask[x, y, z] @cython.boundscheck(False) @cython.wraparound(False) @cython.profile(False) def track_counts(tracks, vol_dims, vox_sizes=(1,1,1), return_elements=True): ''' Counts of points in `tracks` that pass through voxels in volume We find whether a point passed through a track by rounding the mm point values to voxels. For a track that passes through a voxel more than once, we only record counts and elements for the first point in the line that enters the voxel. Parameters ---------- tracks : sequence sequence of T tracks. One track is an ndarray of shape (N, 3), where N is the number of points in that track, and ``tracks[t][n]`` is the n-th point in the t-th track. Points are of form x, y, z in *voxel mm* coordinates. That is, if ``i, j, k`` is the possibly non-integer voxel coordinate of the track point, and `vox_sizes` are 3 floats giving voxel sizes of dimensions 0, 1, 2 respectively, then the voxel mm coordinates ``x, y, z`` are simply ``i * vox_sizes[0], j * vox_sizes[1], k * vox_sizes[2]``. This convention derives from trackviz. To pass in tracks as voxel coordinates, just pass ``vox_sizes=(1, 1, 1)`` (see below). vol_dims : sequence length 3 volume dimensions in voxels, x, y, z. vox_sizes : optional, sequence length 3 voxel sizes in mm. Default is (1,1,1) return_elements : {True, False}, optional If True, also return object array with one list per voxel giving track indices and point indices passing through the voxel (see below) Returns ------- tcs : ndarray shape `vol_dim` An array where entry ``tcs[x, y, z]`` is the number of tracks that passed through voxel at voxel coordinate x, y, z tes : ndarray dtype np.object, shape `vol_dim` If `return_elements` is True, we also return an object array with one object per voxel. The objects at each voxel are a list of integers, where the integers are the indices of the track that passed through the voxel. Examples -------- Imagine you have a volume (voxel) space of dimension ``(10,20,30)``. Imagine you had voxel coordinate tracks in ``vs``. To just fill an array with the counts of how many tracks pass through each voxel: >>> vox_track0 = np.array([[0, 0, 0], [1.1, 2.2, 3.3], [2.2, 4.4, 6.6]]) >>> vox_track1 = np.array([[0, 0, 0], [0, 0, 1], [0, 0, 2]]) >>> vs = (vox_track0, vox_track1) >>> vox_dim = (10, 20, 30) # original voxel array size >>> tcs = track_counts(vs, vox_dim, (1, 1, 1), False) >>> tcs.shape (10, 20, 30) >>> tcs[0, 0, 0:4] array([2, 1, 1, 0]) >>> tcs[1, 2, 3], tcs[2, 4, 7] (1, 1) You can also use the routine to count into larger-than-voxel boxes. To do this, increase the voxel size and decrease the ``vox_dim`` accordingly: >>> tcs=track_counts(vs, (10/2., 20/2., 30/2.), (2,2,2), False) >>> tcs.shape (5, 10, 15) >>> tcs[1,1,2], tcs[1,2,3] (1, 1) ''' vol_dims = np.asarray(vol_dims).astype(np.int) vox_sizes = np.asarray(vox_sizes).astype(np.double) n_voxels = np.prod(vol_dims) # output track counts array, flattened cdef cnp.ndarray[cnp.int_t, ndim=1] tcs = \ np.zeros((n_voxels,), dtype=np.int) # pointer to output track indices cdef cnp.npy_intp i if return_elements: el_inds = {} # cython numpy pointer to individual track array cdef cnp.ndarray[cnp.float_t, ndim=2] t # cython numpy pointer to point in track array cdef cnp.ndarray[cnp.float_t, ndim=1] in_pt # processed point cdef int out_pt[3] # various temporary loop and working variables cdef int tno, pno, cno cdef cnp.npy_intp el_no, v # fill native C arrays from inputs cdef int vd[3] cdef double vxs[3] for cno in range(3): vd[cno] = vol_dims[cno] vxs[cno] = vox_sizes[cno] # return_elements to C native cdef int ret_elf = return_elements # x slice size (C array ordering) cdef cnp.npy_intp yz = vd[1] * vd[2] for tno in range(len(tracks)): t = tracks[tno].astype(np.float) # set to find unique voxel points in track in_inds = set() # the loop below is time-critical for pno in range(t.shape[0]): in_pt = t[pno] # Round to voxel coordinates, and set coordinates outside # volume to volume edges for cno in range(3): v = floor(in_pt[cno] / vxs[cno] + 0.5) if v < 0: v = 0 elif v >= vd[cno]: v = vd[cno]-1 # last index for this dimension out_pt[cno] = v # calculate element number in flattened tcs array el_no = out_pt[0] * yz + out_pt[1] * vd[2] + out_pt[2] # discard duplicates if el_no in in_inds: continue in_inds.add(el_no) # set elements into object array if ret_elf: key = (out_pt[0], out_pt[1], out_pt[2]) val = tno if tcs[el_no]: el_inds[key].append(val) else: el_inds[key] = [val] # set value into counts tcs[el_no] += 1 if ret_elf: return tcs.reshape(vol_dims), el_inds return tcs.reshape(vol_dims) dipy-0.13.0/dipy/utils/000077500000000000000000000000001317371701200146475ustar00rootroot00000000000000dipy-0.13.0/dipy/utils/__init__.py000066400000000000000000000000421317371701200167540ustar00rootroot00000000000000# code support utilities for dipy dipy-0.13.0/dipy/utils/_importlib.py000066400000000000000000000024611317371701200173640ustar00rootroot00000000000000"""Backport of importlib.import_module from 3.x.""" # While not critical (and in no way guaranteed!), it would be nice to keep this # code compatible with Python 2.3. import sys def _resolve_name(name, package, level): """Return the absolute name of the module to be imported.""" if not hasattr(package, 'rindex'): raise ValueError("'package' not set to a string") dot = len(package) for x in xrange(level, 1, -1): try: dot = package.rindex('.', 0, dot) except ValueError: raise ValueError("attempted relative import beyond top-level " + "package") return "%s.%s" % (package[:dot], name) def import_module(name, package=None): """Import a module. The 'package' argument is required when performing a relative import. It specifies the package to use as the anchor point from which to resolve the relative import to an absolute import. """ if name.startswith('.'): if not package: raise TypeError("relative imports require the 'package' argument") level = 0 for character in name: if character != '.': break level += 1 name = _resolve_name(name[level:], package, level) __import__(name) return sys.modules[name] dipy-0.13.0/dipy/utils/arrfuncs.py000066400000000000000000000073511317371701200170520ustar00rootroot00000000000000""" Utilities to manipulate numpy arrays """ import sys from distutils.version import LooseVersion import numpy as np from nibabel.volumeutils import endian_codes, native_code, swapped_code NUMPY_LESS_1_8 = LooseVersion(np.version.short_version) < '1.8' def as_native_array(arr): """ Return `arr` as native byteordered array If arr is already native byte ordered, return unchanged. If it is opposite endian, then make a native byte ordered copy and return that Parameters ---------- arr : ndarray Returns ------- native_arr : ndarray If `arr` was native order, this is just `arr`. Otherwise it's a new array such that ``np.all(native_arr == arr)``, with native byte ordering. """ if endian_codes[arr.dtype.byteorder] == native_code: return arr return arr.byteswap().newbyteorder() def pinv(a, rcond=1e-15): """Vectorized version of `numpy.linalg.pinv` If numpy version is less than 1.8, it falls back to iterating over `np.linalg.pinv` since there isn't a vectorized version of `np.linalg.svd` available. Parameters ---------- a : array_like (..., M, N) Matrix to be pseudo-inverted. rcond : float Cutoff for small singular values. Returns ------- B : ndarray (..., N, M) The pseudo-inverse of `a`. Raises ------ LinAlgError If the SVD computation does not converge. See Also -------- np.linalg.pinv """ a = np.asarray(a) if NUMPY_LESS_1_8: if a.ndim <= 2: # properly handle the case of a single 2D array return np.linalg.pinv(a, rcond) shape = a.shape[:-2] a = a.reshape(-1, a.shape[-2], a.shape[-1]) result = np.empty((a.shape[0], a.shape[2], a.shape[1])) for i, item in enumerate(a): result[i] = np.linalg.pinv(item, rcond) return result.reshape(shape + (a.shape[2], a.shape[1])) else: swap = np.arange(a.ndim) swap[[-2, -1]] = swap[[-1, -2]] u, s, v = np.linalg.svd(a, full_matrices=False) cutoff = np.maximum.reduce(s, axis=-1, keepdims=True) * rcond mask = s > cutoff s[mask] = 1. / s[mask] s[~mask] = 0 return np.einsum('...ij,...jk', np.transpose(v, swap) * s[..., None, :], np.transpose(u, swap)) def eigh(a, UPLO='L'): """Iterate over `np.linalg.eigh` if it doesn't support vectorized operation Parameters ---------- a : array_like (..., M, M) Hermitian/Symmetric matrices whose eigenvalues and eigenvectors are to be computed. UPLO : {'L', 'U'}, optional Specifies whether the calculation is done with the lower triangular part of `a` ('L', default) or the upper triangular part ('U'). Returns ------- w : ndarray (..., M) The eigenvalues in ascending order, each repeated according to its multiplicity. v : ndarray (..., M, M) The column ``v[..., :, i]`` is the normalized eigenvector corresponding to the eigenvalue ``w[..., i]``. Raises ------ LinAlgError If the eigenvalue computation does not converge. See Also -------- np.linalg.eigh """ a = np.asarray(a) if a.ndim > 2 and NUMPY_LESS_1_8: shape = a.shape[:-2] a = a.reshape(-1, a.shape[-2], a.shape[-1]) evals = np.empty((a.shape[0], a.shape[1])) evecs = np.empty((a.shape[0], a.shape[1], a.shape[1])) for i, item in enumerate(a): evals[i], evecs[i] = np.linalg.eigh(item, UPLO) return (evals.reshape(shape + (a.shape[1], )), evecs.reshape(shape + (a.shape[1], a.shape[1]))) return np.linalg.eigh(a, UPLO) dipy-0.13.0/dipy/utils/omp.pxd000066400000000000000000000001311317371701200161520ustar00rootroot00000000000000#!python cdef void set_num_threads(num_threads) cdef void restore_default_num_threads() dipy-0.13.0/dipy/utils/omp.pyx000066400000000000000000000041521317371701200162060ustar00rootroot00000000000000#!python import os cimport cython cimport safe_openmp as openmp have_openmp = openmp.have_openmp __all__ = ['have_openmp', 'default_threads', 'cpu_count', 'thread_count'] def cpu_count(): """Return number of cpus as determined by omp_get_num_procs.""" if have_openmp: return openmp.omp_get_num_procs() else: return 1 def thread_count(): """Return number of threads as determined by omp_get_max_threads.""" if have_openmp: return openmp.omp_get_max_threads() else: return 1 def _get_default_threads(): """Default number of threads for OpenMP. This function prioritizes the OMP_NUM_THREADS environment variable. """ if have_openmp: try: default_threads = int(os.environ.get('OMP_NUM_THREADS', None)) if default_threads < 1: raise ValueError("invalid number of threads") except (ValueError, TypeError): default_threads = openmp.omp_get_num_procs() return default_threads else: return 1 default_threads = _get_default_threads() cdef void set_num_threads(num_threads): """Set the number of threads to be used by OpenMP This function does nothing if OpenMP is not available. Parameters ---------- num_threads : int Desired number of threads for OpenMP accelerated code. """ cdef: int threads_to_use if num_threads is not None: threads_to_use = num_threads else: threads_to_use = default_threads if openmp.have_openmp: openmp.omp_set_dynamic(0) openmp.omp_set_num_threads(threads_to_use) cdef void restore_default_num_threads(): """Restore OpenMP to using the default number of threads. This function does nothing if OpenMP is not available """ if openmp.have_openmp: openmp.omp_set_num_threads( default_threads) def _set_omp_threads(num_threads): """Function for testing set_num_threads.""" set_num_threads(num_threads) def _restore_omp_threads(): """Function for testing restore_default_num_threads.""" restore_default_num_threads() dipy-0.13.0/dipy/utils/optpkg.py000066400000000000000000000052301317371701200165250ustar00rootroot00000000000000""" Routines to support optional packages """ try: import importlib except ImportError: import dipy.utils._importlib as importlib try: import nose except ImportError: have_nose = False else: have_nose = True from dipy.utils.tripwire import TripWire if have_nose: class OptionalImportError(ImportError, nose.SkipTest): pass else: class OptionalImportError(ImportError): pass def optional_package(name, trip_msg=None): """ Return package-like thing and module setup for package `name` Parameters ---------- name : str package name trip_msg : None or str message to give when someone tries to use the return package, but we could not import it, and have returned a TripWire object instead. Default message if None. Returns ------- pkg_like : module or ``TripWire`` instance If we can import the package, return it. Otherwise return an object raising an error when accessed have_pkg : bool True if import for package was successful, false otherwise module_setup : function callable usually set as ``setup_module`` in calling namespace, to allow skipping tests. Example ------- Typical use would be something like this at the top of a module using an optional package: >>> from dipy.utils.optpkg import optional_package >>> pkg, have_pkg, setup_module = optional_package('not_a_package') Of course in this case the package doesn't exist, and so, in the module: >>> have_pkg False and >>> pkg.some_function() #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): ... TripWireError: We need package not_a_package for these functions, but ``import not_a_package`` raised an ImportError If the module does exist - we get the module >>> pkg, _, _ = optional_package('os') >>> hasattr(pkg, 'path') True Or a submodule if that's what we asked for >>> subpkg, _, _ = optional_package('os.path') >>> hasattr(subpkg, 'dirname') True """ try: pkg = importlib.import_module(name) except ImportError: pass else: # import worked # top level module return pkg, True, lambda: None if trip_msg is None: trip_msg = ('We need package %s for these functions, but ' '``import %s`` raised an ImportError' % (name, name)) pkg = TripWire(trip_msg) def setup_module(): if have_nose: raise nose.plugins.skip.SkipTest('No %s for these tests' % name) return pkg, False, setup_module dipy-0.13.0/dipy/utils/six.py000066400000000000000000000313331317371701200160270ustar00rootroot00000000000000"""Utilities for writing code that runs on Python 2 and 3""" # Copyright (c) 2010-2013 Benjamin Peterson # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. import operator import sys import types __author__ = "Benjamin Peterson " __version__ = "1.3.0" # Useful for very coarse version differentiation. PY2 = sys.version_info[0] == 2 PY3 = sys.version_info[0] == 3 if PY3: string_types = str, integer_types = int, class_types = type, text_type = str binary_type = bytes MAXSIZE = sys.maxsize else: string_types = basestring, integer_types = (int, long) class_types = (type, types.ClassType) text_type = unicode binary_type = str if sys.platform.startswith("java"): # Jython always uses 32 bits. MAXSIZE = int((1 << 31) - 1) else: # It's possible to have sizeof(long) != sizeof(Py_ssize_t). class X(object): def __len__(self): return 1 << 31 try: len(X()) except OverflowError: # 32-bit MAXSIZE = int((1 << 31) - 1) else: # 64-bit MAXSIZE = int((1 << 63) - 1) del X def _add_doc(func, doc): """Add documentation to a function.""" func.__doc__ = doc def _import_module(name): """Import module, returning the module after the last dot.""" __import__(name) return sys.modules[name] class _LazyDescr(object): def __init__(self, name): self.name = name def __get__(self, obj, tp): result = self._resolve() setattr(obj, self.name, result) # This is a bit ugly, but it avoids running this again. delattr(tp, self.name) return result class MovedModule(_LazyDescr): def __init__(self, name, old, new=None): super(MovedModule, self).__init__(name) if PY3: if new is None: new = name self.mod = new else: self.mod = old def _resolve(self): return _import_module(self.mod) class MovedAttribute(_LazyDescr): def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): super(MovedAttribute, self).__init__(name) if PY3: if new_mod is None: new_mod = name self.mod = new_mod if new_attr is None: if old_attr is None: new_attr = name else: new_attr = old_attr self.attr = new_attr else: self.mod = old_mod if old_attr is None: old_attr = name self.attr = old_attr def _resolve(self): module = _import_module(self.mod) return getattr(module, self.attr) class _MovedItems(types.ModuleType): """Lazy loading of moved objects""" _moved_attributes = [ MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), MovedAttribute("map", "itertools", "builtins", "imap", "map"), MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), MovedAttribute("reload_module", "__builtin__", "imp", "reload"), MovedAttribute("reduce", "__builtin__", "functools"), MovedAttribute("StringIO", "StringIO", "io"), MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), MovedModule("builtins", "__builtin__"), MovedModule("configparser", "ConfigParser"), MovedModule("copyreg", "copy_reg"), MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), MovedModule("http_cookies", "Cookie", "http.cookies"), MovedModule("html_entities", "htmlentitydefs", "html.entities"), MovedModule("html_parser", "HTMLParser", "html.parser"), MovedModule("http_client", "httplib", "http.client"), MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), MovedModule("cPickle", "cPickle", "pickle"), MovedModule("queue", "Queue"), MovedModule("reprlib", "repr"), MovedModule("socketserver", "SocketServer"), MovedModule("tkinter", "Tkinter"), MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), MovedModule("tkinter_tix", "Tix", "tkinter.tix"), MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), MovedModule("tkinter_colorchooser", "tkColorChooser", "tkinter.colorchooser"), MovedModule("tkinter_commondialog", "tkCommonDialog", "tkinter.commondialog"), MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), MovedModule("tkinter_font", "tkFont", "tkinter.font"), MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", "tkinter.simpledialog"), MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), MovedModule("winreg", "_winreg"), ] for attr in _moved_attributes: setattr(_MovedItems, attr.name, attr) del attr moves = sys.modules[__name__ + ".moves"] = _MovedItems("moves") def add_move(move): """Add an item to six.moves.""" setattr(_MovedItems, move.name, move) def remove_move(name): """Remove item from six.moves.""" try: delattr(_MovedItems, name) except AttributeError: try: del moves.__dict__[name] except KeyError: raise AttributeError("no such move, %r" % (name,)) if PY3: _meth_func = "__func__" _meth_self = "__self__" _func_closure = "__closure__" _func_code = "__code__" _func_defaults = "__defaults__" _func_globals = "__globals__" _iterkeys = "keys" _itervalues = "values" _iteritems = "items" _iterlists = "lists" else: _meth_func = "im_func" _meth_self = "im_self" _func_closure = "func_closure" _func_code = "func_code" _func_defaults = "func_defaults" _func_globals = "func_globals" _iterkeys = "iterkeys" _itervalues = "itervalues" _iteritems = "iteritems" _iterlists = "iterlists" try: advance_iterator = next except NameError: def advance_iterator(it): return it.next() next = advance_iterator try: callable = callable except NameError: def callable(obj): return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) if PY3: def get_unbound_function(unbound): return unbound create_bound_method = types.MethodType Iterator = object else: def get_unbound_function(unbound): return unbound.im_func def create_bound_method(func, obj): return types.MethodType(func, obj, obj.__class__) class Iterator(object): def next(self): return type(self).__next__(self) callable = callable _add_doc(get_unbound_function, """Get the function out of a possibly unbound function""") get_method_function = operator.attrgetter(_meth_func) get_method_self = operator.attrgetter(_meth_self) get_function_closure = operator.attrgetter(_func_closure) get_function_code = operator.attrgetter(_func_code) get_function_defaults = operator.attrgetter(_func_defaults) get_function_globals = operator.attrgetter(_func_globals) def iterkeys(d, **kw): """Return an iterator over the keys of a dictionary.""" return iter(getattr(d, _iterkeys)(**kw)) def itervalues(d, **kw): """Return an iterator over the values of a dictionary.""" return iter(getattr(d, _itervalues)(**kw)) def iteritems(d, **kw): """Return an iterator over the (key, value) pairs of a dictionary.""" return iter(getattr(d, _iteritems)(**kw)) def iterlists(d, **kw): """Return an iterator over the (key, [values]) pairs of a dictionary.""" return iter(getattr(d, _iterlists)(**kw)) if PY3: def b(s): return s.encode("latin-1") def u(s): return s unichr = chr if sys.version_info[1] <= 1: def int2byte(i): return bytes((i,)) else: # This is about 2x faster than the implementation above on 3.2+ int2byte = operator.methodcaller("to_bytes", 1, "big") byte2int = operator.itemgetter(0) indexbytes = operator.getitem iterbytes = iter import io StringIO = io.StringIO BytesIO = io.BytesIO else: def b(s): return s def u(s): return unicode(s, "unicode_escape") unichr = unichr int2byte = chr def byte2int(bs): return ord(bs[0]) def indexbytes(buf, i): return ord(buf[i]) def iterbytes(buf): return (ord(byte) for byte in buf) import StringIO StringIO = BytesIO = StringIO.StringIO _add_doc(b, """Byte literal""") _add_doc(u, """Text literal""") if PY3: import builtins exec_ = getattr(builtins, "exec") def reraise(tp, value, tb=None): if value.__traceback__ is not tb: raise value.with_traceback(tb) raise value print_ = getattr(builtins, "print") del builtins else: def exec_(_code_, _globs_=None, _locs_=None): """Execute code in a namespace.""" if _globs_ is None: frame = sys._getframe(1) _globs_ = frame.f_globals if _locs_ is None: _locs_ = frame.f_locals del frame elif _locs_ is None: _locs_ = _globs_ exec("""exec _code_ in _globs_, _locs_""") exec_("""def reraise(tp, value, tb=None): raise tp, value, tb """) def print_(*args, **kwargs): """The new-style print function.""" fp = kwargs.pop("file", sys.stdout) if fp is None: return def write(data): if not isinstance(data, basestring): data = str(data) fp.write(data) want_unicode = False sep = kwargs.pop("sep", None) if sep is not None: if isinstance(sep, unicode): want_unicode = True elif not isinstance(sep, str): raise TypeError("sep must be None or a string") end = kwargs.pop("end", None) if end is not None: if isinstance(end, unicode): want_unicode = True elif not isinstance(end, str): raise TypeError("end must be None or a string") if kwargs: raise TypeError("invalid keyword arguments to print()") if not want_unicode: for arg in args: if isinstance(arg, unicode): want_unicode = True break if want_unicode: newline = unicode("\n") space = unicode(" ") else: newline = "\n" space = " " if sep is None: sep = space if end is None: end = newline for i, arg in enumerate(args): if i: write(sep) write(arg) write(end) _add_doc(reraise, """Reraise an exception.""") def with_metaclass(meta, *bases): """Create a base class with a metaclass.""" return meta("NewBase", bases, {}) dipy-0.13.0/dipy/utils/tests/000077500000000000000000000000001317371701200160115ustar00rootroot00000000000000dipy-0.13.0/dipy/utils/tests/__init__.py000066400000000000000000000000431317371701200201170ustar00rootroot00000000000000# Tests for utilities - as package dipy-0.13.0/dipy/utils/tests/test_arrfuncs.py000066400000000000000000000033441317371701200212510ustar00rootroot00000000000000""" Testing array utilities """ import sys import numpy as np from dipy.utils.arrfuncs import as_native_array, pinv, eigh from numpy.testing import (assert_array_almost_equal, assert_array_equal) from nose.tools import assert_true, assert_false, assert_equal, assert_raises NATIVE_ORDER = '<' if sys.byteorder == 'little' else '>' SWAPPED_ORDER = '>' if sys.byteorder == 'little' else '<' def test_as_native(): arr = np.arange(5) # native assert_equal(arr.dtype.byteorder, '=') narr = as_native_array(arr) assert_true(arr is narr) sdt = arr.dtype.newbyteorder('s') barr = arr.astype(sdt) assert_equal(barr.dtype.byteorder, SWAPPED_ORDER) narr = as_native_array(barr) assert_false(barr is narr) assert_array_equal(barr, narr) assert_equal(narr.dtype.byteorder, NATIVE_ORDER) def test_pinv(): arr = np.random.randn(4, 4, 4, 3, 7) _pinv = pinv(arr) for i in range(4): for j in range(4): for k in range(4): assert_array_almost_equal(_pinv[i, j, k], np.linalg.pinv(arr[i, j, k])) def test_eigh(): for i in range(10): arr = np.random.randn(7, 7) evals1, evecs1 = eigh(arr) evals2, evecs2 = np.linalg.eigh(arr) assert_array_almost_equal(evals1, evals2) assert_array_almost_equal(evecs1, evecs2) arr = np.random.randn(4, 4, 4, 7, 7) evals, evecs = eigh(arr) for i in range(4): for j in range(4): for k in range(4): evals_vox, evecs_vox = np.linalg.eigh(arr[i, j, k]) assert_array_almost_equal(evals[i, j, k], evals_vox) assert_array_almost_equal(evecs[i, j, k], evecs_vox) dipy-0.13.0/dipy/utils/tests/test_omp.py000066400000000000000000000024401317371701200202150ustar00rootroot00000000000000""" Testing OpenMP utilities """ import os from dipy.utils.omp import (cpu_count, thread_count, default_threads, _set_omp_threads, _restore_omp_threads, have_openmp) from nose.tools import assert_equal from numpy.testing import run_module_suite def test_set_omp_threads(): if have_openmp: # set threads to default _restore_omp_threads() assert_equal(thread_count(), default_threads) # change number of threads nthreads_new = default_threads + 1 _set_omp_threads(nthreads_new) assert_equal(thread_count(), nthreads_new) # restore back to default _restore_omp_threads() assert_equal(thread_count(), default_threads) else: assert_equal(thread_count(), 1) assert_equal(cpu_count(), 1) def test_default_threads(): if have_openmp: try: expected_threads = int(os.environ.get('OMP_NUM_THREADS', None)) if expected_threads < 1: raise ValueError("invalid number of threads") except (ValueError, TypeError): expected_threads = cpu_count() else: expected_threads = 1 assert_equal(default_threads, expected_threads) if __name__ == '__main__': run_module_suite() dipy-0.13.0/dipy/utils/tests/test_tripwire.py000066400000000000000000000016601317371701200212720ustar00rootroot00000000000000""" Testing tripwire module. """ from dipy.utils.tripwire import TripWire, is_tripwire, TripWireError from nose import SkipTest from nose.tools import (assert_true, assert_false, assert_raises, assert_equal, assert_not_equal) def test_is_tripwire(): assert_false(is_tripwire(object())) assert_true(is_tripwire(TripWire('some message'))) def test_tripwire(): # Test tripwire object silly_module_name = TripWire('We do not have silly_module_name') assert_raises(TripWireError, getattr, silly_module_name, 'do_silly_thing') assert_raises(TripWireError, silly_module_name) # Check AttributeError can be checked too try: silly_module_name.__wrapped__ except TripWireError as err: assert_true(isinstance(err, AttributeError)) else: raise RuntimeError("No error raised, but expected") dipy-0.13.0/dipy/utils/tripwire.py000066400000000000000000000025011317371701200170640ustar00rootroot00000000000000""" Class to raise error for missing modules or other misfortunes """ class TripWireError(AttributeError): """ Exception if trying to use TripWire object """ def is_tripwire(obj): """ Returns True if `obj` appears to be a TripWire object Examples -------- >>> is_tripwire(object()) False >>> is_tripwire(TripWire('some message')) True """ try: obj.any_attribute except TripWireError: return True except: pass return False class TripWire(object): """ Class raising error if used Standard use is to proxy modules that we could not import Examples -------- >>> try: ... import silly_module_name ... except ImportError: ... silly_module_name = TripWire('We do not have silly_module_name') >>> silly_module_name.do_silly_thing('with silly string') #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): ... TripWireError: We do not have silly_module_name """ def __init__(self, msg): self._msg = msg def __getattr__(self, attr_name): ''' Raise informative error accessing attributes ''' raise TripWireError(self._msg) def __call__(self, *args, **kwargs): ''' Raise informative error while calling ''' raise TripWireError(self._msg) dipy-0.13.0/dipy/viz/000077500000000000000000000000001317371701200143175ustar00rootroot00000000000000dipy-0.13.0/dipy/viz/__init__.py000066400000000000000000000006511317371701200164320ustar00rootroot00000000000000# Init file for visualization package from __future__ import division, print_function, absolute_import # We make the visualization requirements optional imports: try: import matplotlib has_mpl = True except ImportError: e_s = "You do not have Matplotlib installed. Some visualization functions" e_s += " might not work for you." print(e_s) has_mpl = False if has_mpl: from . import projections dipy-0.13.0/dipy/viz/actor.py000066400000000000000000000733751317371701200160200ustar00rootroot00000000000000from __future__ import division, print_function, absolute_import import numpy as np from nibabel.affines import apply_affine from dipy.viz.colormap import colormap_lookup_table, create_colormap from dipy.viz.utils import lines_to_vtk_polydata from dipy.viz.utils import set_input # Conditional import machinery for vtk from dipy.utils.optpkg import optional_package # Allow import, but disable doctests if we don't have vtk vtk, have_vtk, setup_module = optional_package('vtk') colors, have_vtk_colors, _ = optional_package('vtk.util.colors') numpy_support, have_ns, _ = optional_package('vtk.util.numpy_support') if have_vtk: version = vtk.vtkVersion.GetVTKSourceVersion().split(' ')[-1] major_version = vtk.vtkVersion.GetVTKMajorVersion() def slicer(data, affine=None, value_range=None, opacity=1., lookup_colormap=None, interpolation='linear', picking_tol=0.025): """ Cuts 3D scalar or rgb volumes into 2D images Parameters ---------- data : array, shape (X, Y, Z) or (X, Y, Z, 3) A grayscale or rgb 4D volume as a numpy array. affine : array, shape (4, 4) Grid to space (usually RAS 1mm) transformation matrix. Default is None. If None then the identity matrix is used. value_range : None or tuple (2,) If None then the values will be interpolated from (data.min(), data.max()) to (0, 255). Otherwise from (value_range[0], value_range[1]) to (0, 255). opacity : float Opacity of 0 means completely transparent and 1 completely visible. lookup_colormap : vtkLookupTable If None (default) then a grayscale map is created. interpolation : string If 'linear' (default) then linear interpolation is used on the final texture mapping. If 'nearest' then nearest neighbor interpolation is used on the final texture mapping. picking_tol : float The tolerance for the vtkCellPicker, specified as a fraction of rendering window size. Returns ------- image_actor : ImageActor An object that is capable of displaying different parts of the volume as slices. The key method of this object is ``display_extent`` where one can input grid coordinates and display the slice in space (or grid) coordinates as calculated by the affine parameter. """ if data.ndim != 3: if data.ndim == 4: if data.shape[3] != 3: raise ValueError('Only RGB 3D arrays are currently supported.') else: nb_components = 3 else: raise ValueError('Only 3D arrays are currently supported.') else: nb_components = 1 if value_range is None: vol = np.interp(data, xp=[data.min(), data.max()], fp=[0, 255]) else: vol = np.interp(data, xp=[value_range[0], value_range[1]], fp=[0, 255]) vol = vol.astype('uint8') im = vtk.vtkImageData() if major_version <= 5: im.SetScalarTypeToUnsignedChar() I, J, K = vol.shape[:3] im.SetDimensions(I, J, K) voxsz = (1., 1., 1.) # im.SetOrigin(0,0,0) im.SetSpacing(voxsz[2], voxsz[0], voxsz[1]) if major_version <= 5: im.AllocateScalars() im.SetNumberOfScalarComponents(nb_components) else: im.AllocateScalars(vtk.VTK_UNSIGNED_CHAR, nb_components) # copy data # what I do below is the same as what is commented here but much faster # for index in ndindex(vol.shape): # i, j, k = index # im.SetScalarComponentFromFloat(i, j, k, 0, vol[i, j, k]) vol = np.swapaxes(vol, 0, 2) vol = np.ascontiguousarray(vol) if nb_components == 1: vol = vol.ravel() else: vol = np.reshape(vol, [np.prod(vol.shape[:3]), vol.shape[3]]) uchar_array = numpy_support.numpy_to_vtk(vol, deep=0) im.GetPointData().SetScalars(uchar_array) if affine is None: affine = np.eye(4) # Set the transform (identity if none given) transform = vtk.vtkTransform() transform_matrix = vtk.vtkMatrix4x4() transform_matrix.DeepCopy(( affine[0][0], affine[0][1], affine[0][2], affine[0][3], affine[1][0], affine[1][1], affine[1][2], affine[1][3], affine[2][0], affine[2][1], affine[2][2], affine[2][3], affine[3][0], affine[3][1], affine[3][2], affine[3][3])) transform.SetMatrix(transform_matrix) transform.Inverse() # Set the reslicing image_resliced = vtk.vtkImageReslice() set_input(image_resliced, im) image_resliced.SetResliceTransform(transform) image_resliced.AutoCropOutputOn() # Adding this will allow to support anisotropic voxels # and also gives the opportunity to slice per voxel coordinates RZS = affine[:3, :3] zooms = np.sqrt(np.sum(RZS * RZS, axis=0)) image_resliced.SetOutputSpacing(*zooms) image_resliced.SetInterpolationModeToLinear() image_resliced.Update() if nb_components == 1: if lookup_colormap is None: # Create a black/white lookup table. lut = colormap_lookup_table((0, 255), (0, 0), (0, 0), (0, 1)) else: lut = lookup_colormap x1, x2, y1, y2, z1, z2 = im.GetExtent() ex1, ex2, ey1, ey2, ez1, ez2 = image_resliced.GetOutput().GetExtent() class ImageActor(vtk.vtkImageActor): def __init__(self): self.picker = vtk.vtkCellPicker() def input_connection(self, output): if vtk.VTK_MAJOR_VERSION <= 5: self.SetInput(output.GetOutput()) else: self.GetMapper().SetInputConnection(output.GetOutputPort()) self.output = output self.shape = (ex2 + 1, ey2 + 1, ez2 + 1) def display_extent(self, x1, x2, y1, y2, z1, z2): self.SetDisplayExtent(x1, x2, y1, y2, z1, z2) if vtk.VTK_MAJOR_VERSION > 5: self.Update() def display(self, x=None, y=None, z=None): if x is None and y is None and z is None: self.display_extent(ex1, ex2, ey1, ey2, ez2//2, ez2//2) if x is not None: self.display_extent(x, x, ey1, ey2, ez1, ez2) if y is not None: self.display_extent(ex1, ex2, y, y, ez1, ez2) if z is not None: self.display_extent(ex1, ex2, ey1, ey2, z, z) def opacity(self, value): if vtk.VTK_MAJOR_VERSION <= 5: self.SetOpacity(value) else: self.GetProperty().SetOpacity(value) def tolerance(self, value): self.picker.SetTolerance(value) def copy(self): im_actor = ImageActor() im_actor.input_connection(self.output) im_actor.SetDisplayExtent(*self.GetDisplayExtent()) im_actor.opacity(opacity) im_actor.tolerance(picking_tol) if interpolation == 'nearest': im_actor.SetInterpolate(False) else: im_actor.SetInterpolate(True) if major_version >= 6: im_actor.GetMapper().BorderOn() return im_actor image_actor = ImageActor() if nb_components == 1: plane_colors = vtk.vtkImageMapToColors() plane_colors.SetLookupTable(lut) plane_colors.SetInputConnection(image_resliced.GetOutputPort()) plane_colors.Update() image_actor.input_connection(plane_colors) else: image_actor.input_connection(image_resliced) image_actor.display() image_actor.opacity(opacity) image_actor.tolerance(picking_tol) if interpolation == 'nearest': image_actor.SetInterpolate(False) else: image_actor.SetInterpolate(True) if major_version >= 6: image_actor.GetMapper().BorderOn() return image_actor def streamtube(lines, colors=None, opacity=1, linewidth=0.1, tube_sides=9, lod=True, lod_points=10 ** 4, lod_points_size=3, spline_subdiv=None, lookup_colormap=None): """ Uses streamtubes to visualize polylines Parameters ---------- lines : list list of N curves represented as 2D ndarrays colors : array (N, 3), list of arrays, tuple (3,), array (K,), None If None then a standard orientation colormap is used for every line. If one tuple of color is used. Then all streamlines will have the same colour. If an array (N, 3) is given, where N is equal to the number of lines. Then every line is coloured with a different RGB color. If a list of RGB arrays is given then every point of every line takes a different color. If an array (K, ) is given, where K is the number of points of all lines then these are considered as the values to be used by the colormap. If an array (L, ) is given, where L is the number of streamlines then these are considered as the values to be used by the colormap per streamline. If an array (X, Y, Z) or (X, Y, Z, 3) is given then the values for the colormap are interpolated automatically using trilinear interpolation. opacity : float Default is 1. linewidth : float Default is 0.01. tube_sides : int Default is 9. lod : bool Use vtkLODActor(level of detail) rather than vtkActor. Default is True. Level of detail actors do not render the full geometry when the frame rate is low. lod_points : int Number of points to be used when LOD is in effect. Default is 10000. lod_points_size : int Size of points when lod is in effect. Default is 3. spline_subdiv : int Number of splines subdivision to smooth streamtubes. Default is None. lookup_colormap : vtkLookupTable Add a default lookup table to the colormap. Default is None which calls :func:`dipy.viz.actor.colormap_lookup_table`. Examples -------- >>> import numpy as np >>> from dipy.viz import actor, window >>> ren = window.Renderer() >>> lines = [np.random.rand(10, 3), np.random.rand(20, 3)] >>> colors = np.random.rand(2, 3) >>> c = actor.streamtube(lines, colors) >>> ren.add(c) >>> #window.show(ren) Notes ----- Streamtubes can be heavy on GPU when loading many streamlines and therefore, you may experience slow rendering time depending on system GPU. A solution to this problem is to reduce the number of points in each streamline. In Dipy we provide an algorithm that will reduce the number of points on the straighter parts of the streamline but keep more points on the curvier parts. This can be used in the following way:: from dipy.tracking.distances import approx_polygon_track lines = [approx_polygon_track(line, 0.2) for line in lines] Alternatively we suggest using the ``line`` actor which is much more efficient. See Also -------- :func:`dipy.viz.actor.line` """ # Poly data with lines and colors poly_data, is_colormap = lines_to_vtk_polydata(lines, colors) next_input = poly_data # Set Normals poly_normals = set_input(vtk.vtkPolyDataNormals(), next_input) poly_normals.ComputeCellNormalsOn() poly_normals.ComputePointNormalsOn() poly_normals.ConsistencyOn() poly_normals.AutoOrientNormalsOn() poly_normals.Update() next_input = poly_normals.GetOutputPort() # Spline interpolation if (spline_subdiv is not None) and (spline_subdiv > 0): spline_filter = set_input(vtk.vtkSplineFilter(), next_input) spline_filter.SetSubdivideToSpecified() spline_filter.SetNumberOfSubdivisions(spline_subdiv) spline_filter.Update() next_input = spline_filter.GetOutputPort() # Add thickness to the resulting lines tube_filter = set_input(vtk.vtkTubeFilter(), next_input) tube_filter.SetNumberOfSides(tube_sides) tube_filter.SetRadius(linewidth) # TODO using the line above we will be able to visualize # streamtubes of varying radius # tube_filter.SetVaryRadiusToVaryRadiusByScalar() tube_filter.CappingOn() tube_filter.Update() next_input = tube_filter.GetOutputPort() # Poly mapper poly_mapper = set_input(vtk.vtkPolyDataMapper(), next_input) poly_mapper.ScalarVisibilityOn() poly_mapper.SetScalarModeToUsePointFieldData() poly_mapper.SelectColorArray("Colors") poly_mapper.GlobalImmediateModeRenderingOn() poly_mapper.Update() # Color Scale with a lookup table if is_colormap: if lookup_colormap is None: lookup_colormap = colormap_lookup_table() poly_mapper.SetLookupTable(lookup_colormap) poly_mapper.UseLookupTableScalarRangeOn() poly_mapper.Update() # Set Actor if lod: actor = vtk.vtkLODActor() actor.SetNumberOfCloudPoints(lod_points) actor.GetProperty().SetPointSize(lod_points_size) else: actor = vtk.vtkActor() actor.SetMapper(poly_mapper) actor.GetProperty().SetAmbient(0.1) actor.GetProperty().SetDiffuse(0.15) actor.GetProperty().SetSpecular(0.05) actor.GetProperty().SetSpecularPower(6) actor.GetProperty().SetInterpolationToPhong() actor.GetProperty().BackfaceCullingOn() actor.GetProperty().SetOpacity(opacity) return actor def line(lines, colors=None, opacity=1, linewidth=1, spline_subdiv=None, lod=True, lod_points=10 ** 4, lod_points_size=3, lookup_colormap=None): """ Create an actor for one or more lines. Parameters ------------ lines : list of arrays colors : array (N, 3), list of arrays, tuple (3,), array (K,), None If None then a standard orientation colormap is used for every line. If one tuple of color is used. Then all streamlines will have the same colour. If an array (N, 3) is given, where N is equal to the number of lines. Then every line is coloured with a different RGB color. If a list of RGB arrays is given then every point of every line takes a different color. If an array (K, ) is given, where K is the number of points of all lines then these are considered as the values to be used by the colormap. If an array (L, ) is given, where L is the number of streamlines then these are considered as the values to be used by the colormap per streamline. If an array (X, Y, Z) or (X, Y, Z, 3) is given then the values for the colormap are interpolated automatically using trilinear interpolation. opacity : float, optional Default is 1. linewidth : float, optional Line thickness. Default is 1. spline_subdiv : int, optional Number of splines subdivision to smooth streamtubes. Default is None which means no subdivision. lod : bool Use vtkLODActor(level of detail) rather than vtkActor. Default is True. Level of detail actors do not render the full geometry when the frame rate is low. lod_points : int Number of points to be used when LOD is in effect. Default is 10000. lod_points_size : int Size of points when lod is in effect. Default is 3. lookup_colormap : bool, optional Add a default lookup table to the colormap. Default is None which calls :func:`dipy.viz.actor.colormap_lookup_table`. Returns ---------- v : vtkActor or vtkLODActor object Line. Examples ---------- >>> from dipy.viz import actor, window >>> ren = window.Renderer() >>> lines = [np.random.rand(10, 3), np.random.rand(20, 3)] >>> colors = np.random.rand(2, 3) >>> c = actor.line(lines, colors) >>> ren.add(c) >>> #window.show(ren) """ # Poly data with lines and colors poly_data, is_colormap = lines_to_vtk_polydata(lines, colors) next_input = poly_data # use spline interpolation if (spline_subdiv is not None) and (spline_subdiv > 0): spline_filter = set_input(vtk.vtkSplineFilter(), next_input) spline_filter.SetSubdivideToSpecified() spline_filter.SetNumberOfSubdivisions(spline_subdiv) spline_filter.Update() next_input = spline_filter.GetOutputPort() poly_mapper = set_input(vtk.vtkPolyDataMapper(), next_input) poly_mapper.ScalarVisibilityOn() poly_mapper.SetScalarModeToUsePointFieldData() poly_mapper.SelectColorArray("Colors") poly_mapper.Update() # Color Scale with a lookup table if is_colormap: if lookup_colormap is None: lookup_colormap = colormap_lookup_table() poly_mapper.SetLookupTable(lookup_colormap) poly_mapper.UseLookupTableScalarRangeOn() poly_mapper.Update() # Set Actor if lod: actor = vtk.vtkLODActor() actor.SetNumberOfCloudPoints(lod_points) actor.GetProperty().SetPointSize(lod_points_size) else: actor = vtk.vtkActor() # actor = vtk.vtkActor() actor.SetMapper(poly_mapper) actor.GetProperty().SetLineWidth(linewidth) actor.GetProperty().SetOpacity(opacity) return actor def scalar_bar(lookup_table=None, title=" "): """ Default scalar bar actor for a given colormap (colorbar) Parameters ---------- lookup_table : vtkLookupTable or None If None then ``colormap_lookup_table`` is called with default options. title : str Returns ------- scalar_bar : vtkScalarBarActor See Also -------- :func:`dipy.viz.actor.colormap_lookup_table` """ lookup_table_copy = vtk.vtkLookupTable() if lookup_table is None: lookup_table = colormap_lookup_table() # Deepcopy the lookup_table because sometimes vtkPolyDataMapper deletes it lookup_table_copy.DeepCopy(lookup_table) scalar_bar = vtk.vtkScalarBarActor() scalar_bar.SetTitle(title) scalar_bar.SetLookupTable(lookup_table_copy) scalar_bar.SetNumberOfLabels(6) return scalar_bar def _arrow(pos=(0, 0, 0), color=(1, 0, 0), scale=(1, 1, 1), opacity=1): ''' Internal function for generating arrow actors. ''' arrow = vtk.vtkArrowSource() # arrow.SetTipLength(length) arrowm = vtk.vtkPolyDataMapper() if major_version <= 5: arrowm.SetInput(arrow.GetOutput()) else: arrowm.SetInputConnection(arrow.GetOutputPort()) arrowa = vtk.vtkActor() arrowa.SetMapper(arrowm) arrowa.GetProperty().SetColor(color) arrowa.GetProperty().SetOpacity(opacity) arrowa.SetScale(scale) return arrowa def axes(scale=(1, 1, 1), colorx=(1, 0, 0), colory=(0, 1, 0), colorz=(0, 0, 1), opacity=1): """ Create an actor with the coordinate's system axes where red = x, green = y, blue = z. Parameters ---------- scale : tuple (3,) Axes size e.g. (100, 100, 100). Default is (1, 1, 1). colorx : tuple (3,) x-axis color. Default red (1, 0, 0). colory : tuple (3,) y-axis color. Default green (0, 1, 0). colorz : tuple (3,) z-axis color. Default blue (0, 0, 1). Returns ------- vtkAssembly """ arrowx = _arrow(color=colorx, scale=scale, opacity=opacity) arrowy = _arrow(color=colory, scale=scale, opacity=opacity) arrowz = _arrow(color=colorz, scale=scale, opacity=opacity) arrowy.RotateZ(90) arrowz.RotateY(-90) ass = vtk.vtkAssembly() ass.AddPart(arrowx) ass.AddPart(arrowy) ass.AddPart(arrowz) return ass def odf_slicer(odfs, affine=None, mask=None, sphere=None, scale=2.2, norm=True, radial_scale=True, opacity=1., colormap='plasma', global_cm=False): """ Slice spherical fields in native or world coordinates Parameters ---------- odfs : ndarray 4D array of spherical functions affine : array 4x4 transformation array from native coordinates to world coordinates mask : ndarray 3D mask sphere : Sphere a sphere scale : float Distance between spheres. norm : bool Normalize `sphere_values`. radial_scale : bool Scale sphere points according to odf values. opacity : float Takes values from 0 (fully transparent) to 1 (opaque) colormap : None or str If None then white color is used. Otherwise the name of colormap is given. Matplotlib colormaps are supported (e.g., 'inferno'). global_cm : bool If True the colormap will be applied in all ODFs. If False it will be applied individually at each voxel (default False). """ if mask is None: mask = np.ones(odfs.shape[:3], dtype=np.bool) else: mask = mask.astype(np.bool) szx, szy, szz = odfs.shape[:3] class OdfSlicerActor(vtk.vtkLODActor): def display_extent(self, x1, x2, y1, y2, z1, z2): tmp_mask = np.zeros(odfs.shape[:3], dtype=np.bool) tmp_mask[x1:x2 + 1, y1:y2 + 1, z1:z2 + 1] = True tmp_mask = np.bitwise_and(tmp_mask, mask) self.mapper = _odf_slicer_mapper(odfs=odfs, affine=affine, mask=tmp_mask, sphere=sphere, scale=scale, norm=norm, radial_scale=radial_scale, opacity=opacity, colormap=colormap, global_cm=global_cm) self.SetMapper(self.mapper) def display(self, x=None, y=None, z=None): if x is None and y is None and z is None: self.display_extent(0, szx - 1, 0, szy - 1, int(np.floor(szz/2)), int(np.floor(szz/2))) if x is not None: self.display_extent(x, x, 0, szy - 1, 0, szz - 1) if y is not None: self.display_extent(0, szx - 1, y, y, 0, szz - 1) if z is not None: self.display_extent(0, szx - 1, 0, szy - 1, z, z) odf_actor = OdfSlicerActor() odf_actor.display_extent(0, szx - 1, 0, szy - 1, int(np.floor(szz/2)), int(np.floor(szz/2))) return odf_actor def _odf_slicer_mapper(odfs, affine=None, mask=None, sphere=None, scale=2.2, norm=True, radial_scale=True, opacity=1., colormap='plasma', global_cm=False): """ Helper function for slicing spherical fields Parameters ---------- odfs : ndarray 4D array of spherical functions affine : array 4x4 transformation array from native coordinates to world coordinates mask : ndarray 3D mask sphere : Sphere a sphere scale : float Distance between spheres. norm : bool Normalize `sphere_values`. radial_scale : bool Scale sphere points according to odf values. opacity : float Takes values from 0 (fully transparent) to 1 (opaque) colormap : None or str If None then white color is used. Otherwise the name of colormap is given. Matplotlib colormaps are supported (e.g., 'inferno'). global_cm : bool If True the colormap will be applied in all ODFs. If False it will be applied individually at each voxel (default False). """ if mask is None: mask = np.ones(odfs.shape[:3]) ijk = np.ascontiguousarray(np.array(np.nonzero(mask)).T) if len(ijk) == 0: return None if affine is not None: ijk = np.ascontiguousarray(apply_affine(affine, ijk)) faces = np.asarray(sphere.faces, dtype=int) vertices = sphere.vertices all_xyz = [] all_faces = [] all_ms = [] for (k, center) in enumerate(ijk): m = odfs[tuple(center.astype(np.int))].copy() if norm: m /= np.abs(m).max() if radial_scale: xyz = vertices * m[:, None] else: xyz = vertices.copy() all_xyz.append(scale * xyz + center) all_faces.append(faces + k * xyz.shape[0]) all_ms.append(m) all_xyz = np.ascontiguousarray(np.concatenate(all_xyz)) all_xyz_vtk = numpy_support.numpy_to_vtk(all_xyz, deep=True) all_faces = np.concatenate(all_faces) all_faces = np.hstack((3 * np.ones((len(all_faces), 1)), all_faces)) ncells = len(all_faces) all_faces = np.ascontiguousarray(all_faces.ravel(), dtype='i8') all_faces_vtk = numpy_support.numpy_to_vtkIdTypeArray(all_faces, deep=True) if global_cm: all_ms = np.ascontiguousarray( np.concatenate(all_ms), dtype='f4') points = vtk.vtkPoints() points.SetData(all_xyz_vtk) cells = vtk.vtkCellArray() cells.SetCells(ncells, all_faces_vtk) if colormap is not None: if global_cm: cols = create_colormap(all_ms.ravel(), colormap) else: cols = np.zeros((ijk.shape[0],) + sphere.vertices.shape, dtype='f4') for k in range(ijk.shape[0]): tmp = create_colormap(all_ms[k].ravel(), colormap) cols[k] = tmp.copy() cols = np.ascontiguousarray( np.reshape(cols, (cols.shape[0] * cols.shape[1], cols.shape[2])), dtype='f4') vtk_colors = numpy_support.numpy_to_vtk( np.asarray(255 * cols), deep=True, array_type=vtk.VTK_UNSIGNED_CHAR) vtk_colors.SetName("Colors") polydata = vtk.vtkPolyData() polydata.SetPoints(points) polydata.SetPolys(cells) if colormap is not None: polydata.GetPointData().SetScalars(vtk_colors) mapper = vtk.vtkPolyDataMapper() if major_version <= 5: mapper.SetInput(polydata) else: mapper.SetInputData(polydata) return mapper def _makeNd(array, ndim): """Pads as many 1s at the beginning of array's shape as are need to give array ndim dimensions.""" new_shape = (1,) * (ndim - array.ndim) + array.shape return array.reshape(new_shape) def peak_slicer(peaks_dirs, peaks_values=None, mask=None, affine=None, colors=(1, 0, 0), opacity=1, linewidth=1, lod=False, lod_points=10 ** 4, lod_points_size=3): """ Visualize peak directions as given from ``peaks_from_model`` Parameters ---------- peaks_dirs : ndarray Peak directions. The shape of the array can be (M, 3) or (X, M, 3) or (X, Y, M, 3) or (X, Y, Z, M, 3) peaks_values : ndarray Peak values. The shape of the array can be (M, ) or (X, M) or (X, Y, M) or (X, Y, Z, M) colors : tuple or None Default red color. If None then every peak gets an orientation color in similarity to a DEC map. opacity : float, optional Default is 1. linewidth : float, optional Line thickness. Default is 1. lod : bool Use vtkLODActor(level of detail) rather than vtkActor. Default is False. Level of detail actors do not render the full geometry when the frame rate is low. lod_points : int Number of points to be used when LOD is in effect. Default is 10000. lod_points_size : int Size of points when lod is in effect. Default is 3. Returns ------- vtkActor See Also -------- dipy.viz.fvtk.sphere_funcs """ peaks_dirs = np.asarray(peaks_dirs) if peaks_dirs.ndim > 5: raise ValueError("Wrong shape") peaks_dirs = _makeNd(peaks_dirs, 5) if peaks_values is not None: peaks_values = _makeNd(peaks_values, 4) grid_shape = np.array(peaks_dirs.shape[:3]) if mask is None: mask = np.ones(grid_shape).astype(np.bool) class PeakSlicerActor(vtk.vtkLODActor): def display_extent(self, x1, x2, y1, y2, z1, z2): tmp_mask = np.zeros(grid_shape, dtype=np.bool) tmp_mask[x1:x2 + 1, y1:y2 + 1, z1:z2 + 1] = True tmp_mask = np.bitwise_and(tmp_mask, mask) ijk = np.ascontiguousarray(np.array(np.nonzero(tmp_mask)).T) if len(ijk) == 0: self.SetMapper(None) return if affine is not None: ijk_trans = np.ascontiguousarray(apply_affine(affine, ijk)) list_dirs = [] for index, center in enumerate(ijk): # center = tuple(center) if affine is None: xyz = center[:, None] else: xyz = ijk_trans[index][:, None] xyz = xyz.T for i in range(peaks_dirs[tuple(center)].shape[-2]): if peaks_values is not None: pv = peaks_values[tuple(center)][i] else: pv = 1. symm = np.vstack((-peaks_dirs[tuple(center)][i] * pv + xyz, peaks_dirs[tuple(center)][i] * pv + xyz)) list_dirs.append(symm) self.mapper = line(list_dirs, colors=colors, opacity=opacity, linewidth=linewidth, lod=lod, lod_points=lod_points, lod_points_size=lod_points_size).GetMapper() self.SetMapper(self.mapper) def display(self, x=None, y=None, z=None): if x is None and y is None and z is None: self.display_extent(0, szx - 1, 0, szy - 1, int(np.floor(szz/2)), int(np.floor(szz/2))) if x is not None: self.display_extent(x, x, 0, szy - 1, 0, szz - 1) if y is not None: self.display_extent(0, szx - 1, y, y, 0, szz - 1) if z is not None: self.display_extent(0, szx - 1, 0, szy - 1, z, z) peak_actor = PeakSlicerActor() szx, szy, szz = grid_shape peak_actor.display_extent(0, szx - 1, 0, szy - 1, int(np.floor(szz / 2)), int(np.floor(szz / 2))) return peak_actor dipy-0.13.0/dipy/viz/colormap.py000066400000000000000000000200441317371701200165050ustar00rootroot00000000000000import numpy as np # Conditional import machinery for vtk from dipy.utils.optpkg import optional_package # Allow import, but disable doctests if we don't have vtk vtk, have_vtk, setup_module = optional_package('vtk') cm, have_matplotlib, _ = optional_package('matplotlib.cm') if have_matplotlib: get_cmap = cm.get_cmap else: from dipy.data import get_cmap from warnings import warn def colormap_lookup_table(scale_range=(0, 1), hue_range=(0.8, 0), saturation_range=(1, 1), value_range=(0.8, 0.8)): """ Lookup table for the colormap Parameters ---------- scale_range : tuple It can be anything e.g. (0, 1) or (0, 255). Usually it is the mininum and maximum value of your data. Default is (0, 1). hue_range : tuple of floats HSV values (min 0 and max 1). Default is (0.8, 0). saturation_range : tuple of floats HSV values (min 0 and max 1). Default is (1, 1). value_range : tuple of floats HSV value (min 0 and max 1). Default is (0.8, 0.8). Returns ------- lookup_table : vtkLookupTable """ lookup_table = vtk.vtkLookupTable() lookup_table.SetRange(scale_range) lookup_table.SetTableRange(scale_range) lookup_table.SetHueRange(hue_range) lookup_table.SetSaturationRange(saturation_range) lookup_table.SetValueRange(value_range) lookup_table.Build() return lookup_table def cc(na, nd): return (na * np.cos(nd * np.pi / 180.0)) def ss(na, nd): return na * np.sin(nd * np.pi / 180.0) def boys2rgb(v): """ boys 2 rgb cool colormap Maps a given field of undirected lines (line field) to rgb colors using Boy's Surface immersion of the real projective plane. Boy's Surface is one of the three possible surfaces obtained by gluing a Mobius strip to the edge of a disk. The other two are the crosscap and Roman surface, Steiner surfaces that are homeomorphic to the real projective plane (Pinkall 1986). The Boy's surface is the only 3D immersion of the projective plane without singularities. Visit http://www.cs.brown.edu/~cad/rp2coloring for further details. Cagatay Demiralp, 9/7/2008. Code was initially in matlab and was rewritten in Python for dipy by the Dipy Team. Thank you Cagatay for putting this online. Parameters ------------ v : array, shape (N, 3) of unit vectors (e.g., principal eigenvectors of tensor data) representing one of the two directions of the undirected lines in a line field. Returns --------- c : array, shape (N, 3) matrix of rgb colors corresponding to the vectors given in V. Examples ---------- >>> from dipy.viz import colormap >>> v = np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]]) >>> c = colormap.boys2rgb(v) """ if v.ndim == 1: x = v[0] y = v[1] z = v[2] if v.ndim == 2: x = v[:, 0] y = v[:, 1] z = v[:, 2] x2 = x ** 2 y2 = y ** 2 z2 = z ** 2 x3 = x * x2 y3 = y * y2 z3 = z * z2 z4 = z * z2 xy = x * y xz = x * z yz = y * z hh1 = .5 * (3 * z2 - 1) / 1.58 hh2 = 3 * xz / 2.745 hh3 = 3 * yz / 2.745 hh4 = 1.5 * (x2 - y2) / 2.745 hh5 = 6 * xy / 5.5 hh6 = (1 / 1.176) * .125 * (35 * z4 - 30 * z2 + 3) hh7 = 2.5 * x * (7 * z3 - 3 * z) / 3.737 hh8 = 2.5 * y * (7 * z3 - 3 * z) / 3.737 hh9 = ((x2 - y2) * 7.5 * (7 * z2 - 1)) / 15.85 hh10 = ((2 * xy) * (7.5 * (7 * z2 - 1))) / 15.85 hh11 = 105 * (4 * x3 * z - 3 * xz * (1 - z2)) / 59.32 hh12 = 105 * (-4 * y3 * z + 3 * yz * (1 - z2)) / 59.32 s0 = -23.0 s1 = 227.9 s2 = 251.0 s3 = 125.0 ss23 = ss(2.71, s0) cc23 = cc(2.71, s0) ss45 = ss(2.12, s1) cc45 = cc(2.12, s1) ss67 = ss(.972, s2) cc67 = cc(.972, s2) ss89 = ss(.868, s3) cc89 = cc(.868, s3) X = 0.0 X = X + hh2 * cc23 X = X + hh3 * ss23 X = X + hh5 * cc45 X = X + hh4 * ss45 X = X + hh7 * cc67 X = X + hh8 * ss67 X = X + hh10 * cc89 X = X + hh9 * ss89 Y = 0.0 Y = Y + hh2 * -ss23 Y = Y + hh3 * cc23 Y = Y + hh5 * -ss45 Y = Y + hh4 * cc45 Y = Y + hh7 * -ss67 Y = Y + hh8 * cc67 Y = Y + hh10 * -ss89 Y = Y + hh9 * cc89 Z = 0.0 Z = Z + hh1 * -2.8 Z = Z + hh6 * -0.5 Z = Z + hh11 * 0.3 Z = Z + hh12 * -2.5 # scale and normalize to fit # in the rgb space w_x = 4.1925 trl_x = -2.0425 w_y = 4.0217 trl_y = -1.8541 w_z = 4.0694 trl_z = -2.1899 if v.ndim == 2: N = len(x) C = np.zeros((N, 3)) C[:, 0] = 0.9 * np.abs(((X - trl_x) / w_x)) + 0.05 C[:, 1] = 0.9 * np.abs(((Y - trl_y) / w_y)) + 0.05 C[:, 2] = 0.9 * np.abs(((Z - trl_z) / w_z)) + 0.05 if v.ndim == 1: C = np.zeros((3,)) C[0] = 0.9 * np.abs(((X - trl_x) / w_x)) + 0.05 C[1] = 0.9 * np.abs(((Y - trl_y) / w_y)) + 0.05 C[2] = 0.9 * np.abs(((Z - trl_z) / w_z)) + 0.05 return C def orient2rgb(v): """ standard orientation 2 rgb colormap v : array, shape (N, 3) of vectors not necessarily normalized Returns ------- c : array, shape (N, 3) matrix of rgb colors corresponding to the vectors given in V. Examples -------- >>> from dipy.viz import colormap >>> v = np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]]) >>> c = colormap.orient2rgb(v) """ if v.ndim == 1: orient = v orient = np.abs(orient / np.linalg.norm(orient)) if v.ndim == 2: orientn = np.sqrt(v[:, 0] ** 2 + v[:, 1] ** 2 + v[:, 2] ** 2) orientn.shape = orientn.shape + (1,) orient = np.abs(v / orientn) return orient def line_colors(streamlines, cmap='rgb_standard'): """ Create colors for streamlines to be used in fvtk.line Parameters ---------- streamlines : sequence of ndarrays cmap : ('rgb_standard', 'boys_standard') Returns ------- colors : ndarray """ if cmap == 'rgb_standard': col_list = [orient2rgb(streamline[-1] - streamline[0]) for streamline in streamlines] if cmap == 'boys_standard': col_list = [boys2rgb(streamline[-1] - streamline[0]) for streamline in streamlines] return np.vstack(col_list) lowercase_cm_name = {'blues': 'Blues', 'accent': 'Accent'} def create_colormap(v, name='plasma', auto=True): """Create colors from a specific colormap and return it as an array of shape (N,3) where every row gives the corresponding r,g,b value. The colormaps we use are similar with those of matplotlib. Parameters ---------- v : (N,) array vector of values to be mapped in RGB colors according to colormap name : str. Name of the colormap. Currently implemented: 'jet', 'blues', 'accent', 'bone' and matplotlib colormaps if you have matplotlib installed. For example, we suggest using 'plasma', 'viridis' or 'inferno'. 'jet' is popular but can be often misleading and we will deprecate it the future. auto : bool, if auto is True then v is interpolated to [0, 10] from v.min() to v.max() Notes ----- Dipy supports a few colormaps for those who do not use Matplotlib, for more colormaps consider downloading Matplotlib (see matplotlib.org). """ if name == 'jet': msg = 'Jet is a popular colormap but can often be misleading' msg += 'Use instead plasma, viridis, hot or inferno.' warn(msg, DeprecationWarning) if v.ndim > 1: msg = 'This function works only with 1d arrays. Use ravel()' raise ValueError(msg) if auto: v = np.interp(v, [v.min(), v.max()], [0, 1]) else: v = np.clip(v, 0, 1) # For backwards compatibility with lowercase names newname = lowercase_cm_name.get(name) or name colormap = get_cmap(newname) if colormap is None: e_s = "Colormap {} is not yet implemented ".format(name) raise ValueError(e_s) rgba = colormap(v) rgb = rgba[:, :3].copy() return rgb dipy-0.13.0/dipy/viz/fvtk.py000066400000000000000000000752251317371701200156560ustar00rootroot00000000000000""" Fvtk module implements simple visualization functions using VTK. The main idea is the following: A window can have one or more renderers. A renderer can have none, one or more actors. Examples of actors are a sphere, line, point etc. You basically add actors in a renderer and in that way you can visualize the forementioned objects e.g. sphere, line ... Examples --------- >>> from dipy.viz import fvtk >>> r=fvtk.ren() >>> a=fvtk.axes() >>> fvtk.add(r,a) >>> #fvtk.show(r) For more information on VTK there many neat examples in http://www.vtk.org/Wiki/VTK/Tutorials/External_Tutorials """ from __future__ import division, print_function, absolute_import from warnings import warn from dipy.utils.six.moves import xrange import numpy as np from dipy.core.ndindex import ndindex # Conditional import machinery for vtk from dipy.utils.optpkg import optional_package # Allow import, but disable doctests if we don't have vtk vtk, have_vtk, setup_module = optional_package('vtk') colors, have_vtk_colors, _ = optional_package('vtk.util.colors') cm, have_matplotlib, _ = optional_package('matplotlib.cm') if have_matplotlib: get_cmap = cm.get_cmap else: from dipy.data import get_cmap from dipy.viz.colormap import create_colormap # a track buffer used only with picking tracks track_buffer = [] # indices buffer for the tracks ind_buffer = [] # tempory renderer used only with picking tracks tmp_ren = None if have_vtk: major_version = vtk.vtkVersion.GetVTKMajorVersion() # Create a text mapper and actor to display the results of picking. textMapper = vtk.vtkTextMapper() tprop = textMapper.GetTextProperty() tprop.SetFontFamilyToArial() tprop.SetFontSize(10) # tprop.BoldOn() # tprop.ShadowOn() tprop.SetColor(1, 0, 0) textActor = vtk.vtkActor2D() textActor.VisibilityOff() textActor.SetMapper(textMapper) # Create a cell picker. picker = vtk.vtkCellPicker() from dipy.viz.window import (ren, renderer, add, clear, rm, rm_all, show, record, snapshot) from dipy.viz.actor import line, streamtube, slicer, axes try: from vtk import vtkVolumeTextureMapper2D have_vtk_texture_mapper2D = True except: have_vtk_texture_mapper2D = False else: ren, have_ren, _ = optional_package('dipy.viz.window.ren', 'Python VTK is not installed') def dots(points, color=(1, 0, 0), opacity=1, dot_size=5): """ Create one or more 3d points Parameters ---------- points : ndarray, (N, 3) color : tuple (3,) opacity : float dot_size : int Returns -------- vtkActor See Also --------- dipy.viz.fvtk.point """ if points.ndim == 2: points_no = points.shape[0] else: points_no = 1 polyVertexPoints = vtk.vtkPoints() polyVertexPoints.SetNumberOfPoints(points_no) aPolyVertex = vtk.vtkPolyVertex() aPolyVertex.GetPointIds().SetNumberOfIds(points_no) cnt = 0 if points.ndim > 1: for point in points: polyVertexPoints.InsertPoint(cnt, point[0], point[1], point[2]) aPolyVertex.GetPointIds().SetId(cnt, cnt) cnt += 1 else: polyVertexPoints.InsertPoint(cnt, points[0], points[1], points[2]) aPolyVertex.GetPointIds().SetId(cnt, cnt) cnt += 1 aPolyVertexGrid = vtk.vtkUnstructuredGrid() aPolyVertexGrid.Allocate(1, 1) aPolyVertexGrid.InsertNextCell(aPolyVertex.GetCellType(), aPolyVertex.GetPointIds()) aPolyVertexGrid.SetPoints(polyVertexPoints) aPolyVertexMapper = vtk.vtkDataSetMapper() if major_version <= 5: aPolyVertexMapper.SetInput(aPolyVertexGrid) else: aPolyVertexMapper.SetInputData(aPolyVertexGrid) aPolyVertexActor = vtk.vtkActor() aPolyVertexActor.SetMapper(aPolyVertexMapper) aPolyVertexActor.GetProperty().SetColor(color) aPolyVertexActor.GetProperty().SetOpacity(opacity) aPolyVertexActor.GetProperty().SetPointSize(dot_size) return aPolyVertexActor def point(points, colors, opacity=1, point_radius=0.1, theta=8, phi=8): """ Visualize points as sphere glyphs Parameters ---------- points : ndarray, shape (N, 3) colors : ndarray (N,3) or tuple (3,) point_radius : float theta : int phi : int Returns ------- vtkActor Examples -------- >>> from dipy.viz import fvtk >>> ren = fvtk.ren() >>> pts = np.random.rand(5, 3) >>> point_actor = fvtk.point(pts, fvtk.colors.coral) >>> fvtk.add(ren, point_actor) >>> #fvtk.show(ren) """ if np.array(colors).ndim == 1: # return dots(points,colors,opacity) colors = np.tile(colors, (len(points), 1)) scalars = vtk.vtkUnsignedCharArray() scalars.SetNumberOfComponents(3) pts = vtk.vtkPoints() cnt_colors = 0 for p in points: pts.InsertNextPoint(p[0], p[1], p[2]) scalars.InsertNextTuple3( round(255 * colors[cnt_colors][0]), round(255 * colors[cnt_colors][1]), round(255 * colors[cnt_colors][2])) cnt_colors += 1 src = vtk.vtkSphereSource() src.SetRadius(point_radius) src.SetThetaResolution(theta) src.SetPhiResolution(phi) polyData = vtk.vtkPolyData() polyData.SetPoints(pts) polyData.GetPointData().SetScalars(scalars) glyph = vtk.vtkGlyph3D() glyph.SetSourceConnection(src.GetOutputPort()) if major_version <= 5: glyph.SetInput(polyData) else: glyph.SetInputData(polyData) glyph.SetColorModeToColorByScalar() glyph.SetScaleModeToDataScalingOff() glyph.Update() mapper = vtk.vtkPolyDataMapper() if major_version <= 5: mapper.SetInput(glyph.GetOutput()) else: mapper.SetInputData(glyph.GetOutput()) actor = vtk.vtkActor() actor.SetMapper(mapper) actor.GetProperty().SetOpacity(opacity) return actor def label(ren, text='Origin', pos=(0, 0, 0), scale=(0.2, 0.2, 0.2), color=(1, 1, 1)): ''' Create a label actor. This actor will always face the camera Parameters ---------- ren : vtkRenderer() object Renderer as returned by ``ren()``. text : str Text for the label. pos : (3,) array_like, optional Left down position of the label. scale : (3,) array_like Changes the size of the label. color : (3,) array_like Label color as ``(r,g,b)`` tuple. Returns ------- l : vtkActor object Label. Examples -------- >>> from dipy.viz import fvtk >>> r=fvtk.ren() >>> l=fvtk.label(r) >>> fvtk.add(r,l) >>> #fvtk.show(r) ''' atext = vtk.vtkVectorText() atext.SetText(text) textm = vtk.vtkPolyDataMapper() if major_version <= 5: textm.SetInput(atext.GetOutput()) else: textm.SetInputData(atext.GetOutput()) texta = vtk.vtkFollower() texta.SetMapper(textm) texta.SetScale(scale) texta.GetProperty().SetColor(color) texta.SetPosition(pos) ren.AddActor(texta) texta.SetCamera(ren.GetActiveCamera()) return texta def volume(vol, voxsz=(1.0, 1.0, 1.0), affine=None, center_origin=1, info=0, maptype=0, trilinear=1, iso=0, iso_thr=100, opacitymap=None, colormap=None): ''' Create a volume and return a volumetric actor using volumetric rendering. This function has many different interesting capabilities. The maptype, opacitymap and colormap are the most crucial parameters here. Parameters ---------- vol : array, shape (N, M, K), dtype uint8 An array representing the volumetric dataset that we want to visualize using volumetric rendering. voxsz : (3,) array_like Voxel size. affine : (4, 4) ndarray As given by volumeimages. center_origin : int {0,1} It considers that the center of the volume is the point ``(-vol.shape[0]/2.0+0.5,-vol.shape[1]/2.0+0.5, -vol.shape[2]/2.0+0.5)``. info : int {0,1} If 1 it prints out some info about the volume, the method and the dataset. trilinear : int {0,1} Use trilinear interpolation, default 1, gives smoother rendering. If you want faster interpolation use 0 (Nearest). maptype : int {0,1} The maptype is a very important parameter which affects the raycasting algorithm in use for the rendering. The options are: If 0 then vtkVolumeTextureMapper2D is used. If 1 then vtkVolumeRayCastFunction is used. iso : int {0,1} If iso is 1 and maptype is 1 then we use ``vtkVolumeRayCastIsosurfaceFunction`` which generates an isosurface at the predefined iso_thr value. If iso is 0 and maptype is 1 ``vtkVolumeRayCastCompositeFunction`` is used. iso_thr : int If iso is 1 then then this threshold in the volume defines the value which will be used to create the isosurface. opacitymap : (2, 2) ndarray The opacity map assigns a transparency coefficient to every point in the volume. The default value uses the histogram of the volume to calculate the opacitymap. colormap : (4, 4) ndarray The color map assigns a color value to every point in the volume. When None from the histogram it uses a red-blue colormap. Returns ------- v : vtkVolume Volume. Notes -------- What is the difference between TextureMapper2D and RayCastFunction? Coming soon... See VTK user's guide [book] & The Visualization Toolkit [book] and VTK's online documentation & online docs. What is the difference between RayCastIsosurfaceFunction and RayCastCompositeFunction? Coming soon... See VTK user's guide [book] & The Visualization Toolkit [book] and VTK's online documentation & online docs. What about trilinear interpolation? Coming soon... well when time permits really ... :-) Examples -------- First example random points. >>> from dipy.viz import fvtk >>> import numpy as np >>> vol=100*np.random.rand(100,100,100) >>> vol=vol.astype('uint8') >>> vol.min(), vol.max() (0, 99) >>> r = fvtk.ren() >>> v = fvtk.volume(vol) >>> fvtk.add(r,v) >>> #fvtk.show(r) Second example with a more complicated function >>> from dipy.viz import fvtk >>> import numpy as np >>> x, y, z = np.ogrid[-10:10:20j, -10:10:20j, -10:10:20j] >>> s = np.sin(x*y*z)/(x*y*z) >>> r = fvtk.ren() >>> v = fvtk.volume(s) >>> fvtk.add(r,v) >>> #fvtk.show(r) If you find this function too complicated you can always use mayavi. Please do not forget to use the -wthread switch in ipython if you are running mayavi. from enthought.mayavi import mlab import numpy as np x, y, z = np.ogrid[-10:10:20j, -10:10:20j, -10:10:20j] s = np.sin(x*y*z)/(x*y*z) mlab.pipeline.volume(mlab.pipeline.scalar_field(s)) mlab.show() More mayavi demos are available here: http://code.enthought.com/projects/mayavi/docs/development/html/mayavi/mlab.html ''' if vol.ndim != 3: raise ValueError('3d numpy arrays only please') if info: print('Datatype', vol.dtype, 'converted to uint8') vol = np.interp(vol, [vol.min(), vol.max()], [0, 255]) vol = vol.astype('uint8') if opacitymap is None: bin, res = np.histogram(vol.ravel()) res2 = np.interp(res, [vol.min(), vol.max()], [0, 1]) opacitymap = np.vstack((res, res2)).T opacitymap = opacitymap.astype('float32') ''' opacitymap=np.array([[ 0.0, 0.0], [50.0, 0.9]]) ''' if info: print('opacitymap', opacitymap) if colormap is None: bin, res = np.histogram(vol.ravel()) res2 = np.interp(res, [vol.min(), vol.max()], [0, 1]) zer = np.zeros(res2.shape) colormap = np.vstack((res, res2, zer, res2[::-1])).T colormap = colormap.astype('float32') ''' colormap=np.array([[0.0, 0.5, 0.0, 0.0], [64.0, 1.0, 0.5, 0.5], [128.0, 0.9, 0.2, 0.3], [196.0, 0.81, 0.27, 0.1], [255.0, 0.5, 0.5, 0.5]]) ''' if info: print('colormap', colormap) im = vtk.vtkImageData() if major_version <= 5: im.SetScalarTypeToUnsignedChar() im.SetDimensions(vol.shape[0], vol.shape[1], vol.shape[2]) # im.SetOrigin(0,0,0) # im.SetSpacing(voxsz[2],voxsz[0],voxsz[1]) if major_version <= 5: im.AllocateScalars() else: im.AllocateScalars(vtk.VTK_UNSIGNED_CHAR, 3) for i in range(vol.shape[0]): for j in range(vol.shape[1]): for k in range(vol.shape[2]): im.SetScalarComponentFromFloat(i, j, k, 0, vol[i, j, k]) if affine is not None: aff = vtk.vtkMatrix4x4() aff.DeepCopy((affine[0, 0], affine[0, 1], affine[0, 2], affine[0, 3], affine[1, 0], affine[1, 1], affine[1, 2], affine[1, 3], affine[2, 0], affine[2, 1], affine[2, 2], affine[2, 3], affine[3, 0], affine[3, 1], affine[3, 2], affine[3, 3])) # aff.DeepCopy((affine[0,0],affine[0,1],affine[0,2],0,affine[1,0],affine[1,1],affine[1,2],0,affine[2,0],affine[2,1],affine[2,2],0,affine[3,0],affine[3,1],affine[3,2],1)) # aff.DeepCopy((affine[0,0],affine[0,1],affine[0,2],127.5,affine[1,0],affine[1,1],affine[1,2],-127.5,affine[2,0],affine[2,1],affine[2,2],-127.5,affine[3,0],affine[3,1],affine[3,2],1)) reslice = vtk.vtkImageReslice() if major_version <= 5: reslice.SetInput(im) else: reslice.SetInputData(im) # reslice.SetOutputDimensionality(2) # reslice.SetOutputOrigin(127,-145,147) reslice.SetResliceAxes(aff) # reslice.SetOutputOrigin(-127,-127,-127) # reslice.SetOutputExtent(-127,128,-127,128,-127,128) # reslice.SetResliceAxesOrigin(0,0,0) # print 'Get Reslice Axes Origin ', reslice.GetResliceAxesOrigin() # reslice.SetOutputSpacing(1.0,1.0,1.0) reslice.SetInterpolationModeToLinear() # reslice.UpdateWholeExtent() # print 'reslice GetOutputOrigin', reslice.GetOutputOrigin() # print 'reslice GetOutputExtent',reslice.GetOutputExtent() # print 'reslice GetOutputSpacing',reslice.GetOutputSpacing() changeFilter = vtk.vtkImageChangeInformation() if major_version <= 5: changeFilter.SetInput(reslice.GetOutput()) else: changeFilter.SetInputData(reslice.GetOutput()) # changeFilter.SetInput(im) if center_origin: changeFilter.SetOutputOrigin( -vol.shape[0] / 2.0 + 0.5, -vol.shape[1] / 2.0 + 0.5, -vol.shape[2] / 2.0 + 0.5) print('ChangeFilter ', changeFilter.GetOutputOrigin()) opacity = vtk.vtkPiecewiseFunction() for i in range(opacitymap.shape[0]): opacity.AddPoint(opacitymap[i, 0], opacitymap[i, 1]) color = vtk.vtkColorTransferFunction() for i in range(colormap.shape[0]): color.AddRGBPoint( colormap[i, 0], colormap[i, 1], colormap[i, 2], colormap[i, 3]) if(maptype == 0): if not have_vtk_texture_mapper2D: raise ValueError("VolumeTextureMapper2D is not available in your " "version of VTK") property = vtk.vtkVolumeProperty() property.SetColor(color) property.SetScalarOpacity(opacity) if trilinear: property.SetInterpolationTypeToLinear() else: property.SetInterpolationTypeToNearest() if info: print('mapper VolumeTextureMapper2D') mapper = vtk.vtkVolumeTextureMapper2D() if affine is None: if major_version <= 5: mapper.SetInput(im) else: mapper.SetInputData(im) else: if major_version <= 5: mapper.SetInput(changeFilter.GetOutput()) else: mapper.SetInputData(changeFilter.GetOutput()) if (maptype == 1): property = vtk.vtkVolumeProperty() property.SetColor(color) property.SetScalarOpacity(opacity) property.ShadeOn() if trilinear: property.SetInterpolationTypeToLinear() else: property.SetInterpolationTypeToNearest() if iso: isofunc = vtk.vtkVolumeRayCastIsosurfaceFunction() isofunc.SetIsoValue(iso_thr) else: compositeFunction = vtk.vtkVolumeRayCastCompositeFunction() if info: print('mapper VolumeRayCastMapper') mapper = vtk.vtkVolumeRayCastMapper() if iso: mapper.SetVolumeRayCastFunction(isofunc) if info: print('Isosurface') else: mapper.SetVolumeRayCastFunction(compositeFunction) # mapper.SetMinimumImageSampleDistance(0.2) if info: print('Composite') if affine is None: if major_version <= 5: mapper.SetInput(im) else: mapper.SetInputData(im) else: # mapper.SetInput(reslice.GetOutput()) if major_version <= 5: mapper.SetInput(changeFilter.GetOutput()) else: mapper.SetInputData(changeFilter.GetOutput()) # Return mid position in world space # im2=reslice.GetOutput() # index=im2.FindPoint(vol.shape[0]/2.0,vol.shape[1]/2.0,vol.shape[2]/2.0) # print 'Image Getpoint ' , im2.GetPoint(index) volum = vtk.vtkVolume() volum.SetMapper(mapper) volum.SetProperty(property) if info: print('Origin', volum.GetOrigin()) print('Orientation', volum.GetOrientation()) print('OrientationW', volum.GetOrientationWXYZ()) print('Position', volum.GetPosition()) print('Center', volum.GetCenter()) print('Get XRange', volum.GetXRange()) print('Get YRange', volum.GetYRange()) print('Get ZRange', volum.GetZRange()) print('Volume data type', vol.dtype) return volum def contour(vol, voxsz=(1.0, 1.0, 1.0), affine=None, levels=[50], colors=[np.array([1.0, 0.0, 0.0])], opacities=[0.5]): """ Take a volume and draw surface contours for any any number of thresholds (levels) where every contour has its own color and opacity Parameters ---------- vol : (N, M, K) ndarray An array representing the volumetric dataset for which we will draw some beautiful contours . voxsz : (3,) array_like Voxel size. affine : None Not used. levels : array_like Sequence of thresholds for the contours taken from image values needs to be same datatype as `vol`. colors : (N, 3) ndarray RGB values in [0,1]. opacities : array_like Opacities of contours. Returns ------- vtkAssembly Examples -------- >>> import numpy as np >>> from dipy.viz import fvtk >>> A=np.zeros((10,10,10)) >>> A[3:-3,3:-3,3:-3]=1 >>> r=fvtk.ren() >>> fvtk.add(r,fvtk.contour(A,levels=[1])) >>> #fvtk.show(r) """ im = vtk.vtkImageData() if major_version <= 5: im.SetScalarTypeToUnsignedChar() im.SetDimensions(vol.shape[0], vol.shape[1], vol.shape[2]) # im.SetOrigin(0,0,0) # im.SetSpacing(voxsz[2],voxsz[0],voxsz[1]) if major_version <= 5: im.AllocateScalars() else: im.AllocateScalars(vtk.VTK_UNSIGNED_CHAR, 3) for i in range(vol.shape[0]): for j in range(vol.shape[1]): for k in range(vol.shape[2]): im.SetScalarComponentFromFloat(i, j, k, 0, vol[i, j, k]) ass = vtk.vtkAssembly() # ass=[] for (i, l) in enumerate(levels): # print levels skinExtractor = vtk.vtkContourFilter() if major_version <= 5: skinExtractor.SetInput(im) else: skinExtractor.SetInputData(im) skinExtractor.SetValue(0, l) skinNormals = vtk.vtkPolyDataNormals() skinNormals.SetInputConnection(skinExtractor.GetOutputPort()) skinNormals.SetFeatureAngle(60.0) skinMapper = vtk.vtkPolyDataMapper() skinMapper.SetInputConnection(skinNormals.GetOutputPort()) skinMapper.ScalarVisibilityOff() skin = vtk.vtkActor() skin.SetMapper(skinMapper) skin.GetProperty().SetOpacity(opacities[i]) # print colors[i] skin.GetProperty().SetColor(colors[i][0], colors[i][1], colors[i][2]) # skin.Update() ass.AddPart(skin) del skin del skinMapper del skinExtractor return ass def _makeNd(array, ndim): """Pads as many 1s at the beginning of array's shape as are need to give array ndim dimensions.""" new_shape = (1,) * (ndim - array.ndim) + array.shape return array.reshape(new_shape) def sphere_funcs(sphere_values, sphere, image=None, colormap='jet', scale=2.2, norm=True, radial_scale=True): """Plot many morphed spherical functions simultaneously. Parameters ---------- sphere_values : (M,) or (X, M) or (X, Y, M) or (X, Y, Z, M) ndarray Values on the sphere. sphere : Sphere image : None, Not yet supported. colormap : None or 'jet' If None then no color is used. scale : float, Distance between spheres. norm : bool, Normalize `sphere_values`. radial_scale : bool, Scale sphere points according to odf values. Returns ------- actor : vtkActor Spheres. Examples -------- >>> from dipy.viz import fvtk >>> r = fvtk.ren() >>> odfs = np.ones((5, 5, 724)) >>> odfs[..., 0] = 2. >>> from dipy.data import get_sphere >>> sphere = get_sphere('symmetric724') >>> fvtk.add(r, fvtk.sphere_funcs(odfs, sphere)) >>> #fvtk.show(r) """ sphere_values = np.asarray(sphere_values) if sphere_values.ndim > 4: raise ValueError("Wrong shape") sphere_values = _makeNd(sphere_values, 4) grid_shape = np.array(sphere_values.shape[:3]) faces = np.asarray(sphere.faces, dtype=int) vertices = sphere.vertices if sphere_values.shape[-1] != sphere.vertices.shape[0]: msg = 'Sphere.vertices.shape[0] should be the same as the ' msg += 'last dimensions of sphere_values i.e. sphere_values.shape[-1]' raise ValueError(msg) list_sq = [] list_cols = [] for ijk in np.ndindex(*grid_shape): m = sphere_values[ijk].copy() if norm: m /= abs(m).max() if radial_scale: xyz = vertices.T * m else: xyz = vertices.T.copy() xyz += scale * (ijk - grid_shape / 2.)[:, None] xyz = xyz.T list_sq.append(xyz) if colormap is not None: cols = create_colormap(m, colormap) cols = np.interp(cols, [0, 1], [0, 255]).astype('ubyte') list_cols.append(cols) points = vtk.vtkPoints() triangles = vtk.vtkCellArray() if colormap is not None: colors = vtk.vtkUnsignedCharArray() colors.SetNumberOfComponents(3) colors.SetName("Colors") for k in xrange(len(list_sq)): xyz = list_sq[k] if colormap is not None: cols = list_cols[k] for i in xrange(xyz.shape[0]): points.InsertNextPoint(*xyz[i]) if colormap is not None: colors.InsertNextTuple3(*cols[i]) for j in xrange(faces.shape[0]): triangle = vtk.vtkTriangle() triangle.GetPointIds().SetId(0, faces[j, 0] + k * xyz.shape[0]) triangle.GetPointIds().SetId(1, faces[j, 1] + k * xyz.shape[0]) triangle.GetPointIds().SetId(2, faces[j, 2] + k * xyz.shape[0]) triangles.InsertNextCell(triangle) del triangle polydata = vtk.vtkPolyData() polydata.SetPoints(points) polydata.SetPolys(triangles) if colormap is not None: polydata.GetPointData().SetScalars(colors) polydata.Modified() mapper = vtk.vtkPolyDataMapper() if major_version <= 5: mapper.SetInput(polydata) else: mapper.SetInputData(polydata) actor = vtk.vtkActor() actor.SetMapper(mapper) return actor def peaks(peaks_dirs, peaks_values=None, scale=2.2, colors=(1, 0, 0)): """ Visualize peak directions as given from ``peaks_from_model`` Parameters ---------- peaks_dirs : ndarray Peak directions. The shape of the array can be (M, 3) or (X, M, 3) or (X, Y, M, 3) or (X, Y, Z, M, 3) peaks_values : ndarray Peak values. The shape of the array can be (M, ) or (X, M) or (X, Y, M) or (X, Y, Z, M) scale : float Distance between spheres colors : ndarray or tuple Peak colors Returns ------- vtkActor See Also -------- dipy.viz.fvtk.sphere_funcs """ peaks_dirs = np.asarray(peaks_dirs) if peaks_dirs.ndim > 5: raise ValueError("Wrong shape") peaks_dirs = _makeNd(peaks_dirs, 5) if peaks_values is not None: peaks_values = _makeNd(peaks_values, 4) grid_shape = np.array(peaks_dirs.shape[:3]) list_dirs = [] for ijk in np.ndindex(*grid_shape): xyz = scale * (ijk - grid_shape / 2.)[:, None] xyz = xyz.T for i in range(peaks_dirs.shape[-2]): if peaks_values is not None: pv = peaks_values[ijk][i] else: pv = 1. symm = np.vstack((-peaks_dirs[ijk][i] * pv + xyz, peaks_dirs[ijk][i] * pv + xyz)) list_dirs.append(symm) return line(list_dirs, colors) def tensor(evals, evecs, scalar_colors=None, sphere=None, scale=2.2, norm=True): """Plot many tensors as ellipsoids simultaneously. Parameters ---------- evals : (3,) or (X, 3) or (X, Y, 3) or (X, Y, Z, 3) ndarray eigenvalues evecs : (3, 3) or (X, 3, 3) or (X, Y, 3, 3) or (X, Y, Z, 3, 3) ndarray eigenvectors scalar_colors : (3,) or (X, 3) or (X, Y, 3) or (X, Y, Z, 3) ndarray RGB colors used to show the tensors Default None, color the ellipsoids using ``color_fa`` sphere : Sphere, this sphere will be transformed to the tensor ellipsoid Default is None which uses a symmetric sphere with 724 points. scale : float, distance between ellipsoids. norm : boolean, Normalize `evals`. Returns ------- actor : vtkActor Ellipsoids Examples -------- >>> from dipy.viz import fvtk >>> r = fvtk.ren() >>> evals = np.array([1.4, .35, .35]) * 10 ** (-3) >>> evecs = np.eye(3) >>> from dipy.data import get_sphere >>> sphere = get_sphere('symmetric724') >>> fvtk.add(r, fvtk.tensor(evals, evecs, sphere=sphere)) >>> #fvtk.show(r) """ evals = np.asarray(evals) if evals.ndim > 4: raise ValueError("Wrong shape") evals = _makeNd(evals, 4) evecs = _makeNd(evecs, 5) grid_shape = np.array(evals.shape[:3]) if sphere is None: from dipy.data import get_sphere sphere = get_sphere('symmetric724') faces = np.asarray(sphere.faces, dtype=int) vertices = sphere.vertices colors = vtk.vtkUnsignedCharArray() colors.SetNumberOfComponents(3) colors.SetName("Colors") if scalar_colors is None: from dipy.reconst.dti import color_fa, fractional_anisotropy cfa = color_fa(fractional_anisotropy(evals), evecs) else: cfa = _makeNd(scalar_colors, 4) list_sq = [] list_cols = [] for ijk in ndindex(grid_shape): ea = evals[ijk] if norm: ea /= ea.max() ea = np.diag(ea.copy()) ev = evecs[ijk].copy() xyz = np.dot(ev, np.dot(ea, vertices.T)) xyz += scale * (ijk - grid_shape / 2.)[:, None] xyz = xyz.T list_sq.append(xyz) acolor = np.zeros(xyz.shape) acolor[:, :] = np.interp(cfa[ijk], [0, 1], [0, 255]) list_cols.append(acolor.astype('ubyte')) points = vtk.vtkPoints() triangles = vtk.vtkCellArray() for k in xrange(len(list_sq)): xyz = list_sq[k] cols = list_cols[k] for i in xrange(xyz.shape[0]): points.InsertNextPoint(*xyz[i]) colors.InsertNextTuple3(*cols[i]) for j in xrange(faces.shape[0]): triangle = vtk.vtkTriangle() triangle.GetPointIds().SetId(0, faces[j, 0] + k * xyz.shape[0]) triangle.GetPointIds().SetId(1, faces[j, 1] + k * xyz.shape[0]) triangle.GetPointIds().SetId(2, faces[j, 2] + k * xyz.shape[0]) triangles.InsertNextCell(triangle) del triangle polydata = vtk.vtkPolyData() polydata.SetPoints(points) polydata.SetPolys(triangles) polydata.GetPointData().SetScalars(colors) polydata.Modified() mapper = vtk.vtkPolyDataMapper() if major_version <= 5: mapper.SetInput(polydata) else: mapper.SetInputData(polydata) actor = vtk.vtkActor() actor.SetMapper(mapper) return actor def camera(ren, pos=None, focal=None, viewup=None, verbose=True): """ Change the active camera Parameters ---------- ren : vtkRenderer pos : tuple (x, y, z) position of the camera focal : tuple (x, y, z) focal point viewup : tuple (x, y, z) viewup vector verbose : bool show information about the camera Returns ------- vtkCamera """ msg = "This function is deprecated." msg += "Please use the window.Renderer class to get/set the active camera." warn(DeprecationWarning(msg)) cam = ren.GetActiveCamera() if verbose: print('Camera Position (%.2f,%.2f,%.2f)' % cam.GetPosition()) print('Camera Focal Point (%.2f,%.2f,%.2f)' % cam.GetFocalPoint()) print('Camera View Up (%.2f,%.2f,%.2f)' % cam.GetViewUp()) if pos is not None: cam = ren.GetActiveCamera().SetPosition(*pos) if focal is not None: ren.GetActiveCamera().SetFocalPoint(*focal) if viewup is not None: ren.GetActiveCamera().SetViewUp(*viewup) cam = ren.GetActiveCamera() if pos is not None or focal is not None or viewup is not None: if verbose: print('-------------------------------------') print('Camera New Position (%.2f,%.2f,%.2f)' % cam.GetPosition()) print('Camera New Focal Point (%.2f,%.2f,%.2f)' % cam.GetFocalPoint()) print('Camera New View Up (%.2f,%.2f,%.2f)' % cam.GetViewUp()) return cam if __name__ == "__main__": pass dipy-0.13.0/dipy/viz/interactor.py000066400000000000000000000267701317371701200170570ustar00rootroot00000000000000import numpy as np # Conditional import machinery for vtk from dipy.utils.optpkg import optional_package # Allow import, but disable doctests if we don't have vtk vtk, have_vtk, setup_module = optional_package('vtk') if have_vtk: vtkInteractorStyleUser = vtk.vtkInteractorStyleUser # version = vtk.vtkVersion.GetVTKSourceVersion().split(' ')[-1] # major_version = vtk.vtkVersion.GetVTKMajorVersion() else: vtkInteractorStyleUser = object class Event(object): def __init__(self): self.position = None self.name = None self.key = None self._abort_flag = None @property def abort_flag(self): return self._abort_flag def update(self, event_name, interactor): """ Updates current event information. """ self.name = event_name self.position = np.asarray(interactor.GetEventPosition()) self.key = interactor.GetKeySym() self._abort_flag = False # Reset abort flag def abort(self): """ Aborts the event i.e. do not propagate it any further. """ self._abort_flag = True def reset(self): """ Done with the current event. Reset the attributes. """ self.position = None self.name = None self.key = None self._abort_flag = False class CustomInteractorStyle(vtkInteractorStyleUser): """ Manipulate the camera and interact with objects in the scene. This interactor style allows the user to interactively manipulate (pan, rotate and zoom) the camera. It also allows the user to interact (click, scroll, etc.) with objects in the scene. Several events handling methods from :class:`vtkInteractorStyleUser` have been overloaded to allow the propagation of the events to the objects the user is interacting with. In summary, while interacting with the scene, the mouse events are as follows: - Left mouse button: rotates the camera - Right mouse button: dollys the camera - Mouse wheel: dollys the camera - Middle mouse button: pans the camera """ def __init__(self): # Default interactor is responsible for moving the camera. self.default_interactor = vtk.vtkInteractorStyleTrackballCamera() # The picker allows us to know which object/actor is under the mouse. self.picker = vtk.vtkPropPicker() self.chosen_element = None self.event = Event() # Define some interaction states self.left_button_down = False self.right_button_down = False self.middle_button_down = False self.active_props = set() self.selected_props = {"left_button": set(), "right_button": set(), "middle_button": set()} def add_active_prop(self, prop): self.active_props.add(prop) def remove_active_prop(self, prop): self.active_props.remove(prop) def get_prop_at_event_position(self): """ Returns the prop that lays at the event position. """ # TODO: return a list of items (i.e. each level of the assembly path). event_pos = self.GetInteractor().GetEventPosition() self.picker.Pick(event_pos[0], event_pos[1], 0, self.GetCurrentRenderer()) path = self.picker.GetPath() if path is None: return None node = path.GetLastNode() prop = node.GetViewProp() return prop def propagate_event(self, evt, *props): for prop in props: # Propagate event to the prop. prop.InvokeEvent(evt) if self.event.abort_flag: return def on_left_button_down(self, obj, evt): self.left_button_down = True prop = self.get_prop_at_event_position() if prop is not None: self.selected_props["left_button"].add(prop) self.propagate_event(evt, prop) if not self.event.abort_flag: self.default_interactor.OnLeftButtonDown() def on_left_button_up(self, obj, evt): self.left_button_down = False self.propagate_event(evt, *self.selected_props["left_button"]) self.selected_props["left_button"].clear() self.default_interactor.OnLeftButtonUp() def on_right_button_down(self, obj, evt): self.right_button_down = True prop = self.get_prop_at_event_position() if prop is not None: self.selected_props["right_button"].add(prop) self.propagate_event(evt, prop) if not self.event.abort_flag: self.default_interactor.OnRightButtonDown() def on_right_button_up(self, obj, evt): self.right_button_down = False self.propagate_event(evt, *self.selected_props["right_button"]) self.selected_props["right_button"].clear() self.default_interactor.OnRightButtonUp() def on_middle_button_down(self, obj, evt): self.middle_button_down = True prop = self.get_prop_at_event_position() if prop is not None: self.selected_props["middle_button"].add(prop) self.propagate_event(evt, prop) if not self.event.abort_flag: self.default_interactor.OnMiddleButtonDown() def on_middle_button_up(self, obj, evt): self.middle_button_down = False self.propagate_event(evt, *self.selected_props["middle_button"]) self.selected_props["middle_button"].clear() self.default_interactor.OnMiddleButtonUp() def on_mouse_move(self, obj, evt): # Only propagate events to active or selected props. self.propagate_event(evt, *(self.active_props | self.selected_props["left_button"] | self.selected_props["right_button"] | self.selected_props["middle_button"])) self.default_interactor.OnMouseMove() def on_mouse_wheel_forward(self, obj, evt): # First, propagate mouse wheel event to underneath prop. prop = self.get_prop_at_event_position() if prop is not None: self.propagate_event(evt, prop) # Then, to the active props. if not self.event.abort_flag: self.propagate_event(evt, *self.active_props) # Finally, to the default interactor. if not self.event.abort_flag: self.default_interactor.OnMouseWheelForward() self.event.reset() def on_mouse_wheel_backward(self, obj, evt): # First, propagate mouse wheel event to underneath prop. prop = self.get_prop_at_event_position() if prop is not None: self.propagate_event(evt, prop) # Then, to the active props. if not self.event.abort_flag: self.propagate_event(evt, *self.active_props) # Finally, to the default interactor. if not self.event.abort_flag: self.default_interactor.OnMouseWheelBackward() self.event.reset() def on_char(self, obj, evt): self.propagate_event(evt, *self.active_props) def on_key_press(self, obj, evt): self.propagate_event(evt, *self.active_props) def on_key_release(self, obj, evt): self.propagate_event(evt, *self.active_props) def SetInteractor(self, interactor): # Internally, `InteractorStyle` objects need a handle to a # `vtkWindowInteractor` object and this is done via `SetInteractor`. # However, this has the side effect of adding directly all their # observers to the `interactor`! self.default_interactor.SetInteractor(interactor) # Remove all observers *most likely* (cannot guarantee that the # interactor didn't already have these observers) added by # `vtkInteractorStyleTrackballCamera`, i.e. our `default_interactor`. # # Note: Be sure that no observer has been manually added to the # `interactor` before setting the InteractorStyle. interactor.RemoveObservers("TimerEvent") interactor.RemoveObservers("EnterEvent") interactor.RemoveObservers("LeaveEvent") interactor.RemoveObservers("ExposeEvent") interactor.RemoveObservers("ConfigureEvent") interactor.RemoveObservers("CharEvent") interactor.RemoveObservers("KeyPressEvent") interactor.RemoveObservers("KeyReleaseEvent") interactor.RemoveObservers("MouseMoveEvent") interactor.RemoveObservers("LeftButtonPressEvent") interactor.RemoveObservers("RightButtonPressEvent") interactor.RemoveObservers("MiddleButtonPressEvent") interactor.RemoveObservers("LeftButtonReleaseEvent") interactor.RemoveObservers("RightButtonReleaseEvent") interactor.RemoveObservers("MiddleButtonReleaseEvent") interactor.RemoveObservers("MouseWheelForwardEvent") interactor.RemoveObservers("MouseWheelBackwardEvent") # This class is a `vtkClass` (instead of `object`), so `super()` # cannot be used. Also the method `SetInteractor` is not overridden in # `vtkInteractorStyleUser` so we have to call directly the one from # `vtkInteractorStyle`. In addition to setting the interactor, the # following line adds the necessary hooks to listen to this instance's # observers. vtk.vtkInteractorStyle.SetInteractor(self, interactor) # Keyboard events. self.AddObserver("CharEvent", self.on_char) self.AddObserver("KeyPressEvent", self.on_key_press) self.AddObserver("KeyReleaseEvent", self.on_key_release) # Mouse events. self.AddObserver("MouseMoveEvent", self.on_mouse_move) self.AddObserver("LeftButtonPressEvent", self.on_left_button_down) self.AddObserver("LeftButtonReleaseEvent", self.on_left_button_up) self.AddObserver("RightButtonPressEvent", self.on_right_button_down) self.AddObserver("RightButtonReleaseEvent", self.on_right_button_up) self.AddObserver("MiddleButtonPressEvent", self.on_middle_button_down) self.AddObserver("MiddleButtonReleaseEvent", self.on_middle_button_up) # Windows and special events. # TODO: we ever find them useful we could support them. # self.AddObserver("TimerEvent", self.on_timer) # self.AddObserver("EnterEvent", self.on_enter) # self.AddObserver("LeaveEvent", self.on_leave) # self.AddObserver("ExposeEvent", self.on_expose) # self.AddObserver("ConfigureEvent", self.on_configure) # These observers need to be added directly to the interactor because # `vtkInteractorStyleUser` does not support wheel events prior 7.1. See # https://github.com/Kitware/VTK/commit/373258ed21f0915c425eddb996ce6ac13404be28 interactor.AddObserver("MouseWheelForwardEvent", self.on_mouse_wheel_forward) interactor.AddObserver("MouseWheelBackwardEvent", self.on_mouse_wheel_backward) def force_render(self): """ Causes the renderer to refresh. """ self.GetInteractor().GetRenderWindow().Render() def add_callback(self, prop, event_type, callback, priority=0, args=[]): """ Adds a callback associated to a specific event for a VTK prop. Parameters ---------- prop : vtkProp event_type : event code callback : function priority : int """ def _callback(obj, event_name): # Update event information. self.event.update(event_name, self.GetInteractor()) callback(self, prop, *args) prop.AddObserver(event_type, _callback, priority) dipy-0.13.0/dipy/viz/projections.py000066400000000000000000000075001317371701200172320ustar00rootroot00000000000000""" Visualization tools for 2D projections of 3D functions on the sphere, such as ODFs. """ import numpy as np import scipy.interpolate as interp from dipy.utils.optpkg import optional_package import dipy.core.geometry as geo from dipy.testing import doctest_skip_parser matplotlib, has_mpl, setup_module = optional_package("matplotlib") plt, _, _ = optional_package("matplotlib.pyplot") tri, _, _ = optional_package("matplotlib.tri") bm, has_basemap, _ = optional_package("mpl_toolkits.basemap") @doctest_skip_parser def sph_project(vertices, val, ax=None, vmin=None, vmax=None, cmap=None, cbar=True, tri=False, boundary=False, **basemap_args): """Draw a signal on a 2D projection of the sphere. Parameters ---------- vertices : (N,3) ndarray unit vector points of the sphere val: (N) ndarray Function values. ax : mpl axis, optional If specified, draw onto this existing axis instead. vmin, vmax : floats Values to cut the z cmap : mpl colormap cbar: Whether to add the color-bar to the figure triang : Whether to display the plot triangulated as a pseudo-color plot. boundary : Whether to draw the boundary around the projection in a black line Returns ------- ax : axis Matplotlib figure axis Examples -------- >>> from dipy.data import get_sphere >>> verts = get_sphere('symmetric724').vertices >>> ax = sph_project(verts.T, np.random.rand(len(verts.T))) # skip if not has_basemap """ if ax is None: fig, ax = plt.subplots(1) if cmap is None: cmap = matplotlib.cm.hot basemap_args.setdefault('projection', 'ortho') basemap_args.setdefault('lat_0', 0) basemap_args.setdefault('lon_0', 0) basemap_args.setdefault('resolution', 'c') from mpl_toolkits.basemap import Basemap m = Basemap(**basemap_args) if boundary: m.drawmapboundary() # Rotate the coordinate system so that you are looking from the north pole: verts_rot = np.array( np.dot(np.matrix([[0, 0, -1], [0, 1, 0], [1, 0, 0]]), vertices)) # To get the orthographic projection, when the first coordinate is # positive: neg_idx = np.where(verts_rot[0] > 0) # rotate the entire bvector around to point in the other direction: verts_rot[:, neg_idx] *= -1 _, theta, phi = geo.cart2sphere(verts_rot[0], verts_rot[1], verts_rot[2]) lat, lon = geo.sph2latlon(theta, phi) x, y = m(lon, lat) my_min = np.nanmin(val) if vmin is not None: my_min = vmin my_max = np.nanmax(val) if vmax is not None: my_max = vmax if tri: m.pcolor(x, y, val, vmin=my_min, vmax=my_max, tri=True, cmap=cmap) else: cmap_data = cmap._segmentdata red_interp, blue_interp, green_interp = ( interp.interp1d(np.array(cmap_data[gun])[:, 0], np.array(cmap_data[gun])[:, 1]) for gun in ['red', 'blue', 'green']) r = (val - my_min) / float(my_max - my_min) # Enforce the maximum and minumum boundaries, if there are values # outside those boundaries: r[r < 0] = 0 r[r > 1] = 1 for this_x, this_y, this_r in zip(x, y, r): red = red_interp(this_r) blue = blue_interp(this_r) green = green_interp(this_r) m.plot(this_x, this_y, 'o', c=[red.item(), green.item(), blue.item()]) if cbar: mappable = matplotlib.cm.ScalarMappable(cmap=cmap) mappable.set_array([my_min, my_max]) # setup colorbar axes instance. pos = ax.get_position() l, b, w, h = pos.bounds # setup colorbar axes cax = fig.add_axes([l + w + 0.075, b, 0.05, h], frameon=False) fig.colorbar(mappable, cax=cax) # draw colorbar return ax dipy-0.13.0/dipy/viz/regtools.py000066400000000000000000000376371317371701200165470ustar00rootroot00000000000000import numpy as np from dipy.utils.optpkg import optional_package matplotlib, has_mpl, setup_module = optional_package("matplotlib") plt, _, _ = optional_package("matplotlib.pyplot") def _tile_plot(imgs, titles, **kwargs): """ Helper function """ # Create a new figure and plot the three images fig, ax = plt.subplots(1, len(imgs)) for ii, a in enumerate(ax): a.set_axis_off() a.imshow(imgs[ii], **kwargs) a.set_title(titles[ii]) return fig def overlay_images(img0, img1, title0='', title_mid='', title1='', fname=None): r""" Plot two images one on top of the other using red and green channels. Creates a figure containing three images: the first image to the left plotted on the red channel of a color image, the second to the right plotted on the green channel of a color image and the two given images on top of each other using the red channel for the first image and the green channel for the second one. It is assumed that both images have the same shape. The intended use of this function is to visually assess the quality of a registration result. Parameters ---------- img0 : array, shape(R, C) the image to be plotted on the red channel, to the left of the figure img1 : array, shape(R, C) the image to be plotted on the green channel, to the right of the figure title0 : string (optional) the title to be written on top of the image to the left. By default, no title is displayed. title_mid : string (optional) the title to be written on top of the middle image. By default, no title is displayed. title1 : string (optional) the title to be written on top of the image to the right. By default, no title is displayed. fname : string (optional) the file name to write the resulting figure. If None (default), the image is not saved. """ # Normalize the input images to [0,255] img0 = 255 * ((img0 - img0.min()) / (img0.max() - img0.min())) img1 = 255 * ((img1 - img1.min()) / (img1.max() - img1.min())) # Create the color images img0_red = np.zeros(shape=(img0.shape) + (3,), dtype=np.uint8) img1_green = np.zeros(shape=(img0.shape) + (3,), dtype=np.uint8) overlay = np.zeros(shape=(img0.shape) + (3,), dtype=np.uint8) # Copy the normalized intensities into the appropriate channels of the # color images img0_red[..., 0] = img0 img1_green[..., 1] = img1 overlay[..., 0] = img0 overlay[..., 1] = img1 fig = _tile_plot([img0_red, overlay, img1_green], [title0, title_mid, title1]) # If a file name was given, save the figure if fname is not None: fig.savefig(fname, bbox_inches='tight') return fig def draw_lattice_2d(nrows, ncols, delta): r"""Create a regular lattice of nrows x ncols squares. Creates an image (2D array) of a regular lattice of nrows x ncols squares. The size of each square is delta x delta pixels (not counting the separation lines). The lines are one pixel width. Parameters ---------- nrows : int the number of squares to be drawn vertically ncols : int the number of squares to be drawn horizontally delta : int the size of each square of the grid. Each square is delta x delta pixels Returns ------- lattice : array, shape (R, C) the image (2D array) of the segular lattice. The shape (R, C) of the array is given by R = 1 + (delta + 1) * nrows C = 1 + (delta + 1) * ncols """ lattice = np.ndarray((1 + (delta + 1) * nrows, 1 + (delta + 1) * ncols), dtype=np.float64) # Fill the lattice with "white" lattice[...] = 127 # Draw the horizontal lines in "black" for i in range(nrows + 1): lattice[i * (delta + 1), :] = 0 # Draw the vertical lines in "black" for j in range(ncols + 1): lattice[:, j * (delta + 1)] = 0 return lattice def plot_2d_diffeomorphic_map(mapping, delta=10, fname=None, direct_grid_shape=None, direct_grid2world=-1, inverse_grid_shape=None, inverse_grid2world=-1, show_figure=True): r"""Draw the effect of warping a regular lattice by a diffeomorphic map. Draws a diffeomorphic map by showing the effect of the deformation on a regular grid. The resulting figure contains two images: the direct transformation is plotted to the left, and the inverse transformation is plotted to the right. Parameters ---------- mapping : DiffeomorphicMap object the diffeomorphic map to be drawn delta : int, optional the size (in pixels) of the squares of the regular lattice to be used to plot the warping effects. Each square will be delta x delta pixels. By default, the size will be 10 pixels. fname : string, optional the name of the file the figure will be written to. If None (default), the figure will not be saved to disk. direct_grid_shape : tuple, shape (2,), optional the shape of the grid image after being deformed by the direct transformation. By default, the shape of the deformed grid is the same as the grid of the displacement field, which is by default equal to the shape of the fixed image. In other words, the resulting deformed grid (deformed by the direct transformation) will normally have the same shape as the fixed image. direct_grid2world : array, shape (3, 3), optional the affine transformation mapping the direct grid's coordinates to physical space. By default, this transformation will correspond to the image-to-world transformation corresponding to the default direct_grid_shape (in general, if users specify a direct_grid_shape, they should also specify direct_grid2world). inverse_grid_shape : tuple, shape (2,), optional the shape of the grid image after being deformed by the inverse transformation. By default, the shape of the deformed grid under the inverse transform is the same as the image used as "moving" when the diffeomorphic map was generated by a registration algorithm (so it corresponds to the effect of warping the static image towards the moving). inverse_grid2world : array, shape (3, 3), optional the affine transformation mapping inverse grid's coordinates to physical space. By default, this transformation will correspond to the image-to-world transformation corresponding to the default inverse_grid_shape (in general, if users specify an inverse_grid_shape, they should also specify inverse_grid2world). show_figure : bool, optional if True (default), the deformed grids will be ploted using matplotlib, else the grids are just returned Returns ------- warped_forward : array Image with grid showing the effect of transforming the moving image to the static image. Shape will be `direct_grid_shape` if specified, otherwise the shape of the static image. warped_backward : array Image with grid showing the effect of transforming the static image to the moving image. Shape will be `inverse_grid_shape` if specified, otherwise the shape of the moving image. Note ---- The default value for the affine transformation is "-1" to handle the case in which the user provides "None" as input meaning "identity". If we used None as default, we wouldn't know if the user specifically wants to use the identity (specifically passing None) or if it was left unspecified, meaning to use the apropriate default matrix. """ if mapping.is_inverse: # By default, direct_grid_shape is the codomain grid if direct_grid_shape is None: direct_grid_shape = mapping.codomain_shape if direct_grid2world is -1: direct_grid2world = mapping.codomain_grid2world # By default, the inverse grid is the domain grid if inverse_grid_shape is None: inverse_grid_shape = mapping.domain_shape if inverse_grid2world is -1: inverse_grid2world = mapping.domain_grid2world else: # Now by default, direct_grid_shape is the mapping's input grid if direct_grid_shape is None: direct_grid_shape = mapping.domain_shape if direct_grid2world is -1: direct_grid2world = mapping.domain_grid2world # By default, the output grid is the mapping's domain grid if inverse_grid_shape is None: inverse_grid_shape = mapping.codomain_shape if inverse_grid2world is -1: inverse_grid2world = mapping.codomain_grid2world # The world-to-image (image = drawn lattice on the output grid) # transformation is the inverse of the output affine world_to_image = None if inverse_grid2world is not None: world_to_image = np.linalg.inv(inverse_grid2world) # Draw the squares on the output grid lattice_out = draw_lattice_2d( (inverse_grid_shape[0] + delta) // (delta + 1), (inverse_grid_shape[1] + delta) // (delta + 1), delta) lattice_out = lattice_out[0:inverse_grid_shape[0], 0:inverse_grid_shape[1]] # Warp in the forward direction (sampling it on the input grid) warped_forward = mapping.transform(lattice_out, 'linear', world_to_image, direct_grid_shape, direct_grid2world) # Now, the world-to-image (image = drawn lattice on the input grid) # transformation is the inverse of the input affine world_to_image = None if direct_grid2world is not None: world_to_image = np.linalg.inv(direct_grid2world) # Draw the squares on the input grid lattice_in = draw_lattice_2d((direct_grid_shape[0] + delta) // (delta + 1), (direct_grid_shape[1] + delta) // (delta + 1), delta) lattice_in = lattice_in[0:direct_grid_shape[0], 0:direct_grid_shape[1]] # Warp in the backward direction (sampling it on the output grid) warped_backward = mapping.transform_inverse( lattice_in, 'linear', world_to_image, inverse_grid_shape, inverse_grid2world) # Now plot the grids if show_figure: plt.figure() plt.subplot(1, 2, 1).set_axis_off() plt.imshow(warped_forward, cmap=plt.cm.gray) plt.title('Direct transform') plt.subplot(1, 2, 2).set_axis_off() plt.imshow(warped_backward, cmap=plt.cm.gray) plt.title('Inverse transform') # Finally, save the figure to disk if fname is not None: plt.savefig(fname, bbox_inches='tight') # Return the deformed grids return warped_forward, warped_backward def plot_slices(V, slice_indices=None, fname=None): r"""Plot 3 slices from the given volume: 1 sagital, 1 coronal and 1 axial Creates a figure showing the axial, coronal and sagital slices at the requested positions of the given volume. The requested slices are specified by slice_indices. Parameters ---------- V : array, shape (S, R, C) the 3D volume to extract the slices from slice_indices : array, shape (3,) (optional) the indices of the sagital (slice_indices[0]), coronal (slice_indices[1]) and axial (slice_indices[2]) slices to be displayed. If None, the middle slices along each direction are displayed. fname : string (optional) the name of the file to save the figure to. If None (default), the figure is not saved to disk. """ if slice_indices is None: slice_indices = np.array(V.shape) // 2 # Normalize the intensities to [0, 255] sh = V.shape V = np.asarray(V, dtype=np.float64) V = 255 * (V - V.min()) / (V.max() - V.min()) # Extract the middle slices axial = np.asarray(V[:, :, slice_indices[2]]).astype(np.uint8).T coronal = np.asarray(V[:, slice_indices[1], :]).astype(np.uint8).T sagittal = np.asarray(V[slice_indices[0], :, :]).astype(np.uint8).T fig = _tile_plot([axial, coronal, sagittal], ['Axial', 'Coronal', 'Sagittal'], cmap=plt.cm.gray, origin='lower') # Save the figure if requested if fname is not None: fig.savefig(fname, bbox_inches='tight') return fig def overlay_slices(L, R, slice_index=None, slice_type=1, ltitle='Left', rtitle='Right', fname=None): r"""Plot three overlaid slices from the given volumes. Creates a figure containing three images: the gray scale k-th slice of the first volume (L) to the left, where k=slice_index, the k-th slice of the second volume (R) to the right and the k-th slices of the two given images on top of each other using the red channel for the first volume and the green channel for the second one. It is assumed that both volumes have the same shape. The intended use of this function is to visually assess the quality of a registration result. Parameters ---------- L : array, shape (S, R, C) the first volume to extract the slice from, plottet to the left R : array, shape (S, R, C) the second volume to extract the slice from, plotted to the right slice_index : int (optional) the index of the slices (along the axis given by slice_type) to be overlaid. If None, the slice along the specified axis is used slice_type : int (optional) the type of slice to be extracted: 0=sagital, 1=coronal (default), 2=axial. ltitle : string (optional) the string to be written as title of the left image. By default, no title is displayed. rtitle : string (optional) the string to be written as title of the right image. By default, no title is displayed. fname : string (optional) the name of the file to write the image to. If None (default), the figure is not saved to disk. """ # Normalize the intensities to [0,255] sh = L.shape L = np.asarray(L, dtype=np.float64) R = np.asarray(R, dtype=np.float64) L = 255 * (L - L.min()) / (L.max() - L.min()) R = 255 * (R - R.min()) / (R.max() - R.min()) # Create the color image to draw the overlapped slices into, and extract # the slices (note the transpositions) if slice_type is 0: if slice_index is None: slice_index = sh[0] // 2 colorImage = np.zeros(shape=(sh[2], sh[1], 3), dtype=np.uint8) ll = np.asarray(L[slice_index, :, :]).astype(np.uint8).T rr = np.asarray(R[slice_index, :, :]).astype(np.uint8).T elif slice_type is 1: if slice_index is None: slice_index = sh[1] // 2 colorImage = np.zeros(shape=(sh[2], sh[0], 3), dtype=np.uint8) ll = np.asarray(L[:, slice_index, :]).astype(np.uint8).T rr = np.asarray(R[:, slice_index, :]).astype(np.uint8).T elif slice_type is 2: if slice_index is None: slice_index = sh[2] // 2 colorImage = np.zeros(shape=(sh[1], sh[0], 3), dtype=np.uint8) ll = np.asarray(L[:, :, slice_index]).astype(np.uint8).T rr = np.asarray(R[:, :, slice_index]).astype(np.uint8).T else: print("Slice type must be 0, 1 or 2.") return # Draw the intensity images to the appropriate channels of the color image # The "(ll > ll[0, 0])" condition is just an attempt to eliminate the # background when its intensity is not exactly zero (the [0,0] corner is # usually background) colorImage[..., 0] = ll * (ll > ll[0, 0]) colorImage[..., 1] = rr * (rr > rr[0, 0]) fig = _tile_plot([ll, colorImage, rr], [ltitle, 'Overlay', rtitle], cmap=plt.cm.gray, origin='lower') # Save the figure to disk, if requested if fname is not None: fig.savefig(fname, bbox_inches='tight') return fig dipy-0.13.0/dipy/viz/tests/000077500000000000000000000000001317371701200154615ustar00rootroot00000000000000dipy-0.13.0/dipy/viz/tests/__init__.py000066400000000000000000000001651317371701200175740ustar00rootroot00000000000000# init to make tests into a package # Test callable from numpy.testing import Tester test = Tester().test del Tester dipy-0.13.0/dipy/viz/tests/test_actors.py000066400000000000000000000321111317371701200203630ustar00rootroot00000000000000import os import numpy as np from dipy.viz import actor, window import numpy.testing as npt from nibabel.tmpdirs import TemporaryDirectory from dipy.tracking.streamline import center_streamlines, transform_streamlines from dipy.align.tests.test_streamlinear import fornix_streamlines from dipy.testing.decorators import xvfb_it from dipy.data import get_sphere from tempfile import mkstemp use_xvfb = os.environ.get('TEST_WITH_XVFB', False) if use_xvfb == 'skip': skip_it = True else: skip_it = False run_test = (actor.have_vtk and actor.have_vtk_colors and window.have_imread and not skip_it) if actor.have_vtk: if actor.major_version == 5 and use_xvfb: skip_slicer = True else: skip_slicer = False else: skip_slicer = False @npt.dec.skipif(skip_slicer) @npt.dec.skipif(not run_test) @xvfb_it def test_slicer(): renderer = window.renderer() data = (255 * np.random.rand(50, 50, 50)) affine = np.eye(4) slicer = actor.slicer(data, affine) slicer.display(None, None, 25) renderer.add(slicer) renderer.reset_camera() renderer.reset_clipping_range() # window.show(renderer) # copy pixels in numpy array directly arr = window.snapshot(renderer, 'test_slicer.png', offscreen=False) import scipy print(scipy.__version__) print(scipy.__file__) print(arr.sum()) print(np.sum(arr == 0)) print(np.sum(arr > 0)) print(arr.shape) print(arr.dtype) report = window.analyze_snapshot(arr, find_objects=True) print(report) npt.assert_equal(report.objects, 1) # print(arr[..., 0]) # The slicer can cut directly a smaller part of the image slicer.display_extent(10, 30, 10, 30, 35, 35) renderer.ResetCamera() renderer.add(slicer) # save pixels in png file not a numpy array with TemporaryDirectory() as tmpdir: fname = os.path.join(tmpdir, 'slice.png') # window.show(renderer) arr = window.snapshot(renderer, fname, offscreen=False) report = window.analyze_snapshot(fname, find_objects=True) npt.assert_equal(report.objects, 1) npt.assert_raises(ValueError, actor.slicer, np.ones(10)) renderer.clear() rgb = np.zeros((30, 30, 30, 3)) rgb[..., 0] = 1. rgb_actor = actor.slicer(rgb) renderer.add(rgb_actor) renderer.reset_camera() renderer.reset_clipping_range() arr = window.snapshot(renderer, offscreen=False) report = window.analyze_snapshot(arr, colors=[(255, 0, 0)]) npt.assert_equal(report.objects, 1) npt.assert_equal(report.colors_found, [True]) lut = actor.colormap_lookup_table(scale_range=(0, 255), hue_range=(0.4, 1.), saturation_range=(1, 1.), value_range=(0., 1.)) renderer.clear() slicer_lut = actor.slicer(data, lookup_colormap=lut) slicer_lut.display(10, None, None) slicer_lut.display(None, 10, None) slicer_lut.display(None, None, 10) slicer_lut2 = slicer_lut.copy() slicer_lut2.display(None, None, 10) renderer.add(slicer_lut2) renderer.reset_clipping_range() arr = window.snapshot(renderer, offscreen=False) report = window.analyze_snapshot(arr, find_objects=True) npt.assert_equal(report.objects, 1) renderer.clear() data = (255 * np.random.rand(50, 50, 50)) affine = np.diag([1, 3, 2, 1]) slicer = actor.slicer(data, affine, interpolation='nearest') slicer.display(None, None, 25) renderer.add(slicer) renderer.reset_camera() renderer.reset_clipping_range() arr = window.snapshot(renderer, offscreen=False) report = window.analyze_snapshot(arr, find_objects=True) npt.assert_equal(report.objects, 1) npt.assert_equal(data.shape, slicer.shape) renderer.clear() data = (255 * np.random.rand(50, 50, 50)) affine = np.diag([1, 3, 2, 1]) from dipy.align.reslice import reslice data2, affine2 = reslice(data, affine, zooms=(1, 3, 2), new_zooms=(1, 1, 1)) slicer = actor.slicer(data2, affine2, interpolation='linear') slicer.display(None, None, 25) renderer.add(slicer) renderer.reset_camera() renderer.reset_clipping_range() # window.show(renderer, reset_camera=False) arr = window.snapshot(renderer, offscreen=False) report = window.analyze_snapshot(arr, find_objects=True) npt.assert_equal(report.objects, 1) npt.assert_array_equal([1, 3, 2] * np.array(data.shape), np.array(slicer.shape)) @npt.dec.skipif(not run_test) @xvfb_it def test_streamtube_and_line_actors(): renderer = window.renderer() line1 = np.array([[0, 0, 0], [1, 1, 1], [2, 2, 2.]]) line2 = line1 + np.array([0.5, 0., 0.]) lines = [line1, line2] colors = np.array([[1, 0, 0], [0, 0, 1.]]) c = actor.line(lines, colors, linewidth=3) window.add(renderer, c) c = actor.line(lines, colors, spline_subdiv=5, linewidth=3) window.add(renderer, c) # create streamtubes of the same lines and shift them a bit c2 = actor.streamtube(lines, colors, linewidth=.1) c2.SetPosition(2, 0, 0) window.add(renderer, c2) arr = window.snapshot(renderer) report = window.analyze_snapshot(arr, colors=[(255, 0, 0), (0, 0, 255)], find_objects=True) npt.assert_equal(report.objects, 4) npt.assert_equal(report.colors_found, [True, True]) # as before with splines c2 = actor.streamtube(lines, colors, spline_subdiv=5, linewidth=.1) c2.SetPosition(2, 0, 0) window.add(renderer, c2) arr = window.snapshot(renderer) report = window.analyze_snapshot(arr, colors=[(255, 0, 0), (0, 0, 255)], find_objects=True) npt.assert_equal(report.objects, 4) npt.assert_equal(report.colors_found, [True, True]) @npt.dec.skipif(not run_test) @xvfb_it def test_bundle_maps(): renderer = window.renderer() bundle = fornix_streamlines() bundle, shift = center_streamlines(bundle) mat = np.array([[1, 0, 0, 100], [0, 1, 0, 100], [0, 0, 1, 100], [0, 0, 0, 1.]]) bundle = transform_streamlines(bundle, mat) # metric = np.random.rand(*(200, 200, 200)) metric = 100 * np.ones((200, 200, 200)) # add lower values metric[100, :, :] = 100 * 0.5 # create a nice orange-red colormap lut = actor.colormap_lookup_table(scale_range=(0., 100.), hue_range=(0., 0.1), saturation_range=(1, 1), value_range=(1., 1)) line = actor.line(bundle, metric, linewidth=0.1, lookup_colormap=lut) window.add(renderer, line) window.add(renderer, actor.scalar_bar(lut, ' ')) report = window.analyze_renderer(renderer) npt.assert_almost_equal(report.actors, 1) # window.show(renderer) renderer.clear() nb_points = np.sum([len(b) for b in bundle]) values = 100 * np.random.rand(nb_points) # values[:nb_points/2] = 0 line = actor.streamtube(bundle, values, linewidth=0.1, lookup_colormap=lut) renderer.add(line) # window.show(renderer) report = window.analyze_renderer(renderer) npt.assert_equal(report.actors_classnames[0], 'vtkLODActor') renderer.clear() colors = np.random.rand(nb_points, 3) # values[:nb_points/2] = 0 line = actor.line(bundle, colors, linewidth=2) renderer.add(line) # window.show(renderer) report = window.analyze_renderer(renderer) npt.assert_equal(report.actors_classnames[0], 'vtkLODActor') # window.show(renderer) arr = window.snapshot(renderer) report2 = window.analyze_snapshot(arr) npt.assert_equal(report2.objects, 1) # try other input options for colors renderer.clear() actor.line(bundle, (1., 0.5, 0)) actor.line(bundle, np.arange(len(bundle))) actor.line(bundle) colors = [np.random.rand(*b.shape) for b in bundle] actor.line(bundle, colors=colors) @npt.dec.skipif(not run_test) @xvfb_it def test_odf_slicer(interactive=False): sphere = get_sphere('symmetric362') shape = (11, 11, 11, sphere.vertices.shape[0]) fid, fname = mkstemp(suffix='_odf_slicer.mmap') print(fid) print(fname) odfs = np.memmap(fname, dtype=np.float64, mode='w+', shape=shape) odfs[:] = 1 affine = np.eye(4) renderer = window.Renderer() mask = np.ones(odfs.shape[:3]) mask[:4, :4, :4] = 0 odfs[..., 0] = 1 odf_actor = actor.odf_slicer(odfs, affine, mask=mask, sphere=sphere, scale=.25, colormap='jet') fa = 0. * np.zeros(odfs.shape[:3]) fa[:, 0, :] = 1. fa[:, -1, :] = 1. fa[0, :, :] = 1. fa[-1, :, :] = 1. fa[5, 5, 5] = 1 k = 5 I, J, K = odfs.shape[:3] fa_actor = actor.slicer(fa, affine) fa_actor.display_extent(0, I, 0, J, k, k) renderer.add(odf_actor) renderer.reset_camera() renderer.reset_clipping_range() odf_actor.display_extent(0, I, 0, J, k, k) odf_actor.GetProperty().SetOpacity(1.0) if interactive: window.show(renderer, reset_camera=False) arr = window.snapshot(renderer) report = window.analyze_snapshot(arr, find_objects=True) npt.assert_equal(report.objects, 11 * 11) renderer.clear() renderer.add(fa_actor) renderer.reset_camera() renderer.reset_clipping_range() if interactive: window.show(renderer) mask[:] = 0 mask[5, 5, 5] = 1 fa[5, 5, 5] = 0 fa_actor = actor.slicer(fa, None) fa_actor.display(None, None, 5) odf_actor = actor.odf_slicer(odfs, None, mask=mask, sphere=sphere, scale=.25, colormap='jet', norm=False, global_cm=True) renderer.clear() renderer.add(fa_actor) renderer.add(odf_actor) renderer.reset_camera() renderer.reset_clipping_range() if interactive: window.show(renderer) renderer.clear() renderer.add(odf_actor) renderer.add(fa_actor) odfs[:, :, :] = 1 mask = np.ones(odfs.shape[:3]) odf_actor = actor.odf_slicer(odfs, None, mask=mask, sphere=sphere, scale=.25, colormap='jet', norm=False, global_cm=True) renderer.clear() renderer.add(odf_actor) renderer.add(fa_actor) renderer.add(actor.axes((11, 11, 11))) for i in range(11): odf_actor.display(i, None, None) fa_actor.display(i, None, None) if interactive: window.show(renderer) for j in range(11): odf_actor.display(None, j, None) fa_actor.display(None, j, None) if interactive: window.show(renderer) # with mask equal to zero everything should be black mask = np.zeros(odfs.shape[:3]) odf_actor = actor.odf_slicer(odfs, None, mask=mask, sphere=sphere, scale=.25, colormap='plasma', norm=False, global_cm=True) renderer.clear() renderer.add(odf_actor) renderer.reset_camera() renderer.reset_clipping_range() if interactive: window.show(renderer) report = window.analyze_renderer(renderer) npt.assert_equal(report.actors, 1) npt.assert_equal(report.actors_classnames[0], 'vtkLODActor') del odf_actor odfs._mmap.close() del odfs os.close(fid) os.remove(fname) @npt.dec.skipif(not run_test) @xvfb_it def test_peak_slicer(interactive=False): _peak_dirs = np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]], dtype='f4') # peak_dirs.shape = (1, 1, 1) + peak_dirs.shape peak_dirs = np.zeros((11, 11, 11, 3, 3)) peak_values = np.random.rand(11, 11, 11, 3) peak_dirs[:, :, :] = _peak_dirs renderer = window.Renderer() peak_actor = actor.peak_slicer(peak_dirs) renderer.add(peak_actor) renderer.add(actor.axes((11, 11, 11))) if interactive: window.show(renderer) renderer.clear() renderer.add(peak_actor) renderer.add(actor.axes((11, 11, 11))) for k in range(11): peak_actor.display_extent(0, 10, 0, 10, k, k) for j in range(11): peak_actor.display_extent(0, 10, j, j, 0, 10) for i in range(11): peak_actor.display(i, None, None) renderer.rm_all() peak_actor = actor.peak_slicer( peak_dirs, peak_values, mask=None, affine=np.diag([3, 2, 1, 1]), colors=None, opacity=1, linewidth=3, lod=True, lod_points=10 ** 4, lod_points_size=3) renderer.add(peak_actor) renderer.add(actor.axes((11, 11, 11))) if interactive: window.show(renderer) report = window.analyze_renderer(renderer) ex = ['vtkLODActor', 'vtkOpenGLActor', 'vtkOpenGLActor', 'vtkOpenGLActor'] npt.assert_equal(report.actors_classnames, ex) if __name__ == "__main__": npt.run_module_suite() dipy-0.13.0/dipy/viz/tests/test_fvtk.py000066400000000000000000000076121317371701200200520ustar00rootroot00000000000000"""Testing visualization with fvtk.""" import os import warnings import numpy as np from distutils.version import LooseVersion from dipy.viz import fvtk from dipy import data import numpy.testing as npt from dipy.testing.decorators import xvfb_it from dipy.utils.optpkg import optional_package use_xvfb = os.environ.get('TEST_WITH_XVFB', False) if use_xvfb == 'skip': skip_it = True else: skip_it = False cm, have_matplotlib, _ = optional_package('matplotlib.cm') if have_matplotlib: import matplotlib mpl_version = LooseVersion(matplotlib.__version__) @npt.dec.skipif(not fvtk.have_vtk or not fvtk.have_vtk_colors or skip_it) @xvfb_it def test_fvtk_functions(): # This tests will fail if any of the given actors changed inputs or do # not exist # Create a renderer r = fvtk.ren() # Create 2 lines with 2 different colors lines = [np.random.rand(10, 3), np.random.rand(20, 3)] colors = np.random.rand(2, 3) c = fvtk.line(lines, colors) fvtk.add(r, c) # create streamtubes of the same lines and shift them a bit c2 = fvtk.streamtube(lines, colors) c2.SetPosition(2, 0, 0) fvtk.add(r, c2) # Create a volume and return a volumetric actor using volumetric rendering vol = 100 * np.random.rand(100, 100, 100) vol = vol.astype('uint8') r = fvtk.ren() v = fvtk.volume(vol) fvtk.add(r, v) # Remove all objects fvtk.rm_all(r) # Put some text l = fvtk.label(r, text='Yes Men') fvtk.add(r, l) # Slice the volume slicer = fvtk.slicer(vol) slicer.display(50, None, None) fvtk.add(r, slicer) # Change the position of the active camera fvtk.camera(r, pos=(0.6, 0, 0), verbose=False) fvtk.clear(r) # Peak directions p = fvtk.peaks(np.random.rand(3, 3, 3, 5, 3)) fvtk.add(r, p) p2 = fvtk.peaks(np.random.rand(3, 3, 3, 5, 3), np.random.rand(3, 3, 3, 5), colors=(0, 1, 0)) fvtk.add(r, p2) @npt.dec.skipif(not fvtk.have_vtk or not fvtk.have_vtk_colors or skip_it) @xvfb_it def test_fvtk_ellipsoid(): evals = np.array([1.4, .35, .35]) * 10 ** (-3) evecs = np.eye(3) mevals = np.zeros((3, 2, 4, 3)) mevecs = np.zeros((3, 2, 4, 3, 3)) mevals[..., :] = evals mevecs[..., :, :] = evecs from dipy.data import get_sphere sphere = get_sphere('symmetric724') ren = fvtk.ren() fvtk.add(ren, fvtk.tensor(mevals, mevecs, sphere=sphere)) fvtk.add(ren, fvtk.tensor(mevals, mevecs, np.ones(mevals.shape), sphere=sphere)) npt.assert_equal(ren.GetActors().GetNumberOfItems(), 2) def test_colormap(): v = np.linspace(0., .5) map1 = fvtk.create_colormap(v, 'bone', auto=True) map2 = fvtk.create_colormap(v, 'bone', auto=False) npt.assert_(not np.allclose(map1, map2)) npt.assert_raises(ValueError, fvtk.create_colormap, np.ones((2, 3))) npt.assert_raises(ValueError, fvtk.create_colormap, v, 'no such map') @npt.dec.skipif(not fvtk.have_matplotlib) def test_colormaps_matplotlib(): v = np.random.random(1000) # The "Accent" colormap is deprecated as of 0.12: with warnings.catch_warnings(record=True) as w: accent_cm = data.get_cmap("Accent") # Test that the deprecation warning was raised: npt.assert_(len(w) > 0) names = ['jet', 'Blues', 'bone'] if have_matplotlib and mpl_version < "2": names.append('Accent') for name in names: with warnings.catch_warnings(record=True) as w: # Matplotlib version of get_cmap rgba1 = fvtk.get_cmap(name)(v) # Dipy version of get_cmap rgba2 = data.get_cmap(name)(v) # dipy's colormaps are close to matplotlibs colormaps, but not # perfect: npt.assert_array_almost_equal(rgba1, rgba2, 1) npt.assert_(len(w) == (1 if name == 'Accent' else 0)) if __name__ == "__main__": npt.run_module_suite() dipy-0.13.0/dipy/viz/tests/test_interactor.py000066400000000000000000000126321317371701200212500ustar00rootroot00000000000000import os import numpy as np from os.path import join as pjoin from collections import defaultdict from dipy.viz import actor, window, interactor from dipy.viz import utils as vtk_utils from dipy.data import DATA_DIR import numpy.testing as npt from dipy.testing.decorators import xvfb_it # Conditional import machinery for vtk from dipy.utils.optpkg import optional_package # Allow import, but disable doctests if we don't have vtk vtk, have_vtk, setup_module = optional_package('vtk') use_xvfb = os.environ.get('TEST_WITH_XVFB', False) if use_xvfb == 'skip': skip_it = True else: skip_it = False @npt.dec.skipif(not have_vtk or not actor.have_vtk_colors or skip_it) @xvfb_it def test_custom_interactor_style_events(recording=False): print("Using VTK {}".format(vtk.vtkVersion.GetVTKVersion())) filename = "test_custom_interactor_style_events.log.gz" recording_filename = pjoin(DATA_DIR, filename) renderer = window.Renderer() # the show manager allows to break the rendering process # in steps so that the widgets can be added properly interactor_style = interactor.CustomInteractorStyle() show_manager = window.ShowManager(renderer, size=(800, 800), reset_camera=False, interactor_style=interactor_style) # Create a cursor, a circle that will follow the mouse. polygon_source = vtk.vtkRegularPolygonSource() polygon_source.GeneratePolygonOff() # Only the outline of the circle. polygon_source.SetNumberOfSides(50) polygon_source.SetRadius(10) polygon_source.SetRadius polygon_source.SetCenter(0, 0, 0) mapper = vtk.vtkPolyDataMapper2D() vtk_utils.set_input(mapper, polygon_source.GetOutputPort()) cursor = vtk.vtkActor2D() cursor.SetMapper(mapper) cursor.GetProperty().SetColor(1, 0.5, 0) renderer.add(cursor) def follow_mouse(iren, obj): obj.SetPosition(*iren.event.position) iren.force_render() interactor_style.add_active_prop(cursor) interactor_style.add_callback(cursor, "MouseMoveEvent", follow_mouse) # create some minimalistic streamlines lines = [np.array([[-1, 0, 0.], [1, 0, 0.]]), np.array([[-1, 1, 0.], [1, 1, 0.]])] colors = np.array([[1., 0., 0.], [0.3, 0.7, 0.]]) tube1 = actor.streamtube([lines[0]], colors[0]) tube2 = actor.streamtube([lines[1]], colors[1]) renderer.add(tube1) renderer.add(tube2) # Define some counter callback. states = defaultdict(lambda: 0) def counter(iren, obj): states[iren.event.name] += 1 # Assign the counter callback to every possible event. for event in ["CharEvent", "MouseMoveEvent", "KeyPressEvent", "KeyReleaseEvent", "LeftButtonPressEvent", "LeftButtonReleaseEvent", "RightButtonPressEvent", "RightButtonReleaseEvent", "MiddleButtonPressEvent", "MiddleButtonReleaseEvent"]: interactor_style.add_callback(tube1, event, counter) # Add callback to scale up/down tube1. def scale_up_obj(iren, obj): counter(iren, obj) scale = np.asarray(obj.GetScale()) + 0.1 obj.SetScale(*scale) iren.force_render() iren.event.abort() # Stop propagating the event. def scale_down_obj(iren, obj): counter(iren, obj) scale = np.array(obj.GetScale()) - 0.1 obj.SetScale(*scale) iren.force_render() iren.event.abort() # Stop propagating the event. interactor_style.add_callback(tube2, "MouseWheelForwardEvent", scale_up_obj) interactor_style.add_callback(tube2, "MouseWheelBackwardEvent", scale_down_obj) # Add callback to hide/show tube1. def toggle_visibility(iren, obj): key = iren.event.key if key.lower() == "v": obj.SetVisibility(not obj.GetVisibility()) iren.force_render() interactor_style.add_active_prop(tube1) interactor_style.add_active_prop(tube2) interactor_style.remove_active_prop(tube2) interactor_style.add_callback(tube1, "CharEvent", toggle_visibility) if recording: show_manager.record_events_to_file(recording_filename) print(list(states.items())) else: show_manager.play_events_from_file(recording_filename) msg = ("Wrong count for '{}'.") expected = [('CharEvent', 6), ('KeyPressEvent', 6), ('KeyReleaseEvent', 6), ('MouseMoveEvent', 1652), ('LeftButtonPressEvent', 1), ('RightButtonPressEvent', 1), ('MiddleButtonPressEvent', 2), ('LeftButtonReleaseEvent', 1), ('MouseWheelForwardEvent', 3), ('MouseWheelBackwardEvent', 1), ('MiddleButtonReleaseEvent', 2), ('RightButtonReleaseEvent', 1)] # Useful loop for debugging. for event, count in expected: if states[event] != count: print("{}: {} vs. {} (expected)".format(event, states[event], count)) for event, count in expected: npt.assert_equal(states[event], count, err_msg=msg.format(event)) if __name__ == '__main__': test_custom_interactor_style_events(recording=True) dipy-0.13.0/dipy/viz/tests/test_regtools.py000066400000000000000000000027011317371701200207300ustar00rootroot00000000000000import numpy as np from dipy.viz import regtools import numpy.testing as npt from dipy.align.metrics import SSDMetric from dipy.align.imwarp import SymmetricDiffeomorphicRegistration # Conditional import machinery for matplotlib from dipy.utils.optpkg import optional_package _, have_matplotlib, _ = optional_package('matplotlib') @npt.dec.skipif(not have_matplotlib) def test_plot_2d_diffeomorphic_map(): # Test the regtools plotting interface (lightly). mv_shape = (11, 12) moving = np.random.rand(*mv_shape) st_shape = (13, 14) static = np.random.rand(*st_shape) dim = static.ndim metric = SSDMetric(dim) level_iters = [200, 100, 50, 25] sdr = SymmetricDiffeomorphicRegistration(metric, level_iters, inv_iter=50) mapping = sdr.optimize(static, moving) # Smoke testing of plots ff = regtools.plot_2d_diffeomorphic_map(mapping, 10) # Defualt shape is static shape, moving shape npt.assert_equal(ff[0].shape, st_shape) npt.assert_equal(ff[1].shape, mv_shape) # Can specify shape ff = regtools.plot_2d_diffeomorphic_map(mapping, delta = 10, direct_grid_shape=(7, 8), inverse_grid_shape=(9, 10)) npt.assert_equal(ff[0].shape, (7, 8)) npt.assert_equal(ff[1].shape, (9, 10)) dipy-0.13.0/dipy/viz/tests/test_ui.py000066400000000000000000000333241317371701200175140ustar00rootroot00000000000000import os import sys import pickle from os.path import join as pjoin import numpy.testing as npt from dipy.data import read_viz_icons, fetch_viz_icons from dipy.viz import ui from dipy.viz import window from dipy.data import DATA_DIR from nibabel.tmpdirs import InTemporaryDirectory from dipy.viz.ui import UI from dipy.testing.decorators import xvfb_it # Conditional import machinery for vtk from dipy.utils.optpkg import optional_package # Allow import, but disable doctests if we don't have vtk vtk, have_vtk, setup_module = optional_package('vtk') use_xvfb = os.environ.get('TEST_WITH_XVFB', False) if use_xvfb == 'skip': skip_it = True else: skip_it = False if have_vtk: print("Using VTK {}".format(vtk.vtkVersion.GetVTKVersion())) class EventCounter(object): def __init__(self, events_names=["CharEvent", "MouseMoveEvent", "KeyPressEvent", "KeyReleaseEvent", "LeftButtonPressEvent", "LeftButtonReleaseEvent", "RightButtonPressEvent", "RightButtonReleaseEvent", "MiddleButtonPressEvent", "MiddleButtonReleaseEvent"]): # Events to count self.events_counts = {name: 0 for name in events_names} def count(self, i_ren, obj, element): """ Simple callback that counts events occurences. """ self.events_counts[i_ren.event.name] += 1 def monitor(self, ui_component): for event in self.events_counts: for actor in ui_component.get_actors(): ui_component.add_callback(actor, event, self.count) def save(self, filename): with open(filename, 'wb') as f: pickle.dump(self.events_counts, f, protocol=-1) @classmethod def load(cls, filename): event_counter = cls() with open(filename, 'rb') as f: event_counter.events_counts = pickle.load(f) return event_counter def check_counts(self, expected): npt.assert_equal(len(self.events_counts), len(expected.events_counts)) # Useful loop for debugging. msg = "{}: {} vs. {} (expected)" for event, count in expected.events_counts.items(): if self.events_counts[event] != count: print(msg.format(event, self.events_counts[event], count)) msg = "Wrong count for '{}'." for event, count in expected.events_counts.items(): npt.assert_equal(self.events_counts[event], count, err_msg=msg.format(event)) @npt.dec.skipif(not have_vtk or skip_it) @xvfb_it def test_broken_ui_component(): class BrokenUI(UI): def __init__(self): self.actor = vtk.vtkActor() super(BrokenUI, self).__init__() broken_ui = BrokenUI() npt.assert_raises(NotImplementedError, broken_ui.get_actors) npt.assert_raises(NotImplementedError, broken_ui.set_center, (1, 2)) @npt.dec.skipif(not have_vtk or skip_it) @xvfb_it def test_wrong_interactor_style(): panel = ui.Panel2D(center=(440, 90), size=(300, 150)) dummy_renderer = window.Renderer() dummy_show_manager = window.ShowManager(dummy_renderer, interactor_style='trackball') npt.assert_raises(TypeError, panel.add_to_renderer, dummy_renderer) @npt.dec.skipif(not have_vtk or skip_it) @xvfb_it def test_ui_button_panel(recording=False): filename = "test_ui_button_panel" recording_filename = pjoin(DATA_DIR, filename + ".log.gz") expected_events_counts_filename = pjoin(DATA_DIR, filename + ".pkl") # Rectangle rectangle_test = ui.Rectangle2D(size=(10, 10)) rectangle_test.get_actors() another_rectangle_test = ui.Rectangle2D(size=(1, 1)) # /Rectangle # Button fetch_viz_icons() icon_files = dict() icon_files['stop'] = read_viz_icons(fname='stop2.png') icon_files['play'] = read_viz_icons(fname='play3.png') button_test = ui.Button2D(icon_fnames=icon_files) button_test.set_center((20, 20)) def make_invisible(i_ren, obj, button): # i_ren: CustomInteractorStyle # obj: vtkActor picked # button: Button2D button.set_visibility(False) i_ren.force_render() i_ren.event.abort() def modify_button_callback(i_ren, obj, button): # i_ren: CustomInteractorStyle # obj: vtkActor picked # button: Button2D button.next_icon() i_ren.force_render() button_test.on_right_mouse_button_pressed = make_invisible button_test.on_left_mouse_button_pressed = modify_button_callback button_test.scale((2, 2)) button_color = button_test.color button_test.color = button_color # /Button # TextBlock text_block_test = ui.TextBlock2D() text_block_test.message = 'TextBlock' text_block_test.color = (0, 0, 0) # Panel panel = ui.Panel2D(center=(440, 90), size=(300, 150), color=(1, 1, 1), align="right") panel.add_element(rectangle_test, 'absolute', (580, 150)) panel.add_element(button_test, 'relative', (0.2, 0.2)) panel.add_element(text_block_test, 'relative', (0.7, 0.7)) npt.assert_raises(ValueError, panel.add_element, another_rectangle_test, 'error_string', (1, 2)) # /Panel # Assign the counter callback to every possible event. event_counter = EventCounter() event_counter.monitor(button_test) event_counter.monitor(panel) current_size = (600, 600) show_manager = window.ShowManager(size=current_size, title="DIPY Button") show_manager.ren.add(panel) if recording: show_manager.record_events_to_file(recording_filename) print(list(event_counter.events_counts.items())) event_counter.save(expected_events_counts_filename) else: show_manager.play_events_from_file(recording_filename) expected = EventCounter.load(expected_events_counts_filename) event_counter.check_counts(expected) @npt.dec.skipif(not have_vtk or skip_it) @xvfb_it def test_ui_textbox(recording=False): filename = "test_ui_textbox" recording_filename = pjoin(DATA_DIR, filename + ".log.gz") expected_events_counts_filename = pjoin(DATA_DIR, filename + ".pkl") # TextBox textbox_test = ui.TextBox2D(height=3, width=10, text="Text") another_textbox_test = ui.TextBox2D(height=3, width=10, text="Enter Text") another_textbox_test.set_message("Enter Text") another_textbox_test.set_center((10, 100)) # /TextBox # Assign the counter callback to every possible event. event_counter = EventCounter() event_counter.monitor(textbox_test) current_size = (600, 600) show_manager = window.ShowManager(size=current_size, title="DIPY TextBox") show_manager.ren.add(textbox_test) if recording: show_manager.record_events_to_file(recording_filename) print(list(event_counter.events_counts.items())) event_counter.save(expected_events_counts_filename) else: show_manager.play_events_from_file(recording_filename) expected = EventCounter.load(expected_events_counts_filename) event_counter.check_counts(expected) @npt.dec.skipif(not have_vtk or skip_it) @xvfb_it def test_text_block_2d(): # TextBlock2D text_block = ui.TextBlock2D() text_block.message = "Hello World!" npt.assert_equal("Hello World!", text_block.message) text_block.font_size = 18 npt.assert_equal("18", str(text_block.font_size)) text_block.font_family = "Arial" npt.assert_equal("Arial", text_block.font_family) with npt.assert_raises(ValueError): text_block.font_family = "Verdana" text_block.justification = "left" text_block.justification = "right" text_block.justification = "center" npt.assert_equal("Centered", text_block.justification) with npt.assert_raises(ValueError): text_block.justification = "bottom" text_block.bold = True text_block.bold = False npt.assert_equal(False, text_block.bold) text_block.italic = True text_block.italic = False npt.assert_equal(False, text_block.italic) text_block.shadow = True text_block.shadow = False npt.assert_equal(False, text_block.shadow) text_block.color = (1, 0, 0) npt.assert_equal((1, 0, 0), text_block.color) text_block.position = (2, 3) npt.assert_equal((2, 3), text_block.position) @npt.dec.skipif(not have_vtk or skip_it) @xvfb_it def test_ui_line_slider_2d(recording=False): filename = "test_ui_line_slider_2d" recording_filename = pjoin(DATA_DIR, filename + ".log.gz") expected_events_counts_filename = pjoin(DATA_DIR, filename + ".pkl") line_slider_2d_test = ui.LineSlider2D(initial_value=-2, min_value=-5, max_value=5) line_slider_2d_test.set_center((300, 300)) # Assign the counter callback to every possible event. event_counter = EventCounter() event_counter.monitor(line_slider_2d_test) current_size = (600, 600) show_manager = window.ShowManager(size=current_size, title="DIPY Line Slider") show_manager.ren.add(line_slider_2d_test) if recording: show_manager.record_events_to_file(recording_filename) print(list(event_counter.events_counts.items())) event_counter.save(expected_events_counts_filename) else: show_manager.play_events_from_file(recording_filename) expected = EventCounter.load(expected_events_counts_filename) event_counter.check_counts(expected) @npt.dec.skipif(not have_vtk or skip_it) @xvfb_it def test_ui_disk_slider_2d(recording=False): filename = "test_ui_disk_slider_2d" recording_filename = pjoin(DATA_DIR, filename + ".log.gz") expected_events_counts_filename = pjoin(DATA_DIR, filename + ".pkl") disk_slider_2d_test = ui.DiskSlider2D() disk_slider_2d_test.set_center((300, 300)) disk_slider_2d_test.value = 90 # Assign the counter callback to every possible event. event_counter = EventCounter() event_counter.monitor(disk_slider_2d_test) current_size = (600, 600) show_manager = window.ShowManager(size=current_size, title="DIPY Disk Slider") show_manager.ren.add(disk_slider_2d_test) if recording: show_manager.record_events_to_file(recording_filename) print(list(event_counter.events_counts.items())) event_counter.save(expected_events_counts_filename) else: show_manager.play_events_from_file(recording_filename) expected = EventCounter.load(expected_events_counts_filename) event_counter.check_counts(expected) @npt.dec.skipif(not have_vtk or skip_it) @xvfb_it def test_ui_file_select_menu_2d(recording=False): filename = "test_ui_file_select_menu_2d" recording_filename = pjoin(DATA_DIR, filename + ".log.gz") expected_events_counts_filename = pjoin(DATA_DIR, filename + ".pkl") with InTemporaryDirectory() as tmpdir: for i in range(10): _ = open("test" + str(i) + ".txt", 'wt').write('some text') file_select_menu = ui.FileSelectMenu2D(size=(500, 500), position=(300, 300), font_size=16, extensions=["txt"], directory_path=os.getcwd(), parent=None) file_select_menu.set_center((300, 300)) npt.assert_equal(file_select_menu.text_item_list[1].file_name[:4], "test") npt.assert_equal(file_select_menu.text_item_list[5].file_name[:4], "test") event_counter = EventCounter() for event in event_counter.events_counts: file_select_menu.add_callback(file_select_menu.buttons["up"].actor, event, event_counter.count) file_select_menu.add_callback(file_select_menu.buttons["down"].actor, event, event_counter.count) file_select_menu.menu.add_callback(file_select_menu.menu.panel.actor, event, event_counter.count) for text_ui in file_select_menu.text_item_list: file_select_menu.add_callback(text_ui.text_actor.get_actors()[0], event, event_counter.count) current_size = (600, 600) show_manager = window.ShowManager(size=current_size, title="DIPY File Select Menu") show_manager.ren.add(file_select_menu) if recording: show_manager.record_events_to_file(recording_filename) print(list(event_counter.events_counts.items())) event_counter.save(expected_events_counts_filename) else: show_manager.play_events_from_file(recording_filename) expected = EventCounter.load(expected_events_counts_filename) event_counter.check_counts(expected) if __name__ == "__main__": if len(sys.argv) <= 1 or sys.argv[1] == "test_ui_button_panel": test_ui_button_panel(recording=True) if len(sys.argv) <= 1 or sys.argv[1] == "test_ui_textbox": test_ui_textbox(recording=True) if len(sys.argv) <= 1 or sys.argv[1] == "test_ui_line_slider_2d": test_ui_line_slider_2d(recording=True) if len(sys.argv) <= 1 or sys.argv[1] == "test_ui_disk_slider_2d": test_ui_disk_slider_2d(recording=True) if len(sys.argv) <= 1 or sys.argv[1] == "test_ui_file_select_menu_2d": test_ui_file_select_menu_2d(recording=True) dipy-0.13.0/dipy/viz/tests/test_utils.py000066400000000000000000000041751317371701200202410ustar00rootroot00000000000000import numpy as np import numpy.testing as npt from dipy.viz.utils import map_coordinates_3d_4d def trilinear_interp_numpy(input_array, indices): """ Evaluate the input_array data at the given indices """ if input_array.ndim <= 2 or input_array.ndim >= 5: raise ValueError("Input array can only be 3d or 4d") x_indices = indices[:, 0] y_indices = indices[:, 1] z_indices = indices[:, 2] x0 = x_indices.astype(np.integer) y0 = y_indices.astype(np.integer) z0 = z_indices.astype(np.integer) x1 = x0 + 1 y1 = y0 + 1 z1 = z0 + 1 # Check if xyz1 is beyond array boundary: x1[np.where(x1 == input_array.shape[0])] = x0.max() y1[np.where(y1 == input_array.shape[1])] = y0.max() z1[np.where(z1 == input_array.shape[2])] = z0.max() if input_array.ndim == 3: x = x_indices - x0 y = y_indices - y0 z = z_indices - z0 elif input_array.ndim == 4: x = np.expand_dims(x_indices - x0, axis=1) y = np.expand_dims(y_indices - y0, axis=1) z = np.expand_dims(z_indices - z0, axis=1) output = (input_array[x0, y0, z0] * (1 - x) * (1 - y) * (1 - z) + input_array[x1, y0, z0] * x * (1 - y) * (1 - z) + input_array[x0, y1, z0] * (1 - x) * y * (1-z) + input_array[x0, y0, z1] * (1 - x) * (1 - y) * z + input_array[x1, y0, z1] * x * (1 - y) * z + input_array[x0, y1, z1] * (1 - x) * y * z + input_array[x1, y1, z0] * x * y * (1 - z) + input_array[x1, y1, z1] * x * y * z) return output def test_trilinear_interp(): A = np.zeros((5, 5, 5)) A[2, 2, 2] = 1 indices = np.array([[0, 0, 0], [1, 1, 1], [2, 2, 2], [1.5, 1.5, 1.5]]) values = trilinear_interp_numpy(A, indices) values2 = map_coordinates_3d_4d(A, indices) npt.assert_almost_equal(values, values2) B = np.zeros((5, 5, 5, 3)) B[2, 2, 2] = np.array([1, 1, 1]) values = trilinear_interp_numpy(B, indices) values_4d = map_coordinates_3d_4d(B, indices) npt.assert_almost_equal(values, values_4d) if __name__ == '__main__': npt.run_module_suite() dipy-0.13.0/dipy/viz/tests/test_widgets.py000066400000000000000000000152071317371701200205450ustar00rootroot00000000000000import os import numpy as np from os.path import join as pjoin from dipy.viz import actor, window, widget, fvtk from dipy.data import DATA_DIR from dipy.data import fetch_viz_icons, read_viz_icons import numpy.testing as npt from dipy.testing.decorators import xvfb_it use_xvfb = os.environ.get('TEST_WITH_XVFB', False) if use_xvfb == 'skip': skip_it = True else: skip_it = False @npt.dec.skipif(not actor.have_vtk or not actor.have_vtk_colors or skip_it) @xvfb_it def test_button_and_slider_widgets(): recording = False filename = "test_button_and_slider_widgets.log.gz" recording_filename = pjoin(DATA_DIR, filename) renderer = window.Renderer() # create some minimalistic streamlines lines = [np.array([[-1, 0, 0.], [1, 0, 0.]]), np.array([[-1, 1, 0.], [1, 1, 0.]])] colors = np.array([[1., 0., 0.], [0.3, 0.7, 0.]]) stream_actor = actor.streamtube(lines, colors) states = {'camera_button_count': 0, 'plus_button_count': 0, 'minus_button_count': 0, 'slider_moved_count': 0, } renderer.add(stream_actor) # the show manager allows to break the rendering process # in steps so that the widgets can be added properly show_manager = window.ShowManager(renderer, size=(800, 800)) if recording: show_manager.initialize() show_manager.render() def button_callback(obj, event): print('Camera pressed') states['camera_button_count'] += 1 def button_plus_callback(obj, event): print('+ pressed') states['plus_button_count'] += 1 def button_minus_callback(obj, event): print('- pressed') states['minus_button_count'] += 1 fetch_viz_icons() button_png = read_viz_icons(fname='camera.png') button = widget.button(show_manager.iren, show_manager.ren, button_callback, button_png, (.98, 1.), (80, 50)) button_png_plus = read_viz_icons(fname='plus.png') button_plus = widget.button(show_manager.iren, show_manager.ren, button_plus_callback, button_png_plus, (.98, .9), (120, 50)) button_png_minus = read_viz_icons(fname='minus.png') button_minus = widget.button(show_manager.iren, show_manager.ren, button_minus_callback, button_png_minus, (.98, .9), (50, 50)) def print_status(obj, event): rep = obj.GetRepresentation() stream_actor.SetPosition((rep.GetValue(), 0, 0)) states['slider_moved_count'] += 1 slider = widget.slider(show_manager.iren, show_manager.ren, callback=print_status, min_value=-1, max_value=1, value=0., label="X", right_normalized_pos=(.98, 0.6), size=(120, 0), label_format="%0.2lf") # This callback is used to update the buttons/sliders' position # so they can stay on the right side of the window when the window # is being resized. global size size = renderer.GetSize() def win_callback(obj, event): global size if size != obj.GetSize(): button.place(renderer) button_plus.place(renderer) button_minus.place(renderer) slider.place(renderer) size = obj.GetSize() if recording: # show_manager.add_window_callback(win_callback) # you can also register any callback in a vtk way like this # show_manager.window.AddObserver(vtk.vtkCommand.ModifiedEvent, # win_callback) show_manager.record_events_to_file(recording_filename) print(states) else: show_manager.play_events_from_file(recording_filename) npt.assert_equal(states["camera_button_count"], 7) npt.assert_equal(states["plus_button_count"], 3) npt.assert_equal(states["minus_button_count"], 4) npt.assert_equal(states["slider_moved_count"], 116) if not recording: button.Off() slider.Off() # Uncomment below to test the slider and button with analyze # button.place(renderer) # slider.place(renderer) arr = window.snapshot(renderer, size=(800, 800)) report = window.analyze_snapshot(arr) # import pylab as plt # plt.imshow(report.labels, origin='lower') # plt.show() npt.assert_equal(report.objects, 4) report = window.analyze_renderer(renderer) npt.assert_equal(report.actors, 1) @npt.dec.skipif(not actor.have_vtk or not actor.have_vtk_colors or skip_it) @xvfb_it def test_text_widget(): interactive = False renderer = window.Renderer() axes = fvtk.axes() window.add(renderer, axes) renderer.ResetCamera() show_manager = window.ShowManager(renderer, size=(900, 900)) if interactive: show_manager.initialize() show_manager.render() fetch_viz_icons() button_png = read_viz_icons(fname='home3.png') def button_callback(obj, event): print('Button Pressed') button = widget.button(show_manager.iren, show_manager.ren, button_callback, button_png, (.8, 1.2), (100, 100)) global rulez rulez = True def text_callback(obj, event): global rulez print('Text selected') if rulez: obj.GetTextActor().SetInput("Diffusion Imaging Rulez!!") rulez = False else: obj.GetTextActor().SetInput("Diffusion Imaging in Python") rulez = True show_manager.render() text = widget.text(show_manager.iren, show_manager.ren, text_callback, message="Diffusion Imaging in Python", left_down_pos=(0., 0.), right_top_pos=(0.4, 0.05), opacity=1., border=False) if not interactive: button.Off() text.Off() pass if interactive: show_manager.render() show_manager.start() arr = window.snapshot(renderer, size=(900, 900)) report = window.analyze_snapshot(arr) npt.assert_equal(report.objects, 3) # If you want to see the segmented objects after the analysis is finished # you can use imshow(report.labels, origin='lower') if __name__ == '__main__': npt.run_module_suite() dipy-0.13.0/dipy/viz/tests/test_window.py000066400000000000000000000147641317371701200204150ustar00rootroot00000000000000import os import numpy as np from dipy.viz import actor, window import numpy.testing as npt from dipy.testing.decorators import xvfb_it use_xvfb = os.environ.get('TEST_WITH_XVFB', False) if use_xvfb == 'skip': skip_it = True else: skip_it = False @npt.dec.skipif(not actor.have_vtk or not actor.have_vtk_colors or skip_it) @xvfb_it def test_renderer(): ren = window.Renderer() # background color for renderer (1, 0.5, 0) # 0.001 added here to remove numerical errors when moving from float # to int values bg_float = (1, 0.501, 0) # that will come in the image in the 0-255 uint scale bg_color = tuple((np.round(255 * np.array(bg_float))).astype('uint8')) ren.background(bg_float) # window.show(ren) arr = window.snapshot(ren) report = window.analyze_snapshot(arr, bg_color=bg_color, colors=[bg_color, (0, 127, 0)]) npt.assert_equal(report.objects, 0) npt.assert_equal(report.colors_found, [True, False]) axes = actor.axes() ren.add(axes) # window.show(ren) arr = window.snapshot(ren) report = window.analyze_snapshot(arr, bg_color) npt.assert_equal(report.objects, 1) ren.rm(axes) arr = window.snapshot(ren) report = window.analyze_snapshot(arr, bg_color) npt.assert_equal(report.objects, 0) window.add(ren, axes) arr = window.snapshot(ren) report = window.analyze_snapshot(arr, bg_color) npt.assert_equal(report.objects, 1) ren.rm_all() arr = window.snapshot(ren) report = window.analyze_snapshot(arr, bg_color) npt.assert_equal(report.objects, 0) ren2 = window.renderer(bg_float) ren2.background((0, 0, 0.)) report = window.analyze_renderer(ren2) npt.assert_equal(report.bg_color, (0, 0, 0)) ren2.add(axes) report = window.analyze_renderer(ren2) npt.assert_equal(report.actors, 3) window.rm(ren2, axes) report = window.analyze_renderer(ren2) npt.assert_equal(report.actors, 0) @npt.dec.skipif(not actor.have_vtk or not actor.have_vtk_colors or skip_it) @xvfb_it def test_active_camera(): renderer = window.Renderer() renderer.add(actor.axes(scale=(1, 1, 1))) renderer.reset_camera() renderer.reset_clipping_range() direction = renderer.camera_direction() position, focal_point, view_up = renderer.get_camera() renderer.set_camera((0., 0., 1.), (0., 0., 0), view_up) position, focal_point, view_up = renderer.get_camera() npt.assert_almost_equal(np.dot(direction, position), -1) renderer.zoom(1.5) new_position, _, _ = renderer.get_camera() npt.assert_array_almost_equal(position, new_position) renderer.zoom(1) # rotate around focal point renderer.azimuth(90) position, _, _ = renderer.get_camera() npt.assert_almost_equal(position, (1.0, 0.0, 0)) arr = window.snapshot(renderer) report = window.analyze_snapshot(arr, colors=[(255, 0, 0)]) npt.assert_equal(report.colors_found, [True]) # rotate around camera's center renderer.yaw(90) arr = window.snapshot(renderer) report = window.analyze_snapshot(arr, colors=[(0, 0, 0)]) npt.assert_equal(report.colors_found, [True]) renderer.yaw(-90) renderer.elevation(90) arr = window.snapshot(renderer) report = window.analyze_snapshot(arr, colors=(0, 255, 0)) npt.assert_equal(report.colors_found, [True]) renderer.set_camera((0., 0., 1.), (0., 0., 0), view_up) # vertical rotation of the camera around the focal point renderer.pitch(10) renderer.pitch(-10) # rotate around the direction of projection renderer.roll(90) # inverted normalized distance from focal point along the direction # of the camera position, _, _ = renderer.get_camera() renderer.dolly(0.5) new_position, _, _ = renderer.get_camera() npt.assert_almost_equal(position[2], 0.5 * new_position[2]) @npt.dec.skipif(not actor.have_vtk or not actor.have_vtk_colors or skip_it) @xvfb_it def test_parallel_projection(): ren = window.Renderer() axes = actor.axes() axes2 = actor.axes() axes2.SetPosition((2, 0, 0)) # Add both axes. ren.add(axes, axes2) # Put the camera on a angle so that the # camera can show the difference between perspective # and parallel projection ren.set_camera((1.5, 1.5, 1.5)) ren.GetActiveCamera().Zoom(2) # window.show(ren, reset_camera=True) ren.reset_camera() arr = window.snapshot(ren) ren.projection('parallel') # window.show(ren, reset_camera=False) arr2 = window.snapshot(ren) # Because of the parallel projection the two axes # will have the same size and therefore occupy more # pixels rather than in perspective projection were # the axes being further will be smaller. npt.assert_equal(np.sum(arr2 > 0) > np.sum(arr > 0), True) @npt.dec.skipif(not actor.have_vtk or not actor.have_vtk_colors or skip_it) @xvfb_it def test_order_transparent(): renderer = window.Renderer() lines = [np.array([[-1, 0, 0.], [1, 0, 0.]]), np.array([[-1, 1, 0.], [1, 1, 0.]])] colors = np.array([[1., 0., 0.], [0., .5, 0.]]) stream_actor = actor.streamtube(lines, colors, linewidth=0.3, opacity=0.5) renderer.add(stream_actor) renderer.reset_camera() # green in front renderer.elevation(90) renderer.camera().OrthogonalizeViewUp() renderer.reset_clipping_range() renderer.reset_camera() not_xvfb = os.environ.get("TEST_WITH_XVFB", False) if not_xvfb: arr = window.snapshot(renderer, fname='green_front.png', offscreen=True, order_transparent=False) else: arr = window.snapshot(renderer, fname='green_front.png', offscreen=False, order_transparent=False) # therefore the green component must have a higher value (in RGB terms) npt.assert_equal(arr[150, 150][1] > arr[150, 150][0], True) # red in front renderer.elevation(-180) renderer.camera().OrthogonalizeViewUp() renderer.reset_clipping_range() if not_xvfb: arr = window.snapshot(renderer, fname='red_front.png', offscreen=True, order_transparent=True) else: arr = window.snapshot(renderer, fname='red_front.png', offscreen=False, order_transparent=True) # therefore the red component must have a higher value (in RGB terms) npt.assert_equal(arr[150, 150][0] > arr[150, 150][1], True) if __name__ == '__main__': npt.run_module_suite() dipy-0.13.0/dipy/viz/ui.py000066400000000000000000002243241317371701200153150ustar00rootroot00000000000000from __future__ import division from _warnings import warn import os import glob import numpy as np from dipy.data import read_viz_icons from dipy.viz.interactor import CustomInteractorStyle from dipy.utils.optpkg import optional_package # Allow import, but disable doctests if we don't have vtk. vtk, have_vtk, setup_module = optional_package('vtk') if have_vtk: version = vtk.vtkVersion.GetVTKSourceVersion().split(' ')[-1] major_version = vtk.vtkVersion.GetVTKMajorVersion() vtkTextActor = vtk.vtkTextActor else: vtkTextActor = object TWO_PI = 2 * np.pi class UI(object): """ An umbrella class for all UI elements. While adding UI elements to the renderer, we go over all the sub-elements that come with it and add those to the renderer automatically. Attributes ---------- ui_param : object This is an attribute that can be passed to the UI object by the interactor. ui_list : list of :class:`UI` This is used when there are more than one UI elements inside a UI element. They're all automatically added to the renderer at the same time as this one. parent_ui: UI Reference to the parent UI element. This is useful of there is a parent UI element and its reference needs to be passed down to the child. on_left_mouse_button_pressed: function Callback function for when the left mouse button is pressed. on_left_mouse_button_released: function Callback function for when the left mouse button is released. on_left_mouse_button_clicked: function Callback function for when clicked using the left mouse button (i.e. pressed -> released). on_left_mouse_button_dragged: function Callback function for when dragging using the left mouse button. on_right_mouse_button_pressed: function Callback function for when the right mouse button is pressed. on_right_mouse_button_released: function Callback function for when the right mouse button is released. on_right_mouse_button_clicked: function Callback function for when clicking using the right mouse button (i.e. pressed -> released). on_right_mouse_button_dragged: function Callback function for when dragging using the right mouse button. """ def __init__(self): self.ui_param = None self.ui_list = list() self.parent_ui = None self._callbacks = [] self.left_button_state = "released" self.right_button_state = "released" self.on_left_mouse_button_pressed = lambda i_ren, obj, element: None self.on_left_mouse_button_dragged = lambda i_ren, obj, element: None self.on_left_mouse_button_released = lambda i_ren, obj, element: None self.on_left_mouse_button_clicked = lambda i_ren, obj, element: None self.on_right_mouse_button_pressed = lambda i_ren, obj, element: None self.on_right_mouse_button_released = lambda i_ren, obj, element: None self.on_right_mouse_button_clicked = lambda i_ren, obj, element: None self.on_right_mouse_button_dragged = lambda i_ren, obj, element: None self.on_key_press = lambda i_ren, obj, element: None def get_actors(self): """ Returns the actors that compose this UI component. """ msg = "Subclasses of UI must implement `get_actors(self)`." raise NotImplementedError(msg) def add_to_renderer(self, ren): """ Allows UI objects to add their own props to the renderer. Parameters ---------- ren : renderer """ ren.add(*self.get_actors()) # Get a hold on the current interactor style. iren = ren.GetRenderWindow().GetInteractor().GetInteractorStyle() for callback in self._callbacks: if not isinstance(iren, CustomInteractorStyle): msg = ("The ShowManager requires `CustomInteractorStyle` in" " order to use callbacks.") raise TypeError(msg) iren.add_callback(*callback, args=[self]) def add_callback(self, prop, event_type, callback, priority=0): """ Adds a callback to a specific event for this UI component. Parameters ---------- prop : vtkProp The prop on which is callback is to be added. event_type : string The event code. callback : function The callback function. priority : int Higher number is higher priority. """ # Actually since we need an interactor style we will add the callback # only when this UI component is added to the renderer. self._callbacks.append((prop, event_type, callback, priority)) def set_center(self, position): """ Sets the center of the UI component Parameters ---------- position : (float, float) These are the x and y coordinates respectively, with the origin at the bottom left. """ msg = "Subclasses of UI must implement `set_center(self, position)`." raise NotImplementedError(msg) def set_visibility(self, visibility): """ Sets visibility of this UI component and all its sub-components. """ for actor in self.get_actors(): actor.SetVisibility(visibility) def handle_events(self, actor): self.add_callback(actor, "LeftButtonPressEvent", self.left_button_click_callback) self.add_callback(actor, "LeftButtonReleaseEvent", self.left_button_release_callback) self.add_callback(actor, "RightButtonPressEvent", self.right_button_click_callback) self.add_callback(actor, "RightButtonReleaseEvent", self.right_button_release_callback) self.add_callback(actor, "MouseMoveEvent", self.mouse_move_callback) self.add_callback(actor, "KeyPressEvent", self.key_press_callback) @staticmethod def left_button_click_callback(i_ren, obj, self): self.left_button_state = "pressing" self.on_left_mouse_button_pressed(i_ren, obj, self) i_ren.event.abort() @staticmethod def left_button_release_callback(i_ren, obj, self): if self.left_button_state == "pressing": self.on_left_mouse_button_clicked(i_ren, obj, self) self.left_button_state = "released" self.on_left_mouse_button_released(i_ren, obj, self) @staticmethod def right_button_click_callback(i_ren, obj, self): self.right_button_state = "pressing" self.on_right_mouse_button_pressed(i_ren, obj, self) i_ren.event.abort() @staticmethod def right_button_release_callback(i_ren, obj, self): if self.right_button_state == "pressing": self.on_right_mouse_button_clicked(i_ren, obj, self) self.right_button_state = "released" self.on_right_mouse_button_released(i_ren, obj, self) @staticmethod def mouse_move_callback(i_ren, obj, self): if self.left_button_state == "pressing" or self.left_button_state == "dragging": self.left_button_state = "dragging" self.on_left_mouse_button_dragged(i_ren, obj, self) elif self.right_button_state == "pressing" or self.right_button_state == "dragging": self.right_button_state = "dragging" self.on_right_mouse_button_dragged(i_ren, obj, self) else: pass @staticmethod def key_press_callback(i_ren, obj, self): self.on_key_press(i_ren, obj, self) class Button2D(UI): """ A 2D overlay button and is of type vtkTexturedActor2D. Currently supports: - Multiple icons. - Switching between icons. Attributes ---------- size: (float, float) Button size (width, height) in pixels. """ def __init__(self, icon_fnames, size=(30, 30)): """ Parameters ---------- size : 2-tuple of int, optional Button size. icon_fnames : dict {iconname : filename, iconname : filename, ...} """ super(Button2D, self).__init__() self.icon_extents = dict() self.icons = self.__build_icons(icon_fnames) self.icon_names = list(self.icons.keys()) self.current_icon_id = 0 self.current_icon_name = self.icon_names[self.current_icon_id] self.actor = self.build_actor(self.icons[self.current_icon_name]) self.size = size self.handle_events(self.actor) def __build_icons(self, icon_fnames): """ Converts file names to vtkImageDataGeometryFilters. A pre-processing step to prevent re-read of file names during every state change. Parameters ---------- icon_fnames : dict {iconname: filename, iconname: filename, ...} Returns ------- icons : dict A dictionary of corresponding vtkImageDataGeometryFilters. """ icons = {} for icon_name, icon_fname in icon_fnames.items(): if icon_fname.split(".")[-1] not in ["png", "PNG"]: error_msg = "A specified icon file is not in the PNG format. SKIPPING." warn(Warning(error_msg)) else: png = vtk.vtkPNGReader() png.SetFileName(icon_fname) png.Update() icons[icon_name] = png.GetOutput() return icons @property def size(self): """ Gets the button size. """ return self._size @size.setter def size(self, size): """ Sets the button size. Parameters ---------- size : (float, float) Button size (width, height) in pixels. """ self._size = np.asarray(size) # Update actor. self.texture_points.SetPoint(0, 0, 0, 0.0) self.texture_points.SetPoint(1, size[0], 0, 0.0) self.texture_points.SetPoint(2, size[0], size[1], 0.0) self.texture_points.SetPoint(3, 0, size[1], 0.0) self.texture_polydata.SetPoints(self.texture_points) @property def color(self): """ Gets the button's color. """ color = self.actor.GetProperty().GetColor() return np.asarray(color) @color.setter def color(self, color): """ Sets the button's color. Parameters ---------- color : (float, float, float) RGB. Must take values in [0, 1]. """ self.actor.GetProperty().SetColor(*color) def scale(self, size): """ Scales the button. Parameters ---------- size : (float, float) Scaling factor (width, height) in pixels. """ self.size *= size def build_actor(self, icon): """ Return an image as a 2D actor with a specific position. Parameters ---------- icon : :class:`vtkImageData` Returns ------- :class:`vtkTexturedActor2D` """ # This is highly inspired by # https://github.com/Kitware/VTK/blob/c3ec2495b183e3327820e927af7f8f90d34c3474\ # /Interaction/Widgets/vtkBalloonRepresentation.cxx#L47 self.texture_polydata = vtk.vtkPolyData() self.texture_points = vtk.vtkPoints() self.texture_points.SetNumberOfPoints(4) self.size = icon.GetExtent() polys = vtk.vtkCellArray() polys.InsertNextCell(4) polys.InsertCellPoint(0) polys.InsertCellPoint(1) polys.InsertCellPoint(2) polys.InsertCellPoint(3) self.texture_polydata.SetPolys(polys) tc = vtk.vtkFloatArray() tc.SetNumberOfComponents(2) tc.SetNumberOfTuples(4) tc.InsertComponent(0, 0, 0.0) tc.InsertComponent(0, 1, 0.0) tc.InsertComponent(1, 0, 1.0) tc.InsertComponent(1, 1, 0.0) tc.InsertComponent(2, 0, 1.0) tc.InsertComponent(2, 1, 1.0) tc.InsertComponent(3, 0, 0.0) tc.InsertComponent(3, 1, 1.0) self.texture_polydata.GetPointData().SetTCoords(tc) texture_mapper = vtk.vtkPolyDataMapper2D() if major_version <= 5: texture_mapper.SetInput(self.texture_polydata) else: texture_mapper.SetInputData(self.texture_polydata) button = vtk.vtkTexturedActor2D() button.SetMapper(texture_mapper) self.texture = vtk.vtkTexture() button.SetTexture(self.texture) button_property = vtk.vtkProperty2D() button_property.SetOpacity(1.0) button.SetProperty(button_property) self.set_icon(icon) return button def get_actors(self): """ Returns the actors that compose this UI component. """ return [self.actor] def set_icon(self, icon): """ Modifies the icon used by the vtkTexturedActor2D. Parameters ---------- icon : imageDataGeometryFilter """ if major_version <= 5: self.texture.SetInput(icon) else: self.texture.SetInputData(icon) def next_icon_name(self): """ Returns the next icon name while cycling through icons. """ self.current_icon_id += 1 if self.current_icon_id == len(self.icons): self.current_icon_id = 0 self.current_icon_name = self.icon_names[self.current_icon_id] def next_icon(self): """ Increments the state of the Button. Also changes the icon. """ self.next_icon_name() self.set_icon(self.icons[self.current_icon_name]) def set_center(self, position): """ Sets the icon center to position. Parameters ---------- position : (float, float) The new center of the button (x, y). """ new_position = np.asarray(position) - self.size / 2. self.actor.SetPosition(*new_position) class Rectangle2D(UI): """ A 2D rectangle sub-classed from UI. Uses vtkPolygon. Attributes ---------- size : (float, float) The size of the rectangle (height, width) in pixels. """ def __init__(self, size, center=(0, 0), color=(1, 1, 1), opacity=1.0): """ Initializes a rectangle. Parameters ---------- size : (float, float) The size of the rectangle (height, width) in pixels. center : (float, float) The center of the rectangle (x, y). color : (float, float, float) Must take values in [0, 1]. opacity : float Must take values in [0, 1]. """ super(Rectangle2D, self).__init__() self.size = size self.actor = self.build_actor(size=size, center=center, color=color, opacity=opacity) self.handle_events(self.actor) def get_actors(self): """ Returns the actors that compose this UI component. """ return [self.actor] def build_actor(self, size, center, color, opacity): """ Builds the text actor. Parameters ---------- size : (float, float) The size of the rectangle (height, width) in pixels. center : (float, float) The center of the rectangle (x, y). color : (float, float, float) Must take values in [0, 1]. opacity : float Must take values in [0, 1]. Returns ------- :class:`vtkActor2D` """ # Setup four points points = vtk.vtkPoints() points.InsertNextPoint(0, 0, 0) points.InsertNextPoint(size[0], 0, 0) points.InsertNextPoint(size[0], size[1], 0) points.InsertNextPoint(0, size[1], 0) # Create the polygon polygon = vtk.vtkPolygon() polygon.GetPointIds().SetNumberOfIds(4) # make a quad polygon.GetPointIds().SetId(0, 0) polygon.GetPointIds().SetId(1, 1) polygon.GetPointIds().SetId(2, 2) polygon.GetPointIds().SetId(3, 3) # Add the polygon to a list of polygons polygons = vtk.vtkCellArray() polygons.InsertNextCell(polygon) # Create a PolyData polygonPolyData = vtk.vtkPolyData() polygonPolyData.SetPoints(points) polygonPolyData.SetPolys(polygons) # Create a mapper and actor mapper = vtk.vtkPolyDataMapper2D() if vtk.VTK_MAJOR_VERSION <= 5: mapper.SetInput(polygonPolyData) else: mapper.SetInputData(polygonPolyData) actor = vtk.vtkActor2D() actor.SetMapper(mapper) actor.GetProperty().SetColor(color) actor.GetProperty().SetOpacity(opacity) actor.SetPosition(center[0] - self.size[0] / 2, center[1] - self.size[1] / 2) return actor def set_center(self, position): """ Sets the center to position. Parameters ---------- position : (float, float) The new center of the rectangle (x, y). """ self.actor.SetPosition(position[0] - self.size[0] / 2, position[1] - self.size[1] / 2) class Panel2D(UI): """ A 2D UI Panel. Can contain one or more UI elements. Attributes ---------- center : (float, float) The center of the panel (x, y). size : (float, float) The size of the panel (width, height) in pixels. alignment : [left, right] Alignment of the panel with respect to the overall screen. """ def __init__(self, center, size, color=(0.1, 0.1, 0.1), opacity=0.7, align="left"): """ Parameters ---------- center : (float, float) The center of the panel (x, y). size : (float, float) The size of the panel (width, height) in pixels. color : (float, float, float) Must take values in [0, 1]. opacity : float Must take values in [0, 1]. align : [left, right] Alignment of the panel with respect to the overall screen. """ super(Panel2D, self).__init__() self.center = center self.size = size self.lower_limits = (self.center[0] - self.size[0] / 2, self.center[1] - self.size[1] / 2) self.panel = Rectangle2D(size=size, center=center, color=color, opacity=opacity) self.element_positions = [] self.element_positions.append([self.panel, 'relative', 0.5, 0.5]) self.alignment = align self.handle_events(self.panel.actor) self.on_left_mouse_button_pressed = self.left_button_pressed self.on_left_mouse_button_dragged = self.left_button_dragged def add_to_renderer(self, ren): """ Allows UI objects to add their own props to the renderer. Here, we add only call add_to_renderer for the additional components. Parameters ---------- ren : renderer """ super(Panel2D, self).add_to_renderer(ren) for ui_item in self.ui_list: ui_item.add_to_renderer(ren) def get_actors(self): """ Returns the panel actor. """ return [self.panel.actor] def add_element(self, element, position_type, position): """ Adds an element to the panel. The center of the rectangular panel is its bottom lower position. Parameters ---------- element : UI The UI item to be added. position_type: string 'absolute' or 'relative' position : (float, float) Absolute for absolute and relative for relative """ self.ui_list.append(element) if position_type == 'relative': self.element_positions.append([element, position_type, position[0], position[1]]) element.set_center((self.lower_limits[0] + position[0] * self.size[0], self.lower_limits[1] + position[1] * self.size[1])) elif position_type == 'absolute': self.element_positions.append([element, position_type, position[0], position[1]]) element.set_center((position[0], position[1])) else: raise ValueError("Position can only be absolute or relative") def set_center(self, position): """ Sets the panel center to position. The center of the rectangular panel is its bottom lower position. Parameters ---------- position : (float, float) The new center of the panel (x, y). """ shift = [position[0] - self.center[0], position[1] - self.center[1]] self.center = position self.lower_limits = (position[0] - self.size[0] / 2, position[1] - self.size[1] / 2) for ui_element in self.element_positions: if ui_element[1] == 'relative': ui_element[0].set_center((self.lower_limits[0] + ui_element[2] * self.size[0], self.lower_limits[1] + ui_element[3] * self.size[1])) elif ui_element[1] == 'absolute': ui_element[2] += shift[0] ui_element[3] += shift[1] ui_element[0].set_center((ui_element[2], ui_element[3])) @staticmethod def left_button_pressed(i_ren, obj, panel2d_object): click_position = i_ren.event.position panel2d_object.ui_param = (click_position[0] - panel2d_object.panel.actor.GetPosition()[0] - panel2d_object.panel.size[0] / 2, click_position[1] - panel2d_object.panel.actor.GetPosition()[1] - panel2d_object.panel.size[1] / 2) i_ren.event.abort() # Stop propagating the event. @staticmethod def left_button_dragged(i_ren, obj, panel2d_object): click_position = i_ren.event.position if panel2d_object.ui_param is not None: panel2d_object.set_center((click_position[0] - panel2d_object.ui_param[0], click_position[1] - panel2d_object.ui_param[1])) i_ren.force_render() def re_align(self, window_size_change): """ Re-organises the elements in case the window size is changed. Parameters ---------- window_size_change : (int, int) New window size (width, height) in pixels. """ if self.alignment == "left": pass elif self.alignment == "right": self.set_center((self.center[0] + window_size_change[0], self.center[1] + window_size_change[1])) else: raise ValueError("You can only left-align or right-align objects in a panel.") class TextBlock2D(UI): """ Wraps over the default vtkTextActor and helps setting the text. Contains member functions for text formatting. Attributes ---------- actor : :class:`vtkTextActor` The text actor. message : str The initial text while building the actor. position : (float, float) (x, y) in pixels. color : (float, float, float) RGB: Values must be between 0-1. font_size : int Size of the text font. font_family : str Currently only supports Arial. justification : str left, right or center. bold : bool Makes text bold. italic : bool Makes text italicised. shadow : bool Adds text shadow. """ def __init__(self, text="Text Block", font_size=18, font_family='Arial', justification='left', bold=False, italic=False, shadow=False, color=(1, 1, 1), position=(0, 0)): """ Parameters ---------- text : str The initial text while building the actor. position : (float, float) (x, y) in pixels. color : (float, float, float) RGB: Values must be between 0-1. font_size : int Size of the text font. font_family : str Currently only supports Arial. justification : str left, right or center. bold : bool Makes text bold. italic : bool Makes text italicised. shadow : bool Adds text shadow. """ super(TextBlock2D, self).__init__() self.actor = vtkTextActor() self.message = text self.font_size = font_size self.font_family = font_family self.justification = justification self.bold = bold self.italic = italic self.shadow = shadow self.color = color self.position = position def get_actor(self): """ Returns the actor composing this element. Returns ------- :class:`vtkTextActor` The actor composing this class. """ return self.actor def get_actors(self): """ Returns the actors that compose this UI component. """ return [self.actor] @property def message(self): """ Gets message from the text. Returns ------- str The current text message. """ return self.actor.GetInput() @message.setter def message(self, text): """ Sets the text message. Parameters ---------- text : str The message to be set. """ self.actor.SetInput(text) @property def font_size(self): """ Gets text font size. Returns ---------- int Text font size. """ return self.actor.GetTextProperty().GetFontSize() @font_size.setter def font_size(self, size): """ Sets font size. Parameters ---------- size : int Text font size. """ self.actor.GetTextProperty().SetFontSize(size) @property def font_family(self): """ Gets font family. Returns ---------- str Text font family. """ return self.actor.GetTextProperty().GetFontFamilyAsString() @font_family.setter def font_family(self, family='Arial'): """ Sets font family. Currently defaults to Arial. # ToDo: Add other font families. Parameters ---------- family : str The font family. """ if family == 'Arial': self.actor.GetTextProperty().SetFontFamilyToArial() else: raise ValueError("Font not supported yet: {}.".format(family)) @property def justification(self): """ Gets text justification. Returns ------- str Text justification. """ return self.actor.GetTextProperty().GetJustificationAsString() @justification.setter def justification(self, justification): """ Justifies text. Parameters ---------- justification : str Possible values are left, right, center. """ text_property = self.actor.GetTextProperty() if justification == 'left': text_property.SetJustificationToLeft() elif justification == 'center': text_property.SetJustificationToCentered() elif justification == 'right': text_property.SetJustificationToRight() else: raise ValueError("Text can only be justified left, right and center.") @property def bold(self): """ Returns whether the text is bold. Returns ------- bool Text is bold if True. """ return self.actor.GetTextProperty().GetBold() @bold.setter def bold(self, flag): """ Bolds/un-bolds text. Parameters ---------- flag : bool Sets text bold if True. """ self.actor.GetTextProperty().SetBold(flag) @property def italic(self): """ Returns whether the text is italicised. Returns ------- bool Text is italicised if True. """ return self.actor.GetTextProperty().GetItalic() @italic.setter def italic(self, flag): """ Italicises/un-italicises text. Parameters ---------- flag : bool Italicises text if True. """ self.actor.GetTextProperty().SetItalic(flag) @property def shadow(self): """ Returns whether the text has shadow. Returns ------- bool Text is shadowed if True. """ return self.actor.GetTextProperty().GetShadow() @shadow.setter def shadow(self, flag): """ Adds/removes text shadow. Parameters ---------- flag : bool Shadows text if True. """ self.actor.GetTextProperty().SetShadow(flag) @property def color(self): """ Gets text color. Returns ------- (float, float, float) Returns text color in RGB. """ return self.actor.GetTextProperty().GetColor() @color.setter def color(self, color=(1, 0, 0)): """ Set text color. Parameters ---------- color : (float, float, float) RGB: Values must be between 0-1. """ self.actor.GetTextProperty().SetColor(*color) @property def position(self): """ Gets text actor position. Returns ------- (float, float) The current actor position. (x, y) in pixels. """ return self.actor.GetPosition() @position.setter def position(self, position): """ Set text actor position. Parameters ---------- position : (float, float) The new position. (x, y) in pixels. """ self.actor.SetPosition(*position) def set_center(self, position): """ Sets the text center to position. Parameters ---------- position : (float, float) """ self.position = position class TextBox2D(UI): """ An editable 2D text box that behaves as a UI component. Currently supports: - Basic text editing. - Cursor movements. - Single and multi-line text boxes. - Pre text formatting (text needs to be formatted beforehand). Attributes ---------- text : str The current text state. actor : :class:`vtkActor2d` The text actor. width : int The number of characters in a single line of text. height : int The number of lines in the textbox. window_left : int Left limit of visible text in the textbox. window_right : int Right limit of visible text in the textbox. caret_pos : int Position of the caret in the text. init : bool Flag which says whether the textbox has just been initialized. """ def __init__(self, width, height, text="Enter Text", position=(100, 10), color=(0, 0, 0), font_size=18, font_family='Arial', justification='left', bold=False, italic=False, shadow=False): """ Parameters ---------- width : int The number of characters in a single line of text. height : int The number of lines in the textbox. text : str The initial text while building the actor. position : (float, float) (x, y) in pixels. color : (float, float, float) RGB: Values must be between 0-1. font_size : int Size of the text font. font_family : str Currently only supports Arial. justification : str left, right or center. bold : bool Makes text bold. italic : bool Makes text italicised. shadow : bool Adds text shadow. """ super(TextBox2D, self).__init__() self.text = text self.actor = self.build_actor(self.text, position, color, font_size, font_family, justification, bold, italic, shadow) self.width = width self.height = height self.window_left = 0 self.window_right = 0 self.caret_pos = 0 self.init = True self.handle_events(self.actor.get_actor()) self.on_left_mouse_button_pressed = self.left_button_press self.on_key_press = self.key_press def build_actor(self, text, position, color, font_size, font_family, justification, bold, italic, shadow): """ Builds a text actor. Parameters ---------- text : str The initial text while building the actor. position : (float, float) (x, y) in pixels. color : (float, float, float) RGB: Values must be between 0-1. font_size : int Size of the text font. font_family : str Currently only supports Arial. justification : str left, right or center. bold : bool Makes text bold. italic : bool Makes text italicised. shadow : bool Adds text shadow. Returns ------- :class:`TextBlock2D` """ text_block = TextBlock2D() text_block.position = position text_block.message = text text_block.font_size = font_size text_block.font_family = font_family text_block.justification = justification text_block.bold = bold text_block.italic = italic text_block.shadow = shadow if vtk.vtkVersion.GetVTKSourceVersion().split(' ')[-1] <= "6.2.0": pass else: text_block.actor.GetTextProperty().SetBackgroundColor(1, 1, 1) text_block.actor.GetTextProperty().SetBackgroundOpacity(1.0) text_block.color = color return text_block def set_message(self, message): """ Set custom text to textbox. Parameters ---------- message: str The custom message to be set. """ self.text = message self.actor.message = message self.init = False self.window_right = len(self.text) self.window_left = 0 self.caret_pos = self.window_right def get_actors(self): """ Returns the actors that compose this UI component. """ return [self.actor.get_actor()] def width_set_text(self, text): """ Adds newlines to text where necessary. This is needed for multi-line text boxes. Parameters ---------- text : str The final text to be formatted. Returns ------- str A multi line formatted text. """ multi_line_text = "" for i in range(len(text)): multi_line_text += text[i] if (i + 1) % self.width == 0: multi_line_text += "\n" return multi_line_text.rstrip("\n") def handle_character(self, character): """ Main driving function that handles button events. # TODO: Need to handle all kinds of characters like !, +, etc. Parameters ---------- character : str """ if character.lower() == "return": self.render_text(False) return True if character.lower() == "backspace": self.remove_character() elif character.lower() == "left": self.move_left() elif character.lower() == "right": self.move_right() else: self.add_character(character) self.render_text() return False def move_caret_right(self): """ Moves the caret towards right. """ self.caret_pos = min(self.caret_pos + 1, len(self.text)) def move_caret_left(self): """ Moves the caret towards left. """ self.caret_pos = max(self.caret_pos - 1, 0) def right_move_right(self): """ Moves right boundary of the text window right-wards. """ if self.window_right <= len(self.text): self.window_right += 1 def right_move_left(self): """ Moves right boundary of the text window left-wards. """ if self.window_right > 0: self.window_right -= 1 def left_move_right(self): """ Moves left boundary of the text window right-wards. """ if self.window_left <= len(self.text): self.window_left += 1 def left_move_left(self): """ Moves left boundary of the text window left-wards. """ if self.window_left > 0: self.window_left -= 1 def add_character(self, character): """ Inserts a character into the text and moves window and caret accordingly. Parameters ---------- character : str """ if len(character) > 1 and character.lower() != "space": return if character.lower() == "space": character = " " self.text = (self.text[:self.caret_pos] + character + self.text[self.caret_pos:]) self.move_caret_right() if (self.window_right - self.window_left == self.height * self.width - 1): self.left_move_right() self.right_move_right() def remove_character(self): """ Removes a character from the text and moves window and caret accordingly. """ if self.caret_pos == 0: return self.text = self.text[:self.caret_pos - 1] + self.text[self.caret_pos:] self.move_caret_left() if len(self.text) < self.height * self.width - 1: self.right_move_left() if (self.window_right - self.window_left == self.height * self.width - 1): if self.window_left > 0: self.left_move_left() self.right_move_left() def move_left(self): """ Handles left button press. """ self.move_caret_left() if self.caret_pos == self.window_left - 1: if (self.window_right - self.window_left == self.height * self.width - 1): self.left_move_left() self.right_move_left() def move_right(self): """ Handles right button press. """ self.move_caret_right() if self.caret_pos == self.window_right + 1: if (self.window_right - self.window_left == self.height * self.width - 1): self.left_move_right() self.right_move_right() def showable_text(self, show_caret): """ Chops out text to be shown on the screen. Parameters ---------- show_caret : bool Whether or not to show the caret. """ if show_caret: ret_text = (self.text[:self.caret_pos] + "_" + self.text[self.caret_pos:]) else: ret_text = self.text ret_text = ret_text[self.window_left:self.window_right + 1] return ret_text def render_text(self, show_caret=True): """ Renders text after processing. Parameters ---------- show_caret : bool Whether or not to show the caret. """ text = self.showable_text(show_caret) if text == "": text = "Enter Text" self.actor.message = self.width_set_text(text) def edit_mode(self): """ Turns on edit mode. """ if self.init: self.text = "" self.init = False self.caret_pos = 0 self.render_text() def set_center(self, position): """ Sets the text center to position. Parameters ---------- position : (float, float) """ self.actor.position = position @staticmethod def left_button_press(i_ren, obj, textbox_object): """ Left button press handler for textbox Parameters ---------- i_ren: :class:`CustomInteractorStyle` obj: :class:`vtkActor` The picked actor textbox_object: :class:`TextBox2D` """ i_ren.add_active_prop(textbox_object.actor.get_actor()) textbox_object.edit_mode() i_ren.force_render() @staticmethod def key_press(i_ren, obj, textbox_object): """ Key press handler for textbox Parameters ---------- i_ren: :class:`CustomInteractorStyle` obj: :class:`vtkActor` The picked actor textbox_object: :class:`TextBox2D` """ key = i_ren.event.key is_done = textbox_object.handle_character(key) if is_done: i_ren.remove_active_prop(textbox_object.actor.get_actor()) i_ren.force_render() class LineSlider2D(UI): """ A 2D Line Slider. A sliding ring on a line with a percentage indicator. Currently supports: - A disk on a line (a thin rectangle). - Setting disk position. Attributes ---------- line_width : int Width of the line on which the disk will slide. inner_radius : int Inner radius of the disk (ring). outer_radius : int Outer radius of the disk. center : (float, float) Center of the slider. length : int Length of the slider. slider_line : :class:`vtkActor` The line on which the slider disk moves. slider_disk : :class:`vtkActor` The moving slider disk. text : :class:`TextBlock2D` The text that shows percentage. """ def __init__(self, line_width=5, inner_radius=0, outer_radius=10, center=(450, 300), length=200, initial_value=50, min_value=0, max_value=100, text_size=16, text_template="{value:.1f} ({ratio:.0%})"): """ Parameters ---------- line_width : int Width of the line on which the disk will slide. inner_radius : int Inner radius of the disk (ring). outer_radius : int Outer radius of the disk. center : (float, float) Center of the slider. length : int Length of the slider. initial_value : float Initial value of the slider. min_value : float Minimum value of the slider. max_value : float Maximum value of the slider. text_size : int Size of the text to display alongside the slider (pt). text_template : str, callable If str, text template can contain one or multiple of the replacement fields: `{value:}`, `{ratio:}`. If callable, this instance of `:class:LineSlider2D` will be passed as argument to the text template function. """ super(LineSlider2D, self).__init__() self.length = length self.min_value = min_value self.max_value = max_value self.text_template = text_template self.line_width = line_width self.center = center self.current_state = center[0] self.left_x_position = center[0] - length / 2 self.right_x_position = center[0] + length / 2 self._ratio = (self.current_state - self.left_x_position) / length self.slider_line = None self.slider_disk = None self.text = None self.build_actors(inner_radius=inner_radius, outer_radius=outer_radius, text_size=text_size) # Setting the disk position will also update everything. self.value = initial_value # self.update() self.handle_events(None) def build_actors(self, inner_radius, outer_radius, text_size): """ Builds required actors. Parameters ---------- inner_radius: int The inner radius of the sliding disk. outer_radius: int The outer radius of the sliding disk. text_size: int Size of the text that displays percentage. """ # Slider Line self.slider_line = Rectangle2D(size=(self.length, self.line_width), center=self.center).actor self.slider_line.GetProperty().SetColor(1, 0, 0) # /Slider Line # Slider Disk # Create source disk = vtk.vtkDiskSource() disk.SetInnerRadius(inner_radius) disk.SetOuterRadius(outer_radius) disk.SetRadialResolution(10) disk.SetCircumferentialResolution(50) disk.Update() # Mapper mapper = vtk.vtkPolyDataMapper2D() mapper.SetInputConnection(disk.GetOutputPort()) # Actor self.slider_disk = vtk.vtkActor2D() self.slider_disk.SetMapper(mapper) # /Slider Disk # Slider Text self.text = TextBlock2D() self.text.position = (self.left_x_position - 50, self.center[1] - 10) self.text.font_size = text_size # /Slider Text def get_actors(self): """ Returns the actors that compose this UI component. """ return [self.slider_line, self.slider_disk, self.text.get_actor()] def set_position(self, position): """ Sets the disk's position. Parameters ---------- position : (float, float) The absolute position of the disk (x, y). """ x_position = position[0] if x_position < self.center[0] - self.length/2: x_position = self.center[0] - self.length/2 if x_position > self.center[0] + self.length/2: x_position = self.center[0] + self.length/2 self.current_state = x_position self.update() @property def value(self): return self._value @value.setter def value(self, value): value_range = self.max_value - self.min_value self.ratio = (value - self.min_value) / value_range @property def ratio(self): return self._ratio @ratio.setter def ratio(self, ratio): position_x = self.left_x_position + ratio*self.length self.set_position((position_x, None)) def format_text(self): """ Returns formatted text to display along the slider. """ if callable(self.text_template): return self.text_template(self) return self.text_template.format(ratio=self.ratio, value=self.value) def update(self): """ Updates the slider. """ # Compute the ratio determined by the position of the slider disk. length = float(self.right_x_position - self.left_x_position) assert length == self.length self._ratio = (self.current_state - self.left_x_position) / length # Compute the selected value considering min_value and max_value. value_range = self.max_value - self.min_value self._value = self.min_value + self.ratio*value_range # Update text disk actor. self.slider_disk.SetPosition(self.current_state, self.center[1]) # Update text. text = self.format_text() self.text.message = text offset_x = 8 * len(text) / 2. offset_y = 30 self.text.position = (self.current_state - offset_x, self.center[1] - offset_y) def set_center(self, position): """ Sets the center of the slider to position. Parameters ---------- position : (float, float) The new center of the whole slider (x, y). """ self.slider_line.SetPosition(position[0] - self.length / 2, position[1] - self.line_width / 2) x_change = position[0] - self.center[0] self.current_state += x_change self.center = position self.left_x_position = position[0] - self.length / 2 self.right_x_position = position[0] + self.length / 2 self.set_position((self.current_state, self.center[1])) @staticmethod def line_click_callback(i_ren, obj, slider): """ Update disk position and grab the focus. Parameters ---------- i_ren : :class:`CustomInteractorStyle` obj : :class:`vtkActor` The picked actor slider : :class:`LineSlider2D` """ position = i_ren.event.position slider.set_position(position) i_ren.force_render() i_ren.event.abort() # Stop propagating the event. @staticmethod def disk_press_callback(i_ren, obj, slider): """ Only need to grab the focus. Parameters ---------- i_ren : :class:`CustomInteractorStyle` obj : :class:`vtkActor` The picked actor slider : :class:`LineSlider2D` """ i_ren.event.abort() # Stop propagating the event. @staticmethod def disk_move_callback(i_ren, obj, slider): """ Actual disk movement. Parameters ---------- i_ren : :class:`CustomInteractorStyle` obj : :class:`vtkActor` The picked actor slider : :class:`LineSlider2D` """ position = i_ren.event.position slider.set_position(position) i_ren.force_render() i_ren.event.abort() # Stop propagating the event. def handle_events(self, actor): """ Handle all events for the LineSlider. Base method needs to be overridden due to multiple actors. """ self.add_callback(self.slider_line, "LeftButtonPressEvent", self.line_click_callback) self.add_callback(self.slider_disk, "LeftButtonPressEvent", self.disk_press_callback) self.add_callback(self.slider_disk, "MouseMoveEvent", self.disk_move_callback) self.add_callback(self.slider_line, "MouseMoveEvent", self.disk_move_callback) class DiskSlider2D(UI): """ A disk slider. A disk moves along the boundary of a ring. Goes from 0-360 degrees. Attributes ---------- base_disk_center: (float, float) Position of the system. slider_inner_radius: int Inner radius of the base disk. slider_outer_radius: int Outer radius of the base disk. slider_radius: float Average radius of the base disk. handle_outer_radius: int Outer radius of the slider's handle. handle_inner_radius: int Inner radius of the slider's handle. """ def __init__(self, position=(0, 0), initial_value=180, min_value=0, max_value=360, slider_inner_radius=40, slider_outer_radius=44, handle_inner_radius=10, handle_outer_radius=0, text_size=16, text_template="{ratio:.0%}"): """ Parameters ---------- position : (float, float) Position (x, y) of the slider's center. initial_value : float Initial value of the slider. min_value : float Minimum value of the slider. max_value : float Maximum value of the slider. slider_inner_radius : int Inner radius of the base disk. slider_outer_radius : int Outer radius of the base disk. handle_outer_radius : int Outer radius of the slider's handle. handle_inner_radius : int Inner radius of the slider's handle. text_size : int Size of the text to display alongside the slider (pt). text_template : str, callable If str, text template can contain one or multiple of the replacement fields: `{value:}`, `{ratio:}`, `{angle:}`. If callable, this instance of `:class:DiskSlider2D` will be passed as argument to the text template function. """ super(DiskSlider2D, self).__init__() self.center = np.array(position) self.min_value = min_value self.max_value = max_value self.slider_inner_radius = slider_inner_radius self.slider_outer_radius = slider_outer_radius self.handle_inner_radius = handle_inner_radius self.handle_outer_radius = handle_outer_radius self.slider_radius = (slider_inner_radius + slider_outer_radius) / 2. self.handle = None self.base_disk = None self.text = None self.text_size = text_size self.text_template = text_template self.build_actors() # By setting the value, it also updates everything. self.value = initial_value self.handle_events(None) def build_actors(self): """ Builds actors for the system. """ base_disk = vtk.vtkDiskSource() base_disk.SetInnerRadius(self.slider_inner_radius) base_disk.SetOuterRadius(self.slider_outer_radius) base_disk.SetRadialResolution(10) base_disk.SetCircumferentialResolution(50) base_disk.Update() base_disk_mapper = vtk.vtkPolyDataMapper2D() base_disk_mapper.SetInputConnection(base_disk.GetOutputPort()) self.base_disk = vtk.vtkActor2D() self.base_disk.SetMapper(base_disk_mapper) self.base_disk.GetProperty().SetColor(1, 0, 0) self.base_disk.SetPosition(self.center) handle = vtk.vtkDiskSource() handle.SetInnerRadius(self.handle_inner_radius) handle.SetOuterRadius(self.handle_outer_radius) handle.SetRadialResolution(10) handle.SetCircumferentialResolution(50) handle.Update() handle_mapper = vtk.vtkPolyDataMapper2D() handle_mapper.SetInputConnection(handle.GetOutputPort()) self.handle = vtk.vtkActor2D() self.handle.SetMapper(handle_mapper) self.text = TextBlock2D() offset = np.array((16., 8.)) self.text.position = self.center - offset self.text.font_size = self.text_size @property def value(self): return self._value @value.setter def value(self, value): value_range = self.max_value - self.min_value self.ratio = (value - self.min_value) / value_range @property def ratio(self): return self._ratio @ratio.setter def ratio(self, ratio): self.angle = ratio * TWO_PI @property def angle(self): """ Angle (in rad) the handle makes with x-axis """ return self._angle @angle.setter def angle(self, angle): self._angle = angle % TWO_PI # Wraparound self.update() def format_text(self): """ Returns formatted text to display along the slider. """ if callable(self.text_template): return self.text_template(self) return self.text_template.format(ratio=self.ratio, value=self.value, angle=np.rad2deg(self.angle)) def update(self): """ Updates the slider. """ # Compute the ratio determined by the position of the slider disk. self._ratio = self.angle / TWO_PI # Compute the selected value considering min_value and max_value. value_range = self.max_value - self.min_value self._value = self.min_value + self.ratio*value_range # Update text disk actor. x = self.slider_radius * np.cos(self.angle) + self.center[0] y = self.slider_radius * np.sin(self.angle) + self.center[1] self.handle.SetPosition(x, y) # Update text. text = self.format_text() self.text.message = text def get_actors(self): """ Returns the actors that compose this UI component. """ return [self.base_disk, self.handle, self.text.get_actor()] def move_handle(self, click_position): """Moves the slider's handle. Parameters ---------- click_position: (float, float) Position of the mouse click. """ x, y = np.array(click_position) - self.center angle = np.arctan2(y, x) if angle < 0: angle += TWO_PI self.angle = angle def set_center(self, position): """ Changes the slider's center position. Parameters ---------- position : (float, float) New position (x, y). """ position = np.array(position) offset = position - self.center self.base_disk.SetPosition(position) self.handle.SetPosition(*(offset + self.handle.GetPosition())) self.text.position += offset self.center = position @staticmethod def base_disk_click_callback(i_ren, obj, slider): """ Update disk position and grab the focus. Parameters ---------- i_ren : :class:`CustomInteractorStyle` obj : :class:`vtkActor` The picked actor slider : :class:`DiskSlider2D` """ click_position = i_ren.event.position slider.move_handle(click_position=click_position) i_ren.force_render() i_ren.event.abort() # Stop propagating the event. @staticmethod def handle_move_callback(i_ren, obj, slider): """ Move the slider's handle. Parameters ---------- i_ren : :class:`CustomInteractorStyle` obj : :class:`vtkActor` The picked actor slider : :class:`DiskSlider2D` """ click_position = i_ren.event.position slider.move_handle(click_position=click_position) i_ren.force_render() i_ren.event.abort() # Stop propagating the event. @staticmethod def handle_press_callback(i_ren, obj, slider): """ This is only needed to grab the focus. Parameters ---------- i_ren : :class:`CustomInteractorStyle` obj : :class:`vtkActor` The picked actor slider : :class:`DiskSlider2D` """ i_ren.event.abort() # Stop propagating the event. def handle_events(self, actor): """ Handle all default slider events. """ self.add_callback(self.base_disk, "LeftButtonPressEvent", self.base_disk_click_callback) self.add_callback(self.handle, "LeftButtonPressEvent", self.handle_press_callback) self.add_callback(self.base_disk, "MouseMoveEvent", self.handle_move_callback) self.add_callback(self.handle, "MouseMoveEvent", self.handle_move_callback) class FileSelectMenu2D(UI): """ A menu to select files in the current folder. Can go to new folder, previous folder and select a file and keep it in a variable. Attributes ---------- n_text_actors: int The number of text actors. Calculated dynamically. selected_file: string Current selected file. text_item_list: list(:class:`FileSelectMenuText2D`) List of FileSelectMenuText2Ds - both visible and invisible. window_offset: int Used for scrolling. Tells you the index of the first visible FileSelectMenuText2D object. size: (float, float) The size of the system (x, y) in pixels. font_size: int The font size in pixels. line_spacing: float Distance between menu text items in pixels. parent_ui: :class:`UI` The UI component this object belongs to. extensions: list(string) List of extensions to be shown as files. """ def __init__(self, size, font_size, position, parent, extensions, directory_path, reverse_scrolling=False, line_spacing=1.4): """ Parameters ---------- size: (float, float) The size of the system (x, y) in pixels. font_size: int The font size in pixels. parent: :class:`UI` The UI component this object belongs to. This will be useful when this UI element is used as a part of other UI elements, like a file save dialog. position: (float, float) The initial position (x, y) in pixels. reverse_scrolling: {True, False} If True, scrolling up will move the list of files down. line_spacing: float Distance between menu text items in pixels. extensions: list(string) List of extensions to be shown as files. directory_path: string Path of the directory where this dialog should open. Example: os.getcwd() """ super(FileSelectMenu2D, self).__init__() self.size = size self.font_size = font_size self.parent_ui = parent self.reverse_scrolling = reverse_scrolling self.line_spacing = line_spacing self.extensions = extensions self.n_text_actors = 0 # Initialisation Value self.text_item_list = [] self.selected_file = "" self.window_offset = 0 self.current_directory = directory_path self.buttons = dict() self.menu = self.build_actors(position) self.fill_text_actors() self.handle_events(None) def add_to_renderer(self, ren): self.menu.add_to_renderer(ren) super(FileSelectMenu2D, self).add_to_renderer(ren) for menu_text in self.text_item_list: menu_text.add_to_renderer(ren) def get_actors(self): """ Returns the actors that compose this UI component. """ return [self.buttons["up"], self.buttons["down"]] def build_actors(self, position): """ Builds the number of text actors that will fit in the given size. Allots them positions in the panel, which is only there to allot positions, otherwise the panel itself is invisible. Parameters ---------- position: (float, float) Position of the panel (x, y) in pixels. """ # Calculating the number of text actors. self.n_text_actors = int(self.size[1]/(self.font_size*self.line_spacing)) # This panel is just to facilitate the addition of actors at the right positions panel = Panel2D(center=position, size=self.size, color=(1, 1, 1)) # Initialisation of empty text actors for i in range(self.n_text_actors): text = FileSelectMenuText2D(position=(0, 0), font_size=self.font_size, file_select=self) text.parent_UI = self.parent_ui self.ui_list.append(text) self.text_item_list.append(text) panel.add_element(text, 'relative', (0.1, float(self.n_text_actors-i - 1) / float(self.n_text_actors))) up_button = Button2D({"up": read_viz_icons(fname="arrow-up.png")}) panel.add_element(up_button, 'relative', (0.95, 0.95)) self.buttons["up"] = up_button down_button = Button2D({"down": read_viz_icons(fname="arrow-down.png")}) panel.add_element(down_button, 'relative', (0.95, 0.05)) self.buttons["down"] = down_button return panel @staticmethod def up_button_callback(i_ren, obj, file_select_menu): """ Pressing up button scrolls up in the menu. Parameters ---------- i_ren: :class:`CustomInteractorStyle` obj: :class:`vtkActor` The picked actor file_select_menu: :class:`FileSelectMenu2D` """ all_file_names = file_select_menu.get_all_file_names() if (file_select_menu.n_text_actors + file_select_menu.window_offset) <= len(all_file_names): if file_select_menu.window_offset > 0: file_select_menu.window_offset -= 1 file_select_menu.fill_text_actors() i_ren.force_render() i_ren.event.abort() # Stop propagating the event. @staticmethod def down_button_callback(i_ren, obj, file_select_menu): """ Pressing down button scrolls down in the menu. Parameters ---------- i_ren: :class:`CustomInteractorStyle` obj: :class:`vtkActor` The picked actor file_select_menu: :class:`FileSelectMenu2D` """ all_file_names = file_select_menu.get_all_file_names() if (file_select_menu.n_text_actors + file_select_menu.window_offset) < len(all_file_names): file_select_menu.window_offset += 1 file_select_menu.fill_text_actors() i_ren.force_render() i_ren.event.abort() # Stop propagating the event. def fill_text_actors(self): """ Fills file/folder names to text actors. The list is truncated if the number of file/folder names is greater than the available number of text actors. """ # Flush all the text actors for text_item in self.text_item_list: text_item.text_actor.message = "" text_item.text_actor.actor.SetVisibility(False) all_file_names = self.get_all_file_names() clipped_file_names = all_file_names[self.window_offset:self.n_text_actors + self.window_offset] # Allot file names as in the above list i = 0 for file_name in clipped_file_names: self.text_item_list[i].text_actor.actor.SetVisibility(True) self.text_item_list[i].set_attributes(file_name[0], file_name[1]) if file_name[0] == self.selected_file: self.text_item_list[i].mark_selected() i += 1 def get_all_file_names(self): """ Gets file and directory names. Returns ------- all_file_names: list(string) List of all file and directory names as string. """ all_file_names = [] directory_names = self.get_directory_names() for directory_name in directory_names: all_file_names.append((directory_name, "directory")) file_names = self.get_file_names() for file_name in file_names: all_file_names.append((file_name, "file")) return all_file_names def get_directory_names(self): """ Re-allots file names to the text actors. Uses FileSelectMenuText2D for selecting files and folders. Returns ------- directory_names: list(string) List of all directory names as string. """ # A list of directory names in the current directory directory_names = next(os.walk(self.current_directory))[1] directory_names = [os.path.basename(os.path.abspath(dn)) for dn in directory_names] directory_names = ["../"] + directory_names return directory_names def get_file_names(self): """ Re-allots file names to the text actors. Uses FileSelectMenuText2D for selecting files and folders. Returns ------- file_names: list(string) List of all file names as string. """ # A list of file names with extension in the current directory file_names = [] for extension in self.extensions: file_names += glob.glob(self.current_directory + "/*." + extension) file_names = [os.path.basename(os.path.abspath(fn)) for fn in file_names] return file_names def select_file(self, file_name): """ Changes the selected file name. Parameters ---------- file_name: string Name of the file. """ self.selected_file = file_name def set_center(self, position): """ Sets the elements center. Parameters ---------- position: (float, float) New position (x, y) in pixels. """ self.menu.set_center(position=position) def handle_events(self, actor): self.add_callback(self.buttons["up"].actor, "LeftButtonPressEvent", self.up_button_callback) self.add_callback(self.buttons["down"].actor, "LeftButtonPressEvent", self.down_button_callback) # Handle mouse wheel events up_event = "MouseWheelForwardEvent" down_event = "MouseWheelBackwardEvent" if self.reverse_scrolling: up_event, down_event = down_event, up_event # Swap events self.add_callback(self.menu.get_actors()[0], up_event, self.up_button_callback) self.add_callback(self.menu.get_actors()[0], down_event, self.down_button_callback) for text_ui in self.text_item_list: self.add_callback(text_ui.text_actor.get_actors()[0], up_event, self.up_button_callback) self.add_callback(text_ui.text_actor.get_actors()[0], down_event, self.down_button_callback) class FileSelectMenuText2D(UI): """ The text to select folder in a file select menu. Provides a callback to change the directory. Attributes ---------- file_name: string The name of the file the text is displaying. file_type: string Whether the file is a file or directory. file_select: :class:`FileSelect2D` The FileSelectMenu2D reference this text belongs to. """ def __init__(self, font_size, position, file_select): """ Parameters ---------- font_size: int The font size of the text in pixels. position: (float, float) Absolute text position (x, y) in pixels. file_select: :class:`FileSelect2D` The FileSelectMenu2D reference this text belongs to. """ super(FileSelectMenuText2D, self).__init__() self.file_name = "" self.file_type = "" self.file_select = file_select self.text_actor = self.build_actor(position=position, font_size=font_size) self.handle_events(self.text_actor.get_actor()) self.on_left_mouse_button_clicked = self.left_button_clicked def build_actor(self, position, text="Text", color=(1, 1, 1), font_family='Arial', justification='left', bold=False, italic=False, shadow=False, font_size='14'): """ Builds a text actor. Parameters ---------- text: string The initial text while building the actor. position: (float, float) The text position (x, y) in pixels. color: (float, float, float) Values must be between 0-1 (RGB). font_family: string Currently only supports Arial. justification: string Text justification - left, right or center. bold: bool Whether or not the text is bold. italic: bool Whether or not the text is italicized. shadow: bool Whether or not the text has shadow. font_size: int The font size of the text in pixels. Returns ------- text_actor: :class:`TextBlock2D` The base text actor. """ text_actor = TextBlock2D() text_actor.position = position text_actor.message = text text_actor.font_size = font_size text_actor.font_family = font_family text_actor.justification = justification text_actor.bold = bold text_actor.italic = italic text_actor.shadow = shadow text_actor.color = color if vtk.vtkVersion.GetVTKSourceVersion().split(' ')[-1] <= "6.2.0": pass else: text_actor.actor.GetTextProperty().SetBackgroundColor(1, 1, 1) text_actor.actor.GetTextProperty().SetBackgroundOpacity(1.0) text_actor.actor.GetTextProperty().SetColor(0, 0, 0) text_actor.actor.GetTextProperty().SetLineSpacing(1) return text_actor def get_actors(self): """ Returns the actors that compose this UI component. """ return [self.text_actor.get_actor()] def set_attributes(self, file_name, file_type): """ Set attributes (file name and type) of this component. This function is for use by a FileSelectMenu2D to set the current file_name and file_type for this FileSelectMenuText2D component. Parameters ---------- file_name: string The name of the file. file_type: string File type = directory or file. """ self.file_name = file_name self.file_type = file_type self.text_actor.message = file_name if vtk.vtkVersion.GetVTKSourceVersion().split(' ')[-1] <= "6.2.0": self.text_actor.get_actor().GetTextProperty().SetColor(1, 1, 1) if file_type != "file": self.text_actor.get_actor().GetTextProperty().SetBold(True) else: if file_type == "file": self.text_actor.get_actor().GetTextProperty().SetBackgroundColor(0, 0, 0) self.text_actor.get_actor().GetTextProperty().SetColor(1, 1, 1) else: self.text_actor.get_actor().GetTextProperty().SetBackgroundColor(1, 1, 1) self.text_actor.get_actor().GetTextProperty().SetColor(0, 0, 0) def mark_selected(self): """ Changes the background color of the actor. """ if vtk.vtkVersion.GetVTKSourceVersion().split(' ')[-1] <= "6.2.0": self.text_actor.actor.GetTextProperty().SetColor(1, 0, 0) else: self.text_actor.actor.GetTextProperty().SetBackgroundColor(1, 0, 0) self.text_actor.actor.GetTextProperty().SetBackgroundOpacity(1.0) @staticmethod def left_button_clicked(i_ren, obj, file_select_text): """ A callback to handle left click for this UI element. Parameters ---------- i_ren: :class:`CustomInteractorStyle` obj: :class:`vtkActor` The picked actor file_select_text: :class:`FileSelectMenuText2D` """ if file_select_text.file_type == "directory": file_select_text.file_select.select_file(file_name="") file_select_text.file_select.window_offset = 0 file_select_text.file_select.current_directory = os.path.abspath( os.path.join(file_select_text.file_select.current_directory, file_select_text.text_actor.message)) file_select_text.file_select.window = 0 file_select_text.file_select.fill_text_actors() else: file_select_text.file_select.select_file( file_name=file_select_text.file_name) file_select_text.file_select.fill_text_actors() file_select_text.mark_selected() i_ren.force_render() i_ren.event.abort() # Stop propagating the event. def set_center(self, position): """ Sets the text center to position. Parameters ---------- position: (float, float) The new position (x, y) in pixels. """ self.text_actor.position = position dipy-0.13.0/dipy/viz/utils.py000066400000000000000000000336221317371701200160370ustar00rootroot00000000000000 from __future__ import division, print_function, absolute_import import numpy as np from scipy.ndimage import map_coordinates from dipy.viz.colormap import line_colors # Conditional import machinery for vtk from dipy.utils.optpkg import optional_package # import vtk # Allow import, but disable doctests if we don't have vtk vtk, have_vtk, setup_module = optional_package('vtk') ns, have_numpy_support, _ = optional_package('vtk.util.numpy_support') def set_input(vtk_object, inp): """ Generic input function which takes into account VTK 5 or 6 Parameters ---------- vtk_object: vtk object inp: vtkPolyData or vtkImageData or vtkAlgorithmOutput Returns ------- vtk_object Notes ------- This can be used in the following way:: from dipy.viz.utils import set_input poly_mapper = set_input(vtk.vtkPolyDataMapper(), poly_data) """ if isinstance(inp, vtk.vtkPolyData) \ or isinstance(inp, vtk.vtkImageData): if vtk.VTK_MAJOR_VERSION <= 5: vtk_object.SetInput(inp) else: vtk_object.SetInputData(inp) elif isinstance(inp, vtk.vtkAlgorithmOutput): vtk_object.SetInputConnection(inp) vtk_object.Update() return vtk_object def numpy_to_vtk_points(points): """ Numpy points array to a vtk points array Parameters ---------- points : ndarray Returns ------- vtk_points : vtkPoints() """ vtk_points = vtk.vtkPoints() vtk_points.SetData(ns.numpy_to_vtk(np.asarray(points), deep=True)) return vtk_points def numpy_to_vtk_colors(colors): """ Numpy color array to a vtk color array Parameters ---------- colors: ndarray Returns ------- vtk_colors : vtkDataArray Notes ----- If colors are not already in UNSIGNED_CHAR you may need to multiply by 255. Examples -------- >>> import numpy as np >>> from dipy.viz.utils import numpy_to_vtk_colors >>> rgb_array = np.random.rand(100, 3) >>> vtk_colors = numpy_to_vtk_colors(255 * rgb_array) """ vtk_colors = ns.numpy_to_vtk(np.asarray(colors), deep=True, array_type=vtk.VTK_UNSIGNED_CHAR) return vtk_colors def map_coordinates_3d_4d(input_array, indices): """ Evaluate the input_array data at the given indices using trilinear interpolation Parameters ---------- input_array : ndarray, 3D or 4D array indices : ndarray Returns ------- output : ndarray 1D or 2D array """ if input_array.ndim <= 2 or input_array.ndim >= 5: raise ValueError("Input array can only be 3d or 4d") if input_array.ndim == 3: return map_coordinates(input_array, indices.T, order=1) if input_array.ndim == 4: values_4d = [] for i in range(input_array.shape[-1]): values_tmp = map_coordinates(input_array[..., i], indices.T, order=1) values_4d.append(values_tmp) return np.ascontiguousarray(np.array(values_4d).T) def lines_to_vtk_polydata(lines, colors=None): """ Create a vtkPolyData with lines and colors Parameters ---------- lines : list list of N curves represented as 2D ndarrays colors : array (N, 3), list of arrays, tuple (3,), array (K,), None If None then a standard orientation colormap is used for every line. If one tuple of color is used. Then all streamlines will have the same colour. If an array (N, 3) is given, where N is equal to the number of lines. Then every line is coloured with a different RGB color. If a list of RGB arrays is given then every point of every line takes a different color. If an array (K, 3) is given, where K is the number of points of all lines then every point is colored with a different RGB color. If an array (K,) is given, where K is the number of points of all lines then these are considered as the values to be used by the colormap. If an array (L,) is given, where L is the number of streamlines then these are considered as the values to be used by the colormap per streamline. If an array (X, Y, Z) or (X, Y, Z, 3) is given then the values for the colormap are interpolated automatically using trilinear interpolation. Returns ------- poly_data : vtkPolyData is_colormap : bool, true if the input color array was a colormap """ # Get the 3d points_array points_array = np.vstack(lines) nb_lines = len(lines) nb_points = len(points_array) lines_range = range(nb_lines) # Get lines_array in vtk input format lines_array = [] # Using np.intp (instead of int64), because of a bug in numpy: # https://github.com/nipy/dipy/pull/789 # https://github.com/numpy/numpy/issues/4384 points_per_line = np.zeros([nb_lines], np.intp) current_position = 0 for i in lines_range: current_len = len(lines[i]) points_per_line[i] = current_len end_position = current_position + current_len lines_array += [current_len] lines_array += range(current_position, end_position) current_position = end_position lines_array = np.array(lines_array) # Set Points to vtk array format vtk_points = numpy_to_vtk_points(points_array) # Set Lines to vtk array format vtk_lines = vtk.vtkCellArray() vtk_lines.GetData().DeepCopy(ns.numpy_to_vtk(lines_array)) vtk_lines.SetNumberOfCells(nb_lines) is_colormap = False # Get colors_array (reformat to have colors for each points) # - if/else tested and work in normal simple case if colors is None: # set automatic rgb colors cols_arr = line_colors(lines) colors_mapper = np.repeat(lines_range, points_per_line, axis=0) vtk_colors = numpy_to_vtk_colors(255 * cols_arr[colors_mapper]) else: cols_arr = np.asarray(colors) if cols_arr.dtype == np.object: # colors is a list of colors vtk_colors = numpy_to_vtk_colors(255 * np.vstack(colors)) else: if len(cols_arr) == nb_points: if cols_arr.ndim == 1: # values for every point vtk_colors = ns.numpy_to_vtk(cols_arr, deep=True) is_colormap = True elif cols_arr.ndim == 2: # map color to each point vtk_colors = numpy_to_vtk_colors(255 * cols_arr) elif cols_arr.ndim == 1: if len(cols_arr) == nb_lines: # values for every streamline cols_arrx = [] for (i, value) in enumerate(colors): cols_arrx += lines[i].shape[0]*[value] cols_arrx = np.array(cols_arrx) vtk_colors = ns.numpy_to_vtk(cols_arrx, deep=True) is_colormap = True else: # the same colors for all points vtk_colors = numpy_to_vtk_colors( np.tile(255 * cols_arr, (nb_points, 1))) elif cols_arr.ndim == 2: # map color to each line colors_mapper = np.repeat(lines_range, points_per_line, axis=0) vtk_colors = numpy_to_vtk_colors(255 * cols_arr[colors_mapper]) else: # colormap # get colors for each vertex cols_arr = map_coordinates_3d_4d(cols_arr, points_array) vtk_colors = ns.numpy_to_vtk(cols_arr, deep=True) is_colormap = True vtk_colors.SetName("Colors") # Create the poly_data poly_data = vtk.vtkPolyData() poly_data.SetPoints(vtk_points) poly_data.SetLines(vtk_lines) poly_data.GetPointData().SetScalars(vtk_colors) return poly_data, is_colormap def get_polydata_lines(line_polydata): """ vtk polydata to a list of lines ndarrays Parameters ---------- line_polydata : vtkPolyData Returns ------- lines : list List of N curves represented as 2D ndarrays """ lines_vertices = ns.vtk_to_numpy(line_polydata.GetPoints().GetData()) lines_idx = ns.vtk_to_numpy(line_polydata.GetLines().GetData()) lines = [] current_idx = 0 while current_idx < len(lines_idx): line_len = lines_idx[current_idx] next_idx = current_idx + line_len + 1 line_range = lines_idx[current_idx + 1: next_idx] lines += [lines_vertices[line_range]] current_idx = next_idx return lines def get_polydata_triangles(polydata): """ get triangles (ndarrays Nx3 int) from a vtk polydata Parameters ---------- polydata : vtkPolyData Returns ------- output : array (N, 3) triangles """ vtk_polys = ns.vtk_to_numpy(polydata.GetPolys().GetData()) assert((vtk_polys[::4] == 3).all()) # test if its really triangles return np.vstack([vtk_polys[1::4], vtk_polys[2::4], vtk_polys[3::4]]).T def get_polydata_vertices(polydata): """ get vertices (ndarrays Nx3 int) from a vtk polydata Parameters ---------- polydata : vtkPolyData Returns ------- output : array (N, 3) points, represented as 2D ndarrays """ return ns.vtk_to_numpy(polydata.GetPoints().GetData()) def get_polydata_normals(polydata): """ get vertices normal (ndarrays Nx3 int) from a vtk polydata Parameters ---------- polydata : vtkPolyData Returns ------- output : array (N, 3) Normals, represented as 2D ndarrays (Nx3). None if there are no normals in the vtk polydata. """ vtk_normals = polydata.GetPointData().GetNormals() if vtk_normals is None: return None else: return ns.vtk_to_numpy(vtk_normals) def get_polydata_colors(polydata): """ get points color (ndarrays Nx3 int) from a vtk polydata Parameters ---------- polydata : vtkPolyData Returns ------- output : array (N, 3) Colors. None if no normals in the vtk polydata. """ vtk_colors = polydata.GetPointData().GetScalars() if vtk_colors is None: return None else: return ns.vtk_to_numpy(vtk_colors) def set_polydata_triangles(polydata, triangles): """ set polydata triangles with a numpy array (ndarrays Nx3 int) Parameters ---------- polydata : vtkPolyData triangles : array (N, 3) triangles, represented as 2D ndarrays (Nx3) """ vtk_triangles = np.hstack(np.c_[np.ones(len(triangles)).astype(np.int) * 3, triangles]) vtk_triangles = ns.numpy_to_vtkIdTypeArray(vtk_triangles, deep=True) vtk_cells = vtk.vtkCellArray() vtk_cells.SetCells(len(triangles), vtk_triangles) polydata.SetPolys(vtk_cells) return polydata def set_polydata_vertices(polydata, vertices): """ set polydata vertices with a numpy array (ndarrays Nx3 int) Parameters ---------- polydata : vtkPolyData vertices : vertices, represented as 2D ndarrays (Nx3) """ vtk_points = vtk.vtkPoints() vtk_points.SetData(ns.numpy_to_vtk(vertices, deep=True)) polydata.SetPoints(vtk_points) return polydata def set_polydata_normals(polydata, normals): """ set polydata normals with a numpy array (ndarrays Nx3 int) Parameters ---------- polydata : vtkPolyData normals : normals, represented as 2D ndarrays (Nx3) (one per vertex) """ vtk_normals = ns.numpy_to_vtk(normals, deep=True) polydata.GetPointData().SetNormals(vtk_normals) return polydata def set_polydata_colors(polydata, colors): """ set polydata colors with a numpy array (ndarrays Nx3 int) Parameters ---------- polydata : vtkPolyData colors : colors, represented as 2D ndarrays (Nx3) colors are uint8 [0,255] RGB for each points """ vtk_colors = ns.numpy_to_vtk(colors, deep=True, array_type=vtk.VTK_UNSIGNED_CHAR) vtk_colors.SetNumberOfComponents(3) vtk_colors.SetName("RGB") polydata.GetPointData().SetScalars(vtk_colors) return polydata def update_polydata_normals(polydata): """ generate and update polydata normals Parameters ---------- polydata : vtkPolyData """ normals_gen = set_input(vtk.vtkPolyDataNormals(), polydata) normals_gen.ComputePointNormalsOn() normals_gen.ComputeCellNormalsOn() normals_gen.SplittingOff() # normals_gen.FlipNormalsOn() # normals_gen.ConsistencyOn() # normals_gen.AutoOrientNormalsOn() normals_gen.Update() vtk_normals = normals_gen.GetOutput().GetPointData().GetNormals() polydata.GetPointData().SetNormals(vtk_normals) def get_polymapper_from_polydata(polydata): """ get vtkPolyDataMapper from a vtkPolyData Parameters ---------- polydata : vtkPolyData Returns ------- poly_mapper : vtkPolyDataMapper """ poly_mapper = set_input(vtk.vtkPolyDataMapper(), polydata) poly_mapper.ScalarVisibilityOn() poly_mapper.InterpolateScalarsBeforeMappingOn() poly_mapper.Update() poly_mapper.StaticOn() return poly_mapper def get_actor_from_polymapper(poly_mapper, light=(0.1, 0.15, 0.05)): """ get vtkActor from a vtkPolyDataMapper Parameters ---------- poly_mapper : vtkPolyDataMapper Returns ------- actor : vtkActor """ actor = vtk.vtkActor() actor.SetMapper(poly_mapper) # actor.GetProperty().SetRepresentationToWireframe() actor.GetProperty().BackfaceCullingOn() actor.GetProperty().SetInterpolationToPhong() # actor.GetProperty().SetInterpolationToFlat() actor.GetProperty().SetAmbient(light[0]) # .3 actor.GetProperty().SetDiffuse(light[1]) # .3 actor.GetProperty().SetSpecular(light[2]) # .3 return actor def get_actor_from_polydata(polydata): """ get vtkActor from a vtkPolyData Parameters ---------- polydata : vtkPolyData Returns ------- actor : vtkActor """ poly_mapper = get_polymapper_from_polydata(polydata) return get_actor_from_polymapper(poly_mapper) dipy-0.13.0/dipy/viz/widget.py000066400000000000000000000272441317371701200161650ustar00rootroot00000000000000# Widgets are different than actors in that they can interact with events # To do so they need as input a vtkRenderWindowInteractor also known as iren. import numpy as np # Conditional import machinery for vtk from dipy.utils.optpkg import optional_package # Allow import, but disable doctests if we don't have vtk vtk, have_vtk, setup_module = optional_package('vtk') colors, have_vtk_colors, _ = optional_package('vtk.util.colors') numpy_support, have_ns, _ = optional_package('vtk.util.numpy_support') def slider(iren, ren, callback, min_value=0, max_value=255, value=125, label="Slider", right_normalized_pos=(0.9, 0.5), size=(50, 0), label_format="%0.0lf", color=(0.5, 0.5, 0.5), selected_color=(0.9, 0.2, 0.1)): """ A 2D slider widget Parameters ---------- iren : vtkRenderWindowInteractor Used to process events and handle them to the slider. Can also be given by the attribute ``ShowManager.iren``. ren : vtkRenderer or Renderer Used to update the slider's position when the window changes. Can also be given by the ``ShowManager.ren`` attribute. callback : function Function that has at least ``obj`` and ``event`` as parameters. It will be called when the slider's bar has changed. min_value : float Minimum value of slider. max_value : float Maximum value of slider. value : Default value of slider. label : str Slider's caption. right_normalized_pos : tuple 2d tuple holding the normalized right (X, Y) position of the slider. size: tuple 2d tuple holding the size of the slider in pixels. label_format: str Formating in which the slider's value will appear for example "%0.2lf" allows for 2 decimal values. Returns ------- slider : SliderObject This object inherits from vtkSliderWidget and has additional method called ``place`` which allows to update the position of the slider when for example the window is resized. """ slider_rep = vtk.vtkSliderRepresentation2D() slider_rep.SetMinimumValue(min_value) slider_rep.SetMaximumValue(max_value) slider_rep.SetValue(value) slider_rep.SetTitleText(label) slider_rep.GetPoint2Coordinate().SetCoordinateSystemToNormalizedDisplay() slider_rep.GetPoint2Coordinate().SetValue(*right_normalized_pos) coord2 = slider_rep.GetPoint2Coordinate().GetComputedDisplayValue(ren) slider_rep.GetPoint1Coordinate().SetCoordinateSystemToDisplay() slider_rep.GetPoint1Coordinate().SetValue(coord2[0] - size[0], coord2[1] - size[1]) initial_window_size = ren.GetSize() length = 0.04 width = 0.04 cap_length = 0.01 cap_width = 0.01 tube_width = 0.005 slider_rep.SetSliderLength(length) slider_rep.SetSliderWidth(width) slider_rep.SetEndCapLength(cap_length) slider_rep.SetEndCapWidth(cap_width) slider_rep.SetTubeWidth(tube_width) slider_rep.SetLabelFormat(label_format) slider_rep.GetLabelProperty().SetColor(*color) slider_rep.GetTubeProperty().SetColor(*color) slider_rep.GetCapProperty().SetColor(*color) slider_rep.GetTitleProperty().SetColor(*color) slider_rep.GetSelectedProperty().SetColor(*selected_color) slider_rep.GetSliderProperty().SetColor(*color) slider_rep.GetLabelProperty().SetShadow(0) slider_rep.GetTitleProperty().SetShadow(0) class SliderWidget(vtk.vtkSliderWidget): def place(self, ren): slider_rep = self.GetRepresentation() coord2_norm = slider_rep.GetPoint2Coordinate() coord2_norm.SetCoordinateSystemToNormalizedDisplay() coord2_norm.SetValue(*right_normalized_pos) coord2 = coord2_norm.GetComputedDisplayValue(ren) slider_rep.GetPoint1Coordinate().SetCoordinateSystemToDisplay() slider_rep.GetPoint1Coordinate().SetValue(coord2[0] - size[0], coord2[1] - size[1]) window_size = ren.GetSize() length = initial_window_size[0] * 0.04 / window_size[0] width = initial_window_size[1] * 0.04 / window_size[1] slider_rep.SetSliderLength(length) slider_rep.SetSliderWidth(width) def set_value(self, value): return self.GetSliderRepresentation().SetValue(value) def get_value(self): return self.GetSliderRepresentation().GetValue() slider = SliderWidget() slider.SetInteractor(iren) slider.SetRepresentation(slider_rep) slider.SetAnimationModeToAnimate() slider.KeyPressActivationOff() slider.AddObserver("InteractionEvent", callback) slider.SetEnabled(True) # Place widget after window resizing. def _place_widget(obj, event): slider.place(ren) iren.GetRenderWindow().AddObserver( vtk.vtkCommand.StartEvent, _place_widget) iren.GetRenderWindow().AddObserver( vtk.vtkCommand.ModifiedEvent, _place_widget) return slider def button_display_coordinates(renderer, normalized_display_position, size): upperRight = vtk.vtkCoordinate() upperRight.SetCoordinateSystemToNormalizedDisplay() upperRight.SetValue(normalized_display_position[0], normalized_display_position[1]) bds = [0.0] * 6 bds[0] = upperRight.GetComputedDisplayValue(renderer)[0] - size[0] bds[1] = bds[0] + size[0] bds[2] = upperRight.GetComputedDisplayValue(renderer)[1] - size[1] bds[3] = bds[2] + size[1] return bds def button(iren, ren, callback, fname, right_normalized_pos=(.98, .9), size=(50, 50)): """ A textured two state button widget Parameters ---------- iren : vtkRenderWindowInteractor Used to process events and handle them to the button. Can also be given by the attribute ``ShowManager.iren``. ren : vtkRenderer or Renderer Used to update the slider's position when the window changes. Can also be given by the ``ShowManager.ren`` attribute. callback : function Function that has at least ``obj`` and ``event`` as parameters. It will be called when the button is pressed. fname : str PNG file path of the icon used for the button. right_normalized_pos : tuple 2d tuple holding the normalized right (X, Y) position of the slider. size: tuple 2d tuple holding the size of the slider in pixels. Returns ------- button : ButtonWidget This object inherits from vtkButtonWidget and has an additional method called ``place`` which allows to update the position of the slider if necessary. For example when the renderer size changes. Notes ------ The button and slider widgets have similar positioning system. This enables the developers to create a HUD-like collections of buttons and sliders on the right side of the window that always stays in place when the dimensions of the window change. """ image1 = vtk.vtkPNGReader() image1.SetFileName(fname) image1.Update() button_rep = vtk.vtkTexturedButtonRepresentation2D() button_rep.SetNumberOfStates(2) button_rep.SetButtonTexture(0, image1.GetOutput()) button_rep.SetButtonTexture(1, image1.GetOutput()) class ButtonWidget(vtk.vtkButtonWidget): def place(self, renderer): bds = button_display_coordinates(renderer, right_normalized_pos, size) self.GetRepresentation().SetPlaceFactor(1) self.GetRepresentation().PlaceWidget(bds) self.On() button = ButtonWidget() button.SetInteractor(iren) button.SetRepresentation(button_rep) button.AddObserver(vtk.vtkCommand.StateChangedEvent, callback) # Place widget after window resizing. def _place_widget(obj, event): button.place(ren) iren.GetRenderWindow().AddObserver( vtk.vtkCommand.StartEvent, _place_widget) iren.GetRenderWindow().AddObserver( vtk.vtkCommand.ModifiedEvent, _place_widget) return button def text(iren, ren, callback, message="DIPY", left_down_pos=(0.8, 0.5), right_top_pos=(0.9, 0.5), color=(1., .5, .0), opacity=1., border=False): """ 2D text that can be clicked and process events Parameters ---------- iren : vtkRenderWindowInteractor Used to process events and handle them to the button. Can also be given by the attribute ``ShowManager.iren``. ren : vtkRenderer or Renderer Used to update the slider's position when the window changes. Can also be given by the ``ShowManager.ren`` attribute. callback : function Function that has at least ``obj`` and ``event`` as parameters. It will be called when the button is pressed. message : str Message to be shown in the text widget left_down_pos : tuple Coordinates for left down corner of text. If float are provided, the normalized coordinate system is used, otherwise the coordinates represent pixel positions. Default is (0.8, 0.5). right_top_pos : tuple Coordinates for right top corner of text. If float are provided, the normalized coordinate system is used, otherwise the coordinates represent pixel positions. Default is (0.9, 0.5). color : tuple Foreground RGB color of text. Default is (1., .5, .0). opacity : float Takes values from 0 to 1. Default is 1. border : bool Show text border. Default is False. Returns ------- text : TextWidget This object inherits from ``vtkTextWidget`` has an additional method called ``place`` which allows to update the position of the text if necessary. """ # Create the TextActor text_actor = vtk.vtkTextActor() text_actor.SetInput(message) text_actor.GetTextProperty().SetColor(color) text_actor.GetTextProperty().SetOpacity(opacity) # Create the text representation. Used for positioning the text_actor text_rep = vtk.vtkTextRepresentation() text_rep.SetTextActor(text_actor) if border: text_rep.SetShowBorderToOn() else: text_rep.SetShowBorderToOff() class TextWidget(vtk.vtkTextWidget): def place(self, renderer): text_rep = self.GetRepresentation() position = text_rep.GetPositionCoordinate() position2 = text_rep.GetPosition2Coordinate() # The dtype of `left_down_pos` determines coordinate system type. if np.issubdtype(np.asarray(left_down_pos).dtype, np.integer): position.SetCoordinateSystemToDisplay() else: position.SetCoordinateSystemToNormalizedDisplay() # The dtype of `right_top_pos` determines coordinate system type. if np.issubdtype(np.asarray(right_top_pos).dtype, np.integer): position2.SetCoordinateSystemToDisplay() else: position2.SetCoordinateSystemToNormalizedDisplay() position.SetValue(*left_down_pos) position2.SetValue(*right_top_pos) text_widget = TextWidget() text_widget.SetRepresentation(text_rep) text_widget.SetInteractor(iren) text_widget.SelectableOn() text_widget.ResizableOff() text_widget.AddObserver(vtk.vtkCommand.WidgetActivateEvent, callback) # Place widget after window resizing. def _place_widget(obj, event): text_widget.place(ren) iren.GetRenderWindow().AddObserver( vtk.vtkCommand.StartEvent, _place_widget) iren.GetRenderWindow().AddObserver( vtk.vtkCommand.ModifiedEvent, _place_widget) text_widget.On() return text_widget dipy-0.13.0/dipy/viz/window.py000066400000000000000000000704501317371701200162060ustar00rootroot00000000000000# -*- coding: utf-8 -*- from __future__ import division, print_function, absolute_import import gzip from warnings import warn import numpy as np from scipy import ndimage from copy import copy from nibabel.tmpdirs import InTemporaryDirectory from nibabel.py3k import asbytes try: import Tkinter as tkinter has_tkinter = True except ImportError: try: import tkinter has_tkinter = True except ImportError: has_tkinter = False try: import tkFileDialog as filedialog except ImportError: try: from tkinter import filedialog except ImportError: has_tkinter = False # Conditional import machinery for vtk from dipy.utils.optpkg import optional_package from dipy import __version__ as dipy_version from dipy.utils.six import string_types from dipy.viz.interactor import CustomInteractorStyle # Allow import, but disable doctests if we don't have vtk vtk, have_vtk, setup_module = optional_package('vtk') colors, have_vtk_colors, _ = optional_package('vtk.util.colors') numpy_support, have_ns, _ = optional_package('vtk.util.numpy_support') _, have_imread, _ = optional_package('Image') if not have_imread: _, have_imread, _ = optional_package('PIL') if have_vtk: version = vtk.vtkVersion.GetVTKSourceVersion().split(' ')[-1] major_version = vtk.vtkVersion.GetVTKMajorVersion() from vtk.util.numpy_support import vtk_to_numpy vtkRenderer = vtk.vtkRenderer else: vtkRenderer = object if have_imread: from scipy.misc import imread class Renderer(vtkRenderer): """ Your scene class This is an important object that is responsible for preparing objects e.g. actors and volumes for rendering. This is a more pythonic version of ``vtkRenderer`` proving simple methods for adding and removing actors but also it provides access to all the functionality available in ``vtkRenderer`` if necessary. """ def background(self, color): """ Set a background color """ self.SetBackground(color) def add(self, *actors): """ Add an actor to the renderer """ for actor in actors: if isinstance(actor, vtk.vtkVolume): self.AddVolume(actor) elif isinstance(actor, vtk.vtkActor2D): self.AddActor2D(actor) elif hasattr(actor, 'add_to_renderer'): actor.add_to_renderer(self) else: self.AddActor(actor) def rm(self, actor): """ Remove a specific actor """ self.RemoveActor(actor) def clear(self): """ Remove all actors from the renderer """ self.RemoveAllViewProps() def rm_all(self): """ Remove all actors from the renderer """ self.RemoveAllViewProps() def projection(self, proj_type='perspective'): """ Deside between parallel or perspective projection Parameters ---------- proj_type : str Can be 'parallel' or 'perspective' (default). """ if proj_type == 'parallel': self.GetActiveCamera().ParallelProjectionOn() else: self.GetActiveCamera().ParallelProjectionOff() def reset_camera(self): """ Reset the camera to an automatic position given by the engine. """ self.ResetCamera() def reset_clipping_range(self): self.ResetCameraClippingRange() def camera(self): return self.GetActiveCamera() def get_camera(self): cam = self.GetActiveCamera() return cam.GetPosition(), cam.GetFocalPoint(), cam.GetViewUp() def camera_info(self): cam = self.camera() print('# Active Camera') print(' Position (%.2f, %.2f, %.2f)' % cam.GetPosition()) print(' Focal Point (%.2f, %.2f, %.2f)' % cam.GetFocalPoint()) print(' View Up (%.2f, %.2f, %.2f)' % cam.GetViewUp()) def set_camera(self, position=None, focal_point=None, view_up=None): if position is not None: self.GetActiveCamera().SetPosition(*position) if focal_point is not None: self.GetActiveCamera().SetFocalPoint(*focal_point) if view_up is not None: self.GetActiveCamera().SetViewUp(*view_up) self.ResetCameraClippingRange() def size(self): """ Renderer size""" return self.GetSize() def zoom(self, value): """ In perspective mode, decrease the view angle by the specified factor. In parallel mode, decrease the parallel scale by the specified factor. A value greater than 1 is a zoom-in, a value less than 1 is a zoom-out. """ self.GetActiveCamera().Zoom(value) def azimuth(self, angle): """ Rotate the camera about the view up vector centered at the focal point. Note that the view up vector is whatever was set via SetViewUp, and is not necessarily perpendicular to the direction of projection. The result is a horizontal rotation of the camera. """ self.GetActiveCamera().Azimuth(angle) def yaw(self, angle): """ Rotate the focal point about the view up vector, using the camera's position as the center of rotation. Note that the view up vector is whatever was set via SetViewUp, and is not necessarily perpendicular to the direction of projection. The result is a horizontal rotation of the scene. """ self.GetActiveCamera().Yaw(angle) def elevation(self, angle): """ Rotate the camera about the cross product of the negative of the direction of projection and the view up vector, using the focal point as the center of rotation. The result is a vertical rotation of the scene. """ self.GetActiveCamera().Elevation(angle) def pitch(self, angle): """ Rotate the focal point about the cross product of the view up vector and the direction of projection, using the camera's position as the center of rotation. The result is a vertical rotation of the camera. """ self.GetActiveCamera().Pitch(angle) def roll(self, angle): """ Rotate the camera about the direction of projection. This will spin the camera about its axis. """ self.GetActiveCamera().Roll(angle) def dolly(self, value): """ Divide the camera's distance from the focal point by the given dolly value. Use a value greater than one to dolly-in toward the focal point, and use a value less than one to dolly-out away from the focal point. """ self.GetActiveCamera().Dolly(value) def camera_direction(self): """ Get the vector in the direction from the camera position to the focal point. This is usually the opposite of the ViewPlaneNormal, the vector perpendicular to the screen, unless the view is oblique. """ return self.GetActiveCamera().GetDirectionOfProjection() def renderer(background=None): """ Create a renderer. Parameters ---------- background : tuple Initial background color of renderer Returns ------- v : Renderer Examples -------- >>> from dipy.viz import fvtk >>> import numpy as np >>> r=fvtk.ren() >>> lines=[np.random.rand(10,3)] >>> c=fvtk.line(lines, fvtk.colors.red) >>> fvtk.add(r,c) >>> #fvtk.show(r) """ deprecation_msg = ("Method 'dipy.viz.window.renderer' is deprecated, instead" " use class 'dipy.viz.window.Renderer'.") warn(DeprecationWarning(deprecation_msg)) ren = Renderer() if background is not None: ren.SetBackground(background) return ren if have_vtk: ren = renderer def add(ren, a): """ Add a specific actor """ ren.add(a) def rm(ren, a): """ Remove a specific actor """ ren.rm(a) def clear(ren): """ Remove all actors from the renderer """ ren.clear() def rm_all(ren): """ Remove all actors from the renderer """ ren.rm_all() def open_file_dialog(file_types=[("All files", "*")]): """ Simple Tk file dialog for opening files Parameters ---------- file_types : tuples of tuples Accepted file types. Returns ------- file_paths : sequence of str Returns the full paths of all selected files """ root = tkinter.Tk() root.withdraw() file_paths = filedialog.askopenfilenames(filetypes=file_types) return file_paths def save_file_dialog(initial_file='dipy.png', default_ext='.png', file_types=(("PNG file", "*.png"), ("All Files", "*.*"))): """ Simple Tk file dialog for saving a file Parameters ---------- initial_file : str For example ``dipy.png``. default_ext : str Default extension to appear in the save dialog. file_types : tuples of tuples Accepted file types. Returns ------- filepath : str Complete filename of saved file """ root = tkinter.Tk() root.withdraw() file_path = filedialog.asksaveasfilename(initialfile=initial_file, defaultextension=default_ext, filetypes=file_types) return file_path class ShowManager(object): """ This class is the interface between the renderer, the window and the interactor. """ def __init__(self, ren=None, title='DIPY', size=(300, 300), png_magnify=1, reset_camera=True, order_transparent=False, interactor_style='custom'): """ Manages the visualization pipeline Parameters ---------- ren : Renderer() or vtkRenderer() The scene that holds all the actors. title : string A string for the window title bar. size : (int, int) ``(width, height)`` of the window. Default is (300, 300). png_magnify : int Number of times to magnify the screenshot. This can be used to save high resolution screenshots when pressing 's' inside the window. reset_camera : bool Default is True. You can change this option to False if you want to keep the camera as set before calling this function. order_transparent : bool True is useful when you want to order transparent actors according to their relative position to the camera. The default option which is False will order the actors according to the order of their addition to the Renderer(). interactor_style : str or vtkInteractorStyle If str then if 'trackball' then vtkInteractorStyleTrackballCamera() is used, if 'image' then vtkInteractorStyleImage() is used (no rotation) or if 'custom' then CustomInteractorStyle is used. Otherwise you can input your own interactor style. Attributes ---------- ren : vtkRenderer() iren : vtkRenderWindowInteractor() style : vtkInteractorStyle() window : vtkRenderWindow() Methods ------- initialize() render() start() add_window_callback() Notes ----- Default interaction keys for * 3d navigation are with left, middle and right mouse dragging * resetting the camera press 'r' * saving a screenshot press 's' * for quiting press 'q' Examples -------- >>> from dipy.viz import actor, window >>> renderer = window.Renderer() >>> renderer.add(actor.axes()) >>> showm = window.ShowManager(renderer) >>> # showm.initialize() >>> # showm.render() >>> # showm.start() """ if ren is None: ren = Renderer() self.ren = ren self.title = title self.size = size self.png_magnify = png_magnify self.reset_camera = reset_camera self.order_transparent = order_transparent self.interactor_style = interactor_style if self.reset_camera: self.ren.ResetCamera() self.window = vtk.vtkRenderWindow() self.window.AddRenderer(ren) if self.title == 'DIPY': self.window.SetWindowName(title + ' ' + dipy_version) else: self.window.SetWindowName(title) self.window.SetSize(size[0], size[1]) if self.order_transparent: # Use a render window with alpha bits # as default is 0 (false)) self.window.SetAlphaBitPlanes(True) # Force to not pick a framebuffer with a multisample buffer # (default is 8) self.window.SetMultiSamples(0) # Choose to use depth peeling (if supported) # (default is 0 (false)): self.ren.UseDepthPeelingOn() # Set depth peeling parameters # Set the maximum number of rendering passes (default is 4) ren.SetMaximumNumberOfPeels(4) # Set the occlusion ratio (initial value is 0.0, exact image): ren.SetOcclusionRatio(0.0) if self.interactor_style == 'image': self.style = vtk.vtkInteractorStyleImage() elif self.interactor_style == 'trackball': self.style = vtk.vtkInteractorStyleTrackballCamera() elif self.interactor_style == 'custom': self.style = CustomInteractorStyle() else: self.style = interactor_style self.iren = vtk.vtkRenderWindowInteractor() self.style.SetCurrentRenderer(self.ren) # Hack: below, we explicitly call the Python version of SetInteractor. self.style.SetInteractor(self.iren) self.iren.SetInteractorStyle(self.style) self.iren.SetRenderWindow(self.window) def initialize(self): """ Initialize interaction """ self.iren.Initialize() def render(self): """ Renders only once """ self.window.Render() def start(self): """ Starts interaction """ try: self.iren.Start() except AttributeError: self.__init__(self.ren, self.title, size=self.size, png_magnify=self.png_magnify, reset_camera=self.reset_camera, order_transparent=self.order_transparent, interactor_style=self.interactor_style) self.initialize() self.render() self.iren.Start() self.window.RemoveRenderer(self.ren) self.ren.SetRenderWindow(None) del self.iren del self.window def record_events(self): """ Records events during the interaction. The recording is represented as a list of VTK events that happened during the interaction. The recorded events are then returned. Returns ------- events : str Recorded events (one per line). Notes ----- Since VTK only allows recording events to a file, we use a temporary file from which we then read the events. """ with InTemporaryDirectory(): filename = "recorded_events.log" recorder = vtk.vtkInteractorEventRecorder() recorder.SetInteractor(self.iren) recorder.SetFileName(filename) def _stop_recording_and_close(obj, evt): recorder.Stop() self.iren.TerminateApp() self.iren.AddObserver("ExitEvent", _stop_recording_and_close) recorder.EnabledOn() recorder.Record() self.initialize() self.render() self.iren.Start() # Retrieved recorded events. events = open(filename).read() return events def record_events_to_file(self, filename="record.log"): """ Records events during the interaction. The recording is represented as a list of VTK events that happened during the interaction. The recording is going to be saved into `filename`. Parameters ---------- filename : str Name of the file that will contain the recording (.log|.log.gz). """ events = self.record_events() # Compress file if needed if filename.endswith(".gz"): gzip.open(filename, 'wb').write(asbytes(events)) else: open(filename, 'w').write(events) def play_events(self, events): """ Plays recorded events of a past interaction. The VTK events that happened during the recorded interaction will be played back. Parameters ---------- events : str Recorded events (one per line). """ recorder = vtk.vtkInteractorEventRecorder() recorder.SetInteractor(self.iren) recorder.SetInputString(events) recorder.ReadFromInputStringOn() self.initialize() self.render() recorder.Play() def play_events_from_file(self, filename): """ Plays recorded events of a past interaction. The VTK events that happened during the recorded interaction will be played back from `filename`. Parameters ---------- filename : str Name of the file containing the recorded events (.log|.log.gz). """ # Uncompress file if needed. if filename.endswith(".gz"): with gzip.open(filename, 'r') as f: events = f.read() else: with open(filename) as f: events = f.read() self.play_events(events) def add_window_callback(self, win_callback): """ Add window callbacks """ self.window.AddObserver(vtk.vtkCommand.ModifiedEvent, win_callback) self.window.Render() def show(ren, title='DIPY', size=(300, 300), png_magnify=1, reset_camera=True, order_transparent=False): """ Show window with current renderer Parameters ------------ ren : Renderer() or vtkRenderer() The scene that holds all the actors. title : string A string for the window title bar. Default is DIPY and current version. size : (int, int) ``(width, height)`` of the window. Default is (300, 300). png_magnify : int Number of times to magnify the screenshot. Default is 1. This can be used to save high resolution screenshots when pressing 's' inside the window. reset_camera : bool Default is True. You can change this option to False if you want to keep the camera as set before calling this function. order_transparent : bool True is useful when you want to order transparent actors according to their relative position to the camera. The default option which is False will order the actors according to the order of their addition to the Renderer(). Notes ----- Default interaction keys for * 3d navigation are with left, middle and right mouse dragging * resetting the camera press 'r' * saving a screenshot press 's' * for quiting press 'q' Examples ---------- >>> import numpy as np >>> from dipy.viz import fvtk >>> r=fvtk.ren() >>> lines=[np.random.rand(10,3),np.random.rand(20,3)] >>> colors=np.array([[0.2,0.2,0.2],[0.8,0.8,0.8]]) >>> c=fvtk.line(lines,colors) >>> fvtk.add(r,c) >>> l=fvtk.label(r) >>> fvtk.add(r,l) >>> #fvtk.show(r) See also --------- dipy.viz.window.record dipy.viz.window.snapshot """ show_manager = ShowManager(ren, title, size, png_magnify, reset_camera, order_transparent) show_manager.initialize() show_manager.render() show_manager.start() def record(ren=None, cam_pos=None, cam_focal=None, cam_view=None, out_path=None, path_numbering=False, n_frames=1, az_ang=10, magnification=1, size=(300, 300), reset_camera=True, verbose=False): """ This will record a video of your scene Records a video as a series of ``.png`` files of your scene by rotating the azimuth angle az_angle in every frame. Parameters ----------- ren : vtkRenderer() object as returned from function ren() cam_pos : None or sequence (3,), optional Camera's position. If None then default camera's position is used. cam_focal : None or sequence (3,), optional Camera's focal point. If None then default camera's focal point is used. cam_view : None or sequence (3,), optional Camera's view up direction. If None then default camera's view up vector is used. out_path : str, optional Output path for the frames. If None a default dipy.png is created. path_numbering : bool When recording it changes out_path to out_path + str(frame number) n_frames : int, optional Number of frames to save, default 1 az_ang : float, optional Azimuthal angle of camera rotation. magnification : int, optional How much to magnify the saved frame. Default is 1. size : (int, int) ``(width, height)`` of the window. Default is (300, 300). reset_camera : bool If True Call ``ren.reset_camera()``. Otherwise you need to set the camera before calling this function. verbose : bool print information about the camera. Default is False. Examples --------- >>> from dipy.viz import fvtk >>> r=fvtk.ren() >>> a=fvtk.axes() >>> fvtk.add(r,a) >>> #uncomment below to record >>> #fvtk.record(r) >>> #check for new images in current directory """ if ren is None: ren = vtk.vtkRenderer() renWin = vtk.vtkRenderWindow() renWin.AddRenderer(ren) renWin.SetSize(size[0], size[1]) iren = vtk.vtkRenderWindowInteractor() iren.SetRenderWindow(renWin) # ren.GetActiveCamera().Azimuth(180) if reset_camera: ren.ResetCamera() renderLarge = vtk.vtkRenderLargeImage() if major_version <= 5: renderLarge.SetInput(ren) else: renderLarge.SetInput(ren) renderLarge.SetMagnification(magnification) renderLarge.Update() writer = vtk.vtkPNGWriter() ang = 0 if cam_pos is not None: cx, cy, cz = cam_pos ren.GetActiveCamera().SetPosition(cx, cy, cz) if cam_focal is not None: fx, fy, fz = cam_focal ren.GetActiveCamera().SetFocalPoint(fx, fy, fz) if cam_view is not None: ux, uy, uz = cam_view ren.GetActiveCamera().SetViewUp(ux, uy, uz) cam = ren.GetActiveCamera() if verbose: print('Camera Position (%.2f, %.2f, %.2f)' % cam.GetPosition()) print('Camera Focal Point (%.2f, %.2f, %.2f)' % cam.GetFocalPoint()) print('Camera View Up (%.2f, %.2f, %.2f)' % cam.GetViewUp()) for i in range(n_frames): ren.GetActiveCamera().Azimuth(ang) renderLarge = vtk.vtkRenderLargeImage() renderLarge.SetInput(ren) renderLarge.SetMagnification(magnification) renderLarge.Update() writer.SetInputConnection(renderLarge.GetOutputPort()) if path_numbering: if out_path is None: filename = str(i).zfill(6) + '.png' else: filename = out_path + str(i).zfill(6) + '.png' else: if out_path is None: filename = 'dipy.png' else: filename = out_path writer.SetFileName(filename) writer.Write() ang = +az_ang def snapshot(ren, fname=None, size=(300, 300), offscreen=True, order_transparent=False): """ Saves a snapshot of the renderer in a file or in memory Parameters ----------- ren : vtkRenderer as returned from function renderer() fname : str or None Save PNG file. If None return only an array without saving PNG. size : (int, int) ``(width, height)`` of the window. Default is (300, 300). offscreen : bool Default True. Go stealthmode no window should appear. order_transparent : bool Default False. Use depth peeling to sort transparent objects. Returns ------- arr : ndarray Color array of size (width, height, 3) where the last dimension holds the RGB values. """ width, height = size if offscreen: graphics_factory = vtk.vtkGraphicsFactory() graphics_factory.SetOffScreenOnlyMode(1) # TODO check if the line below helps in something # graphics_factory.SetUseMesaClasses(1) render_window = vtk.vtkRenderWindow() if offscreen: render_window.SetOffScreenRendering(1) render_window.AddRenderer(ren) render_window.SetSize(width, height) if order_transparent: # Use a render window with alpha bits # as default is 0 (false)) render_window.SetAlphaBitPlanes(True) # Force to not pick a framebuffer with a multisample buffer # (default is 8) render_window.SetMultiSamples(0) # Choose to use depth peeling (if supported) # (default is 0 (false)): ren.UseDepthPeelingOn() # Set depth peeling parameters # Set the maximum number of rendering passes (default is 4) ren.SetMaximumNumberOfPeels(4) # Set the occlusion ratio (initial value is 0.0, exact image): ren.SetOcclusionRatio(0.0) render_window.Render() window_to_image_filter = vtk.vtkWindowToImageFilter() window_to_image_filter.SetInput(render_window) window_to_image_filter.Update() vtk_image = window_to_image_filter.GetOutput() h, w, _ = vtk_image.GetDimensions() vtk_array = vtk_image.GetPointData().GetScalars() components = vtk_array.GetNumberOfComponents() arr = vtk_to_numpy(vtk_array).reshape(h, w, components) if fname is None: return arr writer = vtk.vtkPNGWriter() writer.SetFileName(fname) writer.SetInputConnection(window_to_image_filter.GetOutputPort()) writer.Write() return arr def analyze_renderer(ren): class ReportRenderer(object): bg_color = None report = ReportRenderer() report.bg_color = ren.GetBackground() report.collection = ren.GetActors() report.actors = report.collection.GetNumberOfItems() report.collection.InitTraversal() report.actors_classnames = [] for i in range(report.actors): class_name = report.collection.GetNextActor().GetClassName() report.actors_classnames.append(class_name) return report def analyze_snapshot(im, bg_color=(0, 0, 0), colors=None, find_objects=True, strel=None): """ Analyze snapshot from memory or file Parameters ---------- im: str or array If string then the image is read from a file otherwise the image is read from a numpy array. The array is expected to be of shape (X, Y, 3) where the last dimensions are the RGB values. colors: tuple (3,) or list of tuples (3,) List of colors to search in the image find_objects: bool If True it will calculate the number of objects that are different from the background and return their position in a new image. strel: 2d array Structure element to use for finding the objects. Returns ------- report : ReportSnapshot This is an object with attibutes like ``colors_found`` that give information about what was found in the current snapshot array ``im``. """ if isinstance(im, string_types): im = imread(im) class ReportSnapshot(object): objects = None labels = None colors_found = False report = ReportSnapshot() if colors is not None: if isinstance(colors, tuple): colors = [colors] flags = [False] * len(colors) for (i, col) in enumerate(colors): # find if the current color exist in the array flags[i] = np.any(np.all(im == col, axis=-1)) report.colors_found = flags if find_objects is True: weights = [0.299, 0.587, 0.144] gray = np.dot(im[..., :3], weights) bg_color = im[0, 0] background = np.dot(bg_color, weights) if strel is None: strel = np.array([[0, 1, 0], [1, 1, 1], [0, 1, 0]]) labels, objects = ndimage.label(gray != background, strel) report.labels = labels report.objects = objects return report dipy-0.13.0/dipy/workflows/000077500000000000000000000000001317371701200155445ustar00rootroot00000000000000dipy-0.13.0/dipy/workflows/__init__.py000066400000000000000000000000001317371701200176430ustar00rootroot00000000000000dipy-0.13.0/dipy/workflows/base.py000066400000000000000000000232731317371701200170370ustar00rootroot00000000000000import sys import inspect from dipy.fixes import argparse as arg from dipy.workflows.docstring_parser import NumpyDocString def get_args_default(func): if sys.version_info[0] >= 3: sig_object = inspect.signature(func) params = sig_object.parameters.values() names = [param.name for param in params if param.name is not 'self'] defaults = [param.default for param in params if param.default is not inspect._empty] else: specs = inspect.getargspec(func) names = specs.args[1:] defaults = specs.defaults return names, defaults class IntrospectiveArgumentParser(arg.ArgumentParser): def __init__(self, prog=None, usage=None, description=None, epilog=None, version=None, parents=[], formatter_class=arg.RawTextHelpFormatter, prefix_chars='-', fromfile_prefix_chars=None, argument_default=None, conflict_handler='resolve', add_help=True): """ Augmenting the argument parser to allow automatic creation of arguments from workflows Parameters ----------- prog : None The name of the program (default: sys.argv[0]) usage : None A usage message (default: auto-generated from arguments) description : str A description of what the program does epilog : str Text following the argument descriptions version : None Add a -v/--version option with the given version string parents : list Parsers whose arguments should be copied into this one formatter_class : obj HelpFormatter class for printing help messages prefix_chars : str Characters that prefix optional arguments fromfile_prefix_chars : None Characters that prefix files containing additional arguments argument_default : None The default value for all arguments conflict_handler : str String indicating how to handle conflicts add_help : bool Add a -h/-help option """ iap = IntrospectiveArgumentParser if epilog is None: epilog =\ ("References: \n" "Garyfallidis, E., M. Brett, B. Amirbekian, A. Rokem," " S. Van Der Walt, M. Descoteaux, and I. Nimmo-Smith. Dipy, a" " library for the analysis of diffusion MRI data. Frontiers" " in Neuroinformatics, 1-18, 2014.") super(iap, self).__init__(prog, usage, description, epilog, version, parents, formatter_class, prefix_chars, fromfile_prefix_chars, argument_default, conflict_handler, add_help) self.doc = None def add_workflow(self, workflow): """ Take a workflow object and use introspection to extract the parameters, types and docstrings of its run method. Then add these parameters to the current arparser's own params to parse. If the workflow is of type combined_workflow, the optional input parameters of its sub workflows will also be added. Parameters ----------- workflow : dipy.workflows.workflow.Workflow Workflow from which to infer parameters. Returns ------- sub_flow_optionals : dictionary of all sub workflow optional parameters """ doc = inspect.getdoc(workflow.run) npds = NumpyDocString(doc) self.doc = npds['Parameters'] self.description = ' '.join(npds['Extended Summary']) if npds['References']: ref_text = [text if text else "\n" for text in npds['References']] ref_idx = self.epilog.find('References: \n') + len('References: \n') self.epilog = "{0}{1}\n{2}".format(self.epilog[:ref_idx], ''.join([text for text in ref_text]), self.epilog[ref_idx:]) self.outputs = [param for param in npds['Parameters'] if 'out_' in param[0]] args, defaults = get_args_default(workflow.run) len_args = len(args) len_defaults = len(defaults) output_args = \ self.add_argument_group('output arguments(optional)') for i, arg in enumerate(args): prefix = '' is_optionnal = i >= len_args - len_defaults if is_optionnal: prefix = '--' typestr = self.doc[i][1] dtype, isnarg = self._select_dtype(typestr) help_msg = ''.join(self.doc[i][2]) _args = ['{0}{1}'.format(prefix, arg)] _kwargs = {'help': help_msg, 'type': dtype, 'action': 'store'} if is_optionnal: _kwargs['metavar'] = dtype.__name__ if dtype is bool: _kwargs['action'] = 'store_true' default_ = dict() default_[arg] = False self.set_defaults(**default_) del _kwargs['type'] del _kwargs['metavar'] elif dtype is bool: _kwargs['type'] = int _kwargs['choices'] = [0, 1] if dtype is tuple: _kwargs['type'] = str if isnarg: _kwargs['nargs'] = '*' if 'out_' in arg: output_args.add_argument(*_args, **_kwargs) else: self.add_argument(*_args, **_kwargs) return self.add_sub_flow_args(workflow.get_sub_runs()) def add_sub_flow_args(self, sub_flows): """ Take an array of workflow objects and use introspection to extract the parameters, types and docstrings of their run method. Only the optional input parameters are extracted for these as they are treated as sub workflows. Parameters ----------- sub_flows : array of dipy.workflows.workflow.Workflow Workflows to inspect. Returns ------- sub_flow_optionals : dictionary of all sub workflow optional parameters """ sub_flow_optionals = dict() for name, flow, short_name in sub_flows: sub_flow_optionals[name] = {} doc = inspect.getdoc(flow) npds = NumpyDocString(doc) _doc = npds['Parameters'] args, defaults = get_args_default(flow) len_args = len(args) len_defaults = len(defaults) flow_args = \ self.add_argument_group('{0} arguments(optional)'. format(name)) for i, arg_name in enumerate(args): is_not_optionnal = i < len_args - len_defaults if 'out_' in arg_name or is_not_optionnal: continue arg_name = '{0}.{1}'.format(short_name, arg_name) sub_flow_optionals[name][arg_name] = None prefix = '--' typestr = _doc[i][1] dtype, isnarg = self._select_dtype(typestr) help_msg = ''.join(_doc[i][2]) _args = ['{0}{1}'.format(prefix, arg_name)] _kwargs = {'help': help_msg, 'type': dtype, 'action': 'store'} _kwargs['metavar'] = dtype.__name__ if dtype is bool: _kwargs['action'] = 'store_true' default_ = dict() default_[arg_name] = False self.set_defaults(**default_) del _kwargs['type'] del _kwargs['metavar'] elif dtype is bool: _kwargs['type'] = int _kwargs['choices'] = [0, 1] if dtype is tuple: _kwargs['type'] = str if isnarg: _kwargs['nargs'] = '*' flow_args.add_argument(*_args, **_kwargs) return sub_flow_optionals def _select_dtype(self, text): """ Analyses a docstring parameter line and returns the good argparser type. Parameters ----------- text : string Parameter text line to inspect. Returns ------- arg_type : The type found by inspecting the text line. is_nargs : Whether or not this argument is nargs (arparse's multiple values argument) """ text = text.lower() nargs_str = 'variable' is_nargs = nargs_str in text arg_type = None if 'str' in text: arg_type = str if 'int' in text: arg_type = int if 'float' in text: arg_type = float if 'bool' in text: arg_type = bool if 'tuple' in text: arg_type = tuple return arg_type, is_nargs def get_flow_args(self, args=None, namespace=None): """ Returns the parsed arguments as a dictionary that will be used as a workflow's run method arguments. """ ns_args = self.parse_args(args, namespace) dct = vars(ns_args) return dict((k, v) for k, v in dct.items() if v is not None) def update_argument(self, *args, **kargs): self.add_argument(*args, **kargs) def show_argument(self, dest): for act in self._actions[1:]: if act.dest == dest: print(act) def add_epilogue(self): pass def add_description(self): pass def get_outputs(self): return self.outputs dipy-0.13.0/dipy/workflows/combined_workflow.py000066400000000000000000000042761317371701200216410ustar00rootroot00000000000000from __future__ import division, print_function, absolute_import from dipy.utils.six import iteritems from dipy.workflows.workflow import Workflow class CombinedWorkflow(Workflow): def __init__(self, output_strategy='append', mix_names=False, force=False, skip=False): """ Workflow that combines multiple workflows. The workflow combined together are referred as sub flows in this class. """ self._optionals = {} super(CombinedWorkflow, self).__init__(output_strategy, mix_names, force, skip) def get_sub_runs(self): """ Returns a list of tuples (sub flow name, sub flow run method, sub flow short name) to be used in the sub flow parameters extraction. """ sub_runs = [] for flow in self._get_sub_flows(): sub_runs.append((flow.__name__, flow.run, flow.get_short_name())) return sub_runs def _get_sub_flows(self): """ Returns a list of sub flows used in the combined_workflow. Needs to be implemented in every new combined_workflow. """ raise AttributeError('Error: _get_sub_flows() has to be defined for {}'. format(self.__class__)) def set_sub_flows_optionals(self, opts): """ Sets the self._optionals variable with all sub flow arguments that were passed in the commandline. """ self._optionals = {} for key, sub_dict in iteritems(opts): #opts.iteritems(): self._optionals[key] = \ dict((k, v) for k, v in iteritems(sub_dict) if v is not None) def get_optionals(self, flow, **kwargs): """ Returns the sub flow's optional arguments merged with those passed as params in kwargs. """ opts = self._optionals[flow.__name__] opts.update(kwargs) return opts def run_sub_flow(self, flow, *args, **kwargs): """ Runs the sub flow with the optional parameters passed via the command line. This is a convenience method to make sub flow running more intuitive on the concrete CombinedWorkflow side. """ return flow.run(*args, **self.get_optionals(type(flow), **kwargs)) dipy-0.13.0/dipy/workflows/denoise.py000066400000000000000000000040711317371701200175460ustar00rootroot00000000000000from __future__ import division, print_function, absolute_import import logging import shutil import nibabel as nib from dipy.denoise.nlmeans import nlmeans from dipy.denoise.noise_estimate import estimate_sigma from dipy.workflows.workflow import Workflow class NLMeansFlow(Workflow): @classmethod def get_short_name(cls): return 'nlmeans' def run(self, input_files, sigma=0, out_dir='', out_denoised='dwi_nlmeans.nii.gz'): """ Workflow wrapping the nlmeans denoising method. It applies nlmeans denoise on each file found by 'globing' ``input_files`` and saves the results in a directory specified by ``out_dir``. Parameters ---------- input_files : string Path to the input volumes. This path may contain wildcards to process multiple inputs at once. sigma : float, optional Sigma parameter to pass to the nlmeans algorithm (default: auto estimation). out_dir : string, optional Output directory (default input file directory) out_denoised : string, optional Name of the resuting denoised volume (default: dwi_nlmeans.nii.gz) """ io_it = self.get_io_iterator() for fpath, odenoised in io_it: if self._skip: shutil.copy(fpath, odenoised) logging.warning('Denoising skipped for now.') else: logging.info('Denoising {0}'.format(fpath)) image = nib.load(fpath) data = image.get_data() if sigma == 0: logging.info('Estimating sigma') sigma = estimate_sigma(data) logging.debug('Found sigma {0}'.format(sigma)) denoised_data = nlmeans(data, sigma) denoised_image = nib.Nifti1Image( denoised_data, image.affine, image.header) denoised_image.to_filename(odenoised) logging.info('Denoised volume saved as {0}'.format(odenoised)) dipy-0.13.0/dipy/workflows/docstring_parser.py000066400000000000000000000323761317371701200215010ustar00rootroot00000000000000""" This was taken directly from the file docscrape.py of numpydoc package. Copyright (C) 2008 Stefan van der Walt , Pauli Virtanen Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ from __future__ import division, absolute_import, print_function import re import textwrap from warnings import warn class Reader(object): """A line-based string reader. """ def __init__(self, data): """ Parameters ---------- data : str String with lines separated by '\n'. """ if isinstance(data, list): self._str = data else: self._str = data.split('\n') # store string as list of lines self.reset() def __getitem__(self, n): return self._str[n] def reset(self): self._l = 0 # current line nr def read(self): if not self.eof(): out = self[self._l] self._l += 1 return out else: return '' def seek_next_non_empty_line(self): for l in self[self._l:]: if l.strip(): break else: self._l += 1 def eof(self): return self._l >= len(self._str) def read_to_condition(self, condition_func): start = self._l for line in self[start:]: if condition_func(line): return self[start:self._l] self._l += 1 if self.eof(): return self[start:self._l + 1] return [] def read_to_next_empty_line(self): self.seek_next_non_empty_line() def is_empty(line): return not line.strip() return self.read_to_condition(is_empty) def read_to_next_unindented_line(self): def is_unindented(line): return (line.strip() and (len(line.lstrip()) == len(line))) return self.read_to_condition(is_unindented) def peek(self, n=0): if self._l + n < len(self._str): return self[self._l + n] else: return '' def is_empty(self): return not ''.join(self._str).strip() def dedent_lines(lines): """Deindent a list of lines maximally""" return textwrap.dedent("\n".join(lines)).split("\n") class NumpyDocString(object): def __init__(self, docstring, config={}): docstring = textwrap.dedent(docstring).split('\n') self._doc = Reader(docstring) self._parsed_data = { 'Signature': '', 'Summary': [''], 'Extended Summary': [], 'Parameters': [], 'Outputs': [], 'Returns': [], 'Raises': [], 'Warns': [], 'Other Parameters': [], 'Attributes': [], 'Methods': [], 'See Also': [], 'Notes': [], 'Warnings': [], 'References': '', 'Examples': '', 'index': {} } self._parse() def __getitem__(self, key): return self._parsed_data[key] def __setitem__(self, key, val): if key not in self._parsed_data: warn("Unknown section %s" % key) else: self._parsed_data[key] = val def _is_at_section(self): self._doc.seek_next_non_empty_line() if self._doc.eof(): return False l1 = self._doc.peek().strip() # e.g. Parameters if l1.startswith('.. index::'): return True l2 = self._doc.peek(1).strip() # ---------- or ========== return l2.startswith('-' * len(l1)) or l2.startswith('=' * len(l1)) def _strip(self, doc): i = 0 j = 0 for i, line in enumerate(doc): if line.strip(): break for j, line in enumerate(doc[::-1]): if line.strip(): break return doc[i:len(doc) - j] def _read_to_next_section(self): section = self._doc.read_to_next_empty_line() while not self._is_at_section() and not self._doc.eof(): if not self._doc.peek(-1).strip(): # previous line was empty section += [''] section += self._doc.read_to_next_empty_line() return section def _read_sections(self): while not self._doc.eof(): data = self._read_to_next_section() name = data[0].strip() if name.startswith('..'): # index section yield name, data[1:] elif len(data) < 2: yield StopIteration else: yield name, self._strip(data[2:]) def _parse_param_list(self, content): r = Reader(content) params = [] while not r.eof(): header = r.read().strip() if ' : ' in header: arg_name, arg_type = header.split(' : ')[:2] else: arg_name, arg_type = header, '' desc = r.read_to_next_unindented_line() desc = dedent_lines(desc) params.append((arg_name, arg_type, desc)) return params _name_rgx = re.compile(r"^\s*(:(?P\w+):`(?P[a-zA-Z0-9_.-]+)`|" r" (?P[a-zA-Z0-9_.-]+))\s*", re.X) def _parse_see_also(self, content): """ func_name : Descriptive text continued text another_func_name : Descriptive text func_name1, func_name2, :meth:`func_name`, func_name3 """ items = [] def parse_item_name(text): """Match ':role:`name`' or 'name'""" m = self._name_rgx.match(text) if m: g = m.groups() if g[1] is None: return g[3], None else: return g[2], g[1] raise ValueError("%s is not a item name" % text) def push_item(name, rest): if not name: return name, role = parse_item_name(name) items.append((name, list(rest), role)) del rest[:] current_func = None rest = [] for line in content: if not line.strip(): continue m = self._name_rgx.match(line) if m and line[m.end():].strip().startswith(':'): push_item(current_func, rest) current_func, line = line[:m.end()], line[m.end():] rest = [line.split(':', 1)[1].strip()] if not rest[0]: rest = [] elif not line.startswith(' '): push_item(current_func, rest) current_func = None if ',' in line: for func in line.split(','): if func.strip(): push_item(func, []) elif line.strip(): current_func = line elif current_func is not None: rest.append(line.strip()) push_item(current_func, rest) return items def _parse_index(self, section, content): """ .. index: default :refguide: something, else, and more """ def strip_each_in(lst): return [s.strip() for s in lst] out = {} section = section.split('::') if len(section) > 1: out['default'] = strip_each_in(section[1].split(','))[0] for line in content: line = line.split(':') if len(line) > 2: out[line[1]] = strip_each_in(line[2].split(',')) return out def _parse_summary(self): """Grab signature (if given) and summary""" if self._is_at_section(): return # If several signatures present, take the last one while True: summary = self._doc.read_to_next_empty_line() summary_str = " ".join([s.strip() for s in summary]).strip() if re.compile('^([\w., ]+=)?\s*[\w\.]+\(.*\)$').match(summary_str): self['Signature'] = summary_str if not self._is_at_section(): continue break if summary is not None: self['Summary'] = summary if not self._is_at_section(): self['Extended Summary'] = self._read_to_next_section() def _parse(self): self._doc.reset() self._parse_summary() for (section, content) in self._read_sections(): if not section.startswith('..'): section = ' '.join([s.capitalize() for s in section.split(' ')]) if section in ('Parameters', 'Outputs', 'Returns', 'Raises', 'Warns', 'Other Parameters', 'Attributes', 'Methods'): self[section] = self._parse_param_list(content) elif section.startswith('.. index::'): self['index'] = self._parse_index(section, content) elif section == 'See Also': self['See Also'] = self._parse_see_also(content) else: self[section] = content # string conversion routines def _str_header(self, name, symbol='-'): return [name, len(name) * symbol] def _str_indent(self, doc, indent=4): out = [] for line in doc: out += [' ' * indent + line] return out def _str_signature(self): if self['Signature']: return [self['Signature'].replace('*', '\*')] + [''] else: return [''] def _str_summary(self): if self['Summary']: return self['Summary'] + [''] else: return [] def _str_extended_summary(self): if self['Extended Summary']: return self['Extended Summary'] + [''] else: return [] def _str_param_list(self, name): out = [] if self[name]: out += self._str_header(name) for param, param_type, desc in self[name]: if param_type: out += ['%s : %s' % (param, param_type)] else: out += [param] out += self._str_indent(desc) out += [''] return out def _str_section(self, name): out = [] if self[name]: out += self._str_header(name) out += self[name] out += [''] return out def _str_see_also(self, func_role): if not self['See Also']: return [] out = [] out += self._str_header("See Also") last_had_desc = True for func, desc, role in self['See Also']: if role: link = ':%s:`%s`' % (role, func) elif func_role: link = ':%s:`%s`' % (func_role, func) else: link = "`%s`_" % func if desc or last_had_desc: out += [''] out += [link] else: out[-1] += ", %s" % link if desc: out += self._str_indent([' '.join(desc)]) last_had_desc = True else: last_had_desc = False out += [''] return out def _str_index(self): idx = self['index'] out = [] out += ['.. index:: %s' % idx.get('default', '')] for section, references in idx.items(): if section == 'default': continue out += [' :%s: %s' % (section, ', '.join(references))] return out def __str__(self, func_role=''): out = [] out += self._str_signature() out += self._str_summary() out += self._str_extended_summary() for param_list in ('Parameters', 'Returns', 'Other Parameters', 'Raises', 'Warns'): out += self._str_param_list(param_list) out += self._str_section('Warnings') out += self._str_see_also(func_role) for s in ('Notes', 'References', 'Examples'): out += self._str_section(s) for param_list in ('Attributes', 'Methods'): out += self._str_param_list(param_list) out += self._str_index() return '\n'.join(out) dipy-0.13.0/dipy/workflows/flow_runner.py000066400000000000000000000054621317371701200204650ustar00rootroot00000000000000from __future__ import division, print_function, absolute_import import logging from dipy.utils.six import iteritems from dipy.workflows.base import IntrospectiveArgumentParser def get_level(lvl): """ Transforms the loggin level passed on the commandline into a proper logging level name. """ try: return logging._levelNames[lvl] except: return logging.INFO def run_flow(flow): """ Wraps the process of building an argparser that reflects the workflow that we want to run along with some generic parameters like logging, force and output strategies. The resulting parameters are then fed to the workflow's run method. """ parser = IntrospectiveArgumentParser() sub_flows_dicts = parser.add_workflow(flow) # Common workflow arguments parser.add_argument('--force', dest='force', action='store_true', default=False, help='Force overwriting output files.') parser.add_argument('--out_strat', action='store', dest='out_strat', metavar='string', required=False, default='append', help='Strategy to manage output creation.') parser.add_argument('--mix_names', dest='mix_names', action='store_true', default=False, help='Prepend mixed input names to output names.') # Add logging parameters common to all workflows parser.add_argument('--log_level', action='store', dest='log_level', metavar='string', required=False, default='INFO', help='Log messsages display level') parser.add_argument('--log_file', action='store', dest='log_file', metavar='string', required=False, default='', help='Log file to be saved.') args = parser.get_flow_args() logging.basicConfig(filename=args['log_file'], format='%(levelname)s:%(message)s', level=get_level(args['log_level'])) # Output management parameters flow._force_overwrite = args['force'] flow._output_strategy = args['out_strat'] flow._mix_names = args['mix_names'] # Keep only workflow related parameters del args['force'] del args['log_level'] del args['log_file'] del args['out_strat'] del args['mix_names'] # Remove subflows related params for sub_flow, params_dict in iteritems(sub_flows_dicts): for key, _ in iteritems(params_dict): if key in args.keys(): params_dict[key] = args.pop(key) # Rename dictionary key to the original param name params_dict[key.split('.')[1]] = params_dict.pop(key) if sub_flows_dicts: flow.set_sub_flows_optionals(sub_flows_dicts) return flow.run(**args) dipy-0.13.0/dipy/workflows/mask.py000066400000000000000000000026421317371701200170550ustar00rootroot00000000000000#!/usr/bin/env python from __future__ import division import inspect import logging import numpy as np from dipy.io.image import load_nifti, save_nifti from dipy.workflows.workflow import Workflow class MaskFlow(Workflow): @classmethod def get_short_name(cls): return 'mask' def run(self, input_files, lb, ub=np.inf, out_dir='', out_mask='mask.nii.gz'): """ Workflow for creating a binary mask Parameters ---------- input_files : string Path to image to be masked. lb : float Lower bound value. ub : float Upper bound value (default Inf) out_dir : string, optional Output directory (default input file directory) out_mask : string, optional Name of the masked file (default 'mask.nii.gz') """ if lb >= ub: logging.error('The upper bound(less than) should be greater' ' than the lower bound (greather_than).') return io_it = self.get_io_iterator() for input_path, out_mask_path in io_it: logging.info('Creating mask of {0}'.format(input_path)) data, affine = load_nifti(input_path) mask = np.bitwise_and(data > lb, data < ub) save_nifti(out_mask_path, mask.astype(np.ubyte), affine) logging.info('Mask saved at {0}'.format(out_mask_path)) dipy-0.13.0/dipy/workflows/multi_io.py000066400000000000000000000164221317371701200177440ustar00rootroot00000000000000import inspect import numpy as np import os import os.path as path from glob import glob from dipy.utils.six import string_types def common_start(sa, sb): """ Returns the longest common substring from the beginning of sa and sb """ def _iter(): for a, b in zip(sa, sb): if a == b: yield a else: return return ''.join(_iter()) def slash_to_under(dir_str): return ''.join(dir_str.replace('/', '_')) def connect_output_paths(inputs, out_dir, out_files, output_strategy='append', mix_names=True): """ Generates a list of output files paths based on input files and output strategies. Parameters ---------- inputs : array List of input paths. out_dir : string The output directory. out_files : array List of output files. output_strategy : string Which strategy to use to generate the output paths. 'append': Add out_dir to the path of the input. 'prepend': Add the input path directory tree to out_dir. 'absolute': Put directly in out_dir. mix_names : bool Whether or not prepend a string composed of a mix of the input names to the final output name. Returns ------- A list of output file paths. """ outputs = [] if isinstance(inputs, string_types): inputs = [inputs] if isinstance(out_files, string_types): out_files = [out_files] sizes_of_inputs = [len(inp) for inp in inputs] max_size = np.max(sizes_of_inputs) min_size = np.min(sizes_of_inputs) if min_size > 1 and min_size != max_size: raise ImportError('Size of input issue') elif min_size == 1: for i, sz in enumerate(sizes_of_inputs): if sz == min_size: inputs[i] = max_size * inputs[i] if mix_names: mixing_prefixes = concatenate_inputs(inputs) else: mixing_prefixes = [''] * len(inputs[0]) for (mix_pref, inp) in zip(mixing_prefixes, inputs[0]): inp_dirname = path.dirname(inp) if output_strategy == 'prepend': if path.isabs(out_dir): dname = out_dir + inp_dirname if not path.isabs(out_dir): dname = path.join( os.getcwd(), out_dir + inp_dirname) elif output_strategy == 'append': dname = path.join(inp_dirname, out_dir) else: dname = out_dir updated_out_files = [] for out_file in out_files: updated_out_files.append(path.join(dname, mix_pref + out_file)) outputs.append(updated_out_files) return inputs, outputs def concatenate_inputs(multi_inputs): """ Concatenate list of inputs """ mixing_names = [] for inps in zip(*multi_inputs): mixing_name = '' for i, inp in enumerate(inps): mixing_name += basename_without_extension(inp) + '_' mixing_names.append(mixing_name + '_') return mixing_names def basename_without_extension(fname): base = path.basename(fname) result = base.split('.')[0] if result[-4:] == '.nii': result = result.split('.')[0] return result def io_iterator(inputs, out_dir, fnames, output_strategy='append', mix_names=False, out_keys=None): """ Creates an IOIterator from the parameters. Parameters ---------- inputs : array List of input files. out_dir : string Output directory. fnames : array File names of all outputs to be created. output_strategy : string Controls the behavior of the IOIterator for output paths. mix_names : bool Whether or not to append a mix of input names at the beginning. Returns ------- Properly instantiated IOIterator object. """ io_it = IOIterator(output_strategy=output_strategy, mix_names=mix_names) io_it.set_inputs(*inputs) io_it.set_out_dir(out_dir) io_it.set_out_fnames(*fnames) io_it.create_outputs() if out_keys: io_it.set_output_keys(*out_keys) return io_it def io_iterator_(frame, fnc, output_strategy='append', mix_names=False): """ Creates an IOIterator using introspection. Parameters ---------- frame : frameobject Contains the info about the current local variables values. fnc : function The function to inspect output_strategy : string Controls the behavior of the IOIterator for output paths. mix_names : bool Whether or not to append a mix of input names at the beginning. Returns ------- Properly instantiated IOIterator object. """ args, _, _, values = inspect.getargvalues(frame) args.remove('self') del values['self'] specs = inspect.getargspec(fnc) spargs = specs.args spargs.remove('self') defaults = specs.defaults len_args = len(spargs) len_defaults = len(defaults) split_at = len_args - len_defaults inputs = [] outputs = [] out_dir = '' # inputs for arv in args[:split_at]: inputs.append(values[arv]) # defaults out_keys = [] for arv in args[split_at:]: if arv == 'out_dir': out_dir = values[arv] elif 'out_' in arv: out_keys.append(arv) outputs.append(values[arv]) return io_iterator(inputs, out_dir, outputs, output_strategy, mix_names, out_keys=out_keys) class IOIterator(object): """ Create output filenames that work nicely with multiple input files from multiple directories (processing multiple subjects with one command) Use information from input files, out_dir and out_fnames to generate correct outputs which can come from long lists of multiple or single inputs. """ def __init__(self, output_strategy='append', mix_names=False): self.output_strategy = output_strategy self.mix_names = mix_names self.inputs = [] self.out_keys = None def set_inputs(self, *args): self.input_args = list(args) self.inputs = [sorted(glob(inp)) for inp in self.input_args if type(inp) == str] def set_out_dir(self, out_dir): self.out_dir = out_dir def set_out_fnames(self, *args): self.out_fnames = list(args) def set_output_keys(self, *args): self.out_keys = list(args) def create_outputs(self): if len(self.inputs) >= 1: self.updated_inputs, self.outputs = connect_output_paths( self.inputs, self.out_dir, self.out_fnames, self.output_strategy, self.mix_names) self.create_directories() else: raise ImportError('No inputs') def create_directories(self): for outputs in self.outputs: for output in outputs: directory = path.dirname(output) if not (directory == '' or os.path.exists(directory)): os.makedirs(directory) def __iter__(self): I = np.array(self.inputs).T O = np.array(self.outputs) IO = np.concatenate([I, O], axis=1) for i_o in IO: yield i_o dipy-0.13.0/dipy/workflows/reconst.py000066400000000000000000000521761317371701200176060ustar00rootroot00000000000000from __future__ import division, print_function, absolute_import import logging import numpy as np import os.path from ast import literal_eval import nibabel as nib from dipy.core.gradients import gradient_table from dipy.data import get_sphere from dipy.io.gradients import read_bvals_bvecs from dipy.io.peaks import save_peaks, peaks_to_niftis from dipy.reconst.csdeconv import (ConstrainedSphericalDeconvModel, auto_response) from dipy.reconst.dti import (TensorModel, color_fa, fractional_anisotropy, geodesic_anisotropy, mean_diffusivity, axial_diffusivity, radial_diffusivity, lower_triangular, mode as get_mode) from dipy.reconst.peaks import peaks_from_model from dipy.reconst.shm import CsaOdfModel from dipy.workflows.workflow import Workflow class ReconstDtiFlow(Workflow): @classmethod def get_short_name(cls): return 'dti' def run(self, input_files, bvalues, bvectors, mask_files, b0_threshold=0.0, save_metrics=[], out_dir='', out_tensor='tensors.nii.gz', out_fa='fa.nii.gz', out_ga='ga.nii.gz', out_rgb='rgb.nii.gz', out_md='md.nii.gz', out_ad='ad.nii.gz', out_rd='rd.nii.gz', out_mode='mode.nii.gz', out_evec='evecs.nii.gz', out_eval='evals.nii.gz'): """ Workflow for tensor reconstruction and for computing DTI metrics. Performs a tensor reconstruction on the files by 'globing' ``input_files`` and saves the DTI metrics in a directory specified by ``out_dir``. Parameters ---------- input_files : string Path to the input volumes. This path may contain wildcards to process multiple inputs at once. bvalues : string Path to the bvalues files. This path may contain wildcards to use multiple bvalues files at once. bvectors : string Path to the bvalues files. This path may contain wildcards to use multiple bvalues files at once. mask_files : string Path to the input masks. This path may contain wildcards to use multiple masks at once. (default: No mask used) b0_threshold : float, optional Threshold used to find b=0 directions (default 0.0) save_metrics : variable string, optional List of metrics to save. Possible values: fa, ga, rgb, md, ad, rd, mode, tensor, evec, eval (default [] (all)) out_dir : string, optional Output directory (default input file directory) out_tensor : string, optional Name of the tensors volume to be saved (default 'tensors.nii.gz') out_fa : string, optional Name of the fractional anisotropy volume to be saved (default 'fa.nii.gz') out_ga : string, optional Name of the geodesic anisotropy volume to be saved (default 'ga.nii.gz') out_rgb : string, optional Name of the color fa volume to be saved (default 'rgb.nii.gz') out_md : string, optional Name of the mean diffusivity volume to be saved (default 'md.nii.gz') out_ad : string, optional Name of the axial diffusivity volume to be saved (default 'ad.nii.gz') out_rd : string, optional Name of the radial diffusivity volume to be saved (default 'rd.nii.gz') out_mode : string, optional Name of the mode volume to be saved (default 'mode.nii.gz') out_evec : string, optional Name of the eigenvectors volume to be saved (default 'evecs.nii.gz') out_eval : string, optional Name of the eigenvalues to be saved (default 'evals.nii.gz') """ io_it = self.get_io_iterator() for dwi, bval, bvec, mask, otensor, ofa, oga, orgb, omd, oad, orad, \ omode, oevecs, oevals in io_it: logging.info('Computing DTI metrics for {0}'.format(dwi)) img = nib.load(dwi) data = img.get_data() affine = img.affine if mask is None: mask = None else: mask = nib.load(mask).get_data().astype(np.bool) tenfit, _ = self.get_fitted_tensor(data, mask, bval, bvec, b0_threshold) if not save_metrics: save_metrics = ['fa', 'md', 'rd', 'ad', 'ga', 'rgb', 'mode', 'evec', 'eval', 'tensor'] FA = fractional_anisotropy(tenfit.evals) FA[np.isnan(FA)] = 0 FA = np.clip(FA, 0, 1) if 'tensor' in save_metrics: tensor_vals = lower_triangular(tenfit.quadratic_form) correct_order = [0, 1, 3, 2, 4, 5] tensor_vals_reordered = tensor_vals[..., correct_order] fiber_tensors = nib.Nifti1Image(tensor_vals_reordered.astype( np.float32), affine) nib.save(fiber_tensors, otensor) if 'fa' in save_metrics: fa_img = nib.Nifti1Image(FA.astype(np.float32), affine) nib.save(fa_img, ofa) if 'ga' in save_metrics: GA = geodesic_anisotropy(tenfit.evals) ga_img = nib.Nifti1Image(GA.astype(np.float32), affine) nib.save(ga_img, oga) if 'rgb' in save_metrics: RGB = color_fa(FA, tenfit.evecs) rgb_img = nib.Nifti1Image(np.array(255 * RGB, 'uint8'), affine) nib.save(rgb_img, orgb) if 'md' in save_metrics: MD = mean_diffusivity(tenfit.evals) md_img = nib.Nifti1Image(MD.astype(np.float32), affine) nib.save(md_img, omd) if 'ad' in save_metrics: AD = axial_diffusivity(tenfit.evals) ad_img = nib.Nifti1Image(AD.astype(np.float32), affine) nib.save(ad_img, oad) if 'rd' in save_metrics: RD = radial_diffusivity(tenfit.evals) rd_img = nib.Nifti1Image(RD.astype(np.float32), affine) nib.save(rd_img, orad) if 'mode' in save_metrics: MODE = get_mode(tenfit.quadratic_form) mode_img = nib.Nifti1Image(MODE.astype(np.float32), affine) nib.save(mode_img, omode) if 'evec' in save_metrics: evecs_img = nib.Nifti1Image(tenfit.evecs.astype(np.float32), affine) nib.save(evecs_img, oevecs) if 'eval' in save_metrics: evals_img = nib.Nifti1Image(tenfit.evals.astype(np.float32), affine) nib.save(evals_img, oevals) logging.info('DTI metrics saved in {0}'. format(os.path.dirname(oevals))) def get_tensor_model(self, gtab): return TensorModel(gtab, fit_method="WLS") def get_fitted_tensor(self, data, mask, bval, bvec, b0_threshold=0): logging.info('Tensor estimation...') bvals, bvecs = read_bvals_bvecs(bval, bvec) gtab = gradient_table(bvals, bvecs, b0_threshold=b0_threshold) tenmodel = self.get_tensor_model(gtab) tenfit = tenmodel.fit(data, mask) return tenfit, gtab class ReconstDtiRestoreFlow(ReconstDtiFlow): @classmethod def get_short_name(cls): return 'dti_restore' def run(self, input_files, bvalues, bvectors, mask_files, sigma, b0_threshold=0.0, save_metrics=[], jacobian=True, out_dir='', out_tensor='tensors.nii.gz', out_fa='fa.nii.gz', out_ga='ga.nii.gz', out_rgb='rgb.nii.gz', out_md='md.nii.gz', out_ad='ad.nii.gz', out_rd='rd.nii.gz', out_mode='mode.nii.gz', out_evec='evecs.nii.gz', out_eval='evals.nii.gz'): """ Workflow for tensor reconstruction and for computing DTI metrics. Performs a tensor reconstruction on the files by 'globing' ``input_files`` and saves the DTI metrics in a directory specified by ``out_dir``. Parameters ---------- input_files : string Path to the input volumes. This path may contain wildcards to process multiple inputs at once. bvalues : string Path to the bvalues files. This path may contain wildcards to use multiple bvalues files at once. bvectors : string Path to the bvalues files. This path may contain wildcards to use multiple bvalues files at once. mask_files : string Path to the input masks. This path may contain wildcards to use multiple masks at once. (default: No mask used) sigma : float An estimate of the variance. b0_threshold : float, optional Threshold used to find b=0 directions (default 0.0) save_metrics : variable string, optional List of metrics to save. Possible values: fa, ga, rgb, md, ad, rd, mode, tensor, evec, eval (default [] (all)) jacobian : bool, optional Whether to use the Jacobian of the tensor to speed the non-linear optimization procedure used to fit the tensor parameters (default True) out_dir : string, optional Output directory (default input file directory) out_tensor : string, optional Name of the tensors volume to be saved (default 'tensors.nii.gz') out_fa : string, optional Name of the fractional anisotropy volume to be saved (default 'fa.nii.gz') out_ga : string, optional Name of the geodesic anisotropy volume to be saved (default 'ga.nii.gz') out_rgb : string, optional Name of the color fa volume to be saved (default 'rgb.nii.gz') out_md : string, optional Name of the mean diffusivity volume to be saved (default 'md.nii.gz') out_ad : string, optional Name of the axial diffusivity volume to be saved (default 'ad.nii.gz') out_rd : string, optional Name of the radial diffusivity volume to be saved (default 'rd.nii.gz') out_mode : string, optional Name of the mode volume to be saved (default 'mode.nii.gz') out_evec : string, optional Name of the eigenvectors volume to be saved (default 'evecs.nii.gz') out_eval : string, optional Name of the eigenvalues to be saved (default 'evals.nii.gz') """ self.sigma = sigma self.jacobian = jacobian super(ReconstDtiRestoreFlow, self).\ run(input_files, bvalues, bvectors, mask_files, b0_threshold, save_metrics, out_dir, out_tensor, out_fa, out_ga, out_rgb, out_md, out_ad, out_rd, out_mode, out_evec, out_eval) class ReconstCSDFlow(Workflow): @classmethod def get_short_name(cls): return 'csd' def run(self, input_files, bvalues, bvectors, mask_files, b0_threshold=0.0, frf=[15.0, 4.0, 4.0], extract_pam_values=False, out_dir='', out_pam='peaks.pam5', out_shm='shm.nii.gz', out_peaks_dir='peaks_dirs.nii.gz', out_peaks_values='peaks_values.nii.gz', out_peaks_indices='peaks_indices.nii.gz', out_gfa='gfa.nii.gz'): """ Workflow for peaks computation. Peaks computation is done by 'globing' ``input_files`` and saves the peaks in a directory specified by ``out_dir``. Parameters ---------- input_files : string Path to the input volumes. This path may contain wildcards to process multiple inputs at once. bvalues : string Path to the bvalues files. This path may contain wildcards to use multiple bvalues files at once. bvectors : string Path to the bvalues files. This path may contain wildcards to use multiple bvalues files at once. mask_files : string Path to the input masks. This path may contain wildcards to use multiple masks at once. (default: No mask used) b0_threshold : float, optional Threshold used to find b=0 directions frf : tuple, optional Fiber response function to me mutiplied by 10**-4 (default: 15,4,4) extract_pam_values : bool, optional Wheter or not to save pam volumes as single nifti files. out_dir : string, optional Output directory (default input file directory) out_pam : string, optional Name of the peaks volume to be saved (default 'peaks.pam5') out_shm : string, optional Name of the shperical harmonics volume to be saved (default 'shm.nii.gz') out_peaks_dir : string, optional Name of the peaks directions volume to be saved (default 'peaks_dirs.nii.gz') out_peaks_values : string, optional Name of the peaks values volume to be saved (default 'peaks_values.nii.gz') out_peaks_indices : string, optional Name of the peaks indices volume to be saved (default 'peaks_indices.nii.gz') out_gfa : string, optional Name of the generalise fa volume to be saved (default 'gfa.nii.gz') """ io_it = self.get_io_iterator() for dwi, bval, bvec, maskfile, opam, oshm, opeaks_dir, opeaks_values, \ opeaks_indices, ogfa in io_it: logging.info('Computing fiber odfs for {0}'.format(dwi)) vol = nib.load(dwi) data = vol.get_data() affine = vol.get_affine() bvals, bvecs = read_bvals_bvecs(bval, bvec) gtab = gradient_table(bvals, bvecs, b0_threshold=b0_threshold) mask_vol = nib.load(maskfile).get_data().astype(np.bool) sh_order = 8 if data.shape[-1] < 15: raise ValueError( 'You need at least 15 unique DWI volumes to ' 'compute fiber odfs. You currently have: {0}' ' DWI volumes.'.format(data.shape[-1])) elif data.shape[-1] < 30: sh_order = 6 response, ratio = auto_response(gtab, data) response = list(response) if frf is not None: if isinstance(frf, str): l01 = np.array(literal_eval(frf), dtype=np.float64) else: l01 = np.array(frf) l01 *= 10 ** -4 response[0] = np.array([l01[0], l01[1], l01[1]]) ratio = l01[1] / l01[0] logging.info( 'Eigenvalues for the frf of the input data are :{0}' .format(response[0])) logging.info('Ratio for smallest to largest eigen value is {0}' .format(ratio)) peaks_sphere = get_sphere('symmetric362') csd_model = ConstrainedSphericalDeconvModel(gtab, response, sh_order=sh_order) peaks_csd = peaks_from_model(model=csd_model, data=data, sphere=peaks_sphere, relative_peak_threshold=.5, min_separation_angle=25, mask=mask_vol, return_sh=True, sh_order=sh_order, normalize_peaks=True, parallel=False) peaks_csd.affine = affine save_peaks(opam, peaks_csd) if extract_pam_values: peaks_to_niftis(peaks_csd, oshm, opeaks_dir, opeaks_values, opeaks_indices, ogfa, reshape_dirs=True) logging.info('Peaks saved in {0}'.format(os.path.dirname(opam))) return io_it class ReconstCSAFlow(Workflow): @classmethod def get_short_name(cls): return 'csa' def run(self, input_files, bvalues, bvectors, mask_files, b0_threshold=0.0, extract_pam_values=False, out_dir='', out_pam='peaks.pam5', out_shm='shm.nii.gz', out_peaks_dir='peaks_dirs.nii.gz', out_peaks_values='peaks_values.nii.gz', out_peaks_indices='peaks_indices.nii.gz', out_gfa='gfa.nii.gz'): """ Workflow for peaks computation. Peaks computation is done by 'globing' ``input_files`` and saves the peaks in a directory specified by ``out_dir``. Parameters ---------- input_files : string Path to the input volumes. This path may contain wildcards to process multiple inputs at once. bvalues : string Path to the bvalues files. This path may contain wildcards to use multiple bvalues files at once. bvectors : string Path to the bvalues files. This path may contain wildcards to use multiple bvalues files at once. mask_files : string Path to the input masks. This path may contain wildcards to use multiple masks at once. (default: No mask used) b0_threshold : float, optional Threshold used to find b=0 directions extract_pam_values : bool, optional Wheter or not to save pam volumes as single nifti files. out_dir : string, optional Output directory (default input file directory) out_pam : string, optional Name of the peaks volume to be saved (default 'peaks.pam5') out_shm : string, optional Name of the shperical harmonics volume to be saved (default 'shm.nii.gz') out_peaks_dir : string, optional Name of the peaks directions volume to be saved (default 'peaks_dirs.nii.gz') out_peaks_values : string, optional Name of the peaks values volume to be saved (default 'peaks_values.nii.gz') out_peaks_indices : string, optional Name of the peaks indices volume to be saved (default 'peaks_indices.nii.gz') out_gfa : string, optional Name of the generalise fa volume to be saved (default 'gfa.nii.gz') """ io_it = self.get_io_iterator() for dwi, bval, bvec, maskfile, opam, oshm, opeaks_dir, \ opeaks_values, opeaks_indices, ogfa in io_it: logging.info('Computing fiber odfs for {0}'.format(dwi)) vol = nib.load(dwi) data = vol.get_data() affine = vol.get_affine() bvals, bvecs = read_bvals_bvecs(bval, bvec) gtab = gradient_table(bvals, bvecs, b0_threshold=b0_threshold) mask_vol = nib.load(maskfile).get_data().astype(np.bool) sh_order = 8 if data.shape[-1] < 15: raise ValueError( 'You need at least 15 unique DWI volumes to ' 'compute fiber odfs. You currently have: {0}' ' DWI volumes.'.format(data.shape[-1])) elif data.shape[-1] < 30: sh_order = 6 response, ratio = auto_response(gtab, data) response = list(response) logging.info( 'Eigenvalues for the frf of the input data are :{0}' .format(response[0])) logging.info( 'Ratio for smallest to largest eigen value is {0}' .format(ratio)) peaks_sphere = get_sphere('symmetric362') csa_model = CsaOdfModel(gtab, sh_order) peaks_csa = peaks_from_model(model=csa_model, data=data, sphere=peaks_sphere, relative_peak_threshold=.5, min_separation_angle=25, mask=mask_vol, return_sh=True, sh_order=sh_order, normalize_peaks=True, parallel=False) peaks_csa.affine = affine save_peaks(opam, peaks_csa) if extract_pam_values: peaks_to_niftis(peaks_csa, oshm, opeaks_dir, opeaks_values, opeaks_indices, ogfa, reshape_dirs=True) logging.info( 'Peaks saved in {0}'.format(os.path.dirname(opam))) return io_it def get_tensor_model(self, gtab): return TensorModel(gtab, fit_method="RT", sigma=self.sigma, jac=self.jacobian) dipy-0.13.0/dipy/workflows/segment.py000066400000000000000000000061601317371701200175630ustar00rootroot00000000000000from __future__ import division, print_function, absolute_import import logging import numpy as np from dipy.segment.mask import median_otsu from dipy.workflows.workflow import Workflow from dipy.io.image import save_nifti, load_nifti class MedianOtsuFlow(Workflow): @classmethod def get_short_name(cls): return 'medotsu' def run(self, input_files, save_masked=False, median_radius=2, numpass=5, autocrop=False, vol_idx=None, dilate=None, out_dir='', out_mask='brain_mask.nii.gz', out_masked='dwi_masked.nii.gz'): """Workflow wrapping the median_otsu segmentation method. Applies median_otsu segmentation on each file found by 'globing' ``input_files`` and saves the results in a directory specified by ``out_dir``. Parameters ---------- input_files : string Path to the input volumes. This path may contain wildcards to process multiple inputs at once. save_masked : bool Save mask median_radius : int, optional Radius (in voxels) of the applied median filter (default 2) numpass : int, optional Number of pass of the median filter (default 5) autocrop : bool, optional If True, the masked input_volumes will also be cropped using the bounding box defined by the masked data. For example, if diffusion images are of 1x1x1 (mm^3) or higher resolution auto-cropping could reduce their size in memory and speed up some of the analysis. (default False) vol_idx : string, optional 1D array representing indices of ``axis=3`` of a 4D `input_volume` 'None' (the default) corresponds to ``(0,)`` (assumes first volume in 4D array) dilate : string, optional number of iterations for binary dilation (default 'None') out_dir : string, optional Output directory (default input file directory) out_mask : string, optional Name of the mask volume to be saved (default 'brain_mask.nii.gz') out_masked : string, optional Name of the masked volume to be saved (default 'dwi_masked.nii.gz') """ io_it = self.get_io_iterator() for fpath, mask_out_path, masked_out_path in io_it: logging.info('Applying median_otsu segmentation on {0}'. format(fpath)) data, affine, img = load_nifti(fpath, return_img=True) masked_volume, mask_volume = median_otsu(data, median_radius, numpass, autocrop, vol_idx, dilate) save_nifti(mask_out_path, mask_volume.astype(np.float32), affine) logging.info('Mask saved as {0}'.format(mask_out_path)) if save_masked: save_nifti(masked_out_path, masked_volume, affine, img.header) logging.info('Masked volume saved as {0}'. format(masked_out_path)) return io_it dipy-0.13.0/dipy/workflows/tests/000077500000000000000000000000001317371701200167065ustar00rootroot00000000000000dipy-0.13.0/dipy/workflows/tests/__init__.py000066400000000000000000000001011317371701200210070ustar00rootroot00000000000000from numpy.testing import Tester test = Tester().test del Tester dipy-0.13.0/dipy/workflows/tests/test_docstring_parser.py000066400000000000000000000317251317371701200236770ustar00rootroot00000000000000""" This was taken directly from the file test_docscrape.py of numpydoc package. Copyright (C) 2008 Stefan van der Walt , Pauli Virtanen Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ # -*- encoding:utf-8 -*- from __future__ import division, absolute_import, print_function import sys import textwrap from dipy.workflows.docstring_parser import NumpyDocString from nose.tools import * if sys.version_info[0] >= 3: def sixu(s): return s else: def sixu(s): return unicode(s, 'unicode_escape') doc_txt = '''\ numpy.multivariate_normal(mean, cov, shape=None, spam=None) Draw values from a multivariate normal distribution with specified mean and covariance. The multivariate normal or Gaussian distribution is a generalisation of the one-dimensional normal distribution to higher dimensions. Parameters ---------- mean : (N,) ndarray Mean of the N-dimensional distribution. .. math:: (1+2+3)/3 cov : (N, N) ndarray Covariance matrix of the distribution. shape : tuple of ints Given a shape of, for example, (m,n,k), m*n*k samples are generated, and packed in an m-by-n-by-k arrangement. Because each sample is N-dimensional, the output shape is (m,n,k,N). Returns ------- out : ndarray The drawn samples, arranged according to `shape`. If the shape given is (m,n,...), then the shape of `out` is is (m,n,...,N). In other words, each entry ``out[i,j,...,:]`` is an N-dimensional value drawn from the distribution. list of str This is not a real return value. It exists to test anonymous return values. Other Parameters ---------------- spam : parrot A parrot off its mortal coil. Raises ------ RuntimeError Some error Warns ----- RuntimeWarning Some warning Warnings -------- Certain warnings apply. Notes ----- Instead of specifying the full covariance matrix, popular approximations include: - Spherical covariance (`cov` is a multiple of the identity matrix) - Diagonal covariance (`cov` has non-negative elements only on the diagonal) This geometrical property can be seen in two dimensions by plotting generated data-points: >>> mean = [0,0] >>> cov = [[1,0],[0,100]] # diagonal covariance, points lie on x or y-axis >>> x,y = multivariate_normal(mean,cov,5000).T >>> plt.plot(x,y,'x'); plt.axis('equal'); plt.show() Note that the covariance matrix must be symmetric and non-negative definite. References ---------- .. [1] A. Papoulis, "Probability, Random Variables, and Stochastic Processes," 3rd ed., McGraw-Hill Companies, 1991 .. [2] R.O. Duda, P.E. Hart, and D.G. Stork, "Pattern Classification," 2nd ed., Wiley, 2001. See Also -------- some, other, funcs otherfunc : relationship Examples -------- >>> mean = (1,2) >>> cov = [[1,0],[1,0]] >>> x = multivariate_normal(mean,cov,(3,3)) >>> print x.shape (3, 3, 2) The following is probably true, given that 0.6 is roughly twice the standard deviation: >>> print list( (x[0,0,:] - mean) < 0.6 ) [True, True] .. index:: random :refguide: random;distributions, random;gauss ''' doc = NumpyDocString(doc_txt) doc_yields_txt = """ Test generator Yields ------ a : int The number of apples. b : int The number of bananas. int The number of unknowns. """ doc_yields = NumpyDocString(doc_yields_txt) def test_signature(): assert doc['Signature'].startswith('numpy.multivariate_normal(') assert doc['Signature'].endswith('spam=None)') def test_summary(): assert doc['Summary'][0].startswith('Draw values') assert doc['Summary'][-1].endswith('covariance.') def test_extended_summary(): assert doc['Extended Summary'][0].startswith('The multivariate normal') def test_parameters(): assert_equal(len(doc['Parameters']), 3) assert_equal( [n for n, _, _ in doc['Parameters']], ['mean', 'cov', 'shape']) arg, arg_type, desc = doc['Parameters'][1] assert_equal(arg_type, '(N, N) ndarray') assert desc[0].startswith('Covariance matrix') assert doc['Parameters'][0][-1][-2] == ' (1+2+3)/3' def test_other_parameters(): assert_equal(len(doc['Other Parameters']), 1) assert_equal([n for n, _, _ in doc['Other Parameters']], ['spam']) arg, arg_type, desc = doc['Other Parameters'][0] assert_equal(arg_type, 'parrot') assert desc[0].startswith('A parrot off its mortal coil') def test_returns(): assert_equal(len(doc['Returns']), 2) arg, arg_type, desc = doc['Returns'][0] assert_equal(arg, 'out') assert_equal(arg_type, 'ndarray') assert desc[0].startswith('The drawn samples') assert desc[-1].endswith('distribution.') arg, arg_type, desc = doc['Returns'][1] assert_equal(arg, 'list of str') assert_equal(arg_type, '') assert desc[0].startswith('This is not a real') assert desc[-1].endswith('anonymous return values.') def test_notes(): assert doc['Notes'][0].startswith('Instead') assert doc['Notes'][-1].endswith('definite.') assert_equal(len(doc['Notes']), 17) def test_references(): assert doc['References'][0].startswith('..') assert doc['References'][-1].endswith('2001.') def test_examples(): assert doc['Examples'][0].startswith('>>>') assert doc['Examples'][-1].endswith('True]') def test_index(): assert_equal(doc['index']['default'], 'random') assert_equal(len(doc['index']), 2) assert_equal(len(doc['index']['refguide']), 2) def non_blank_line_by_line_compare(a, b): a = textwrap.dedent(a) b = textwrap.dedent(b) a = [l.rstrip() for l in a.split('\n') if l.strip()] b = [l.rstrip() for l in b.split('\n') if l.strip()] for n, line in enumerate(a): if not line == b[n]: raise AssertionError("Lines %s of a and b differ: " "\n>>> %s\n<<< %s\n" % (n, line, b[n])) def test_str(): # doc_txt has the order of Notes and See Also sections flipped. # This should be handled automatically, and so, one thing this test does # is to make sure that See Also precedes Notes in the output. non_blank_line_by_line_compare(str(doc), """numpy.multivariate_normal(mean, cov, shape=None, spam=None) Draw values from a multivariate normal distribution with specified mean and covariance. The multivariate normal or Gaussian distribution is a generalisation of the one-dimensional normal distribution to higher dimensions. Parameters ---------- mean : (N,) ndarray Mean of the N-dimensional distribution. .. math:: (1+2+3)/3 cov : (N, N) ndarray Covariance matrix of the distribution. shape : tuple of ints Given a shape of, for example, (m,n,k), m*n*k samples are generated, and packed in an m-by-n-by-k arrangement. Because each sample is N-dimensional, the output shape is (m,n,k,N). Returns ------- out : ndarray The drawn samples, arranged according to `shape`. If the shape given is (m,n,...), then the shape of `out` is is (m,n,...,N). In other words, each entry ``out[i,j,...,:]`` is an N-dimensional value drawn from the distribution. list of str This is not a real return value. It exists to test anonymous return values. Other Parameters ---------------- spam : parrot A parrot off its mortal coil. Raises ------ RuntimeError Some error Warns ----- RuntimeWarning Some warning Warnings -------- Certain warnings apply. See Also -------- `some`_, `other`_, `funcs`_ `otherfunc`_ relationship Notes ----- Instead of specifying the full covariance matrix, popular approximations include: - Spherical covariance (`cov` is a multiple of the identity matrix) - Diagonal covariance (`cov` has non-negative elements only on the diagonal) This geometrical property can be seen in two dimensions by plotting generated data-points: >>> mean = [0,0] >>> cov = [[1,0],[0,100]] # diagonal covariance, points lie on x or y-axis >>> x,y = multivariate_normal(mean,cov,5000).T >>> plt.plot(x,y,'x'); plt.axis('equal'); plt.show() Note that the covariance matrix must be symmetric and non-negative definite. References ---------- .. [1] A. Papoulis, "Probability, Random Variables, and Stochastic Processes," 3rd ed., McGraw-Hill Companies, 1991 .. [2] R.O. Duda, P.E. Hart, and D.G. Stork, "Pattern Classification," 2nd ed., Wiley, 2001. Examples -------- >>> mean = (1,2) >>> cov = [[1,0],[1,0]] >>> x = multivariate_normal(mean,cov,(3,3)) >>> print x.shape (3, 3, 2) The following is probably true, given that 0.6 is roughly twice the standard deviation: >>> print list( (x[0,0,:] - mean) < 0.6 ) [True, True] .. index:: random :refguide: random;distributions, random;gauss""") doc2 = NumpyDocString(""" Returns array of indices of the maximum values of along the given axis. Parameters ---------- a : {array_like} Array to look in. axis : {None, integer} If None, the index is into the flattened array, otherwise along the specified axis""") def test_parameters_without_extended_description(): assert_equal(len(doc2['Parameters']), 2) doc3 = NumpyDocString(""" my_signature(*params, **kwds) Return this and that. """) doc5 = NumpyDocString( """ a.something() Raises ------ LinAlgException If array is singular. Warns ----- SomeWarning If needed """) def test_raises(): assert_equal(len(doc5['Raises']), 1) name, _, desc = doc5['Raises'][0] assert_equal(name, 'LinAlgException') assert_equal(desc, ['If array is singular.']) def test_warns(): assert_equal(len(doc5['Warns']), 1) name, _, desc = doc5['Warns'][0] assert_equal(name, 'SomeWarning') assert_equal(desc, ['If needed']) def test_see_also(): doc6 = NumpyDocString( """ z(x,theta) See Also -------- func_a, func_b, func_c func_d : some equivalent func foo.func_e : some other func over multiple lines func_f, func_g, :meth:`func_h`, func_j, func_k :obj:`baz.obj_q` :class:`class_j`: fubar foobar """) assert len(doc6['See Also']) == 12 for func, desc, role in doc6['See Also']: if func in ('func_a', 'func_b', 'func_c', 'func_f', 'func_g', 'func_h', 'func_j', 'func_k', 'baz.obj_q'): assert(not desc) else: assert(desc) if func == 'func_h': assert role == 'meth' elif func == 'baz.obj_q': assert role == 'obj' elif func == 'class_j': assert role == 'class' else: assert role is None if func == 'func_d': assert desc == ['some equivalent func'] elif func == 'foo.func_e': assert desc == ['some other func over', 'multiple lines'] elif func == 'class_j': assert desc == ['fubar', 'foobar'] doc7 = NumpyDocString(""" Doc starts on second line. """) def test_empty_first_line(): assert doc7['Summary'][0].startswith('Doc starts') def test_duplicate_signature(): # Duplicate function signatures occur e.g. in ufuncs, when the # automatic mechanism adds one, and a more detailed comes from the # docstring itself. doc = NumpyDocString( """ z(x1, x2) z(a, theta) """) assert doc['Signature'].strip() == 'z(a, theta)' class_doc_txt = """ Foo Parameters ---------- f : callable ``f(t, y, *f_args)`` Aaa. jac : callable ``jac(t, y, *jac_args)`` Bbb. Attributes ---------- t : float Current time. y : ndarray Current variable values. x : float Some parameter Methods ------- a b c Examples -------- For usage examples, see `ode`. """ if __name__ == "__main__": import nose nose.run() dipy-0.13.0/dipy/workflows/tests/test_iap.py000066400000000000000000000051371317371701200210760ustar00rootroot00000000000000import numpy.testing as npt import sys from dipy.workflows.base import IntrospectiveArgumentParser from dipy.workflows.flow_runner import run_flow from dipy.workflows.tests.workflow_tests_utils import TestFlow, \ DummyCombinedWorkflow, DummyWorkflow1 def test_iap(): sys.argv = [sys.argv[0]] pos_keys = ['positional_str', 'positional_bool', 'positional_int', 'positional_float'] opt_keys = ['optional_str', 'optional_bool', 'optional_int', 'optional_float'] pos_results = ['test', 0, 10, 10.2] opt_results = ['opt_test', True, 20, 20.2] inputs = inputs_from_results(opt_results, opt_keys, optional=True) inputs.extend(inputs_from_results(pos_results)) sys.argv.extend(inputs) parser = IntrospectiveArgumentParser() dummy_flow = TestFlow() parser.add_workflow(dummy_flow) args = parser.get_flow_args() all_keys = pos_keys + opt_keys all_results = pos_results + opt_results # Test if types and order are respected for k, v in zip(all_keys, all_results): npt.assert_equal(args[k], v) # Test if **args really fits dummy_flow's arguments return_values = dummy_flow.run(**args) npt.assert_array_equal(return_values, all_results + [2.0]) def test_iap_epilog(): parser = IntrospectiveArgumentParser() dummy_flow = DummyWorkflow1() parser.add_workflow(dummy_flow) assert "dummy references" in parser.epilog def test_flow_runner(): old_argv = sys.argv sys.argv = [sys.argv[0]] opt_keys = ['param_combined', 'dwf1.param1', 'dwf2.param2', 'force', 'out_strat', 'mix_names'] pos_results = ['dipy.txt'] opt_results = [30, 10, 20, True, 'absolute', True] inputs = inputs_from_results(opt_results, opt_keys, optional=True) inputs.extend(inputs_from_results(pos_results)) sys.argv.extend(inputs) dcwf = DummyCombinedWorkflow() param1, param2, combined = run_flow(dcwf) # generic flow params assert dcwf._force_overwrite assert dcwf._output_strategy == 'absolute' assert dcwf._mix_names # sub flow params assert param1 == 10 assert param2 == 20 # parent flow param assert combined == 30 sys.argv = old_argv def inputs_from_results(results, keys=None, optional=False): prefix = '--' inputs = [] for idx, result in enumerate(results): if keys is not None: inputs.append(prefix+keys[idx]) if optional and str(result) in ['True', 'False']: continue inputs.append(str(result)) return inputs if __name__ == '__main__': test_iap() test_flow_runner() dipy-0.13.0/dipy/workflows/tests/test_masking.py000066400000000000000000000017301317371701200217510ustar00rootroot00000000000000import numpy as np import numpy.testing as nt from nose.tools import assert_true, assert_false import nibabel as nib from nibabel.tmpdirs import TemporaryDirectory from dipy.data import get_data from dipy.workflows.mask import MaskFlow def test_mask(): with TemporaryDirectory() as out_dir: data_path, _, _ = get_data('small_25') vol_img = nib.load(data_path) volume = vol_img.get_data() mask_flow = MaskFlow() mask_flow.run(data_path, 10, out_dir=out_dir, ub=9) assert_false(mask_flow.last_generated_outputs) mask_flow.run(data_path, 10, out_dir=out_dir) mask_path = mask_flow.last_generated_outputs['out_mask'] mask_img = nib.load(mask_path) mask_data = mask_img.get_data() assert_true(mask_data.shape == volume.shape) nt.assert_array_almost_equal(mask_img.affine, vol_img.affine) assert_true(mask_data.dtype == np.uint8) if __name__ == '__main__': test_mask() dipy-0.13.0/dipy/workflows/tests/test_reconst_csa_csd.py000066400000000000000000000052441317371701200234600ustar00rootroot00000000000000import numpy as np from nose.tools import assert_true from os.path import join import numpy.testing as npt import nibabel as nib from dipy.io.peaks import load_peaks from nibabel.tmpdirs import TemporaryDirectory from dipy.data import get_data from dipy.workflows.reconst import ReconstCSDFlow, ReconstCSAFlow def test_reconst_csa(): reconst_flow_core(ReconstCSAFlow) def test_reconst_csd(): reconst_flow_core(ReconstCSDFlow) def reconst_flow_core(flow): with TemporaryDirectory() as out_dir: data_path, bval_path, bvec_path = get_data('small_64D') vol_img = nib.load(data_path) volume = vol_img.get_data() mask = np.ones_like(volume[:, :, :, 0]) mask_img = nib.Nifti1Image(mask.astype(np.uint8), vol_img.get_affine()) mask_path = join(out_dir, 'tmp_mask.nii.gz') nib.save(mask_img, mask_path) reconst_flow = flow() reconst_flow.run(data_path, bval_path, bvec_path, mask_path, out_dir=out_dir, extract_pam_values=True) gfa_path = reconst_flow.last_generated_outputs['out_gfa'] gfa_data = nib.load(gfa_path).get_data() assert_true(gfa_data.shape == volume.shape[:-1]) peaks_dir_path = reconst_flow.last_generated_outputs['out_peaks_dir'] peaks_dir_data = nib.load(peaks_dir_path).get_data() assert_true(peaks_dir_data.shape[-1] == 15) assert_true(peaks_dir_data.shape[:-1] == volume.shape[:-1]) peaks_idx_path = \ reconst_flow.last_generated_outputs['out_peaks_indices'] peaks_idx_data = nib.load(peaks_idx_path).get_data() assert_true(peaks_idx_data.shape[-1] == 5) assert_true(peaks_idx_data.shape[:-1] == volume.shape[:-1]) peaks_vals_path = \ reconst_flow.last_generated_outputs['out_peaks_values'] peaks_vals_data = nib.load(peaks_vals_path).get_data() assert_true(peaks_vals_data.shape[-1] == 5) assert_true(peaks_vals_data.shape[:-1] == volume.shape[:-1]) shm_path = reconst_flow.last_generated_outputs['out_shm'] shm_data = nib.load(shm_path).get_data() assert_true(shm_data.shape[-1] == 45) assert_true(shm_data.shape[:-1] == volume.shape[:-1]) pam = load_peaks(reconst_flow.last_generated_outputs['out_pam']) npt.assert_allclose(pam.peak_dirs.reshape(peaks_dir_data.shape), peaks_dir_data) npt.assert_allclose(pam.peak_values, peaks_vals_data) npt.assert_allclose(pam.peak_indices, peaks_idx_data) npt.assert_allclose(pam.shm_coeff, shm_data) npt.assert_allclose(pam.gfa, gfa_data) if __name__ == '__main__': npt.run_module_suite() dipy-0.13.0/dipy/workflows/tests/test_reconst_dti.py000066400000000000000000000056721317371701200226460ustar00rootroot00000000000000from os.path import join import nibabel as nib from nibabel.tmpdirs import TemporaryDirectory import numpy as np from nose.tools import assert_true from dipy.data import get_data from dipy.workflows.reconst import ReconstDtiFlow, ReconstDtiRestoreFlow def test_reconst_dti_restore(): reconst_flow_core(ReconstDtiRestoreFlow, [67]) def test_reconst_dti_nlls(): reconst_flow_core(ReconstDtiFlow) def reconst_flow_core(flow, extra_args=[]): with TemporaryDirectory() as out_dir: data_path, bval_path, bvec_path = get_data('small_25') vol_img = nib.load(data_path) volume = vol_img.get_data() mask = np.ones_like(volume[:, :, :, 0]) mask_img = nib.Nifti1Image(mask.astype(np.uint8), vol_img.affine) mask_path = join(out_dir, 'tmp_mask.nii.gz') nib.save(mask_img, mask_path) dti_flow = flow() args = [data_path, bval_path, bvec_path, mask_path] args.extend(extra_args) dti_flow.run(*args, out_dir=out_dir) fa_path = dti_flow.last_generated_outputs['out_fa'] fa_data = nib.load(fa_path).get_data() assert_true(fa_data.shape == volume.shape[:-1]) tensor_path = dti_flow.last_generated_outputs['out_tensor'] tensor_data = nib.load(tensor_path) assert_true(tensor_data.shape[-1] == 6) assert_true(tensor_data.shape[:-1] == volume.shape[:-1]) ga_path = dti_flow.last_generated_outputs['out_ga'] ga_data = nib.load(ga_path).get_data() assert_true(ga_data.shape == volume.shape[:-1]) rgb_path = dti_flow.last_generated_outputs['out_rgb'] rgb_data = nib.load(rgb_path) assert_true(rgb_data.shape[-1] == 3) assert_true(rgb_data.shape[:-1] == volume.shape[:-1]) md_path = dti_flow.last_generated_outputs['out_md'] md_data = nib.load(md_path).get_data() assert_true(md_data.shape == volume.shape[:-1]) ad_path = dti_flow.last_generated_outputs['out_ad'] ad_data = nib.load(ad_path).get_data() assert_true(ad_data.shape == volume.shape[:-1]) rd_path = dti_flow.last_generated_outputs['out_rd'] rd_data = nib.load(rd_path).get_data() assert_true(rd_data.shape == volume.shape[:-1]) mode_path = dti_flow.last_generated_outputs['out_mode'] mode_data = nib.load(mode_path).get_data() assert_true(mode_data.shape == volume.shape[:-1]) evecs_path = dti_flow.last_generated_outputs['out_evec'] evecs_data = nib.load(evecs_path).get_data() assert_true(evecs_data.shape[-2:] == tuple((3, 3))) assert_true(evecs_data.shape[:-2] == volume.shape[:-1]) evals_path = dti_flow.last_generated_outputs['out_eval'] evals_data = nib.load(evals_path).get_data() assert_true(evals_data.shape[-1] == 3) assert_true(evals_data.shape[:-1] == volume.shape[:-1]) if __name__ == '__main__': test_reconst_dti_restore() test_reconst_dti_nlls() dipy-0.13.0/dipy/workflows/tests/test_segment.py000066400000000000000000000026341317371701200217660ustar00rootroot00000000000000import numpy.testing as npt from os.path import join import nibabel as nib from nibabel.tmpdirs import TemporaryDirectory from dipy.data import get_data from dipy.segment.mask import median_otsu from dipy.workflows.segment import MedianOtsuFlow def test_median_otsu_flow(): with TemporaryDirectory() as out_dir: data_path, _, _ = get_data('small_25') volume = nib.load(data_path).get_data() save_masked = True median_radius = 3 numpass = 3 autocrop = False vol_idx = [0] dilate = 0 mo_flow = MedianOtsuFlow() mo_flow.run(data_path, out_dir=out_dir, save_masked=save_masked, median_radius=median_radius, numpass=numpass, autocrop=autocrop, vol_idx=vol_idx, dilate=dilate) mask_name = mo_flow.last_generated_outputs['out_mask'] masked_name = mo_flow.last_generated_outputs['out_masked'] masked, mask = median_otsu(volume, median_radius, numpass, autocrop, vol_idx, dilate) result_mask_data = nib.load(join(out_dir, mask_name)).get_data() npt.assert_array_equal(result_mask_data, mask) result_masked_data = nib.load(join(out_dir, masked_name)).get_data() npt.assert_array_equal(result_masked_data, masked) if __name__ == '__main__': test_median_otsu_flow() dipy-0.13.0/dipy/workflows/tests/test_workflow.py000066400000000000000000000032241317371701200221720ustar00rootroot00000000000000from nose.tools import assert_raises import os import time from nibabel.tmpdirs import TemporaryDirectory from dipy.data import get_data from dipy.workflows.segment import MedianOtsuFlow from dipy.workflows.workflow import Workflow def test_force_overwrite(): with TemporaryDirectory() as out_dir: data_path, _, _ = get_data('small_25') mo_flow = MedianOtsuFlow(output_strategy='absolute') # Generate the first results mo_flow.run(data_path, out_dir=out_dir) mask_file = mo_flow.last_generated_outputs['out_mask'] first_time = os.path.getmtime(mask_file) # re-run with no force overwrite, modified time should not change mo_flow.run(data_path, out_dir=out_dir) mask_file = mo_flow.last_generated_outputs['out_mask'] second_time = os.path.getmtime(mask_file) assert first_time == second_time # re-run with force overwrite, modified time should change mo_flow = MedianOtsuFlow(output_strategy='absolute', force=True) # Make sure that at least one second elapsed, so that time-stamp is # different (sometimes measured in whole seconds) time.sleep(1) mo_flow.run(data_path, out_dir=out_dir) mask_file = mo_flow.last_generated_outputs['out_mask'] third_time = os.path.getmtime(mask_file) assert third_time != second_time def test_get_sub_runs(): wf = Workflow() assert len(wf.get_sub_runs()) == 0 def test_run(): wf = Workflow() assert_raises(Exception, wf.run, None) if __name__ == '__main__': test_force_overwrite() test_set_sub_flows_optionals() test_get_sub_runs() test_run() dipy-0.13.0/dipy/workflows/tests/workflow_tests_utils.py000066400000000000000000000067751317371701200236130ustar00rootroot00000000000000from dipy.workflows.workflow import Workflow from dipy.workflows.combined_workflow import CombinedWorkflow class DummyWorkflow1(Workflow): @classmethod def get_short_name(cls): return 'dwf1' def run(self, inputs, param1=1, out_dir='', output_1='out1.txt'): """ Workflow used to test combined workflows in general. Parameters ---------- inputs : string fake input string param param1 : int fake positional param (default 1) out_dir : string fake output directory (default '') out_combined : string fake out file (default out_combined.txt) References ----------- dummy references """ return param1 class DummyWorkflow2(Workflow): @classmethod def get_short_name(cls): return 'dwf2' def run(self, inputs, param2=2, out_dir='', output_1='out2.txt'): """ Workflow used to test combined workflows in general. Parameters ---------- inputs : string fake input string param param2 : int fake positional param (default 2) out_dir : string fake output directory (default '') out_combined : string fake out file (default out_combined.txt) """ return param2 class DummyCombinedWorkflow(CombinedWorkflow): def _get_sub_flows(self): return [DummyWorkflow1, DummyWorkflow2] def run(self, inputs, param_combined=3, out_dir='', out_combined='out_combined.txt'): """ Workflow used to test combined workflows in general. Parameters ---------- inputs : string fake input string param param_combined : int fake positional param (default 3) out_dir : string fake output directory (default '') out_combined : string fake out file (default out_combined.txt) """ dwf1 = DummyWorkflow1() param1 = self.run_sub_flow(dwf1, inputs) dwf2 = DummyWorkflow2() param2 = self.run_sub_flow(dwf2, inputs) return param1, param2, param_combined class TestFlow(Workflow): def run(self, positional_str, positional_bool, positional_int, positional_float, optional_str='default', optional_bool=False, optional_int=0, optional_float=1.0, optional_float_2=2.0, out_dir=''): """ Workflow used to test the introspective argument parser. Parameters ---------- positional_str : string positional string argument positional_bool : bool positional bool argument positional_int : int positional int argument positional_float : float positional float argument optional_str : string, optional optional string argument (default 'default') optional_bool : bool, optional optional bool argument (default False) optional_int : int, optional optional int argument (default 0) optional_float : float, optional optional float argument (default 1.0) optional_float_2 : float, optional optional float argument #2 (default 2.0) out_dir : string output directory (default '') """ return positional_str, positional_bool, positional_int,\ positional_float, optional_str, optional_bool,\ optional_int, optional_float, optional_float_2 dipy-0.13.0/dipy/workflows/workflow.py000066400000000000000000000100111317371701200177610ustar00rootroot00000000000000from __future__ import division, print_function, absolute_import import inspect import logging import os from dipy.workflows.multi_io import io_iterator_ class Workflow(object): def __init__(self, output_strategy='append', mix_names=False, force=False, skip=False): """ The basic workflow object. This object takes care of any workflow operation that is common to all the workflows. Every new workflow should extend this class. """ self._output_strategy = output_strategy self._mix_names = mix_names self.last_generated_outputs = None self._force_overwrite = force self._skip = skip def get_io_iterator(self): """ Create an iterator for IO. Use a couple of inspection tricks to build an IOIterator using the previous frame (values of local variables and other contextuals) and the run method's docstring. """ # To manage different python versions. frame = inspect.stack()[1] if isinstance(frame, tuple): frame = frame[0] else: frame = frame.frame io_it = io_iterator_(frame, self.run, output_strategy=self._output_strategy, mix_names=self._mix_names) # Make a list out of a list of lists self.flat_outputs = [item for sublist in io_it.outputs for item in sublist] if io_it.out_keys: self.last_generated_outputs = dict(zip(io_it.out_keys, self.flat_outputs)) else: self.last_generated_outputs = self.flat_outputs if self.manage_output_overwrite(): return io_it else: return [] def manage_output_overwrite(self): """ Check if a file will be overwritten upon processing the inputs. If it is bound to happen, an action is taken depending on self._force_overwrite (or --force via command line). A log message is output independently of the outcome to tell the user something happened. """ duplicates = [] for output in self.flat_outputs: if os.path.isfile(output): duplicates.append(output) if len(duplicates) > 0: if self._force_overwrite: logging.info('The following output files are about to be' ' overwritten.') else: logging.info('The following output files already exist, the ' 'workflow will not continue processing any ' 'further. Add the --force flag to allow output ' 'files overwrite.') for dup in duplicates: logging.info(dup) return self._force_overwrite return True def run(self): """ Since this is an abstract class, raise exception if this code is reached (not impletemented in child class or literally called on this class) """ raise Exception('Error: {} does not have a run method.'. format(self.__class__)) def get_sub_runs(self): """No sub runs since this is a simple workflow. """ return [] @classmethod def get_short_name(cls): """A short name for the workflow used to subdivide The short name is used by CombinedWorkflows and the argparser to subdivide the commandline parameters avoiding the trouble of having subworkflows parameters with the same name. For example, a combined workflow with dti reconstruction and csd reconstruction might en up with the b0_threshold parameter. Using short names, we will have dti.b0_threshold and csd.b0_threshold available. Returns class name by default but it is strongly advised to set it to something shorter and easier to write on commandline. """ return cls.__name__ dipy-0.13.0/doc/000077500000000000000000000000001317371701200133075ustar00rootroot00000000000000dipy-0.13.0/doc/.gitignore000066400000000000000000000000131317371701200152710ustar00rootroot00000000000000reference/ dipy-0.13.0/doc/Makefile000066400000000000000000000103451317371701200147520ustar00rootroot00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d _build/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml pickle json htmlhelp qthelp latex changes linkcheck doctest help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " api to make the auto-generated API files" @echo " dirhtml to make HTML files named index.html in directories" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " changes to make an overview of all changed/added/deprecated items" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: api-clean examples-clean -rm -rf _build/* -rm *-stamp api-clean: rm -rf reference/*.rst api: @mkdir -p reference $(PYTHON) tools/build_modref_templates.py dipy reference @mkdir -p reference_cmd $(PYTHON) tools/docgen_cmd.py dipy reference_cmd @echo "Build API docs...done." examples-clean: -cd examples_built && rm -rf *.py *.rst *.png fig examples-clean-tgz: examples-clean examples-tgz ../tools/pack_examples.py ../dist examples-tgz: rstexamples ../tools/pack_examples.py ../dist gitwash-update: python ../tools/gitwash_dumper.py devel dipy --repo-name=dipy --github-user=nipy \ --project-url=http://nipy.org/dipy \ --project-ml-url=https://mail.python.org/mailman/listinfo/neuroimaging html: api rstexamples html-after-examples # build full docs including examples html-after-examples: # Standard html build after examples have been prepared $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) _build/html @echo @echo "Build finished. The HTML pages are in _build/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) _build/dirhtml @echo @echo "Build finished. The HTML pages are in _build/dirhtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) _build/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) _build/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) _build/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in _build/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) _build/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in _build/qthelp, like this:" @echo "# qcollectiongenerator _build/qthelp/dipy.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile _build/qthelp/dipy.qhc" latex: rstexamples latex-after-examples latex-after-examples: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) _build/latex @echo @echo "Build finished; the LaTeX files are in _build/latex." @echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \ "run these through (pdf)latex." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) _build/changes @echo @echo "The overview file is in _build/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) _build/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in _build/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) _build/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in _build/doctest/output.txt." rstexamples: rstexamples-stamp rstexamples-stamp: cd examples_built && ../../tools/make_examples.py touch $@ pdf: pdf-stamp pdf-stamp: latex cd _build/latex && make all-pdf touch $@ upload: html ./upload-gh-pages.sh _build/html/ dipy nipy xvfb: export TEST_WITH_XVFB=true && make html memory_profile: export TEST_WITH_MEMPROF=true && make html dipy-0.13.0/doc/_static/000077500000000000000000000000001317371701200147355ustar00rootroot00000000000000dipy-0.13.0/doc/_static/colorfa.png000066400000000000000000001540521317371701200170770ustar00rootroot00000000000000PNG  IHDR6XsRGBbKGD pHYs,r,rMvtIME3ktEXtCommentCreated with GIMPW IDATxݒH5YU[3++(tWWfFYˮ̚ꞄDfxBqc_j3W8ԯܗů@?/gKϗ?/n?w,Ŀ\}5CgZ&Gn }}jߟϧ9s/_p^>uKCsKזn. C_1W [w} ׀}~עkQwU ?%JS+mv_3 J v_)OU_Sw? \s"_ym)^ r+u'~H;X_/~&33K엾'88?}~)+~.>ϸ>W~- A:E"XR*I'i tcx^JIU/_(x=6jrο7,F]%"-‘0 DdΛ1(LL Ҵ/Rk+oKH>\rh .߬HKY Q EP"@*5Q9p< 3s8w 9B_C}gVڟGgDE6h bc*ሐB6 g63篠?q]y<'qtl,Vm0󠌃%Y1 BM嘟)匑WȆ.K_q-}}+_O) /2&;i7P Aw` J8qxb/5O+?gLL' ]f7Dd؀jh(g6x܄ Vjvqk`h30~ CK|4x|~gM!o6~1tS(pB$ۃPbUz܋!.mW:³+2!}<6–-خ*[`-K0b(JJ_ў?'%ʿM̸PTpBIh @ یLB=?4sY$lHdT;*"X#0 ы00ysqwWNd<3333p@JX*|li,c*NϬ&*wN7xI`Ojj="Ϡ&=2sYD-G"O4K 3idpPe| | X^ Lb$|8Ax23#8t=Ef` 3P}mC kD3*TDSy )Cr1>6QX , x3xk+REw娧8i$reǤDH;rfL*2&\4Оg3 @1iL9e'E Ft P ɮ17˄ V\,Cj"Rljм'aǜr"ޏ|7娟A|%_ϡ?uLSl+@W $Xe>7a2 :Q %fNA$&@`U2,bf7;A!,w{Ln-AtQ )U bIQpGω,fўO<t|I 2:T?scR_DͰ'*,^`*ypyNIjE1*j0f!Kq:r=CGBQ"R(pƩBE$r=9wTNJ9KZdQkX5LpX"8m{dH#4 STlN,rϠ5sKcr,wMQrX 5"M6=}h-OH+SH"- <2 1d ĖG"VQĵD͐yӅ.Q:t(r2&ekBU;zP1Q[[~[&U2oN\nd(DŔ* vMAPAqdhՈ1Pt LD&g1I|%U_@{q4CJARQNq܈)b,BX'HYWN#Y`R31nԀS]'F̢H=5Z%p1tF^(aP(! e@4#Yr~n蚁G9iE*9?&M 9XhʁW0UD`,ߤg&V7N HQ:jP&@-TRDZQ%Sq63VR``yI5 OmEp)X Y =a#31̏꼥WL.PjtZ0oIw šXhx;$͏pP03P?bp@׳iTDfc ޽S7 a -Y=I!'5۽/RGx@}0Y045e!Ty;Ӄ@[HebOqkQO5rOW݌K1gGoA# HQg)QNk+_#hYBh Dsz{pq|n=WU%UY헖JX3cIAvvTu "%NZdk*>obM~l즏4CVY)RhL-R A=_y@C?t}s>iw6N$7[%}a4m 4f xi7̋ZW.܎6o( /ܟ{}]uZآx! HYrʏ=Tm) VffYF?(/564iivpDPQs"9ѯX]HWP`]+ lv'j涢TRnu-FBz$[⏛/\}JPXUcb7KܷCUyo6/.[jبڊW*&H V2/p7UkTlG=^Veu=W-² zSQ'9-SmkZ4`'f+;`vJ#/ϸއZ v\JhA,##5:"?r(f>pҍ8飺>CE_+i\˺q%{ehD0v{RXJP8XxpYn,y֝w=mK7/iv+4 MQ-|k 6 I Wi1h$4دۂ9JWz҉Ug`M(1n%"\Vek8˼Wq[T%h(|_GBOrwv>ӯ&~p$"D2Y , NdLߧ'ΒBkPGoTރFnur/;}n2 5گX %R$tPuI G AW_6fJ$uNϨ7fA9iPYШEPjKGqGhl\܂s!Zq_|H'ŝ{fW&CH K.;h8 RU eiNZ twBY(=tx NO!?qᥥpLŌtسd 4egFqNPaqDN˙-IQvSvo\/t3ns;AHЁK¼7IK9}c,4$n=o;H>%GS:%`(uqlL3f,e0^3-EQڹ{}cF j9yՁhAj|mQJ'N8gz'@}';!rWւh%`,g81`h2%UM??>-woκ-+ҋuvOEsĒ KуlƳ;tDvj|uZt}.6kEQ6i4W&CtI)ZHEiV)bT!}<w[/Tж 4.M}`8#%RKsț(YJZCa[jv>Bb(p^b箾/EAYd1k M7Ll("pyӡ/:.,6%z0Z(pbt.UH:i9Ȝfd^3&wr*ޜ# j\l~B9+G$.B"ICzވf<0m>BcH8f;i4f'\1\A=)ugor|sh#@hyLqp6LQnd"ʜ3Q9[#yH eVn9P+0}yԓ+1ዑ4wș+C:F+WK#F Xdx5ɗo_Irv8X\ 9ú?Ff5Lx2٠/m}c}W4 8'fx)ua4@-9Y&cÝw,|G L*NdH1t or[ǚR ٛUT#9_%ǧrX,L Cłv}N3ov{}Cή<b:h$ƤIKZݜ\Ԃ!rb4LF1}r,h[CajMpay/=џH7F-15ӋWi@SHN5GR['ll2N^Iv{TZ,v' XOȝ̤ .4 =T[*z5ܙ|cBv@}90Usp 9S7o+?>uV9vbvȞ>h%L eᜏ\1'".Ή$ .OX1,z]ʝЮt֥1gAu\b CT8_E?Jj0'fdڼbA- =Sx5d[X'TUkҗFdjD!g` FI>My SQꞐyO_PtBN)v%?t~(6"L?_B{R@iP SCuC'1(^pd5' 甯Siv^uzm楕"f%AˋHIth˭sꚜD[mh, *'z}Xoo8KHƈ9I^T kٲk%h{p,g8lt3zNڀz !J !ǿc%YOWla(t>_}]8]9} bwH1$ e〴=]}wB!:-R8,V5ԲSV \+뷛w0ZnD9pTĺ 2oЯhx^;81"˨w_`>!Stgtd]ڒ4L]35>¤k%+SQXS>YmJe761n.AQ"xj0,(+(AVq|z~-lwGrmpsFP*?^iEpY(R L޼[n7bߩqI\ōNMOiH ضZ!f0̲ݽ[٠= % %~+@U|V́ƒ`()b z8CȑslJCn@mɪod='". < .%՟#L֠i×X7PW+ȚP~T{{pMh4h"Өu JO@pHMs2r}1DB\)vCw4D.eV!9[c>.cClS Ώfh|0&@՘'uaFH+|xGqzY7V''QWq٭KWzYZ8h,z21 _v 7e!iuR$ƄJ̬i3zXt>"Ѵhguc̩vQkLݲ',VWz.DVxxWQ%x~ Fj(>Z\{C>~;Q` QN-;1v>jq¥"FQE1L۫9v)oEyp^1A ʼnp>D2h8m1vȗ!xZ,ƪd9X\Gn(bC)ƔRͨlʓ ax412Nrv2yAJy(J_y K6h(*q`x`~cw!y~b<[&{Rl>(gGk-[!݌:l#'"gY)E'l29̑/;+D`Q n 1 pDY ݎOCw=EI&jJnYtq9mU+K}a-;k7RUi#?95iëQBo!:*F)]JfFk+Ujl,!z^B)a-Ukg[2^B>$?0ݻˁ}MϴLH,QX3gMc@ )eUjcꉰcN ψLS 6ː9D "7x  4S2irX61Vt,c0z]=x*Ԩ8x^vd-[)e-a+VSBY Ix6:}r裇2 Qsۊn u?s+wȫU mY> ЅnSY{^ݭ蓊EZ( E^/hYMY&~n}nkM#rU+ KKUV?F!9JJ'5^(Zyr>[Z#+JVJ!) IDAT (}hSolfsK$w@^Ҥ[B-)C[ZouTs EaYmNCcvbqķ*QK )2b"X8XB% ~GlfOi87R5nPv'u.^Vq yuu߸\7?7co\F`BA` ѩWִKx tJ/ 4VQcZuZ0bqP̠ tz. QhxĜ$1M…6UX--O,/g0pZM&[g6+kMi ChFFe)~^b(;֚O `x!K.;/O{٩1MDU򢩩^!|̹b3sL[Ӯleg5ՁQ27+Vx.;W]+.GbgxϦ\,~!6Cw,SB7,qm?pH=i釳-dAqZf} M:tl5hcg܌vJFQdP [sǏf#cxnZSF¦] q9'?9? L(=tt,kB"a:~ϨHuy5q}wjRPʙMBd5wEӷA[sUX6[%5mKm+,ax3^><'_N\ܹSٰ T.\*Fq% ~`U٧Zo땷v./I ej%eeHK !զX}(!݌k[2=kO{Dz)+4>\#eDZ"ꢵiu3?]_"+'oX(HJK9Ҕ_ά-^~7wOޖ6p٫=H9 81V]꩸d#\ +>b:0sh8cR# |ܝc:*өoS2'\кJ?/l}_rQ4; vX/ P6IOG5~%&ΚZO<*5+f}2q:NVXMZD u1 \f NR9-xe|\3^Wp%.b:Sx>vnѩN4l1it^J\K u L$(5RB \ǜ09&:z$& 95SަRZWz]?ahӰp5E+5 cm&a<u,N;UW\xZY4 =cfq#{%[yz\O,qI9ANcc F=JǘFӔiP1d5abìKpwoNp߭\\T 3Vu![%%2섦$-Ny;3 @/\-R:*#!c2%J(W"Ko(ҷb_XƠ&F#qʅT@+'uLW\AN^1N@k %d1twwE qX ,DTXMݠw2Xudqnl:b6 \hC[a*7x u;Z̲ۺmЗPC |7ىXS!vS{aRWx|Lb{ZgP%bʾWUWQKWmܖvM웈5` 2yiSIS]raIJm,y̓ŝHZw>u4YB)LQfN`QOI_<+/P3( &IN S<iٜbKvڰn a)]RteZrzzc "u-oVcz}zx4oNqM5XVe,. \RJ\v74UkZRTY6bkdʂFR=`n59UTeb.Ecy0*Ü N;ߒt5v.1V6gĤso[9 _񟝆Uqxq㷮5xܞ1;iAbcqvs`UdіeXǙR "0}${&zur]HT)mQaU?-9?49a)}k 6o1*uzz)[PP\!nu$Ire٭}T="Rgx _O/?tTef|P ntSdd[Z.^Rj&If11YX:cix;P4.Dƫx&=V"=Z5TiA,)%혝x󹯼Bd4zU;oqA|iCט{IvSx #ǟ.}P4HJ'`4ViD NY^C~M;2_5J-&%:‘!(C.+ϭSO>RWZ|+~ "p81/|OwB݂ffw+m=VZ<֥3wEwEWxs4 jRv⺲>`y@(#y|S-&i0eJPDǼ;N/Qaw4Bg:7~ptu^~,LBi4! c`IuZX2濗T'iC{ 7ьZ^ 609N[Seklo~чxKYYYRXzpxnOTr>JnT[Ѓނϧ9l6.%ѩa6ZUw9tbԒf, ڐM4Ջ혮Y^bֵy>܇w(jam >::HQ'&6ΌDCAOь){7&Jj5i*]g6vW#u;w A61Xv773n%sS1 4}/___c5c-r%.Z8 :]7*4*E8au 9!X3)y@/m.=^uV,s+V,eφA Ek)Lz eUΞg,gݝ{k |4:hMmX8z81ݝf̭)Q, ZB\£vFX":rn;>vxvǘ:؅)E㐴Y ^'щjw/a߿oCR`ڪc3Yv }]KLi>;gA-S5NL؞b ҃ybQGeI_NU[#j |xo7r7s 5;,H҉k4?rx⧫Yj1_x34ryWZOҳ&2IKa Ҽ`KS,3d5<~ǜz/v[ F ~9w'7M˚2Umͮ+X\J%@t4+#7gTmj2 #ct|;у_J\)Y|Ҳҩrʚ_9g_uO^(OM9TTPuDjR.NgRa=vӇ^>,<-Ë%"MSpiKގw| O<}~9U{!; sV*8bgMmu-;nyK*^wHi$T ."26W s%y7-8H7_r iYkR˰F1>ezVrGeGO.#ЭuXYfý \('.N ^#GM\ϛG’ drq~/|%|=h& ] ]0M&H{ג .,#|lxY؞trcg2.θ@5bIpG؏)M|$_V [-#X^fH4%sݿ?R8[?͗hm]ZJ7ҪR T":rud%`9b׷q f'G5CxmD9L1B&ZS-r~Am' Yx(A_H#Z)66-n%S1d8.S.?<7/$ )W0HZa[!6ka.\_\ˊBý<[R_a|Oӷs\'O5<2un O}m?<ߓ/;M>5cS jNZy[pW1,Xc3̕ud6ff^VrP\{yo}|% GUq,L,PF]4c*f)SFpPKAsX/\;+SЗlDc32DMQKPUCЃC=y#s 6)|wGt&ükeM3W'JQb6 !rYYWʶ ]8d*VK(F8#)Ӌ͋!>3!ǔnsh=]^m-#nv;E!(ծUV:fB9?cRl0J( .JAI0yQͨ.G ti |%tS>urB$ "ҿ EVEܾL9S E$R=گg鿍)jݣ25dTfəz:fHצ9()T\,JIbbt$wSaQ)G5  &q-u]R3i^h}st"zY,ʣ; U<XNmTr/3;EU"fS(FKL2h̏4 m #+:Q Zd_Rg0dYͭ48 OxI΍ I5'-iFYwk{09 F#eh, Ҋ*o$5zhX1`3"M2њC#Xsv!u, HjTJPqqF*(.!eS,ZXXZISj9Ta+*Q*jFH}ӱN$rŏcћK`oe$̠N^L~ wrA)tc. Rqnj kr> 4-BF,!s6ICCZeh=;ů(fz`I4K9o]vh(8Ǚy9?BybP%s.eFhk83Дu/†ۅqƄ)Rwiڙڝt1/PKRQpŋRݦatĻe<-UEOʥ^NtLG&*h hZ xCR}FZ;ӑ8$2do'+E9G,'*zx*R'mN,$farٸo4#6-:};"Mf0N6Gcfj'KQFwOhǬj$N4dy]mB_Usޟ.J9=g6NQwYW#<\?Df=up:5~=pCNγ):dI#AasfJ~`L9S1rmI֋~DzSB%m1RElN#Rej\z O}q>Eza g=v1FHg(](^_^v~e[۝,@WÞs7>+70rިc2gw~gKPCv̢4X8׀߮_ږd'YTwN9b:6jy]bd]&x`ߴ,m.E(ܻǼ;;MN,? p՟~oa;k\Dno湦$ϳyoVR[p>/X<6(w,C&Ny N Ħ^y (Zr4Pi!x+ɑ2L1C5MK#GMB!_O׋U^:G{Ǐb! #ɤ̧+aXN<+I[tD[X t!E([7JSV@u*z IDATO3XTuCI|k ^>KP?D[x=x r41 Bk\ŨtdJw4rn҉m^%b%vKIS+7.sڱ^?}x.Fc44_t#FurdgA'-|_!/ZF f_w߹+J{\F<ΰ)]Ƿ_؏7'V/9;kZBiyCXD+sW/M;W/:]-B8-yRxh6Qk&qeIOv|^>d5ZoR|YX8jP=HI>_Y7Ƹe+PI6؞;Se0PV_8̵M8'uRy'] $)+4/br5gx,=}#%؛8ɇ5nnQ,T~4-ĕO4嬽;_Ɨo=uzy0t =is`3ǖTTDׁ=¡?X8h1| YL]:y@Ʌ%Vw;]_"`{Hc޶yxXӍ/;99}&<|X_jFxN@̉y<#<ö9|ឹ13T0u<o>GX`5pٵlek 4fdI%6SWب( .XW6w~?n|yܸb r@%UB=bB=1Ã_';#xK3'O@ޡw{2:škT.P0<- GșޚrI9ˍCU.`oƭ\o/ 뇢$mxy+^s,$0z^;5NVjVti&YhVHmљ/xm#z(֗BYA?$^M~Iћ6hm.W;FIZ<`!UhǍV^\^aƗ#v^[8*zP̳S|xV'2.S-/:rJhdްTj|?vlN Tb0CLKX\-qGZ6x4-,-Ý!?LQ(xDӺ#??zヲ{ӆ{ أ(V| ́v(k{/NfYAY%SU2B!-0YG,Z6,z }pP"T5u(`gW;4+ѩz.6DNKc#$!]n l΄v~p{~ s֎qcȊgo=UJ<̹ѝ撇28\9|FwBā8J*Q ߟn8gK Op.M7W7rkcvk5k2vn|>{ CǸqѳ6>鷌^,}DFIe2X)T%X{}>x>nhd̔yncʪ5e]ɗـSx/a"Dgũ -N Ey)V PhEʺ,RwsQ Y6F^}_waa)RQgNmI1хƔ2F] + .Rѡѓ(IA9M_f~RYEǥ2Xk #9~r>0:%>?1UwNO'Uc>ݾV!9DZ`[϶u>3w1Ҩ[v^r#4`q;6r8~ݯ1<yŋՔJۃa25H`W4#E=Hs=#?<>'}/>Px‘9.?ۭsoM(p;:uyěW=O\ʛ~vy'NS6;G %qdݍNQrQ5UuA̿[Q.zKk!5H*5.Țb,x)_S)(}Gh5sF'MÕʥlysD SyUcl]֪tg-KcW?i q{8WeGb|evΒz!_.jto @ߦ;eL6rH)+xHru:B)2ٮ6--WwuH߆1C*Ac.<̕XG_ "~Og3IVHӂx뫶JR aE pp5H6s>{wAgndR9Ճw uR(% PA5g%M (#XXX녭pѕ=v^؏8 GfpA:NWGG1hwl{xeg;e"FnK1/۠j1p"ob^;QiaJ13q%bC*̖ L)Ȭd.D,QVu_>oׅ5Vgyp^|,Uhy2Q7s|&}s­un9~7~eEqaFHͼvoTcϗ1kA7w8}a;o_H'Y KY|>ԏ|wˏ~)+wznߝ:;ǀ(by}̓.%O7{| (H#JhۑnZR@$ήq|Uo?+揤dXi_XO|W?[[S}Q:ʡ,^z+Oږvg|}ˁcވjDˀ23 "ƊYmk:C81UBy d+2cQGV665>u#,wxY ĒN1zqMӠ,]۝G{q1[v.`<Vep%"֏hs̓{OlG%*-/p^z/IB;|ɠfIhYxT#2jV}bjH7qGjDj6%t Ƣwヮ@!V"W A-b)/홧'h7oNA`6-q6K!F!QzoCC=0휧űT^Pۺ-^EJu'ge8+].{X>l ֧dՀl`{o̹9Kָh##*?Vj+YRZ7]_|Kos<2X~E uι֋.e>F#W(6R 7FNJXo, e?eUTQrm!Wv5h: WK\|>ŋ7yCj~1s;֧i}En nU}M=+S*,RkFo]Y;5#^D+slhOzvOվ{y|{~1*='5c#p667gojmѿoh,2Rl:5Q--nFR%|ȱ qv݊-wkLƮϿ|N, d}/TZ3ztg|'>. i#Zy>!5Q`lo|(sEA(W >|> .Orrz^>"¡=zj7o ـ9{.CY QoO)y~zFLbח8 8U*.?vkz{ĭ1dne=G'G2]sc% :Ú J*uqJsSNƣ*ǫ#^Yr`Ў)]D1C.30"!eE1gDGqԇZ.tyVB'Y*SS(Gkahcm/OxYB@,2h6JZDE<+ӳy?5p ip%Nj BɅ_^W綐`I憎LO^>%:xR,*L`vڒa06R6V?!^QY|&./DU~Ϟ˅uݜ:I7yL|Y;tԺ`A*wok>1fx(pFr*qRP#L-"VjSXRrPbcI#_FEEXh:pt#旷g>d#_\X_؏[db]3qQlEqN;:!p'>R~kqxCИeI24z6ުu ]dy)rWҙbsRv΁!f'@gJC4xǃqoɈGl EkYu€&N b/f\\촄{X1/d=߮#ę?Ў:'q9fJ.٘AwM GfP= q䝜M(˅̅+7~ЫeɖLփdaEvH+b"^F9V+Z/Bw Cdc흲H*WXnϠK'i@]qTJqL _IEIo֓ԩw֦rҰ,,gy ꚼ^tPE6+h:#vL1;7}8 6h߉=,<<kغ9Lh1c cc2h_7Q;6C@4SLwC|V`;ƀg<>lRFSFcΞ?rSɭ*ȁrdڂi,1z1 rC; &~Z _`1~gYnJNJЇ13 r%+v7Kcq?)J/0cLjTmM!E/0N'BTmNL;V$ghg )86O :X}Ok밓 a['ǠJ/Έ Ѿšp+'CzZ>Ki\R%jo ZD־`3dNA0gD;eO<B;VF'bI;rVg]w2La8} 5O+9^X. {u$6vd֝b.Dvpb1 z>B攥6gi $wF$9L9D8"B#zv"ڼU>h &ۼPOI)ggdtGglL-17Ij79wbs}q /&K …J+gA3ZExX^m:o(sU(;a ۘJGIFFN98h:# k^̑6E" Mà&QW_q OqJ͔cIL@0 .kq@4dSDA~g;qB?gI{s@?`}4lĔL֧*gY}AݟN1r6kdER`0Hc2(5O׿ҟ#"M=#˂FFSzz RxE) xAFhP=pyyP^/8_V^ _PF';/ݍ"}V/wl:k[Y.J ?X%QV8nNJP:CiwwR= Mؘ/122#Nz}d،?pĕj.<ƤK=:zdTUm(S l9B8tsɉ*qpgJ&]ʉdhcD; ^:dS IDAT׷ܿ  gh~!9PYWJf/z*9W٨d/V sl6(bóc&T 4\Q~˹xb:lIՍ^&*b(&Hl7t|KSy\p-jGmlEZjB B:P2~@|MB#E BԘ;( 8c?~ADO4\M}s1ƜƤo"OF:->lKlx?/`M7f6b6'>%بW:\ *# tG).R} 3qy9KO@jB+~ĝ#; `,SLtdx 9ldd <9oӭ@ *1ƣYȋ \^KtPLg)T!Ovj,fM|;FM48BmBrTX:ϹpM$+neGо;z ?`WeYt b9E  XiImM؆rt4SY8~ {ʼnWN,}l?($n3 a!.JvI}'ۼ`hS12c*n|݉+|܊Q+S]PRЏd b>f?o<ǀlODss +=Ɵ&Aa: B/&R V"TLi^)ޏυI~ܐ(R'ćɹԭZN=q NY}3&sp6űXT?~?W ,z|K#NxЊQ4jTJ:+9y֥3FG'0H@AQ`yCU₸/E<-dG܇̉˒JMrkzuzt"/X}{ Ct\$/SkqNDSp'q@-޴`yjQܿ1SNzrC[Vg)7O׿ rN8~}cyee=GN{QQ&#Wzf>/ÚZ;M!xFxP=ٿ1= Q({B 7T]P3o>C`i$##9$ " ~Ix* OtV;[uέ4>}wGloS)Y\9h78rLyJ$u|Ml ,S4w#vM'svgl9n);lrXKzR-=NCkQ|H8)|eI=Q/XJjܼuV6 s=cnOԷ1.`I[wռP39UC9)ɦyy<ٝΐaR'o;5jBEdEҿ#G̯R:/blx?%Mg;{G -xP3Ϲ>a79sT\;O7RrA?%e+rp`{uV*{'VsPJUя_ ijs橿:>ME`96^ v i(F6??j1c 46.zAyrYe1oZ|,O|y3Lk⦈Guͷ!(G6o:`_K"h`1 _QbZrLHs' )<9FܓTjrO23c΅R߳MǜZdZsISՍ[ZJvnqsDzvMiL!mBgϮ+կX|EUF)#Ҹx26:L5ɢH#FG}}#Gm:2ӈ d zjv0 DzXݳSݢCs{NɂzP@4]0V OqCyMBqýA|W(ҦݵLE-psM\ŗWl\.pY 9=slb0wr3Yfof[c@d["@Դٟ$޴Z2[c>lXdM`H>Zb+_/{%WF7<3xp`/Ed*(L=8c&jVzC>aiQ js1qp<BTާowF6\&AY~ l< ;z 6&G٘0s'n~?}(A3Ch1c:(t:Nyb"n37ݏ 6wX 2#j63nPW 'OJ-G26oe Hoq~v1F؂Gg U\gϰ`{#݃wM d o{5lG zD!j>gCi9, .+Y/+ċ͂pLG\tlv'[-vJIrPiЏZ#F aR+" l WP;C?h;ESwqڜXO lAk;EdI v.wc^O?=,QckPiqgH=ɳhNflzMIdЛ`Gs򩒚ٴ_g۹ Y¹Qt)V u~e.ERȋ3>WI2$EN۹(B. RH"$Suh .k`K 6\mUv{ \>N]lX7ehmhCһKI擹EsRvt:^Uc}_6frPr\ٴ?8EFɛ8B36f##A *O(Dd#ʾ}oEgazM(I"W.{feRR2k9Qh , 3,rA*T*TI_߿b N B*r)|eh.d):Bv SYWձPZ\ g+4MMcIUlmgZ6+.|K9k/}Ah2;7I˝JD",I1qX\L &`Y߽_~B닸s7wX+|e'vnΜXnܩy~Ľ^MeSlόh),%c/GzgBvY [L7ٴ 2'NRp* ̤tY('9T[K=5ӢvM/{}Gw8l/W j:4Nݘ JQ=hS NغYJB0"")Ǚa s"ȣ $Jeζ ,RޏOb\==i1>gZR'zi* WFff/_RMSe^ן_(\ k&Ml=wU\Q0ED%K<(٩TUV6&#u}-x4+?P$ϵnISk2Z|=}2󥈍BٓUɗ\\_Lu9C-ù|{zSya(Jc-rBmZ:^\/ k7ٸsdɒ]ہo l-XG<=7 Y #֫8,Gt~hL{7=nA!:9&-sW%ɑF$I>()\2TsP4(U<;{^w^+5]C7]wGq~* /M_;?} ^_2#smwqhٚ=ڬY!ids#igAEN b)}'8c&͟13lslgtB!z!(sw0uPFɗt\\LaZ /_$Y^Z=? =Pg^oԿ{WBw1ҲP-};(QX]u(=?@`G kȾ4ճS$20T|f Od0PVM3 d#3{yDG9dJ ˘eaWɜ_ ;PH{,𾲕J,P.(VG\V-|nׅʝ__ܹw[|1x<~-4_88za{ GmGNhP],ih3g;wgb0d:%y-bW6s}׻f"jbjPKr\ G,3?v:%|X}<" wOz=Mhsm'ɣ M}49JBx1\pE_1\ie'DI㩛~:ϭs]upy~{쵪1]7'Z=U с?B*rcMAA;^QVޞµz0d?6A418<N's3Y'(}ѦqlĉMg 6i$>sj6Un44H$&&A;4 !eE-HY(#<,rbt2(mTԭ煹Zf~)*r+rƥ=?howKK}ߠZ;FGg۸i ?5GT=gˤZ|R n)P9&rSr6Xdf󝳐ZdY_T %!7˴d3E_ .iKwӲe.A- NqIGfW`+z{<(7)Ĥy`DS?A2u榩aH䬤5f &, 5%c&+FYJߒo~H1{uP8 :_DDa /~#(#&DݓOIv1Wϼ?V5d_UmXPS`~;훰`W信P+Qw\ =هxh|7<9\Q1M[Go^8*N. [2ₓtS(DJʍ%uس+|Bl<)ƉY$dOk+b/,kڔxyV6" AeLgO)rc- >Wʐhr^"eԞTL"|e[i Qh}c66>?2<|q]׍'Νxʃ͆I)՜8Gxg(O(R J4?27jH} jUa!T#rD|(N-e}OX><"t"K*$Th>8dWɒ{'Q;3=O~GƱ [~ hhKUh3-]d_GUgUvX-F̖OD$ﴙʜ9;*.iRo{I e:9ovG5L!Q`^^/,:t̂"]oFt1w"PrXtOɩObқhrF8~L|{ֲ?Ƃ +?8]MG>ƳpK &~25X-ow|I̬"Kt=>ՠҌ,/X] V`塕ޫ>ǫ:b-v~W;^Fy+¸Tg#"!R*n¥`pIVb^q&=$:+ETV!t{_L|\}(Hrvτ A? ,4!mpWE\0/:[ՂE2 |E3׷IP r`ےM,W"FhYVx߂A>Or4J; w^:/?tڟ6gʷ/\́=0r yӎ;K9xZV +ϪlKgݠNn"|7`MTAk4Yxm$K v33FeS6[dlQ*rQ6/O:o/y5HgKQ&t Oʴ,Yhlr"Kyq؅a7S&b%'оRh5pCÎ/\z+ۥ|ͼ^:ܩ *',(/#Bf܈)Nx2xOzKbŔdf9](",3+QRҒY6T7Om)dln+^^ٞtԼ-howˑз|w:̈UDeK^3 |N\3h_`=%xedQ&CBr#L X(c-p$R_+TŞwZy9KTy}/=Y+H >*Hw)aFNz .e˘(IC; ng~5}|8Ě=CCw 'uՐ>͋-:8grx&W;vr!RA1ot{ɇQS 6r-7=/W>??Yw%?\N#h͙lhhx1!;4HzȞ;iK (3޺wGfbQ9 ޅPZ&WΑ/t{ъ|䷻ӋEG>փt HJWgxye˯(l_Q$NzH'N֞GoZ-6:<)13ir">^23HN IL ԎI-6-&'uL:;fWx!SMͱ$SJ2nDx1GJ F??s#ìB jT؃xt);iVQ֧,T5)>Lmg$ 9N7V9\$mRSJdT̯LO Khf>c^Vȣf1YX ,[qduX"dB~xc`#3ep Cpɲpr0;K Sc[r<ʒL4eN JXj2V#$n߄57qR(M@TL >@Mߠp)9v/$KGkA>^eVBt2`ҿS+q ke.nؑ=nkp]Y%Y)Y<$-#?ΞEԨF)"Ky~0|Tv37ESї` Zr Ӗiz^ӂ oZ޻3<2G {eR&S|:ir`҅wr)EUrnrt t`=IiW3ř4͵|>|rwLjsNOdETLt湖 9biscQntx'ܵ8w:ȶ@ߡ,Bb KKA1q`ayqu>ud̐.hY-b=Ɯi"i#|Qt^'j ^ Q `pj˸Oh@9Üxp:rƳ8a >Cx݌Ry; +⩊eҼ 5hk!6ޟ3/>1nO"@kb]e-\B/ƽ&^f|DRfR JDg|ƽ{^_,1ոm+. _HNN9O٫,Z<(F#P=9 \ 1SNJ@GAoΡV—@viyL{M&ÜCƎhYPaYMk:O*ӹp#ېT<>gG+p0?:}s1';u f 4#3G!.-x fokջj`LǸ 5%9|2&r%?_?}b2KCl?v22ävI c#؂ΉBDccJGO?c% 5uv ~k vr~JJl]aWV*xi! ߞ"JƑ.uj+~q.5& ns.YrbO͒e\sfg=k4q[3薣~}wcMӆXMYmo:}Q(*j{ٖܮOg4%~4˹;: P%<:q]Zni{Hǟ??^2;2W6k3 یn>e1:h0-JzMMsXYV HJ_pl䚫^i Fq*$VwJzL'Qy)B7ާ:.#!Esvޣ|zLSW[&|3R:f0@ILܷ3DZ-K~^-ʣ{Y ǻo ?2s H\2 HAuQg KAy{w^N*噴OɑD/븣kyiQ~tKyB`+1;GXe!BmUV̒Os^ʺOs4s8H|rWjRRBzK{j{tud*DcpҺEPAVLQ24s13vPL}Hcɫ>3ZhY>gr!js(-'RPA̱;1P&k-ifTڌ%YIU-7V]S#s.7<3ί> ;dS@S}>u-ӷ96g)t)=87h<F4blEқX\υb2f>gq'rOX ޲̵4 BnKVf7fd.Wz4VkSǁF#_X޲a%duʜ 4'#~$$6K}}"mytdʿ$e-ӡEꧨ>}ڵXTUp1tӖO;=w| 7nR`L9FOQr%sՖ I^aC^LΊ0e`*26`;yQ]/ۡWn^Jjbr PrER$TɱGu:H@ ];_IfG)K' $$zviK%_cOhqGji6&JV,O+Nȡ zjZ,hX}:*נx$}=୘ޔ:#3=g氫6hGh?qKd|LL" u=ڧ90)tKA] |Z_ooV׼Pr92!( gˍ2ys[ t Q՞8^Wc(v3cg2>:=ceѝ#9. 'c23%RtdKr,AY4.eڬ'q'GrF݉1wUmXN f3<aG?'h#-5+O D5Ӟ5ev䵼߱OL 21XsȪGA<V,H9K̞S5,:ek!TH\ k>Q!ۄuGZZ:T|+ kyPiӄV]YccU#a Tcug#iš?CT%Glib=Om;M͓Sqj>&@M˂KrAr.n҈: FD3Ž0dJ1RY~tlbkp1ehr킇=@eLb/#Ƅr1ݿN̴˪szGZ:->Fɉ#Ʋ.EϗK.v/&9?4:n7ش]( IDAT*Zig;tUO.ydBkbUXHcUJ)Lt(B5 1 )|2cudRO~ߦ^d㑞]A!yf(e5JZTLYĥe(U!R2/s=3e̞Abd-"2(j{*bB`Ȇ5??§#؆ӹ &`640cj&^3o?РSc^^|'= B">QjՊtj:14T;jܪT,2؃+$atYKڎϟo<>_r{#yhiWo";]pgyQP(ܨet5ʛ Hk,TefHw#O?S6_똲DVdR*.KRKԙSR(7tH}ȷg񉈢/ȟ3)둄GޭqW-9T8h 2?xeS*tgd SGy躪񄱚eUH2:ݡ,s+Ք7nUe FhIKAF.oOw9nkQb{WnVQWVT# DZk{8Vs5s~<7wޭq_{ f$i=Wk%Iش%q_iŸ#nëO8OF&K >9Q!r<{oE?|ۅm+z=9|`]o9ޅsSX^3/OmVZJ͔ȼ"˙6zT8lÉ%(KSex-&d3D2;IQ-h-Ya̯cI;U̳,:8Sl1YV'?ϗHi5{QQ͋Wkbo%9TB u> V)&w**IYGpzYi.~E~[BL8StaTUY(yF`[W̗1)I곑9ѳ~sr6{ ,:Êp|^ɯ+ k\Sڳ^Qp>vv n+*tfؗz٨zH[gk 646cDFu-*S*6eώi'IS/8HCJxWn:I69K턜1R!͕P&e#PIp)'HkP?[c:Gc/#9A;2G{]xnQz|a_{A 5<4U+_/ץRu1-''g 4W7VM¶T#L}1f- X?ؾn"" "y&#wzVFV5SR6{J 61 r0' C*V7l)j*Չkc̯HESwFP}IeLoɼDk\-βN 2ͺLhM{^id^&Y萬 S+81 vFLaBV71QM 7=Jv4|7=<ԖKf=sX>.rLsͥ B3Avz>_r߶E]Rg+v8 LN|bpgA%EHӁ12Q (*3?1ޙ.q͛cBח 7$Y6zˡTDƼv$Dybhuɗ=owbx3g&#TTUsSJ}vH^,;bec.kcSzSBۜ$q[_:p3IN<~=AZLVLx!3{<4Pj{0lt [6|t=;nl>z[fV%wPA`̴>E5ɫ&#W&(_?|')S?K+5aT($5Vw u,ג1D+d Ds;J,̢j_kޥK{Dg,zz}f@2#aYe$QUEUdFDEn1XPz)9: ahi1[G,`DM5yF&)6K2аGzܤ{ԥhaj=w"姹CQgsݥ~#L! %y,Srb)K G+ČI ܘ,djG)X)a" I%͎Ć\OF(I=M=;gel8a(6_ N`on~"uqY߽~ w%;hOͺ&S˜ý)8TE-;mq$\D )P2Z29Z`JC4?-d2ɤd[-R>Cy6_􀒂lȇ:_#qtpq6EOʧ6Ga'pǻP%dWg4 D1*E$5GCR).|eh%LL"f<#l/T~O+m }oRҢ,XVW^@U悶'T#! \8 E"kO#T4д˟3iz²T'c[T7yd2)-*xJqg"J"λH<0+r'}䃙A &k :bWgɍ5n"l.fB 1g/lyG_w_v= n!ŴR#Y(vf73=5̃NH}Sd8pG$ ,4ڠ[FtrNKjh: SV牾7oitpFD13]6YF >q$L#۝~'LHK'%)jƆH9Tlb,)f?âh&-!F;ո C?N!DzSIrrI9LMSo>4Jg ?[LeSm#$+b =uiĭ&-K!oDE)HXaĈXJR6CV1]%I֟2 dL'mFJ5<#扱/~>j} eWWq mMєB!FϠ1ĩGMf3ԓ/Pۤǰص%x8G^7x}t#{SiNJ1r|u:hwzkGh{ oRu5֦%ݜA3gpƹN̎t&0B'=9.^F1(]sB8Gu-_өu_5fWEدXOϔǖݷKeN{fmu6,vSU=G>Ggdgȥ=ν;sp2˕fNũ!0>hN/xu -'6'a܀6%Z  34(Ձyr"ټMchfM!@̀IUmڍbo{qZj\5&ye&).6H53HYDBm%'gRgޡ]Tyu-VHn^>Z܂Vbk ȓbpc!܃q>xwz;3*}c_CwSVIː6l u'ÏyH!>ɚwIF"LI !'a(wG}(Q+ɥӣz';$h%gJY9OڮhxL_xu:ӈ.[ǭSBM?x4#5ozcރf2;5]F2wm'oנWOį7wޜsk+=.KQK;-]icw &>LSDD(t}@A"I<>/IT2KZ:v>-]:Mé0bZ{ 0Oa$QT|%{8BezDhBtǪDA.u9˻v%ųhw`' EkzFۤx)'b7zlU/kM';|zLaMchH)Qrf0rfꉅDS눽j1CQD #vdc`d (PWvgDGKOX)$ rU7u>}纇A:tg U;ha*c 9 \!g,S'Q"42Tk]^;7zj7^C|\h7 'ZS:^>jgе/n7Ұ)D1ͱx45U33 C6-4=7gO$ꙗ;~O7wN?J{ UKmimgF>sAsq(AQAƝ`+,-kDǴE?-l~ru-sZOQ~U:w +02Ԙ㤕%֔TL z@ƐtgSk1R,u /fo|=~aqΦ iE͇Jݽ9ESN}O 3(QDBJOp"">-6#90f3CJ1%ƘW:-\6o_g--wqh׋6z<2]#l=?5*=}i}ľwmg]e6#G)y'\!)ܗpwɜrN{O,ϵ`<\gxy__amG2 KJb#I^Y&'`ՃXz25g ˞[7~Ϥ#;τE-i!O6>EC2,LI %%lZlڠ~u c ;6+klޔ>Hh5:/º \ޱ?7W.)I˙SQr 4[6)E(Y>$c}>m"f7F*<<}EZ߰]w6ƾ~ǟ'U^_(_9MaS"#7:35))X Vxz| gwiN +,7荈Gccax9㓡]3`I;Cs?cGSҧ,>&M /VȳMw(&r5JFR͂H1C`M͍9 iLa'GUDd;CsH'aOF/ș4y 4YQ"b#ؾ{ݳM15xjFƶRV>oO+2 SG aJUN`BI I5r։N_Y JP:)u%.os>}uF_f=fSs T ٌL * ”/;)b)+U&REH`FaA02_=׿[w/ζY}’flv慶-5E\d[F/%t{DmZk.YaVK8ooϯv>={0,27ZRMr#܈ S+jfŁp ٯMX(caAL]le\EqZ8B/rhv%zoK~l e [AJ$EvKv52M=Y|Oq9"L-ql G*yD }>A3'nzuBTi4|8K!KEoHSvg :_qG OmHV2Gp{o_#ٻ"LLǧ0T ;Ϥ !Tg0&c # j .UEI^ _2'Wϯ`o_?C^]6c"dJTIjcg 1ÉZb.HΈ@&ѡգcb$cO2X_ Y&a;Wz/}Iݨ[0bLpC)U7ȿF;a;JŖwhw$vOŜ 8"NR!LX vq3סӔE,V-/+.x̶H~>wQ_͈G_;#_1^2㶱FBd*K8} n{fX&]>z_Vƴ%X +q߈u P(>s+ƮBO%M"czbH Pr_~߇C+[7w#.>B'ABp% D~HA f JO(ÖG35A]%Ӯ{ǭQug (&/M ՘KhUy Li=!4pUB&-&z'_t9sO,46P S+\#|FZIfs )5% E AǝXM_^ɺQP)Ur,$4'G&򘉴1d(3lCym:$'ւJʌtpϊq Uh#)xB_P/kgyD]\1nR ať2ƍpJQ䌆=Pte,CT M7|Ԉ@-G~遭im`;JHNxupMPO#N0'rL9)0IIBp3jKqaP nc)2҇GTIyc)!#`;1AOl]gȃAdIfOK;÷E@=R<^zau%15+ZPICo\KqqU(~rX`H51W1'f0aϜzC&<,+LgC%zHy#[]~d#FDhy9DgN0qp#oךDcP|xg 1Hg#-F*Fs>'Cc?D696.e[Hx#=Ka2,d!H}Ѧ%SysҢ u| ;zR ;zӫ7_n -dʼnIn~:G碝f;$򬵙}dX^FY ;#0/$4~w<տ'.^}z9̌E'>Νn="> ُAh+EiQ3~~Εَe'd_6|OO (>GmO&|fGF?q>|vARr5NhcHl#LFVANu|Xv(!o~;X)+^ˉv}mvawB7)*f# Q;mg\n9EKr '-HCz7-uKZV1#vb)uY$0=TkW;ç}*dIaί}WVR4|]! tpJܯw} $S\OyND9&8)ى3y%o M{Wr%J>AǗ|S gz VY.-w(?#vƣe*Fo89!#bx"nHs&s ]}و⬴?]#wFeˍeNNYo4p-~s'ekX4 z>T4Hy,qJ ;w|LI7x7>;;ˉsy{CGf҉3\_~|}ϒidN OREygʐNr4M64,k A(ȵ}G(>6PTD> xIx=Mߕ+h`KZ;/ 7Nmc5<]/贸4:;Ns'1Ãө};.LAX.d5q3IP4HFXb#>lV9NƌY=)RRwΞqx0dD,EbIwR4MmCq12ث,3u<1⭔߅33nwEMx߁]N Sx׈ؐ*}msdZ/ȓ,װExzꐽDpQ׼R'Χy2N!Lm:7@fli$T4rpfí^#(wcFŦF4ϿԽ#/q|6N(s/?~}9=&d 1(ED6 +c.Ɓ8ΌH%QqH3 =9|H9C rG>P8)2Id?w3;^px|# Ol?LvNaO;ΌoDv,٦ФuEmPS9ܘQTI1fΚ~dLVQ+ 8#b̗,./Kx!θWpe "1T:oIK4Z~Tfu ږ ^TMߦ*_SFa'+̠+6:~Dz: ŸD(uj}<򜣶n/ўaȹQ΢oqhm3qID}i2r#Vk~za05 R$2'iM1i۾Q| ]]6H 3}*8蓏.oCaD9r,d O)2ec hDB>D ctuڱFXNzٹ|{ sA&Wx;VN;3: iF g-a%,2E:J'OmͩvK#=T;1>eyV UFW:;%>5D/7[C#:ԏwՌbO>[~B(9_܇lrEU֛NxC!?"ҐN;:ك1,b1-}L21 <Gqf>N"NW~њ,"tZ2{mDT/ԑL}E  \{Li`霙JgO ^[~dPC?ܒ㟬G?@?ϩ9N$#5_8}r]/߸‡~q4m3V&pΤ1&dh9tF&ZA#d2[($::% ^Bzl˙Jxmmg17^9ɺ:NB1<[ )#:MD/P ) s÷1>zKsgu$7A_ gg^83A4&fР&0bBO.Om/h#Fn%yp5s@O."fj/qgPat0 -Rf&9IL^>ԯ`}iذO&7:-C 0FR E04՝~X@#\''䙣ƣcs&j$þuX Qj oX`9bL W"Q0,ӦW{=N美3㇞#bA8s^9iљ>w͘q:9USV QaIz] sȁ/!k8!Ma8T[Um7FG|rOQrTS%_hFFeB1 wcH2ǹg@ZƺMg3#MdfOhcv<醑4 9D5ez+6=mdjT\?i_MB6KY[v(GG6;bJ>!JoGsOUGԱ?Ks?U| Oc?I??{~'s}3~/~?5ܡbvܡڡ?k?wܡ#?,MIENDB`dipy-0.13.0/doc/_static/dipy-banner.png000066400000000000000000001134361317371701200176630ustar00rootroot00000000000000PNG  IHDRmtsBIT|d pHYstEXtSoftwarewww.inkscape.org< IDATxwe7!-PЛE@b@;v^}PHE:"UJh;$RY693~߯׼v=}es잙kK?"⫽ڣAJ^4 0-`ۻ̬]z@3J^',N\XxGcSa֚ffffffffffffffffYV^488UGĂ.;MI4ENofffffffffffffff6{'v!GUk^ U9)"nd,ffffffffffffffffE߶ EW $u2`;j', |ةq}p_q[p,{wnG0333333333333333E:y%"(8ا#afffffffffffffff6juJv' [bfffffffffffffffւO^9  cW` ' 1ffffffffffffffffcZ'*0nS}XcL>Z&(333333333333333A7+?+iC(reD4Mp5UIǔ Lɶe0333333333333333땁H^}bW&YI%%LIٿ C &)qea]х]b4333333333333333+^P9"^3e#cGX?"f88=MzĽfM`D|ө2&cT^y }$,q,<]`$M,q;Wk'瘙 J^MI-H-l2@$mlT`sk=(iRʦEIx- :ffffffffffffffff=7p+"P,eMa_5W$KJ\٤OF<~>Pla-333333333333333䕈u_ yG"HZR=Q=3Dlp򊙙 L^}xI/[ CZL+0_HZb톆<L):hKGIHlt䕈F;]Xs>Rʆxrmq-8߫[Xev>-Gye333333U$$iRzY-^6Ikx9g`&c BVCE6'%R1R'Vo2أZ"$RrpH߯6cffffffHZXtĪFĢ6ܜmeY5g N^fं.#Ui˚5"6-7`q^7=@үC `X? `y"733333TY2ջ'1͵XܑXΈmfffffff6d0x,.qDR8Y끷P;sNBtI+olpܟh"`-S14N\t+fffff6fIZ X2阮:# kx3333333i)` `Rv\|Y{Us i2pNC`O',aReH +BG;P2ė۴SSVm2tyDJLqrD27"?u&ĈO6'_Ϸ៏}y>,ox5kC[_|l!=On"=k(EZ8Ie{ҍH p pYL(9Z`6$Y4:tsWOIz1p1z!"ToI :,")  Fu8pBo$F> lCkqX({ǢUL I]("v<"~Y`",.V"3݈j)$-0.[PQ[8@G33333i<)OW?L$UlM"UϘP2NTDlm[H݁e*^`R>;*i:tM#Co&9g$REmud1kޤB)dq\^ ֒^FQ zGA?5wJ]E~k伈{/+"{H86S # I5v713333$mЅ66wwFĕ4Ty#R2ˤß̬2Dj2,E_>hC`mmk;^ҶL?^L"V0O5RRȕOy#YZADe8ԪgJų GE7:mYɜ-vJGe-u?RQ @oy񣈸%] ЃI/n;F:;2ؚy xADN?$.E N/IJK5rhD񘙙Ymy;e A:aw[}sSH'6cɊ,GĵݎP(>s+dHR9twc+U+dlbh_+i}yK:8pZ^E7lS37E͘bp,&@*d:8#ftSEı'{WNj5q%WǀR+8w$l\|{ IHܚyӱYcy{G<89"~7:q"x8oS_|# W[8bF+0F* nR[^%J^52xIM*Jeش 2R T4M*>i*{RME׬Pk1QyeHDBۥy>J*W;nAz%+qM\*OwW$:,"fn$8!"qJN\13333Y&?1d;Uz||'Uײ ]E3n |N।D*N ӤLj/I\1#eJ]e7"+FIgIOIz=%Mm3qGxo-0nt.ie.jvjǹmYȓ$zu@~̺#![g;dke|*r xpd8cE3LhR{hơ\cO>UtAx\~W.>LebE`+RVٻ/'̕JoJ$(iͼ%,R+"T (DsRD iI'V.p 5"jn\`̜_vffffffO8`$i^dfffff<{}?dY| xp~Wp2&vxFsJt_ gT0T0OmY|VELU4Wdz-P4ad&+1T9oeSH}&@xxVΗt7)0 5I'g;q# 4">  N/@3333>'iI:KnXx%-Ø̬n;,d1  ~Lܼ-N^4Y'uy6%K+Q%{d*r?G.86yaac6y%78Ah"^v0/8 xPoS5X'GĕmKEG ; 酤H6%ye|TI/5 ݓōdn`K/Zewb2L N^DY<|َh%!J1`^Aq3}V+<1ܽowuiIVnWENW̬B&Jz )qi#:ض ۺ. &ijB23333.K]dSCXat2-Ӂ+E:o WW0ϛɴlԖZ\\l'] |x-Y2c:y%"Ou,%sc?&"]Sܹ33336IZ 8TurKF%hWIt',333339/e{R{_t`݀_߮/[yR ,8V` WəVZ>.JIk*"n*{Em;mwm\-fsf+R۠Zt=pyDk=̬u}+Yt5-F;-e \];^PrmUQ}Y lV`ܱdqJ|JnZn'\ Xtk}`Ά}#R?ʆp 3333IZxOV<;> <[$g49yyb쵌6t~Dѭ̺.i:{ڿ9GKb;[X9 QWHSYޡ5[I:yGq;33331KҺH&p9peDĈ}/vhs XrʔJ.a=R$fG|\D 6^FJ`e9`?I'DĬhfffffmY\@W8t YެgR>ye*)$tpTYv WMPi@E)J+~M)"f4_x`mLj 3s9oUSdN:Kz]""n:fffff&iKR5zg? 5"h _"+\MSty_Kt"hGj҉+fffff6&dq.+'iYw. ){+G=KE2oo08S#o&Jo96Z4K^90"K#ՈxD6"UHwmKʰp"p!p'\kf3F`2wνEE~n:V;/~p-:Y'IZNҮ>n7 W|%e2%]r " gWt?*33333`o g݋-s%gYL"BBE|*i+MFpxtI˒2y.[D^ɒm7~I&gJZ%"u iՑ+W@#s̬iVʷ@K I'X"%#W?z˜܀&sDD'*:RJ"Yղw^"`ժf%/bPnHz,%ӧd Y\YUXeOj솯EB #4>wv?{?oi"/|SUh'/wWJ: x;(E?X$qGҦĕ=&pX̬H0X Ċ"`I\g<=3sxosU[tRв#ehp_6dW`#I77H2ꩈxHүu"|Rƨ,*7泱ђSI*RA;'N}ԂKYZ6֓W dF> l @ؙ~i 49z#g7P'qeȚ,RYIZ ֦*GĜD3cJD\SoI+WSϽA|F҅Q.̬8L>PьGd泱lJogyB 6gxjW7(p0Yfޒ2G}[ݭ@:%"o~Xةȋ=O&ĕ [u*%irC+wbEs13"#8%"~^jT2xiy{^333333o%]lec`&dI8 IDATplYLUmDJitngpY<؉lI*"ft+N.;]CN{H xN eM)3_r!WNj!Ufffffy6\+ :amoffffff(f8."N"bVODĂ8]޲&4M Y33333xaEl)rY\SY$| ww&Žvm!pY&R1Bz\#:5#k/HZvVpC+|Z8̬R{_Eq~D zuND̍󁓀 +iE{o $icfffffVT4ccO*,-9Wȴr44Y 4Ld]$- t[ xk33 MƼ<Dģ*r7Jo5 ie``"Lqb,"ݵhvVD;lmiffff&*>EkEe""t2]vxѕ'i{`d5H7j5"~Tffffffb~ !ӸNQ&D`6^h<2xk3AJ(`uAՐiinJ&c.yEҲ+ KtWzaIgYΉ2:'AIBr!^ 1M'Vqfu[W6 xj;l)qei>= R4>]ңI#=+6{":ID] ̬j'SMʪWU0{˷yd}ٝ#n04X3˜F1{\ KD܈x[t}IVvɿ8Kt\pp"/V#>K:wkD."zT[U>\Fj/ Iwہg+i-q^`NTW2EK%8؝T"{J!yY%7333F,*irsL*IFx8^rT[IgFIe߇ͳ}EXiWZN1*éTj|L+Vѐ~d3oTf!c펠lq-FČn<Q3#/MV"UHy /UY8"Bu8KEn^RbEjEGrW+w_o}`*![MjeawX8G4nTBg"-I/&~b63"攈̬%y]'4~pUDԻ8_k CU\t U[aQ&/EN"=k\[ 8HiQ潌Y]FnS6!E2MV~_lSwF YB c UM<ڗPS=yM׍7?jZIBa C#"s'NNOfgwaVCV*"ik`o`BqsNjRJׁ@["b^r6@÷RE+0 Kf,_133*-UR,"ici,ؾ)63k?nټƐɤ233333,l'<–WLc }GI_ڭV2O7"*DV9yԕ(:,o[UzUEBIwНK&tiVAw i}E5 =PtZ/p=pMDZcR8XGNh2qaCT^Yq".}m4 -WDcƚy"7p~D/;k%\ƨ y{R33333l2-5d'ؒ2mHrd,~N#=ڜCd:,n. mR1B+eW5sQՊda"iJD<<@D<1l?F-~U݀$}("~^ D\I?k.7+"ux-&3;OD',"˷)5.,i `R?MJ7"̬xDo# PGijc6pbfffff,ʷo 2֟[ꙣHEZ5.@Dc6q{LO>N^tptaoP%Ķ#+#{Sk#/TIJWΉ+%"EC1 Z]s9Tsn}RbZ$}{Kz5KmfffuT*ݰƮ{#▪ֱI8I;HںױaUeqK2#I ZM\9x(K\,n!h42;?tYҩ5Z E\༼*QoK4Td(%J.Y&yޒkY puñQ$"XF\ҥQoT333333k3AiG.x<ژiȹ,. m_d9Y,i"K`B>@֟7I+{74".?tFD;HLjiLIR<epX~ry,'vb&ݍxpnDkCA]UI 0m G|=x`mE)P{*"-3$ x"fe\ycuHc`tpY]< 8y{3- ]ÙGJXѺ3'2xL?osW dq*^Fn!ǶNW74߷YEHZq)"х\N*#6ɸs ̷fǾDL?,80I[KZ.߆{hַ"≒S?V$(%i:pSD<]r 33poD \C%4rG0낍<" *k%M_G$mx:-Cj39jIZxx`ݮY4Q2(Un~M[ŏHm5 Y@,Fs啽I(9?"nS#m2f.p@m5pI+v^t~OD<\plS~X՜]df }"xFMo33{y=/[]x6DLI3G uI7 h"zI'GDDWW0uAţ=zeq+|idQơ2WFU I{4"NSGv@43+8/""iprDdľeapVbRh8d)Eniff})/7K-#w ݎɬne䕩GZ 9+íN"buTW2X~ \ loW%%3Mʷ|{l.?Z|8شyxΎ IO %ӺNi'^9>"BH5EtIk5OH:8`w,20OL9~O_N*_UI=NnqՆ,HҚٍ: @Ծk|FۀF<4uШ+9OK:6htV53333.+[yqxH;Xܜo+Y#Q|&Yu]K:i\!i7[k"E3O9 du;QS)0,p\u7 iUI~@J\tݒVtE(|[lD3CH ,{ql⋓WlL"lu#͸Z0%+1\w6=Y㱑muFmxncfffffcF͇4ucseW4\ଊ8'1I>Z`q{)b YbIK p;:[Tǁ#1`D,'#!Ri룁WQf< | x/ O <r6Eļ."~MbuɮfY̺NԮVp}1ZY+t=.[ٽͺYS+a6veC&+uUv=-i*$ FD !!WKNU$\ఈ?AI.G=GE4s_#i[פh[,IWؑ) v#Rmw٨;Gҕv⟙!IQ܈^v4Z'TFu!qE2lI̬V^ *1~A*n0<2Fg76UW$-Xп|D-uP!$tkHm{:E%_8J6~7OI󡈸$"ND!SDFjYdf#䕑.v~:_ک6#+8J]@&4ᘙSʿw+06e1T?W0G׌2M<[^DĿ˻Թw$(hohkG*lmgEĢ<ămL p 5̬G$mB;r8fGM?h;jL&wU%ժdfffٻ8K*003H(J0.*"꺻ouM׸]WQW](* &b  CN&O5soU_zz9}ުS瘙$`GP (*4T+ xI_oc#^SЩJ(2=bSYI"bp$A :'WJڹ̬s_nU%)i['1OD< ״fഈXTCHfffff6v,0i%bfp_E'$ Lkg{Q1TWv_'`$^!Fffc4UޒkuWVFG(UYK3XDLzaDߏ]r IDATlhXpOi8ެDy-;䕺Hz㗲zp}` yJtg Gr@DXդq))!W] |Kҏ%Qndf6+}|G9*+dIUZo;̬?̆^(8~ѯIG1c"bN!""Y,JE8 x/eQ[$/6VmVD7DDҠX?gGaf:gM{;CiY@)ye [I:'"u<=pw= >"^LD| ؟TiS[IzwQ6Md_fffɒXn k auN^1333333f <[V(fV,1H^Zx]&I8 bS"[T;~40;"Ψ;"JR3*p3%]ndfSVue*133W& ] \V f6q E p8ώnڰOzw"Oo""cwFu5\¬\_r6>h͊233+Z'Ϫ:3333333HC8/iCecfװ$b9DљOIpzeu1u0/pG7(齥e6q+gj+%eff̏GbfffffffU9X˱|Xl#ٶ8@!U"^S)u1Y%tj$%))uB|R;fWY^+ꊙp{s?זMh"iyrYD̩*V$mAJP]gVRU&[8 @D̋tqÁmʍl≈E@^;$PqHfffeY+gGafffffffhhK.G1 nWh2JhՀ3['V[ݱcz+"á{لyIWYIe:؟ш &6'T$`< !"TOIz pci+{~+s/."'"nc󁒶233?\Vyfffffff{ me@`$U(,x~}`E͒Š~N?لK%l~ yD̮/233彆w>ՁuOF*=Bzme㐴2$Jz0"),33333G M!݄mՕ ̬|/y3/g&p u'p2Cݱsݺ襈Xo>WoTfCD<#H~;̬ eY-.:+O^%錈p%33333`.ǞA#.33B8PCIGn"u1J'i:Kgw< |ا񝺨+#IWs9!lD|I?Eucff%*q,k"] Jz]>8qYκm[588̬4ݷ hV0+G~N%QA&@;>%"u< PR-;׀ʒWzp+z&hp_szDļc03 -:A1lbZnުcTY+׳!Iz,"!433333 MNVbtory=Es+",="T*IK:8Wf2 \CWEIV0ٳ;5^XpN^b,ff2yx9^gՕ[xcD$3333+Y^/mUbo$m lUW EY3974 33333VCSh[g<4WNa5pK^^bd`*Mһ[p|D, I!鿁/q>N.@;G;QDe|I,F23333[I֚9UcևvY7@QD~CzlIyl5wy'hUSE[N-%(m`p?J^G+e&5$i-v x&F_ih'%[ee?FD^o8';v8G;FXfJX̬)MԒlBeΦkU,g,13333^kh'R.$rNGb|/ fM@%ڻ1d$M~b_WRW$m(;8f:OZTmxEԽͬ$MNqYO윳xHHv3YRR#nq%+*8=}̆FCv(#+WX%^^JQ4K^LƮzKкM++|Xá*t#AʈN.b33kHK$껌̆r6¼Yz/x-jn4.\3333sZttp45 Üc9y [UI;Gbקc#GI[^йOʈJcwmcہ `.xf%i@V#ODdff=WueвEٰef ooc%%Nw F&v<Hc`3aڬ "[2mtBR y_  2I^kD -U$;Zx-uI֭$Հmr6~fEÒ~DJ'"0s⊙ &y76J:giIJWa?_6~D0$W~:NHzm`|'`ۂa^}ŀfyggI: `c$U13+W5 \X}8ffVIG[Fuf#"n133333kƬ؂t85TgH#~V124_6~iϳ<3I}A S{2k灩CtD|1`yn4-ۈ]Ѥ$1UW̬g"⮬LMv[}TffVŤAf ISMHG@ ue_i=:~,8EDEcZ+ۑшDSO^$,yEҧHvqIDU,t$u %BуHZƮo-2؆eIk.}_~f6p.eI>o*JI ,7Hz)zꌤ餞#gHY\"%,uf_\hhKChH4GCIw؇֠k갂W^ WB,`_ګ1'+>|!_E!?wX|>}On 9]|=QR;EcT-Y&"t+h/tUD̪#.33+WD< <\wȪ6=g&}cffffff\CT H )7=|ZbM' 18z}HwZ±t;bL^p4V.HRu=YTI"脤ЋwoT0%[#3Z+g" I?~ J:;b(zY.avn>333%=մi7bffffffuih ]_ܖ~HJ X%mZis4⡺WtW+(8N^KJca:SjRVd?G$m|8ásWv/!gc#%Y^ bcDDG9_y;G ?~3!IbgDăcffHU0s&'U?Pi@UHC|-^7-[VF\± L*8^IGAH`I:pK.v$ϯ"⨲bh)HA:I6Hs&?N 3ϽI"33349ڈ]ل*'MmH[W3 tN19䬭Iڏ,'wЈz0uՁHρ#75n ߏ,r2".q`2`Lj!iO߹?{I_Wm`6𪈸 kݚőwhOEĒy6C%'ٳ$/ߎ5dffV/j*&iK`󈸨Xlih79~6t)4⊺JC/"XkԲ.Ϩ!ݤ. OdQ.wnhE]k:lʭ$p0iu56R!^ب C#~[ePE{K;β&t<%IKg=_!UyW6j+aK:^Vu c$IK0"n`^{Iv5Ddff?5'HjjIF֠$`?ֹfGD7333aԐP^ˌasp>p6p.xxzpBAal<ƶ7+5 >_"]GAԏV%]n}Bv^*Ö!Itb)^D*}C%?UG}%[kߙ?w7]̹]`.ƚ $M* 78yjKجincV"IV*iaDVwL֝mV(ш褵Y7&;>p1pp>xzoW$=/RV~ÈJH 4YIe~NC%[kϓOG>~l7z왙ufV#bV(Wf_}8֏$mO*;)[uyPaYvfH?#%("Dq/F-7҈ŵFefVM^+PBIp2EnEYtw+7&.ۯcV;g&q͂Tff3tR[bff5 !텒jKeثidN'+̊؄XftfD,>,3333,,Y7tlJq޶2.6-VnP-,aşk;g:=o Uy>w-%k{I2:/aߴvӣ%ED1m Y LXáFěI}ΞLK|~x_D,igI$2HVo J&8I;wfffV&IGlz8_SZoe7ؕ+gGĽEeeZG~UӿF%dfffffffCꌱN3 &',>Hz!鮧< l< 7EĿTyc\K+?vv⊙%"nۜbff$"f_t^҆5c;q) ' XK:셒mkfffffff/W$ml[ NKwjDZpH"c]>~AwEĩ5?c,%%+EI6Xyq7$|x_D̯`.3333Ys6ȫ`mgǀ_GĜ Dē^yIY&'HZ 8t&=AYIȿk72$ ׀u8Ow_m-xoD<ܣu鮯^y8s`Vf,"t1MG"nIW{7o֨!$3333333"=I^p<)Ye^3:;:5]وxAo> lżӲ؍T,W Km̊&l#"[E]+`ڨ3XS$yEg7 4AV򡈸@Kͺ<"|;_b IDATX>"jc]I툶$% <<}\uW$m \ BR~AY1^鬈x̬zO;)gQG$"n.0m5qENޖ}:r K?wugso,iZK2bIvs|tĤyFsFꊑ1qe_G꺡1vuD\弓ww7pID˹,,.{Mҵc-J%=yHIOYv0&"bm"JD\\W`=^Ps8ZWdߑffff6=s6bffffffIY'%4L!%qLq޺#/]'Nrl"]ckd,'Ϝw3qV݌͜ {'`ӱH|qD|@{;v9`f6rr2k0XwtG*kU= dQGxN||`Ak "%؍bu9hffffikfW333333$mlBJ4'4?< |v9H *ɢE.PCJ,VWݚJ6ms<+\)2~݃|req%pw"+WoI3ҝG=3$mBʰڗB%U76+fցٲj,Yff6IΊwVq8'K ඈxh&ƀHdZ%[6}lg/ G؄!:UF"{&Q4h"y6 $ESqxiۿ^ bE\M1f֐݁}k#bN I^;jy^Au`fglb13'%6t ]$:";DvOW!i4`N;e*˱E:MHe%V꼰?!5&qy9w#"DVn]>yEG6rG"ਈxq̀}X93TƉ7̬odUW@DRu>HB}D^ v@sf1ş7rK:b6a"yLv'wJV_(ޯ w ڬt,OTxN heu׽')L/|df})wޞffV@D+T;0"1IGs}jd˶6jsXwe|:"iM`Xl8IFJRY +f &TY}ls#anǛ{LT /^e=KwI'񾒎Sfy>u\@ zR?VN[a,ޙ-% p~DRZ̆NޛGaff+JxRsEē^M~s7JMD[mthڿdppV RIz&"Zz! /@R2M~xfn6331V O<""nY1{(FD|cJ^,7IU~-e.^|kޒ-U< 5rt338@ҙ9LtpEͭN^)}EZ]$-^&in̬S|XDtU $"JZ@I(u9ndŤ< IXº7f7Yt]=̬_`f٤c*ie#K:^a#e"TUّʠ.˖+}8bYݚ l3&Ť2]YQ&/% =j6ւՁr Ց@Ff6mHk?q% 33+DGE{ao_{jDfIEĜJ0MU Yw]Z xx^i% Ȼv݂S'M0W-c1Y^gn5IQ^aye@1-">}/agc?|HN$1wKY+ff6t"I3݁k+Y) H *#*L6!67jq?e33a:qgӵS|zäʄNZ133HZ xŊY?"I}F-ۓjw1p(XR2Q'EĵMڊToIw=lO6bE:13VFēubIڌtQ<ǽUw8uqqY;I;lZ%ubGR:qUDy_L!`&)aވXlT^y-N\)#,icl9XDZ)J'\R1е%̲̿|1'̬dͥ!ݙmff%WJXْgyߎ,GyL$-VfY F%LVЪM$`liV)NDY"jIwEܺc13ec'*5>+K]v9'hffffUqJg&JۀEug> l~ˀ6o/~*(ҧ* i0{fKpʄ#';3[W̬"'\,r333|N^qY0Q݁ Zw̠`6v-iL7333Bi𫈘WqHfffff&IVf4m'"~Q}TffffkSqJ+"h/q=y+I~I{>&Ɉ67333޺X tԺ_GzB23333HZh{ƿ^i̬*~A*mX|#".;A%"mzzD 201"ffff#V\!"ިlDhSRđ@$"ڳ̬ @&'G캃dIlyH?#ʓV1̬$l7+]df'' IkV`2`&pMD߃̬F#mggD-u3D>~_$79"N&6=8‰+ffff#";33+Ivs :>W?{+焕z J:xgDoPD qv{8,"m7n3nIKl"48^`'I"5effI=}}7E#Ueffff{y\-WFă4L{m}Ez NJ g}!q[ga'+`lY )@aUNhyM0"!,33PD=7K+j@M,@>΋sEUqo=NLJWtgu1鎬jGĢ}xUwW l3H`Fj̬6Yba}wjf6$ }:9"/*3333Hʉ'] |8=";[izo#qp &Јxx̺!i?EMv 3333hG<"0$33뒤"⡺c1333M>(׋HmffKT$!i3 %,Z?wc]$~+%N\1ܞn:qՁA꒦oHm%#,z@zDğbffff#4rӶ+= g,>>-Ž59$Fb[U<#Y$޹u =IZzl)k玱my̆ɤW;.+k ̆Uo8Ck)p)RK|;hcM<1z@`41a-78#23~#iU(N`87uOsH +ےnmnD\}Tffff6F'|pt'I' HةC"5Ubی۝lPIԪsj ̬IZ xKCnngzMHSWDo%x{SIec3ay2 ˀ&ʫ<ЍY͉+fyu8־V]\f+Do΃8II^%:,AT$Y.rْcVlUc%+U)WRXgRJWKsX % oD"Ww-4Uenx5_%IkWեYO&>_}j1O)>ڦ@cזkKDl :..G&WQ"I둤xl2lן$I+" R+OdZ $IҌ2ax "> |o^fDVk~jߧhזP "vYd'K4ZnHR"bQ`I;( S${CY|d$I?x93_I$LM+'S!"ohtkǰ:Hll2soE5V|EHnqpc:/]tgזcꮒkG$I$PD"F&$KD,^O2yZ`I$iP^yuO,ZEݵLL{vltm<-I$I$CD|co;$"b.y-X <#=+L$Iꃎ+jONFQ]Jڵ)5#I$IZ)g#e]JLW5X=J$I꽎+(D8+mgB^,oV>vVO$IwqfI:̚`A\ӿ$4q&+c> , UI$I׵ʫX| 'Ļ }fER\}&~V7̃}8$MDuI4ISn-ti%IԱXXdzן$4JSJ$iu=r#V7E-cEhwyW$I xrSey8˧$MGx'oFoS$"VSPqfqY$M1x3eN"5yKUIj)ioaW2sI$ICbIii𵺋ǭW$MWDDl5ӁFijK}+Pthv-*?%I4}T75[( % jQB+s@yx֒$IyE)"(7Yf7.B24xKqߏ$/"cwOQBm\$IT pNG'(ػ$Id <~Xns[X$D!,`hR4sd(V$I$< )~H$IVD< w_7O3."I=)7_O uGf>ߪ$-"M;Rw=$ ]-C$i'!Ѝ#z$I:g ~ˇ^ Kumow2q[\$\5YpD,2z} XUWmju$ܛ#l2.F2sD<\YueU&I$ ;hE[:B{K:1x#G,o8 AZD21\WsI("K2skݵHUǓyκ$/"NOtUf$I$i(^(7Z'֗j8^ \ J$3w]DD+ˀK]z$ISl33"IDΣK$IexE/"> Lyi,3ƾ$I=s4r˜gf>$IR3q"enbORB,{V$I$I53sg'JS<ս3sd5JLSuX{E6U緕նuys$IsEMuNj_5ox537]$I^P~!3;" 8Sl^ۭQf x74(R"bJw;|$IOuCgdJ_ DD,,u5ؘ]oU$I1(_F̝q (O;5sxgfy4+α!v.."V+){2s{/j$I3S })LPq:J9c>vfnU$IZaxEC#"+Nf~ggBvJI~xtcu\DeUOk2sWo$ITM\dAz-+"~ҭpgMJ$IT^Ј?ia<Ұ{oql(OdjVu]- AƲn,"\'X :@ >lJ/%I % U[c>NxLK(fA6?zQ$I^PR(53og̵].Qf+(cZ5'1;8 3ȇ)I8x&2J) ԓ$I$u y- 23WOp2ٮ'I3TDXZƲ&3o}URoTKgKY q㮝$&\F $''3p7j\ ]P$"b)p(3]Ŕʓ~$IW4" `U C.)I_DuID\Pp=lyu*"jsX$IR"0Z`Ufn$@ 'gu#I$izRfۀwM2<}Uf=9X^moF"b lu'"<ċSlmW$#U _/.6V/Eĉ%@T?=I$I켢л2=.GC|5]RY(Y6GD,V?_~vph蟧H$uSD r] c!"p4rGW$I+f7OO2mq_%iEp?PQDbJdIv_RO[;"ISU˚ t=8SlG}Uvp.N$Iҫ hhe摈"6$IꝤ,r*U)rr~p@YK:<-ycH$)J` XS]>Q3D,XI.6gG";`J$IAj՗荔e/*IRD|Jѭ /=#"g; xl2n6w?_jܒ$I*"N\C],®k3{J."><Pz:$I$ $;heΈI.~ %I<@yBt=@D,Φ~:pF:\ຆG"b%ȲN`Gf\BD,s5}4ӸDăt0~^2?$i] p%r1 umhkk[wxXy&3(I$I+zq {_$.b9hٕtp끫AZ^ h1P22sgx_e~53wyK6rJevJWvN+I4t.yWRt(4hqEiG&4x63GV$Id \73M/}Udhamyϣ,gt*v[ᕪJ86RB4 @[Je/TvUE$5UGG#J2J=WLfۀe[(zۢ$If9j[z[fq3T(I$i3!33"S-p%ICwG0KO'W ?Xm^s4 "I""Sk,˔ g>:TpxLpq$I h&&˙y_HOf7^YDȱhMl7$Ir Z{b]ǻ]԰s}qDiGbI$Iu3#3IT5]mw鰷G8A O$IT#b %(1p1qډyB`73wy\0"I$VW$ITf>Qw $I86S:IYBhʹ^,I7"b*!JХqeyY` ~jmzG4"vOI$Iu2"I$I$ke&U1 88 8Nc%0Tqk8DljsnIfL}%]pЎv`I$Iu3"I$I$g2Re FYgok;BRNFYD p33G:<$I$I$I$I}U5Vmkxix%WFUXH$II$I$Ibei,Q/^9H ̗:_$IfxE$I$IPF_GlJeᘟ yeNlp4r8@ zKH$IDբS$I$I$I$IYu I$I$I$I$I$I$I$IjcxE$I$I$I$I1"I$I$I$I^$I$I$I$IRm H$I$I$I$6HIENDB`dipy-0.13.0/doc/_static/dipy.css000066400000000000000000000012011317371701200164060ustar00rootroot00000000000000@import url("./sphinxdoc.css"); body { background-color: #FFFFFF; } div.sphinxsidebar h4, div.sphinxsidebar h3 { background-color: #000000; } h1,h2,h3,h4,h5 { color: #000000; } h1 { margin-top: 10px; font-size: 160%; } h2 { font-size: 140%; } h3 { font-size: 120%; } h4 { font-size: 110%; } h5 { font-size: 90%; } h6 { font-size: 80%; } table { margin: 0 0 0 0; } div.footer { background-color: #000000; } div.body { background-color: white; } tt { background-color: transparent; } div.leftside { width: 614px; padding: 0px 3px 0px 0px; float: left; } div.rightside { float: right; } dipy-0.13.0/doc/_static/dipy_paper_logo.jpg000066400000000000000000001346301317371701200206220ustar00rootroot00000000000000JFIF++C   #,%!*!&4'*./121%6:60:,010C  0  00000000000000000000000000000000000000000000000000u "Y !1AQa"2q#BRUbr$3CSt467%5DTcsE&du6!1AQRa"2qB#3Cbr ?5=NMm|P87글v l5qYW&e f -..¦X*dpt>d$o`=\,k[*l-Zib9cKv6;+GeDʮ*VH&y8\GM/)i|DSȤN{u5N  2qsRP+4q0-h7*^I-JESSVkn\Jv=2wS^mwO?SAR\&\(xI\BWQ) ados|CrD{7phYY骨UwnQ̗[ 4IY92JoގR&p83gIvM 5N`B6 ##>+nB؆xq;xs.]- C%x|s-]4n}ʠ Nwz'գ(NiB9V3ܷYS50SM hǯ봟h*uu귵=&ipqAִu6I#6H8 {jΎ6@t(AƔ*rqdJ'KƯak_|.-} (jn[?Fv*&T} c#<Y4TRKMY xxx^<0nQ `&]tSS\Dخ㧻WW97'z87Xbշ=7OAAE|!š۪_͎{Ū;5*;}TsK;akd4 {JgW[5TZ̤S@<஥}3GMw)Y'Zc^ ]GRz1-l%#ǒX+cFkISPESC8P;;ֻM{.QL]3XZ=fcub- j_"Hs eEz3SgiV. <&'EQή*ֆTgZ9ǖWl%rqDrtm5/|pG [1褖F\$ nZjz)FpAv ؕ| dnVp:LddvʭC\׊O-Jfhfɀ+CۼTGYx=JÜHVLPRrBB!H! [dOzo07Nf6?W<i3y !V0FOP^VHBZFz]7wwȬvwFceVgsU]sEج06eGȭ`M=Qf<%^xBhL$߳]m5MWapQ=)#>*itCGΐcz躎HTdPd:7lpVvgc}4^頂%mzW;_'o 'dGPqV:wԭN;P\uE¡9 Nl =zgwWˈe=BYGݤr+ 5At}Ԝٻ >^e^̌U6̺#zgbU*gn<8eUpUj쐈k)LB& DۤQej5\yjŮsyV M4e]H߫gR:>TWѷB^$ 2f9g>B8&vhu]9.n?bgXV:4n+TpoK\4v|FJueT`A ?7~GsOv_ôCMpK+!z8yu™bvoR#sV$GWo|dž6^|ߘMʂ6_nDŽ:xOHWmA+.Ӭ Jji7|W@jެL -dqx8Q{b_p45Ua0B8sひQkuT5Ogaq!u=Sy/:[ݮ/,Q<[{U(BWgM߿Q ӬG]I=+lѹq 4F *%3g{ =wRuQk.$QJs8x|KR*3=ZtqGmiy kKN$2!M W'8sJ7LGl~c4s)˹D{4ibK{$,wq/0Tu(ܮjj ukq#|Otr{Ik>t[pٮҊ-Ӑ\&e;36.iQ[!w;H!IhR)/%R+[ْ_tsQZnUhmՕQ_{A ZXWE,oV;sHsHA;ƫEYUls㤀l~-r8tdeU3VZ.t0նh^j!d{ɣB8xc8qO)FOc fOJ69 i`˞W$ݚMڭtiG42ϛ\Oto;YS+kOc; (6v>_W MYg p9~GsO*âu[?)S+ܽN1Ӥ5 Ԛmvp G?EF 5:^ׁjTګaRKL4drF5sJmd0q,k9\jM6CmN393qcǒs٤|}%R6m&8zrR+YlPFd~c\\|l͋rI7%{5Pi="5HFK"ܵ#' B%=e:zwQL7yNwLxu%'n#Vlt-stqKBkrF ;x*gwKBAYI 㯢tXP;l]-[ܶ6A ۻvTc!q i]UwED}VCGHFr|G*agf-&_FTXj7x|†IevI֏5ѡ8QkO࣭ϵf{NE{9tB K$U]}ͩ.gXw+.k[&]븎Cxf8X_8KjOn2ZY?f:OC/#ٍ$s9 $Tݹ#M{XYC5=մl-3쒩꺯V*\Gفov]ڋC13#~̇7Y*Aش8x(U4!`An;bRѺ+ό0m9>>SSVO;_6=@ ZYFVHR>Κ;sOݣ>zZdԝOU`tT;7röG"6%{OZPA_nx⎕3@pYi " 98ldK}̴qR]U-KtиU)ō˳ުaBJ >ok? IW4T>N]w/}s㧆Ko62r*ڭ-Yt񓌮;28{3apHg 9ĭ}) ԵR@q`gT4i{VQk[1,˸z*dԖs⥙{X2KyyVnnwcŨ*j-zhjJOSRjd;`ǟ}CW/fr><= :e/V4eM;a}ڎmᖞ.o c>J;&ôq|GT no sW؛[I%UujWe1T6g=0rsc"ShqgCx8YS5\kuERzYY!'> ̍W>Rk˕򦦙UB&q7=zDݤYCOY]oۓ,*utpM%fF|]<=z'-PF!$naom7g}'ٴ?ERKv-[e I#>_%AgE+P)≍-#r⦥::!:pIHw}Yf?l_? {BߴT֪8*8l#21O'bSz(N1D\n?RvNa=8:%l׊{e;O<<oHUBt)UP9,iã'sqkZ(Q 5cxݢk=> z2UR[]M٭e[]MSQp`zm?Tk AJOGMp{dX09,Pרz+iHXssŞ ־lK28bN,O'ov몰9|~G4IJ]x%NBB2G0Lv\/otM^XIq3.srn:K#≱Dr$I4/GǙ$fڴ^ɕ*Bj);lJژc q9#tRAi-H 9`rZʨ+dySuxP83Rfl-o{@@`ϪcZT%v #i῰V/# ԝ]qPx:}2ͻCc69~溋ͶZ+]R[r%}s쏽kW]#)(!Z=J.%$rIܒGA-e(Zy^&ƌn+݉$K\ɟZ 8jG?=H>fN&0aBָܶP5l}Mwp fg Y%\Y?djk}pA.{ܷMC0tn 2<%CPD$ឩ?VB9c'k.!gƓxE=+^Xm~`}ZwGv|%]o%]oN~_B]U侬7=5$ n wGv|%wHwKܦ//Qݫ|%]owKӽe;>c/.ߪЗou///Qݱ)} vW[;$~`}ZFM{A?1'zwl|ʿB]U/.ߪ侭M;^^c>Q$~,i޲ڼ_B]UB]U侭4Yyz폘GKwKӽe;>c.ߪ?OK/P//Qݱ)} vW[;$} vY[;%je;>c/.߫+d~ ޲ڼʂuY[;%Kf9CP% >''z.>aqu>Tm[;%.߫+侭IOzKԎ^c/.߫+ЗoՕÿ_V;^^wl|_B]VVg^*uk\sws%2FղrNGUx#w7~ru57S[ޯ4T,s_}O1 k 2* `?^{/qwQq<gB]VVetնKc4oIޥkekvny6k///R^?|o`G[@Yźƿ+ivE/[7Cu*Սkwޝz3Vw_JC.N=DpycHG[#[/Yyzl|?BUBU}ZwGv|-]w-]wNmךgGl i~ܷ*-5u1}Zxwrq ěwԲ8.m>N]Zbqٮc63/]wN;7czt0 wIQPTF<]NT~iX{sP6)='tZ2Zs+*& w;侊BgR\L6p{Q\zn<L}4?+#t>^}״w>_EZWegKq ӈ]>$sMKqŲwJӦ(j(Q:KNDx_AsWSdϜwV{: 9Z\cWߜ|ӨPN|l9e|!Å0<>Jn!S yf=It>^ǨoUאָ:GrhxkF6 K[/V&⬠Q.|5Vu&_lȭ6~r ;zkf>QwHwK.^^1lN#{;[+侬IGzKԞ^cqhgIr.3///S/7C42:իՂ}`h?U w u^ahI>B!4i& I&h3 $ЀII(JMq  u;jaZ5y KsˠP:v,j]IB!4h'ޚI!,&$Bi M! um5O 9GjIdgIs}A%%a #P^>;in7jr>ä_ʱ]ڶ$SF8?r@,hyܻ$HC*Iu=HpjM:Wl3=ŎB::JF]^qڨ'#q|z ÀظZ>_`ഇW:}a!l4?YdkG̱N!kjM׉ 7>*C 'EТX|/(?so']HGTߎjzjx &\W n<,)$d>7z+E[+|m4]N=\{y_2GNa ^87g-׍!Zh+sp9-tx'Ռ8kJ)(-55<@e Y2#%%x˜%JIЄ B!@4!B!@4!B!4M!@ &M@B=ql=84 i$@<M$u@4#8ܨZSkԐI%}cyQ06o<ӷyY|^7rGy5.g>^+GEES87ښyT@٠ L88᠏cOKlӂe]l6($c#iֆaDU{E]-E򛭽dv9ԏaqphh'`sTƛ!uSԽ.@K D^5wsNeAwu#Yh*mT4SJe.#hilnsXr-E%MEᢢ˗չd890 ൠ 5olm;nc'܀NpꈵV6jG]8^ \K@'#e<,$2__!wF=c8p? ʄ1 1Wf1`c hmHd ln3Kn F[#dt<千/{Z<Ԟmk-l 36SӲtQ 0ksȞ_9;[]i(!ӱsco&8d4ϞgOVH1s8 0vm qq3G |Ѻ9@6,-ǒ,szA4qelq7 \ӚͩGԴ45(яY#}GON~?mhΪNݑ0G(I4n@x!gR8551;RBͭFLn=[?V# ٖ0y /rX=Bb2$;?$2u1*UZ30蝖9<7HWyOlpI>26P PAz',h?kvw4 w]5(dlc n7<׻®'q(1K{*6GSNu6MF]AVyᩅSJɢx^5ȅFq% Gs"!@ &!!@ &@BIGD !$!Є BI@ K&USh.| hd5%MWM[*o˿`n7h i`|3G Lt85deڶ!G۝pø]pTlK]flH,pZyr8)#>-q%Ycc#cchZр CzɆXqӒ=*rAImm5h8>TSRi@q#s?Q+$eh %8Y4.pC 4R,9`>$29ka G<1ᕳmwxiH9d!jٟABQ0qcc`qqqoROS=g4<U0宗8ĜJ!dg|8jݾ*5ys] 699@!|30>9Z摱q{y \tFM\CX HyQQ0JN`zv͐v >ˊ[YCuikc{<<Șmq`GP Zpӏ86w_d|h6jqJX=%&h#Ü>pi !w/1u?:s$;CiM x篗-tppw錕3$ҹqt$9ϐ{dc_yoy]qBF[xc8oWuqآG׸znŹwG}!9%EK s{0ޙ-ȣKG/sײ`q9qɣg:8YGVGT 8sXs]1)ksʓƓrVdn'RCK"[3F<ͣhugdVV;> 5E7<ǎP[?YݒzӺBvhK߃Ign3sr~,5cCGR4[CZ$|GAv~ieG~ S~ |`s)+_-+\N#ɧoe`~\sIQUf'plW}POz{tދx&Z*]Վk3xISrұ}gS%o!Bҙ -L?⺕Oikfm®}ͮt~$v<v!C޻4Ib$AT<²Oւ}c整F69;e<+*WHF2Z@=X4gXjj!c p-w~TnʟR^C<5 VJcܲ/ij)飯%Ayk+K4E44% {2x?V^J6ߡP=}CV6C' sOnc5ps\29xNzujn gIjǤ! BHB BBHBBHM$BM!&u\45EکvdQ;R<=@NO4VE`{Z:y*̺DZ>qp1QCɂ90x^)tmҢ*g\XCAN (?w竏[ֱK⩝dџ؇欍kZրցy⁼S<4yvI1lx ,5ts cP;d2i="!uRn<^g;c[xSK,{<P hd"YT- V̡ 9"L ph55LVڗo*cCCfS] N9<@GXD$.ll @YF4d{ @kFI=[W^;LΑ,TY8!U<7r pI܆7im]w1\l2>dZɢnW2K.F G}ev Yl,.N#sW)%>/y~n4PdaDžN'd@ &M$ЀKjffj(Ǐ돒+fzGj4&9_MHcb10z \3~ѯ3$c`$ݺӞXTb[4 ,ssX%^]ISfm<4MҊVxG|N X<~+\?#v3pϗRޣvgZg9 ;s%.75zU^q ia9.hCDZ]Є !!tM@B hBj[Md.2z75N\ic9(`IHm3O-o+W$9E=ED29ih8h|XRU}m*AX2<7h۶> eDr8b&ѳ?gf:#һ\J5ƀ}%aU[l*ݑE qY?%1ӵve@h5{Gϫ#q{- 줣q9av:R I={g# z:m>[/{(dEpȡcc6 `_Fx`!$HD8,W qP*Z]L۳ {یuhr6Cx*h٠'AwVxj}=T1 xv*IU]1Xi[j_nw1j7c'lʗ]= Jg J?X)᪅ɢxÙ#C}ઝ߳{-Xs[d08Ͻly +U)BaRtrvczcC#3̵WEGeLf:YW.ZWTMb|7 oViX(3+VZv7v k B`)o2GA=KB"Ddw$˦yv 6A܎y pIq 9T<8;qe xܒw%,i"7=264b=P}l`<,Ԃj],L,nxbgNI']-KaI#]ĵNg|CɍSB^s_ML`#栤UtJA條3쟁Vd )V82kI8YmWZ #jupO[qv-KM>DCzz9kaBU]u.ϺPih˪Q%g±ڮTWjm1J2װ!6g{XWHBZ! 5:n5th\!܋, xDv]augOTLG6m}$-q|=U^(.\*in ̀(,o~O7t#bq}LS{ f{1paˑR4;Gn*# ޴MҢnA܌1mJE5&SO!sݼOZ\ƿ=~զ KOMH:+mu =Jw6"ģ,둺PFEI/Nېsu'gBіEtl =ґvֽ퓿6 rQ֕U]eVh>F*\cn%E{\HD/ qˎDxoS7X-Tk+㡀O[4NoxpHq WI=I5↖Q-E5ҾCPZ bl8-4 STYx>66f"ё: kݾAnJYS8cN Q[mUct . 3OUoWelm cCZ90M$5f:qI7?@.'yTb稪)ᐘɥ`d>gωSoEe2u>wҽsH{O?q+IGO<:N2@>,"&=? v>vFgH[QX!bk[3G1s'oմ<+ӕq9<|B0rj1݊cvKf@[F~w?Q톰ŧ#cj!ŭi'-^.VO7/C2/kI!qM㇋ĸ~cq;^ղ7×YFKsnڲsAYΥ״ѽT6|Um~~ W{T,ߣvJHjo{}o_aP>cW$Mj3h@$!*\[u5c?~Od=s);SGhΓh.q =!v{2*Pcbc=i'UKn3Fj2yx(iv{48%zJmE~~nu Sڕײ4|-Ow3O%J:Ŗm:l]NN?gF=Swt?Dw<v*JzXhdD8Yc h.J2lt!ٙBO IЄ !@4!B!4i& $BξoA=; yT+->l\^7M<ɠwPDJmABےw m'|V 긨4!]]1w C8!t_u]D5ES!k7pЬSK=Dub&c1./`IӈsZs|.<g` N`Mm6_O`<mGb2룩sQC .'0Z7`%NCp*_ qI(icpdsZgprH-z~m\:8s[RC".vpTM=ξi$cpkIˎ%<geP;KK=2zz i|Pq=c&p2rHbZ)r|ta3GH'=|ֵ }ULaI%xspXwݴ H*1FѭTD^TRKqg|t ],tTP t[W, u[ē_ ́#;7 N)sAۗHd⚙rU|T6̳>Q_=#s nt̍xIb|q62A)t΁Oop+6-i,i)9 .=X+OĒGNZ iٖVͧ ~] AR-PmTL#v˧VOLOMSUu#T n3%rxMLz0[xqE$}bU (>hm-{?xpqi*h]83 ( T(KI+SDLc'~h˜zW\owZc<\I( Wii)wҲr{Ċzͱ3mh,{OYZqpr")㨎$ $s/#ߡ=$оJ@&9{L=9.%865/bȎ-3|lm@a'jO3vs=PA#nG;F\I$ 玞4}+Hk].w\;]Y;>_BviA%Y+CFigp A-4T02(#cGF>J Qh}=]8.⡔Dykϙ Q|wȮ͋m$M I#[*86)u7Ɛhh)t۝KQH;$cc i;E+IJkuSDLq*̄ZAiΞx P?p<kf-6u#lUS!<`ĄNǠ-K7z:L T=gһq}fVԓ@B{6]]4<-'/.^w~}ڄ@o3gkOm.y`@r^N=iET.m؀8OWJ0x7XL,;S6',iXDni\Ln\I83+gf:H귆}nKEM֞r0eS:|fz iS]q }ڥ~73=O|e0ݒ/y>[cQ.ΟqܨHGd ./}HSGD1ǖ#TI[o8|oZbN>9?(;&LOre :fpAObZ0,I}YI!CWj==c}YdM߄#< b&PO{cc#XK^*!VC M.|8k@J=v6c|e N߉5F5a,vˆtWۦqkDg7 `#jѲhi|o#|%S$td$r:,F ܯz.WC Bw '@M!99 ƻf1τ!>%2(4z̾) Lj=튒IH\Z}e^AhmVʆR7qMQ(`24 +iٵ)OC3}3" FpLn9E?x#rVx7qnܺU5 k_GӕWV͊Mu[[wgݒ0Ou*- chg12YLk#cCZ֌ BZr[A5 K#"J.q̨ZSD]uD0 o:--p 姛C3A,{OB%(5֫ 5N="ly4u4zl-Ew>:#~"9} Mu(HxA#`yҩzhi%`kx-}+\$8]9qsA+.dO@rw.LmvC#/,oMftkS.}J;xxqU*$Z飒;|25{79;’u5l2G~kCKkO$uP#~Z`$ql- a3O i9Yr&WwFHph 2nwy޸N[k qǒFpAw*iH* *jZ$G.Fǥ-I_]v,qJZx^A-</Mr8 F`yKjϫ76:=w'*zTM{=4IԑM{ji'֊rxb{ x8yqnp@ZntZ3ia <4,cxʴ.JfVAVf-39s\BYdɢQ a- #87L"MvU[]ɕ" wwK)58ǞAQt1StF6+uvkKqFI#k: sdFн2yg8->ӲZOnG ?yuT;-+fgK6|d4lCy,C]+jjlToN\dvqJ>77!O`ӔVx '06?dlj]K))4FO;z_V:wHɭ>=j[PUAd=*w85˜#mRpJm c <49 kAϓOJH1o&h=Ch}=v s$q\E*HM$B  餀}hBI4~|ݟ^[Ka<bx]c¨4(%ac#|sloQZn p}41D x#9K4W-&(fdSFEB-|rZxA}qH*H"p-2F'|pgӏ5IB<K$K<_>jڏH]dpP3a}Hh$_6K{K^/~լP=|6j'WS0*ˆs+aPXMt:ဵk@vj|w\ΰZX".FLjAl~wDžskƣW]f7'QcÅ㒽jP2k/Sj:&UN>jDT )#Bs WH~s%n<$7Um]$/ ^YIukOj7Rl0rP|K)+\ڎg^sגm;Oܱ}w<4GXe,}Z>)Yþh$:QodA`x~d*׷ZllyrWw!1מ?{ZY6bIߟ==R V2S鞮(i*jֿbd;n2[(On$~N;oȮmKD2O9`>~ Foi8&Noe5Zv􅳻)} Sv7FƎ19ߺKtTf{{ov{VI|NWx^ܟ2 aaz7$\cImdu5YPYV9y'DCMwakM]SR\FxK[`Sډ0o !hq\:{ӴBYAY;% ÚXAs<YdRȻ9\2=Cԭ_K[]kQJ/đcXψߑFk誋Oӹpv3ܜٰ ʒdL\GܪVƚF 8$n{]fA灺վޯ͞F6AN9"Vqwn2j.`sc |Gd@j!Y04D|9Be5UwԉylH ۼ2Qf k]5=d3&\}8h U˴:h_,T p;gPmoU=%9#/dp9G lMEc+-e< n3g OzuKD&л/9z@6lD;LDv~^5$+SNUM!Lf>1 r-<0z}&Cip|F9sܤ**礧 kGckGSnWJZ]{I<,9#p\׿cNj\$Xw$:2H^X@RN tϚ4Ly2NK1[ŰnooW 56q0 5sZ h#etBDB-Eֶg5D 煃X΂=۸p'wmZߒ&B1huSkCx8A a]t-|[dNOEjBW5mx3)a{$<%3sZUs =\7 F:#]5?~ǰ0z W 9TUu]ˤdk^9pn8urWUUauDO&18`.sF 'a +URSU6):HE˥Ow0s # 8n2>"et/qg v=A'}[HSǵʕћuh#shn (aI )&L]Zp18fҏz\ecR8 9l kZ2\UjU4:#=ۙ@һǎ2|;uʨSZb~;p1dOi ,wJ:y GoRs* %u})YMgLGOT5OXIkC /ڂP x4q˧5 c8q}qVT * k[@zU с7%59iۯ ȿJ[qMfE%_מ6pߑaBAt釽kia-|Chݶk:I408yd!@z;s4!!@B55i@a>-=GZՙd/ҦtTCpABhtUNm59/O>m ӝJ-gm-@;ŧ:6ї}3XaPI.e1K>~ ,.9DRuJGFFUuk*ZX|[SǷ{ Y$6sAsH`h8g~ބ[t qN,4o\rt47sTېoU(G?T6Ksj߈^>gzh—ptEԯxVX% C]ߧӎ*w9•췳&\k.O+;#~@7uT eε,';gc68z*}[MR#⥽vyTF 8qq1#|JYtw/,kzA sKmk+âspٟǪ=anCN^VuLoz`VDhR8g#v vGS >'=.cNH 8|w>JsK2q48;coT|qa@# pٞ ; | .1YI{x9Ǵ=2wUh╥X݇l#F店iJl9΍#9|BZbQ[)cdN\H-8AZV;ൖ]LkҚxݷh/x w '8'bWJ-v=KMo&J{ԷY+䂜z4a! g4V{FGO +jjiؕ[*ݧ-l a<46n\s@;]fkeid{Z$6ac>1}Z#ۅC>w Hw[cmSABLP@_]3d?Rӑ9#e76iWQ@i+m͎c٘47`xwtFKlMou`WC~N+=vD:7?q1!4@.&p.x騫ޜ^H7RpT!Vc7PyuMښptIހU5ձ GQb;wWwMqVhd"{^nӐ~(I&8duM .lp~ӻ3CϐnrO@JS H/Hlj\1sׅ#ķVOhiw-}TEc;V[MAyI9$@muUKEiuT( pqٍ;*m}կWռSS"7;4eNXi,4BpeETQ!'!lGW^mVz e1Gԝ}%mX4L( XyxhCKF9D( _l;5#%EZ6h9 v@\Uj.T'= :Y|nkᖐrЄ \ rF<uWo9 u TUO%=C$Rpp<"jٍlG\,uNyV0e9{tN=}*HBi& IJj#Qhs\<;a;\d3Y!ܱ>JUM"l w2 r4V ɟ?3cP@Ý݉8 ~O0;|W҇c|_Gmڶ熚 hqJ8N1OSi51=qtNM\) 2';I}o[{]cy2?{YLEF:IM$Wip[JICWVgQ5uܭ4m'O;rQ:_T[Twvqŏ%,MQ(ͤ4! I4BM$!Є hB!@4!NP:6̊Hϻ%eF/YAK d'qYQƤa]wzָe<oz e 6YJKEЌ_$RCi'RzG~c./- ISvvpՌ{裍nv?|ոңMJ;ܫ J5g^p썎|k\89U;F%S]u y%nH07;QaZ lvCA>N8JT4PQIIb1¢K4}'Ae#:HBfЄE|{ p܂n;:hIc>jM ӹų\r<2F˩+H\ iQ=Û8FI2ZэyA$x!dt]Q KGQq"# {^drI4hRn,Z:~'THXqw`B*D4q%Ka|.hϫӎg{ɒLq] DZŲ5Yss9VO,AJ&ƒbܴj |϶\>t7iǭݐѾū K4h31 !`#9=|P]h^jAcjL7#}P얓29(&#Ҍb: tÁPlr\51O]p(d}.nG\FUV<ށVpf@\] !xX煒#$crȂ9! 0WQSW>:^0hp?Q{f٤UOc?rV +riG .1C^^1W27"()GVzzt7 d2yRAv:ek=MY-s:k} x㓺M!@VC=5jlqgnFMSTqDdyPZͳޭW9}%c(␂$?TVS@d2` g#`8xZq%` ֹ'U5]X"eS(4P'9Y[:T}'66YC]#n| O[&ﵐǶ =X<]SxesÇ15s3ϩA(sFE;K$pȩq|?FT8$s+X)kl7<@q;VB:JCcu,=wo}c^,URCg6WHX,``##CN^Zel2T6hٮqo#@EڻLJ("I"q9kseG [jj 9uDVS1f;c##pN4/zr 8.fi玙8IwI srp*%<2 o悱 '{Tt$>v>oQKk>9ڝeuRjmsWESǑ#i$`.M = 4UPL2&#iqv;ln^vh% y( m_hWjh,Uou-U\y| ,^/VguO;n4OxK8~.,]&'KANR w6~%m;fOoI-)s#,Ukɨ(% ͎ۥ<-mLh'<; u^սjKV|qIG(!=i~_s5vgfxz:;*uaSq>#d+-D/V:i6M0'%\%$\[LI5!@ FZu1$Q8~=Q8Ms3V5v22A v:I&ۭLG+[5l;t̑0XCyx;ԵRbVS#8pHh#Ez[_w;#`ԇ{ʩZSU}Fz5k.zmxi>0O3t}U5m$u2Zwd1H|- }ʶKDeV[ӻc-gL5}`ߧmXooG~%fJۗշ+Qojv["1qW׻V^gMdHhCÀOᓌ-arյN8Ychx''ZrU_}i8!h㨖1ݷvZx1Zuf5(Ћ'-滪*d|+X[#;G0 .,m75xkr<*kv;d3c/=iiᤦ6 M cɠr ҫJ9fչzv":4D&fM$ Bi M$ Pu7ڮ~cON挜g+UZzUA &SnQ3吀֘M~W)$pz Q62TMJֺj'SKq2qun1j[MHs&|Ӹ;>M=v; ʏEۦM#[;{2pǷ|#@N[.t/Hʘ Ө;0AUM[dC$o2(fcpw3UUgֈvz| c jY)]p9@r1]ƞMrq1gR27 shv ڛ-QTŜ|Uڻ3YR^\;#0A{F#⪕ڻHeq1ĸ%#Y{H72T@( <>yj68ei#ހ! 4hBdcdc#CZdzEv}4Rzi\UH>s\9# 6iQ-o"v`3޾C@RZF`IpԕB+m]UG×>BO1#mjcE4@񹄎hд촗 _zwo t7o8s9 fͦ::Q%ovCAi$Oq?e% պ6GayF}tّ,s$s[{D4.'x`..'a-$t82&z,h{?UUqmP>GIi4C)Zr wC8I2Coh[{}x``csNI7 :2J3%$8Gޚ=n54n;u[Uicp?%ل#рD䀲@I4h@$&$IHA Bh α_h0[=;;| աpqtnG=G5ޘn]M@q~> rC3㑆)w {+7ou}6 _u]#r usvp} MQ짹aZ}N֚Iԅr}fKukpF}|(^uMc[GQD2ˉσ:՞f=dzܱv] +Vтi;8nIpUE \_<;8<46(9q([Ʋ4^&"`d14p9ߴy=3wk=a.=SĨB(R#>WW,^QӴ2 Jp[c/mCSU#K%Ǧ$8c+/5Md#yqԎg5'~*4Vܪ?908o*ԡvU٣uV'EC_7`szI)4i^ֲHKNqَvKTt,t9Z2}6kDK%Ǵ9 <4pQi`800@gBuuꊖ5a]Ȝ 3dlt.F;osmOY.D!$ͻzb2bNď- 8rp27JuL kf\3 ㍆xxIhd-s-<qmᓵ֞~&A]}S4IVቍ `NORJ I;|cp玊|/Xs)wSת5bM\݂!٥jv}NitE88=.H IЎ@BI!@BIB}A̓;6`>˿*҅XM]ө*r&:w;9^-1eLl|}5P@% #?A#ϘɦW%=TN/c<LLۓO|.*e΁"Muf1% 7tG] S8$nkp%o=fcUien11ˣ y0G?OTJ~+8>BsUw7]j F*IWM[*<9 sȝ9AH̄!Xp$OBcK֎dםqa0V6^S>F v:%7h%_ںߦ`ꧏR{%uht-s-~$?5We-v퇌\Iq/##p=jiLݻE˛ H7{ 3k*jdq#j\<x?"'dS=ǡAuxNz͞hEI;zu$gki?42f)!S?' 7fGp>%m]h6<}FHZ4a2Wr|}59T53$_,PEچpms)n 'ZGܭ4[u􀑿3~ǻvz:Cn'QAĕD9LQ/^q稏o>YY&\Rvw|FnuUSRRm+$p?eWf]S[PXfHavN;eni=x? ȓV:N)%xŶ5#xP_IN'ztErGw1h HІE⎜{-2G-뎠Q2O< 1DOssß 4j~s4LKNԖSXG g7ˮ~ aj*؎r$x±YmMct;H&ϧc-;Eih!lQ>$+ѡ)Hᇼc7%ȥcjG-g߲N),V<ԕGy0B8!$B !BB Zn Mig5@1?6#*m4&Һ ӚV_+s"1\Pۭ˳FՔ24˞)-SF'Nkh*7>O ='8Ng#@],on:Hc?,n.fFppSVKWCVƜ;޳wQQ] fGdGP9}Ko2UCe]$)8bhŒ`_'ᮃcG*ķګ,<2qֵ;f'ŧ`)8^2Npw|V_I-0ՆP6-jj bh$Ǚf߂|/lǘ8#Ugb'ͭ(ӏbqg)K鴽j+햚Y?Y%׶&dixk)"I+!?]Bm#\ⶬqo[y('f7ߌm+M-[t>MymR[cThd8,oą4f4Q%m|tNAM٭ʪ3[+3sQM| ?rh[^̤ѓ*=⾇I'/^K_5;Eύ@_I^lVE?lr<׺JkMPR q:GQ 8ZG⍴1 %##asg%a|Acs^r#%DŽ<|<%+MW j :`@;?pz/5QަkygcZ\1II8n2W@'s[LͶҚrg9m51?hmx]3F^z|qB@N9#|G:5ӌ;- fVXۇOXR4FD3sˇvi+%ꠏ&Xɉ hk@ M%4!BI4 BH r SI&hB&4M$y 416K <+Pm-4ѵ '"8{g[IM*wcg˸-KJ[p;s08} x8qq#ȓVs[rTd~}J]C[, .V)Xu("-t.k%# O9=:bETښ i$.7[5Uzy`$rs|nzTedr dóPnњe`}؇LangS蝆so }SÒsžNܔ֊70}N( V迫gִ4@ *V0Le;9ĎJvq! w^P(9'IG]Hpc/.'l{_Z˽I"vlI8 "H4u%k>u4/ }Q粤WjjHy$`]mg;|G['e8,7p;WtM][3])02C288:dlUy)\8KFr>Gt]670;h ogЫV[u ęh'iK,3>٥qUN <qJ`omhM-tGŏ?*/u)I,-p?;N?kK\uwvqc=jGµ੫%o9;\F9uL-8Z6qᖏl2W>?K|Q]C "!ˈ;'TUϳF.|YjpDU?!ݔQP:5NK} Ng=LTt2&Ɔ[Y} ux6>+nOmFS] >atk]H[m,T7~یԟ3BjXx妏zĄ!h(}[ LDdhFN8 ~vȅ]H*ppr]֢P=ݤx99iK-].VD?ǃO"7ʝkw+lϧSȸ g ƱSj4ԯ8"{?n"9u|<{FMf8`HA+kzV۵&>a;' .aڥqtmWTT! HM$!!@I!!,``&{Є1M! zi& !$BH@hp-p`[u=䜒v”BuNVԐIl}kN|XAi*/pTc0 p<H'g]7 w7VJK9d3ݩZͮPʩ8A;>K&bvFx= }6/=-G_\M%,e5S e {BٻN!h`x꽄c0sW*I6RS:k㺦,.d\qSx0=ch[[qqq 9"];8˷fkjT7EwC$ӚTwGnf2p]u'4;@E5Qkqv{qU28qˀO? ڣYS)ъv\$˾ l:.$J:XFrZ|G ;10-ĵKtRU 9cp|B5hԱYV˸AF f$O?Tn006@rۆ4WmK4 kȆRAIs vhj7h 9^3hf ;Vq2GN`g8xt1:Vdo'q@dB !$ !$I!IВ}$hI4!!@ !4!!@A|qWw㛇gAHn-V3x=G|ҮYճiS .Y:\]n<ֿ.@iZ4o 97#GWZ❭BRP]nTIO)980qyMq NM<EB$B:!!@ &@H $B i&@$$B R@ B !$Bѻ^-js=ֶ H'Wܪ[tz*Yw~x}*f=+|I{O Pu[k: zZbso:Z,;{|ӏJ.2kå}i<m=۫=N !ɑ04} zRPmT&Eh$SG黜߁ȣlq1cF֌<^@4!BI4!$\ BB $B i& !D@BM$!! ZjԵNָ7FuYtJ[EGk3兤8~%(Stg7Ҥ;;2ۖR1sKs!tu,[jz >dfzo$Zƾxscx{|NN:*&Ri+SȒw7bD}:o:jxD4t˅-ZO'|ݙ/D &B B& B!4 i&MЀ@4@ &Bh@$!>$ЀHMЀHMЀK8U2~S{<ؒ8F,i{nZ1Ֆh;V GfjRڹd4 +s45 h  H! H94h@$lg4 BI4 B&h$ЀHMIB!4 BBHMЀIЀHM$~FY50C%YQG!ܵ}6^;qH ּ#"J69s$]tZ7_TU񖽇#_%s+f$qdRgs6qDqTodX695mf,ҚMW\mZupn<=l)*oܪ]ÀIt;pyw=R%I[3bgQ G.rseIQzP]ctlh"~sQSEx .0yoM1ijܫTI4-rĄл9B& BI4hBBPM$!I4!!@$M!@B M!@BI@4!BI4 BBM$B IЄ ! M!@B 4@BM$!!4AGy8yr[5T};h!98yq{Jn^8r$7t]$5C5]}Ahwܼ7O]/lڞ&S4>>+cƴxqyTW^ Eݿh(CEMAL 7h|O*WRGW%W-s aiӦ@*± Pc ց|OWN27oUF\Y75?vaw6X$:i)MSohB!$H!!@B!!@B!B! !BB !BB B !!@B!!@B!B! !BB !BB B !!@dipy-0.13.0/doc/_static/hbm2015_exhibitors.jpg000066400000000000000000002217221317371701200207630ustar00rootroot00000000000000JFIFHHCreated with GIMPC      C  t  :2\@8Ǣ|WeYWJ38qQ/3$vj E=vRJ)\k 5g 2o&L3ɼHQ31b;8(΅?!35(MpL̅'M!LSvGMv3t y/2g!~ &P7hfEaiC$95M^'kM0MEjZ,7(I316DL83Sf4\J܁/ rJ8c3;Nw]D&fqrnA*$Zau>QiSgl R/\Xd v&UZy,hKبVfX1d|`wY+F]DlrFKZ>HKh@S64byGb:yב8$%U)SB!IWbFLDɻ(.T"t%f:#%*2)ɡf2$̙gE6$ P]RRei2OaM;4JUYv^t!4-d'T$("5yXUT-7j)ϵaHQTj+-V>_jFRQ]q;wL 6%tʲ<4sچeE8IVY &c$N2Q7ⅼpEnHBR7+vfѳר:q9&-&C4.:5L_ʘΤbF`Zn5/FF#fc *R3 rJVvtl5 Q.W'WMbuszP.qWgl6gAabLcƤKp%%ao9 jxTtd  H8"Q d:/(N$uڬtHYcjQVF T+4 3Zj, z-'5"Fk9t"rFn4Y'VJwn, UuLW.sY$lx4(Æ6zjiM 4Ab(M"ȼEs8VJdfZK R)Y&RUyf"x|U|zJeTLxM a2=IIS^8íc2#3*չuZBhk0}& $ԊobC6xY}o﵎_O&fj*xi #bA]΄y8$l)-+w qU.Ҁ\]c.:LB W ŲP'nbgO̾g:af^]){uNRSoQjhjT1ck1c-j;]/w!/_juq_Bb^NYTR5vSDED/U=C9%g[S׹Kk-⾋L.j\w3hϷ=VvjV^k Flvg\jɂm+g!szѦyC>c d~o>X= y ż(6y"IV&6ܵܒ :<_9,OC;p/䶮ë6$$rI-,ۗ)H,q\N_<7sT$7s9cz.XfbtYt~u%\-1UEk$$NfFv7o)^sW7'DԲt@!h)hng$"8ȓSZdkkS$wq;~~:|l48e<-伖?EQ(fON}gxJ:8J˔8=PxQ3 l!tqWCٜ_Yacj{Yf,1Ry^}Ӻ]原֎Ucy_a律st=is5sI㾏f*ja\2jDL!7C#n$DFc,v C(л1r^`Qd*ts5K='QEw(ux$L;9 ./c߭֨Vo$IhsvG6V;GR[_C^Ro摗7>#fCȇ9X~z*Rשkq)Z*Da'J(x;G7=YʓgIo`&Yp< 68*=8_CQ B=hk0G-ޜ}&& r fk^4p\?0=kQ8un{LLGjfUӿgtkx0\\Spڦ۠ht={4L^N3 5{Vh7#1HɁ!LV|KcUKz&DXdxa8WszHu*3^DHS8Ȑ@ę[@@H33"GAlZI fAnXZp#$J9R+3Yuy~kS**5]?i=lI\c,=g~kijN~3Y}_cͶ:סųykK#w3[Z Ii0Hp2As3Բ9Ve%VX/Kxˣ2|w{ڤP^yS޺tn-=y}~yzXX= M7iKQװþkyy!-]U:}:?~gUՇ翬,}^c/Nqi{VXB# 0ʛIk$N(䛱9Rչ+$L1H o'aV}nRDѹsTHZWAKa-vW36AՇc/m޶+*ZT:XTlG3qMi53TBlSA JzS 7s͙TTjxwfzR@JY"9!09䋤HJB)dEg3tL]s DFdt6^vQbMf.\#E &ٕcfΜqŮ(›7lj>:ylɷƍJhIZnJk  \: 䏡9Z"}kWz ̬C^ldJ'`DY &oLXEJO#A 7[rޥ]:3qם]y7/F)yѓyד 7.Z\YceIR4w8>cQrIfr:_ ږY*h1]QB{޵by61Osn$mf"BFT'EL#DbqcEwtXJ|j|zt"LTYʐ.t)) !&/$I*ZYRhw+PI }plmGȏ/t܊LY ymG?yVǾW1p J rP}<AK [}>w9-8<Գ7Czڑumtgh={[iy߼ݩqKSGZth.Hޭ{͞Ñ[Y(׏simt-Z;Kcat 㒭{|lGX͎X|?$RHlwuL:o3"ˊ!x],9 *ɬw.r`^r[aTBk&j{VeL FF\HI 4IfIǐFt5LuR .̒8Pf^L(@֏3m "$HYp!;ėQtȴхq\(@(%N'T6m W+@н2cP3 H}$<%(W&ƾ/29xfi/$2uuLo'J/ARc WѴD:7@0M8}OU''BfEJrRHn{ ɝe%Do:kgdƷeRn_mۼM,>/6=* x<^C0$.Yǔu;[$n1K8t~Z[:Xl, v[Jj讒=$!/k_;Jˋn)r-z"uԓPy )ũ$nSեJT::@"_QjW+ˊ3d4ۄ|E:P}`̒YX "rMN'{ܲX:=WeWE#O\4u$ V3j9EE-}e-s0#B̚ETk2ɬCɓaEkYQk^iRKS)ce-HJAfKZ[o{56+u3YDDsY 3fj=ACCAH%##X cS"36ȐY\z\U4`%$nG+ʑc!eeM&T\}mx~L񘽱vL\4ƶ݂>Wv,bҾEGJZE<= ']u2D'GhMpCӒ{";2q.6L8m;4_i G%聇Ta,)AJj$lllllAkCe_װ#Q^R ǰl#վAwwPܘćIGAoF:&f$8fLL6O5q6]8WskzW2v3d#-ˮY,'4̬ I8~L(]7m19*iN_ctze6rRq&4$ю^yի++0`9#o5>&<3 ylsjA]Cvi\q/\LetXX]tSN I/5'V·'yK#W }UjDrLU8J՘;H Zb!\n)&)w=*R+$0k$%*s AՑI/c)Cl6g_XC|%y2+"d8<9帜~Zƞm*Jୣ:lEð|∗Qa2Vd)#'M&JQٶ,M|QJ)Ei% ΫWcW"4F8ЩۺT`5e].ؒoO`X&q:N!J+=/=**z/)Ja1MSz,&Ztktfk6@O7ĶiRj[ޚ4p e"d1 6J[on%΋ђRMWM)?ǚAdW [t=#ys28!/Y5 XEepF>nW%Ƭ8-M"/.kZք*TffBk<'c"ƺDKx?t$-bct3-lʥɦW+Z-$`t]gf;[)yY"x}769勥f=S)2gYUlnitA9"bckc[f@p˳ocU :MLb$`7cԒٗh }GUӍDfGZJZ7G@TLGmhPy؍dvk6&jִi(m$ee=e3.o<|q$;]I_zh3r3%"՚Ҥ) $z~Dyc&\IӾ?IF]"hd $)NwX*b- KG\sJI)kUBǵ6#BDg v'KR|y b)J2˒{g܊ï2닂|i_c0^}0cRܸfz BUb/ #͕㸤x)CTM $?]`X;lIL!Ĝ`.0դzZhgK&<ezeHR$&;_KVwod69HCd*}I䃙(WHyٶqmS#e8Ro҂o:y@Ts~f5(vmJneibRhHf cKqN:6e{䝣j+ɇd5dmK*#I\u"xIj^qè+~I׭Cli6~eiiY,YHjjgu0S4$%w۞x!<~S؝ p{z"G,Ss器arqqQlyQCk'7ҺfdG2o!{,hEki!R$bKa> _YMpBPJGL$r+5MHb-gͳ:*3uM6N8zo6]GHms}tb18ΖaoHMA% POR I) BevEtc(v-D_:eY$%IxU7"XJir9ۼ̹U:OCQIA ~V*^&pz#8[qaidWd 4Y.6uvT,TAWx{?Q ̐{lIÌM:ѐ@1p ~R%Eu>IVxlv,fM_kjS]k$KiXIN!*1vrvHMHK_P>̍cv2lr4Jl4@w4lCƐioXyߨEA\鼩sch}ܬn4Yn4 V a4GJe7L #C1hhhmJH6C#?<h~f 9F< kXk͔eDj٧ 7W늈ld]1ǐ̸Yޡ_;$OT`Grvj,^9U_;dc| lb9܀k ,fDxY:]9kt \dR7vs+&%xr#_|kμ|66>^9}XhhhhqF&#D5Fe+(v&z~b~I c6p}m7%_CE})P?w}m?;ĉ>m 3bzb[/QXU!i `g4CEW7% pOLñ̎ XӡNH4CcI H. V+kGdEž[>Rߥe 3 4Z}aң#g~h?U?{U{SThUzKh[=Rj8HCU`|S` &CCuCFZ} t48/lzOֽ>-!1FoqiR-Jk+X^I;$]Jɤ'/&-09ީ/ic 'ewN>*TxMKiDPcѨjrnm l?PѤAG3R 8o__r##/gjGiJۑATjFkb8Q'+,4ϺFy3u-! : p6|Ahq1i @4H2|q21[2 q82 'myÑX\2Wup"ŶV} P Qߺ d@¾j?`|h}0/O!1A"Qq2Ra #Br34Sb$0C%&57s6@c?6}ǚfv+EزY`Y"-w4pD` (N;7jq|Pϊ.,q[imϊωFZ<5Nml&ϊ5ϵt3fymg#>+k7|V֣>+mSl=G|WH+M%|VSO>+k?|P8q[iyġ,yFi|Vڧ>(T78T8j|UG|WJ>(>%tH|JTa{%Ǐ{7ߚɬ̋n`VN)ϹHƃbmB ;pؿJ7nn^{*[ j-hF V]l첫-{Nux E eT19 +zWYjDzF-VB8/ss79F]Kj49gX+wbӚGU7ZAwwڭ{tY,eNlZwN HYH W"7Tr,ɕmIPRMd1AY"J΃:!\pWfmUW ҵ͇~@C5@:8Wf[+m`Όnt}+tjx&1t0W+ 3rCfuB>diڅ,:9Z!#-{/KM>F]ILJubޠf}{n/L©☑_4[t؎n(MD8jQtF2 .2peJ)gf@jm#QONRQ;!EPwifel&I:/j"su7rb ޤ7r/mlCB}Xˤje[UFde[UFB2=KBi[V JwS1 N` 6 c,wfYl-'RI?:(Or Bh FtZ@< Gj;+?趔ANǙиx%|o1)Tl?n?݄(P7rfN#gվ AغS🱩)iwUŊjcs`6r9N 6*Ue fWO""\Ks,SQu]\,Y6"Ⱥhȶ'T.9 euX.TS޲p撡!t7[4eDY.= : 8iT`(T:3`t];N!miO@A8]-GYMʙ:8MP|'Zr6d[:w}  ϯ;Js*ڔ(J'k.u=(9Q<ذ㢡d1Mz_rs~C,_' ?=35RcV4[q&=2L {e̢^)C jE#J۹޶i=lSh7]:*+5kwHWMo!__O`_8 G'w7+kgֻ`=?BJgI&YX_/(z>[l9TU׽}DXY?iem̖fDsQDszẺ r4O1U,&h='IL٘3i,G')ܡ!4жt4*8W37nXN4.~W 噫K]55a Jy!*钒EVUHᔬp: N,T<!F3;h] G P2)\:C{}-MTĂ\,:S4$Ü0rfj-Ne苧H2TPb">(#z4_6ETkfcKQ{)aRlg_ܫ 0T1,Μ!/ lzI T5MRcf(٘oTS262Dc,@;>[BFVJB :$&Pѽ<IN)ƱJ$dzi]2]ˢĥ;خSVTʡLAmW͑ 5VcBlo ut[Bz[u+ھTLZfv\iu 0w&{hsH2;Ss`s o+낲{Q9U`Uc o?|WH}+^Ibiw6 Joѻǻ8 8+b9IXC2Sk;2xn)}9JN!H\ VernэͽQTdr ~ň,;ev uEd$W͍^ަ~gʊ)ݠڕ=MV}=AG ,xruk '"U}K'Q[SQ^7J|TGɝA%uBrhd7MjFnRQEpOk:W?Gr``HED\Y0z Cr+"/MUӔZU48lJP~=k ݋)^]m (49:0"‹Yx'Bd\]>[FI"yA,gRHZBO.R'*odOmT1GܧLhQ< S'!*9͹z)*#6!DAʥfFv6 %&*j8D&nՄa~y4{}bxl=uʋs}S3+ve?ULYîlUSAp:F:RJ~n~B^MH;J ZVB)gfE-8'@]eX##qml a[TeAE*)Z+?ONR3;rG(aUM˳bQ~enl'nUEݫx`7GK.K5*Xjdf("hjkAv%ͽ i:uZ= 3c&iwЫ&c{M^FNޒ47A;foG55y7N N7zXKɝ.~ް|Mto!YAG$Fjü,> kbMAFVeS|=E UWB.Џj99PIMVEg/+,Eƶz, e`Ͻo[&GVNj#$/jڟ$ДB-M6kʲf'=ٜ4Ni%lS533/Tuvcv]gʘ_U1en&CϹSLA&xvt-Qܪ/Wp!dctڈ)Or_>⟵~ &Mw໹k/.%n gO,ΜŜ_9W69Žf*lRF 1zSq)\z(VTG5alڝOө\w'B,=7Ntlo>WOV).nLWhgDDrr1#,mlr/b-EYY3-L^ȹfFQMWI+0 jl@w!>YXl} P=MZwyzf![8Uuݐ為WZpZaEsǘ\ Mխ(1:YK|zpsSRUStyX[ )(/I-OM%ae|'K=}YVYVV<Š] 6qT>ST6*a<~F[JީMbL"]pN4%}Ia=:G319mmK;n)+_Fk}`r_'01Ve%Dž`}Aphۼ[rQQ9m6zoWF#2$v|u__&(KU,S۷ ǰa8QڨLּ.{K ;}l;0˚j76_՟ܰbt$3+{~r=Ҁoh59P>6&Gk xШVxw>QvmNjV6ӼvsEMP@fՅ5,{;=!_ԫ7C:eͫsyۙ{ ϵb44ӱS8twa.emê6L[racZ=˔Odqg)lB-[^5wrLZO nQaf5I],Cdr6&7(jĤkΌ6\*~kuf3oh'Q\}W,|{kE|-ҿ5OGo0_(wr[?pxoBp9H)*@?0W%ʊxt< ܅93KmOi_(zr«?l_%[)('guqW'Q,"p|[cZ}ܾP1S5mdQTrb ~1ߗSu,O;oh'6==/ʦ&j1H!sW͌.ggy}jnLTSVHKufVTO3Y$UҏO*~Yz R-qٞsc}Ek]+ eiq/=bW{,A\tl429C7[}3C^ 5q}ks,Ea'#.3f.s[}쵻W7BW&Nn)rH)GLB2l M-͙zѿQo}vN<'y1 Zހ7O.QV3%:-ދU#ט Xo{ "U!{}JY⠦GRsk l5nc۪N8 n"FW2qp`>;jg;=/خbw-w"ҷwiX Κws]\s2Y"7a d<.5WI> d;G0qiSUT&yuT5S<.} Y7*9qПbonS)ZPWU zbu)lb^ʢmuQ[WUww|+|WB!nQ3T|!zKw߱|~%UWZATU?[U^#3-Dp쾞ƶh)!,nQKY hBı\Es@7f7TulZY n NqWtum*Kx}dٍU3C7}İ3Ϳ)K4dٜrOTx/DwnmcOJOLǎ#Bq NA-\G .}L9*kcN57Ni(v},= pQJߔztӻ3ݼ`!N騈poMQ̏7c'O]̗9s Bi<Áa'nuef`k!@Hsp7ѕt'}Iv+YeV<溺ຨtV NrQu,`:% #cr1[({kDX#%˱m\/m9űtk EjS[suU\fn\[=K zpC,ٕg,9s_]aee-UĠZqfcV!q5 苕f!\S[<UfSѷ.xm$taќgKERIB6N[Qap6=Qt,a?_C.Wj8aW2w<3`u{LJj<#GwaX->p_7~ t,JX: Oͯ8%8pW!fUɿ]ke=,[x JٔB+qBZ!v|6ӞTU34H_>N[[XKdv`nU'm+v Hߎjjj ~ !]]ee!J +zb̮0WĔ ]9@2oB}]_]]]_]_:f}A?N '9]R {OblMщޟ];LގkƬmԱ|ZcܪA;Ck"|"f amEE\FAV4dga(,5[.wqV]Yp>urg}]]_{"0jly8PXcvk1)6&:WMFvffaE Mgxݘ#<SGRGqtPDs0kU2*vSf&"iɫouu%3nڛ69Nh tXlw+Nk ;pW eVW fp@=0)PY ɣ# WzL q )D'5"1ZEd1XBbYn vfȀYh@qj}! Q!3]Z=#rE1.vM7/+Boyb!Ǜ(g# pN Y\qOCzެ,Pކ{J!1Q"Aq 2Ra#34B0r%Sb$56s&@C?kʹ{Q%\f+:̳Ve\rSx|SAoi@\\(kY7v{f ,|XPqr u qzd $^[}Qpn?t-~[PQzs%G-~[QqznV.Ku%3[r[r[-|H-|>K$|HPEꬑznAn.Anb,LB3[TֶM6[k\B]]]]d,F†Ĭ"Ȧ>ҋqY[haB!"[żYeutoUeUY[ff]yW+1WUZ;Xt\mu[je,p L8!<jt:-\djγfA] 器vtVr,f]t{RUeebUEʿupQ6@l:"rA]{dYDvktnA[ci@9ke5LtɠUxL}V>+6lٍ8u*N436MBAuu(}TRc0Fhn˝."֞,A 񬱕ZVRg Y5E &^)[9$^Y=e*<`67FVD첲 FnIeYEődYEdYVEnt OoEudgX][18Ue: *W7W?=A-,%g泅 ~+;exPK$[:I 4^G-#yar!'x+Ȧ-G?: 75[9TH;P.I%ɱDIz^м"HHGjW'bIv[=rUa&Q܁gl%}.'P]jPOxgA*ʬ*ʲFHq |F U~f? ~ItsEb5T!KrEFΌa5[ ̯&'R0SV*hu~*܍~ <]n>O7Y#SP UL{,)g"E[];O\yOQ4׹ 1F_[X[[kM jt lkB KdeS{t?/߱5yT1E hnNt?MY[H:T0]u?d@YQb݅$MNNٙo\[ⷷ=Ro~Ya?unUP|Wti4' DJ;]oɰyN-L{P߹gqto][^ߚF]\JʁC`Mm W+ apvћNO96'56DCVJiZt[o=^>#Tu@%)ȴ9>$:;"[,'4g :hHNLսj2NM iIM <&m!$\i=97)qMb1:2-WMlf5+|r5$cҥ,}2g;le9[R' [oz5xv,ñ>6TG#XnT-mĩuTUx<Tży)؄ǀn#' 3&ff5r( NmsMOvQur߹o$٨@]uy.@«w~hQZ{쉟xm.%1=]}]WnY[eo%$b wdX~Ge;M6S |c6A=)rVz=ԮH$P'7`AK cddW=\nv77}5AcUKwCoor1QLa;G|/$5a;fpPB L(ŚE$nU&ߗP\frЋfjmh[,}cK.BJY_ePv#4eɬsE6O9ctQ 5*鞒]kXeO::Ƿ` Ѹ5G4QE7+ eP wYUS ٧ZgX// y*]r7^+)ʙz)d2 BgS`DwVV vc -68b\o^(۴<@4 Vj UV8<h+0U􂥛x(Lam*M榙RgݨT1ϳx)2n MJ1oY)uz wsOO= H$Ñs܋!Wzi}D(ZnKO ޘ_iڙO>]ZemU5-^AЍʏM=|x-ۓ@] h6@7)ꮮW7n-K<{|}&pS9ZجTsgM4AL<K,4Tq E`UO8ꃚDj tw[Y]WWWeO=Qtumvw%bb9hJ()7*]F&xbXto1D3;ta$b|AGvhGf|6vA39QTA іmb) ~I&ZXQA&JXs#$`{ X?SU ?5uUCc_[?;_ Aeku n|#uh9oQT:9]VT:Iŭ']tw e cؤU  KXMŊ7%CU G鮎SnnlC VŤ^v{FcN56 ~ =/ |5첂VԴ\/Қk46;{= }Oph<~OԾ5kzQW/H7.b/tHZ_a50ت^,^/uOo\Rvam-:+[!:)D,eO㱜QsmhzpUCHaÞƝ_ߊ&~+eUS\$1M2XKJM?21 ?Vsa.ɜC:Xp9W .%[k-;߆bOj̺_TN6?XY wdJI7`G>|vbWF{;֠uXvOQD$4\~Sl8 >'1`S>Fǟ0,{rcmtY,oG/jwr{*"ⱊ#OQLo?oNl_ ܕRpbj9ͮ fOw{*swɢ]A7 n!oZXd'ѷg~CV'N:/=1QHɣl7Rχ/G4?i>x6'_Pwr _xŻm^KIRVɽ mhQkJ-t\[V2Ȏa%"˪q]O xpv k]V_{\|B7(fe'tlXB7Q1bq1=g//'i xbqrxVEB#ljX>"2߈#WK$- St\b5I=&:W=,pT'9O+- +s3=|X=] O`#oUf D~볣u4Gr/rI.soTq]:<"39W.GKXhy>9aeWJ.O<V`{A1-uQ#]3u2ffQ~9`ҏz=S8 3p6pJMPt"->A{Zv WA/+ +Z/OT_ pQ.iVķB'RG᯳ZaTM, WZ"!b2]+5-PQS5<3$=8b" X♹d/cӱMEMPsJkhGJ`]KECRo4` -/0 °[࿅aXbٌCFDܬ zjjonk^ |==5(;ߒ}35+C,2vYXpV%LLς>V:>j=\8e<-& ySKK[㎺EF)|v7~g>ꖒY]Samx{Uu0p n-O4OO \F)U-%-2S0{ {rTfDhy &xsj|DUP4609`Q 4pOOCr&F'G5XڇX V}VZ+x VY[eڻvq⊲ -T ն V2fγ"m]gRL,2xRLcm$%,9fWW}W}k++yYeqJ*Ⱥ̃6A@&F 2-ux&obQJ_N;f4]/.UǩPٛqUT> eNChF)%dE!SY9!ūlYYb_^;?8y1Ee{Żug ;|ð(mr4 Lz3 WZ2 KB7ʙf44)x #sFDt;mYYxeo:-tBIioU4A(bnJ}Dq ]y*5!Ru*K+jʨH)*7Pz@k>;5VVVE-]_̺iYRLsNTykE _#dnQޣޛaYR2!ecd|lÁL6 ھƆ#K Vo2~?@֫1AlWYͲ#d }FNJt <Ψf=`S"]@. {١Q<fv9j ]]fA2`(uut ]_e2n+(P";w{PD5Ulh;FMh!])ɞ60lybhߚ[  !"1Q2Aaq #345BRr$bst0Ccu%6@STDPUeEd?Ig37sd|nivH[:n fRޙx(c;l2|X@ 1=xjc X;%kjrz!zu%pKݱZUqZU =YIGκE`2HJX|{}CK+i䮑|)*%Kf\\1Y8'HԷ9=#[|6^ZΫz׾U_;ֳ5v!޵W}zZVJw{X?;֭VֳҵKk-+VֽOwz׾u7!޵SֳUwehw{YSkZ\ҕ?;ֳuGlwԟ׹\ 5g|hwl:|hwgjmKk*]^V.}k?hwgk?irKU~Z֬Jwgir5Z,irY ]Y*!޵o~I[:Zڇ/|juKjw$ /+ˏEb|9 Kpn:*yil܅׵X;iYszC#SX(yZu;ZB]1h;(kLi*bH BPNүαE]O! cqtrA <UB64u)df, 8fݫXy!F..d|o+;W'Coحkr쀷}$yr]n˽go#etE^El N>ex*VG-! ' 1[xSմʉhF]5ޅ>BC&xj8_+zc/zHTO9&ȈUqt)J tj4՞*|W?oZptR+0bp]p-T۟BMH-ހ-mN&9xϭ3 | F҉2Y#r" u]X:Ufd[YwG]ypWT|]Mή je˫ʾ0W,Vos1,U r%<On-DЁ ~] aX:!0 [BDžHkwy[N8 Y޷LQ_#2SZbhbTĭ>qłMI[L:ZQEHQalp1E_Ώ7{l!5aiZreqwHEUdhf1QG3UKmV/hZsua޼[1ՊSE3zM|!,/|gqi,%e[(N~>Qp>0jcMC/3W'kZf۩/nxue&8&@tO9 "kSYYrbT-dTQu2Sί{ + Tڈ{V}xAiX*c&k3>)b`a[Xx`aɅgKa״? uF-ܼ&c3w;q,;ݶ忐-rlF\ng8s*-m>7AR9ukjaYiV`8ܘj(^sx6['%D޷H QC$'Jm1ug>'ɫ]"rX鶔Սﳑq.):Xxc|lͫmg-k4moSNASuL&v@ JgRsfx\:ZJcn҅dp~b:շES,M+>%{0: ^CA6(>W{dz>L\+oVdak6Yx.>4oAu}6 6\%2-%D?^ 2L.jqQAŮzJ0׍2y%$qRS2y"skYmʦZyg7zS zip㐎krqT+ѕ'ӫQh+w|nȰd>-kSU@t|ahxyrdtox-{dv##eU97źb#jgWy״DTKB]UǫxϱÈWȬ1Sj43yx/ku誃8K׃b W BI.iGe]]utհCbs0݆Zqn0Fa˽:~}TI2ޅDEyZFQE 肶i&Z&Jk-\=A{vݸ[P<44[9KmaﵑD))98M@O{xD/TS?nCD"0!6?9+l=ZHͣ]a@>;!tU7G%q\YWK|?K?e8cn:M"TMw '9ȹy̋!92w4GێӓPA3zTsFlBp~wi6xdPk &gipqc=KoNzQk6aŃEpcFJR͛ 5}[\4م3zӝW3QWͤZ9̃VX:";u)wJ)\1˖܍7nMP)̡kdDZNUU[^t{TP?z,i# K[KџmUFG,nֽfO%>y Z,UVrwxܟ>7Y!fNuQ,Gp:tS.Yͷh9MV>Z NlkrHXzTI{O÷uf6;ߝ)˱Ȗ{ֳZb6Òm={OhȟBJVSnrve3nq4i*8y Λ6}vFcnT2Q, ←T1퉭xM!Hg"|/h[ts#{# CJjI2K T ;O64U`7D8KqzRo p*g wfղ<gTrr 3!Ym{Fc0ZQ21J:Vj2[ܬFگɖyv^B*`g8o E#dd;`,L1[Er]trX8IG ax_3^偭w(Z"?#~w؋YrU~pzL1B.| ,T7.~ Us՝d&mB Sbb¤VRE{~#Y<D-y 1Ux>D&Y PIoL)<ܷ+_>(sgNM<3aXЄ%qt7s$zޅ5,=+54-6 ZN޲b/9b;O9vQNO(E-rVtRisͻ,8cq6,\ݮ=m`?%0۱oYd?'l;rk-_z7]}WR|7\f9䏤/q]+vI+CHG ñnLޟЫ9ܙAskbZ.n0+%>.;{r7y^&iᨷ6KUe;h:~Pе*M4La Y|`)tvdiA@%hz`)[ST;7~SZ] wl5DC0^ "tinl/e<q/6aG.RJ) ŇTGU6w)*%:L>2L;8 #Qڲmz/3셒DagS%ϣ׏!c/`.>7!SVO ᮿbr>>$񃬅{ q[ʷ[ާGxwxr+}nsD77*ڃ퍋$k'r{y+I;8_.]L3si1o'K+=G] 2˥py Ύ[YU~vGZ? _W"8HpF}Dz=g:_Hf֒dµk^Klڇ $܌Q7[( 'H_:M;Š8ǵr32?p(Qۇ[~,ѼAhV ;[ԁf.@XoMz&e܍2ױ y]BTrwʁ1u I%D[ ]GVLK鈾'w brsFh0X\KPoj+nM{ #>HֱH7_q[MBv_kRP7_QT "s&60~]KWJQ2}c%Ld{ ~'OװTkqtD1b+bUodwhuq[-KM mB*71c n$s@qZZFA/O?rj/*')ʇ#7ru$s~)vA nge{90S\Q5G4:[ZfhOX$->Uqe`WH5֯q[dU怖2;n6blӆCU'wjs"{&Fs#(ߢk" ?&F[<&ґV5T^q#up0 /p+HFzGI˰Eݼ䪄vYZupQ؁j=3{v9'֡}3ErdPM?)S{tf8EDns[be<1rlGҹ ;*d2ոT $&3Vne Ԝ׷ 64D`AD&]k,\U,E/S[k6jBrzܸVk?zr6[Sc|gY,m}t$u8!WE,7xTZIcFEO!i΂BN-#6|P clbQ_nA\p[(Nwß'T̗횽#Y+\2 !>ۮ(~ mlF#$\ݦz%>Kp]e#eGu&%amlv[Gw෬*,o۾|`fH$";:ur%]k*&G`[2*Ҍ0WiO]C2Jl.=DpL/c 9KT*?SO4ڶ6=Cp'3DE+(esbs Zꩫ%h|`fYebg]qeY~vf ;LJ<_T‹Xu~Gv Îذjuߍ~n_6Xa&IWaqu,,q?;7l z@mԅN2捞&7;d=TNiLr-mFT쪤3:68InҶ!jL{w>iYgkqu# 2b-ՔΑu5tP 6inӚ! ڢ1=vmP+DNE\zml9^4UuQ:[smls`fc!j, Czu-#<qCSpnwREO셭Sdwz!Ojx 0؝Sk6];;W9@:WYT\8NjZw[R-Xk^|UW #yw|^ۧESj4-eu8tf+06YdU?;"Nd'd%אh|hn h:Z]\HmPsgr-SIE-4o?S Ah,*M+\#=t?HGU7a|npZ[؛Ǎdž'8jmm#p@w/XZv``G޴|?݄)(_,e \GԬњR1K.XX֌11Ćߤۭ9_ӛGQ}6v?(<^nmoJU貂L@70Gch *x)K.4}s]N|f W~!̳3[+5perYcbaOVM{yW4aj#:caұ-MFD5C*Fw"7OVbJv)? {WLmҒj+O+?ϛOeV8r{*Uz_[1 %=:\f1m:]ih=ϙPR\NͲӿ#Z1 IwjN/s<^:Cv{Q$Y!ڂ0[Z..MtmD.%ٴy%CgK ^Gxf92+1u\<=O-l>X!:MCm:8 ^a+R70OM잱RwU9^p3 rA󣥴~IO+3 ͣYUtę6 J $5k7Y0WEk}=lR:J7-him-PDxrɼV3^G34Zv]Ò5:݇v~LYrn1a ۹ra<6dg~"̎&.ԴͦŰ~Rm~D>gdu5-lh|L$F:#C"1e'!1AQaq ?!j} #…iJ#w2YJ`֟nh)OwSE_&`;h2?J] m)#$u*H`?s4S`?b9_ !\_Z^#dQ "u+Z_xo߽ƍOxps3Xύ\)U,\s )3O)b3̈f 5_Y='ɿcH."g;0n#[[".}ȹjHbQMح9[?kw¬O_5Ƅw o1Ue10@JTpLaĽ99 mf~+oIr%\˕>?.?D+gܗw/+'jH3\ce?6gXe(8{V29  f7fG/.l\M2RkFzf.)Þ& E,Dm(]ޒvN% jufdnn];@ 0~T<M4Ү]^Jllw>?sr Dukݽ>,返T> 浜E߰(\2*ƥp}!0ҋ](ڀJE3ơły-Lv gm{%lxKl[}L ¥FY])9#AG1[#2*h!~BR>bAe̮UAt󉲔Grsޘ:助}Gi˔fxm9+dLE[Ae ;FWE]4{c`GW+w_\=aɮm3qcrŲTy;K6 <nedM[apn8-;Pʥ6y ȨFt(>X֎Y3Ia2fVvi*5G %M  (#բKPO}G@ >ɯwǖ,|" -}etOKQ]oSw :,l*Ŋƹ53)ʎoj4MAHXh~6]F O|$qP1EE9l@T4.3]h04%4Łvk=T(<#N^Sk*̤OQVpR/PȽEz4jQL❦=Dh.튘w+d~Qۼ(A]:}Mt.(h<{FP *`:spἚտ> AvC=5>,iq+ A2m1>#l,BAM^[*ʄZLbt72=fQSj/nAt&u g=sN{#;'$VLAfPS; M3nHEm*Y5 o֐fuNly p7N'qNJ+bA1D*̻KJJdƶSi:~e:t9/ᙄ 9#4cin }`\j<Hn ٖx)(2`[/bD,pM {e +|)2Djt|&^ }i\:gyVrA]>Au䪕 jJcʑpu3;tsej?h4'P F#bȘj?bEY݀:~߭;ArM&bJw,u`s9yNp5)hCO8|ş]\?7M3$–zdP;֧UpA۩.fs1@6oԷӥeF[Rrb}l*.d;7> |y~+썏zM:5Awöp~E31,jJ$\oR]P薩K=͑J <r]B[)j+3}:H8o=3zT<#̀,Y $Fs@vb&U^36iI ]8-#NכH@^TK~6a09¶Hrat|u21|gk!̣/PB)4shbg-N P^vgṊW( V.aƐdal5ȸRm3@Z1xܨ%N#N>G(gg)n#UVgW3p>#(,<*c˧:2ȕը}ݰ@ʡ= z\̥ :NE@rا1kJ&oAtSjS#X5+|cFb4r^L<"q|԰W4PiaqB\]%~#y{8Uⱷ@-^i/x8quµE>%=%!:=Ӵyf CW2ĵ(qp$,?V[P|J~}#D%"0^g3 Wz{#IьX*-8?83$ ojt Qr< 80ĔV"*vC<& J\a WT##fbVi 1SqASaKgG.AVJ^~R?' L d,5:wUE$Z4npwuF%P3N¡ܾO:[O5>u.Tgb:vBu5 i)?1<~fMdRaI}M,]sԿ9jtz;q#^~$z>dBYߎҀZ`x )tfu8|52﬉oFIg0 Ah)>A" 1vx.<Ֆ&M^4(&괚k,LP#<qU1-L*V.Q˞=Rj0m٫14Q\K@0**\߮edt9ie[H5z_X :)\6?L1W I =PO/.6#Qj&P˯@Ǧ^"})ev]).ü6 x`6}jv),S &OcģXGVיh )8!5aRb@[,cnOu1]!]&NILK890ܬӬL+=&YwIBP4éҀbWJWѦb3kOU= 9j(nV lCD΋rSYJfRzB*jŏF`hL a*xINJL{1:w,s%ݵ( \ 8IX ګ+ R\2petPUjݽ5_b^#3C0OsA C g̒g 48SqND$z݃s!0׃ӻ8s Ĭ9?Mzi.itErTJ h-ne2ԖXX_2-#֢Js {LUjwP_#WމʐB⺔mPǨ-VF۷PeņA")A[9"Q1]aURʹj. Aa`vwe6΢4jPU.^*KO E}h`5x)O2⼈PN & B̤A"60o3c+scO(AXx}.O+ӭQ-`d,Sb쟲-Vb}{JO+WpT͚bkG$%]CŤb'|Ɛ%ר_xVCJE尦>.71T1(jz<$$Q~S&`UvDH| 4"[E\ʆ* ^?Gj}hKV2.LXfĴLSb{`*Oaؽe@%63JiO26MLS㋕O!.vHC!P,oQNQ ͭ{ǁaqLˬRʍSTDBd/^LuvӠ-kiku ̢AQbiĺй\o9bJ8k^`b|S _'55N>bKORE2>y;J l[ iDE678bNJ*|~OxpJ3DYnm+ :ܮ&6o1p&M(.UpcOĮ 4#rm 2k)JGq13Z YqA*0 KSHP_WKgZ't4(.LaUdG+O Zx'@fp+ crTlHlmy>rQk'z^PĵX+u^#YCXC'1y.7t\Lgeųs{2xĻwP9-vyŒRս`SDAE&.XטE`@stSbiN{QBכ^Z/6Aamkgy+bh KV +u cL6Qh-R9~BJBĘ1W Vk/3uyS;p,D \7wY1J"S;Giq[-q+J/n] i@UIAs+F?J*WԩRJ+U &izZY+N旑 Y%2 >XI:8JڿGE?K\|QT;BXε*KCfhe:;ӢS )#{qL^wit>!{BnPuh3W¢fWP #mRF}F5%C3J+.?KQ\S]:Ԗ*%̽:ՊCVkeġ6edVm]{90ƹJ/'ŪO 9"M_|.?0Zg!vv{`6*X5f,#SՀpuJqRBl-n5TOF@L6W&_0|Iv[&NQ~ *4MȵWufBݛ>'=^Ș`0F/5 HZ^c߸w \],b\Mȍ=Q4GL~xȭ0Sx}/`SEJ$ˮls Gau~_}3ccD WaT#-⽇$dTɳކ\RtN\nQ0Z@OLe^kx8HlebHߎ GJ.GuٽƦGQ &/2g ee WGGJQYt[bA.?c05LO1fӒ>D׶@[+HظZ7\`#6YN=a v@4d:?5[F"%CFK9HU]G6׮:,w)U-@_)_t1Rl]XW'hco\Zn]!{Yce  4V>z]MS*mPv5kCn„s/ELX oI:g,Vq F;x]h"OC N2<0\琫:82"OqiʁuMMV/y9Kzgfrjh"!䥓ڜTc)\:)H FNǚtyָۼdjR>sژA: V1ٖ* O`"=esEJE?Kh'4QL$_@tKO9.]-Ur5?rnDGl4Ab;LϨ|eDDl31]vFٿٓAXAXzPyNͭ'!1AQaq ?ת5οeî?W ?Cт=HHq;Aw>_ovHt_v{?ă,ӨJL/ywݙ?sb>Wyb;ض<Z_//}t=QOgg Yo`=kf➯-?џqP+F=??}yƬ<_؝{g;_ٚuo}W!G;N5m]u~K@3(ֈʈ,/a̗`_@t_x?|f j0$ 8>8pb:YP溍.XD&&ڂ@v hF5wLVB!l+&b5ÈԳ)6Tn-pEؗʮ{ ╙DTR'*Lʸ#c-TTQdS /s ir+N"p*c ܈=3՚B1̬;Lf+|-_k+9 ^<>e&4\@`^XY37T`Jĭl(S n lRg}ETS@NYp%CQFNҦ?1e5s&R3ƜB7=U1/.u4p ,J#&3=ElwR d[1jK7>8nj NbJ{MO TNA[s9 %(̠ L l%`UD`m ܢvs-]n=l+0c A',4ZůL%3s@f d81k1!*(R\PٿTDb ZERaUCWy}' K+EH{4>k{h#ŏS ;hBke>w)\,^1O.y\8-b=nZ{yAEys.ߤ@Ԡ!uACy]ȖtDP inT.a?`.X; g1Z/Mj*wq<kx?$єf`PO)Q0@i&[l%8|[78%?)gŮhO e%H1z"cڿz;FK}EHaqƵM&Wmt`UNjq`_,btmS)sק:sDn2C( %`t}JpJCa `t*|YyUfCO <뾳 *}+~fVஓOg'eI!(TH=qM=IDl RKK +z%+"o>z%b 0kA#a,6>QiX]OIV"0r73(EXvG=nΘ9󸘂e&r"xyϷF=9G< N#lJޫᏞ Kd0+lrET KX%א½l=~01snZh]]8\ԡMgHU<ϧYrق{:!9#RWm~ 4Y#4 J^u,pkmJ@+jC-̡Na1%]%*iw,_7BN4F|B?d6$Jen :T-UTnىr@X(phŚkq:~$"o %5X6d1W?K9}LKe,06#~y{lCJ|V<9C޼q)mHz%:x\x)` {ٳ={ gz~s+Գ>ُzi.yPγVZPCeATӕk9PJtkJF:]rSM>rp}#PnX"KROS85%z/T!FT}SnP1Iw2G]+Sj@Y/Ɣ5dZ1]=`$? /uھަB^PF1VUMK%)]fG:Ois׈4"hUDǧzj!6[COj==it|?QP:et/Ĩyz Q' yQn(c,u0ˣwۼ*P=neE(RјX ^P1`!Q9an8e"ξx?nD&=L1а,hG )@Iw(j=ȗ%b+7r'P)1$+ꢘ ,*,ҠiB"%%, Fo8b,{n&l^НQmPy|Sr>=e/Gi4bJȥ}@&WP?ޖ;%z԰nGoFWptz%,HZ£a0p}?p5{Zv'rׄU+ɟK-sv"_DesD -7^{:{ K:2 4e80]`G G<?:p 0YXשxS<{bvXY=CIfC(w3Yy_GWΏ C:=\ar-\ E 04' }iGeGyJY''Ǥ+gKhJEWA]=Q6`7֮\Sjͨl \  fZ.2b+#G;ERc' !gjco#\=k]Hԭk&qı|Y11P%kG|5zaQeuP&M .洒߈)dxX;3j\䩅еwr7xJrrw]s1_ #znaFڪ- NWEGz3*~%?h;%ֻ%6"GTJ&1&buA#|Mkl 0Q;b2ExdQF X)neBH!oA?C>&+.)Lm ~ZcdGӏ$uwAc>N#GAN\jnA\'d@(6Y{`z{ZVCۥT\<>Z-K;aV o< A0\nնl|=jUsM=jlYuߍ (z%jX  _FV_eΌS;Az2 RKq 9j5iaW(=93A /a=Ue+ :rӴ:Te1!@*h/-S\d7j 3x)}fH uIF'ƺLd½lFv(g5EʱNrC  ,sg/]=M[8NOL)䔌d;!apӂ0Ҋ5 [u[2c0\-e6El ec6V ؘD"abL/$2 7 f!4,5C%[o3hj9)uP[Qw }7sh] 4cg,`ݨDj^` ڼ2uxwҩh{a\' WUP=f{&<' ᥚm^z60Z\}+,ꛊhi \nSYyY̡)|UʽgK5;Qv3QG4e^*"٭؊ȕ;^99|0A SWc*zs>Z a_Y|{ZGQ|ɨ`أ BU>.ajm}t (1+-`m tKbAEwt[bj4))$TP1CE6,nQ- 9FSa z$cY`XT@H۾[1ƅ-*YLDgh:<ZP怼JcP_C0a6{EX'09Z6K]mw1*ZZ0J^ <KYXnS%I֦I9;%Rg,4YlJҘ hPXt(|cZ4§DYTYzȑm\vWR-BUm3v{ LPh^=,h"g! rzK*3 X;K@Tf)=lp enK4gʀ7M*/ P {LHžO`ʬj٫weLAЬkH;-6J2地ZAãޏ M>&-E 1Qj"'h߷qF-,Znܶ6!`ejUU"Q873OpBS>I+]-&bPdToQ;T-TZ)ST uܸQ wX@1o_.0C2ɵ,%EQbLzK kYzRhl)δRMV-/L^JR NeȁCFAЊ/JqcB=Qj$5ikD%zt<1gdW1˧X'}S4Z (:XڬߐΫe`ӟUZ E1CZKJG#hӿYe#K-yâC|\&7%Cs&J{FKɊh?uzs,1AǁǦ[Ip /m+/bt 45/卖J+Kl^(TH/ی)r#|>JceYbt!}bcĢbM2ʽhJ KĿ!%$K_vߘo*\}.[^ca-^,O;l n4䆵b<@2DȏR#ٰ\Yzc։)Y' u c e=V`AдulCGf)t=؁fe[k UW*X@Yʪ͜{rJz,F%x`r!Z/E]U#HFaXK.JKkC*;@zkINZpzF͆b+ I|Df[ rSY;/uUǬIfFfJs9n3L ,48AkC5 f]1 q*Zm87( )24KԼ_hvQt;1ds IUV^ 6xuzClW:b!bĘ1Dp@xfkPP؍>Dqv]2d;"ȷ*k,(Zi]|Ftqy``\dW\1X2%ŗf?1 Β5B1p m3ƻn6KnW%(!K_-97Kq]uʷ)]+R 3)Tc.{.ǡb ]WYj3yx)xgCs}r>6#tJ*!(2I( 1R`ᶢ˙`~/%7;%f.Ļ!C(r K#QZSTY9K` NRS]3_%woYzLwKS&`2:w_騢oKS1xF{ç!i!lBK<.l\ᄊwBvC1s1o尗d$\rK-eRr bFA}jY ߘ}q ̽ п2& 7n>.y2ܯ'U}E-TMXvG&XK NT5JCJ(F0Švdi.3dYl U; Pi`j+S#01R[Ql3  %`vCO%k?SQ uO9_g:_<[?& x"x>TCG#Vod#Nx 'CDN? "-tx`C2vǙC'I`>^>0A(كXqDxt- ML< kt@Փ>T*K E" NUK`a +4`˖˗SpDUɄ-AF]lbj,KB.Ys#1e#_3'q,٘TSHdKo"6n[N b^' ZCrl`b($-]FcLl^Jw/M>ߖ\)v\K=:b@LBՃBTau3Ks!*Q-+,.@o(.Z2znQ^rZlcZvnu3( 'Q1r)&dC" o=>la>`ws=0.ZtO|b+z E11B?2TdP7tˋTg=!f"PnxZq*"XЋpmd0BR1񙆵p=d4t~;X@e]3竫[:vUpwrpo dSK&B. :i,w,G~ Gy 3 O'_w)X /+tǨLr Tb\4yDԁƱek#ܸ`Rٵ0wbiW߂7TE"0(T-$ A@gQL:&s'OD&s|{/`@/R-1xsf jB;QyMiԀbbp0M]LJXLBU8x:]'G$81F? _eq:E{J~`NFOh*-y϶QoѺƱ$QʾfcG2'%J'oDe#SL 8v*HٵӾoK(Nsgju]2(>Շz$ǴFg@#Oh;YCKHVM r 2EO$Ю ς[pbԯP%c ^3,*? C"ưR:-C%3%#E\pwMR%Loņ=4Jf&4ʂnd T*bOȾH8WaqÌDe)Vtfl.~s UvlfB]="-K#SXi&rS2=QAjDybzeN5p }V܁v*0`{27OR!$9qG/hO`1-t#2 @4)Z![K**(KBNegDm:DrȆxb @֫bB]uks^+@lۥ zgC҃RN)Oc_c:=<-KjCvKu'R0l~xb]N0!,Ŀ +YihWYaplctZ0ŽML˂C,-kRZDa Ep"F(,<ufWnRab)C5%:AxmwS'd~ jՁ{Z얯zQ~ l:B6XȗNn1׵8yƧ4qhB]T1b8x8ﰤ,C. l&ʷ2nzGUI'HmM2}^Hu UjKrer*9ωPG dVf#w5\2EO(8;@.1Ktux9lW T )X2{ I6"n^1o[GS =1eϾ~"Bо7/C@D8M#f3օօhQwҜg򖾁SQCs)&eho;E(ӱ|S/7V{_s]]H_ycЪp&f}@Y#*TfdneRMy:IbNȐ8[̏okf/n/Q\ CF>[}x3]v7o oϼPAhSz?0wASX۾odj :Y4K&zQ0BWCH+N+w:ReG&x߼Ϭ萔8CB Mmw Fe )A[twggץ+X&X5uOlyAGOf`=Y%_A_]]T8%KT9L&i3a P%SR"ЉWim. -]`qbZKGc{3e.N4+fQ%ӊe.z,b;&f 0 tkP4E'|[?/B7(.o0YsGWݕB/b'jIo[˫1]fގŠz\2nU}x(v(ЬX=؆uf7( gzܵ\l4דTseH- 7*dEqaCŵ"atCCR %PɶBMūCm:P Ad\nH!U"6/=#g,zSp8&Ar{3]V ft y9y]9X;,4z{TVuxl< |}GdpqypHDίW19h(H"u1/)<_xEb%E%Po(i4cz %`khZ Pq(ì5&BVXPhVR\wvm\%^hӣ$6yV}0e:g:GP.0t Nx4IvoK^V߂Lp<̚`BA&veęZ˜JD#+)5*q+})8,p m CPJTc`-#Kĥ"d/n  .뜜57 nIvmżԾ>B2תlLL:y;\oV%_ݯnCE:){44G"G++!wMQWzNM~XѪMmpa 0c(ܶ^ E7U*Ր]dbSxk0e@f0 mqC)ޥ;>{A7r٭%y%=5k86zX,3mf>F|Z:tFjk4]D^DԕZRbUl*Qj]F>8ǐEP %vZ`FЖ-[c J @*nP(J\ZQv^3fY ňm'$ h=y'HxkZoraU|5059HcA7DXʚKWnq٘`m:" Z-U7 i}b>!zNYF(Y;U}^Ƙu={7 r7ՠx+U,/hSV.3ZhG% 4.,􊸲3?׉Ѿ 2/W;W@$8YaPd˺TpSwpTW2PF_l@Z%o_}$=1:2;2]/6Do]wa상xjt3IO Tyr^ZJ["r>7Pk /g+uۣ*j]"mwU{#L ̙e V1-*E-mek Q5z9%NК_amE2ݰgNɪxp~"ijuvuƏ-;kGJC0:Nj\NL&0ۭWk)u巰ZNJCTr/@]E湕5T([{Ģ(vp)zth6!$Mq7q4˺`RkhG%FQ.Nr{- ?(׸b`5aKCyX VR1m\Mm/wUenS(hBû]XJk`AfuFfHvѡZ/V^YfƊޮY~|N,Rk[V.R~ξB vc7G2e0! %(F !H|%PY(Jˡe '|–wdTP q#E٣}(?t r: a?%a> v M GoxF֗PY;: ~˓0PHu{:夹4{B (hwH58jh%ӽ)(qp8(+:[Q5`ԳlR/P`pږlAk lFZ kz2GDu0)$R&mp=@p`7D,\"ࢪVsʔ,U-%%\//LJc 3+i&xm1e12TBRf0ZPХT\,e:0d2a PV0o54`q3Q  A"KŲQ)̯G CDTV㬣DC S+ d-4c,371l%lb!Oiz Tzix .]e*:0PJ9>qƔqn h}zz![jsYf؍DiՖhuA(6K S,UB h |H%I9G;auyjP%x#>9݀Jp7(F7Ͽ\C/A0a[KC2S)*+1.]fAHQLoDW11*VeJfcTRN2QjA8GV-:~ :D cu.I,'@5!@ˎgQ KҢ21& `IP iX!B+a7^a|_Vd,%mX/Xnr(.QoBJ `תX0F4e.e"f_0MʯF|?e02B`5Bu4IL"}+(^QfWƩL@EKH#%GnlmE ۆe3/MH,mσ'!1AQaq ?y?x[sJƝ MF5W _~y|F8k~hUZ׾}a2?*XsGṞzb>-j L| D |iTB?x,#(s-_ G\~eؐ\n Ari{r#j3=|ߌP^Cd?[ÂO\Iϼp!`eaҁ0kUޕˆCba1%-݇nx?B%Nxg?mE,a%5JS }3YZ&%«r&|}HB]+0r%LᤛX#Cip(c켢9V&*׬<›߼Cp3N \`Øe_y `pQCZ5g#s6zO2nc" jAR+7xWok'I8:}? =P;*fʃ]Tc]>F&a|,9ʳ0/۳(0FQ - &mѕ/3D>| JLCal ꏼN*i("hz(mu2ʼn!8fć5kpC (~ˌ6=DIְ>!.bAKY Nf}bwX`F{ígSvjGDq jHHwaXm!:{GPK|IYvUƯ\1{aϥ(az@Y ~^5*xqF=b8c_8`η0čXσ 0pT'ZQu/o1d=k2Oβ4t0izl'KB18c+ѷŶ#)-m0憴+hR"9”M5ͥ{ǫ#ܩK*Vf09:ȄGo tr5vlQPZiG(EP%Q:* F"8RB l~$4 C\TxA1Mȭ(8v7P`3"+#)ѫAƆݔdڗM%Ot)r|cX!`$<3w1q %8͞?cM_ ,;|͚fg Sy 9ӝ&L~EOO*y~䣯,Ȏ^8 5+CO^Lo0Ϝcޔrv~0ހp ɼ Te7d&? $Z>Z0ZW-x&ZA>0*D=fTQ9Hy+4hz;:d]@xT߲8Hh'#aڅAҎpKC+dc.~PSg |Ⓣ?lqC,1=jdG[gUJ1HJx͞of44mLxiTo#"xʜW\9qˏ?*6 zfo!QF`^p Yn`z@&md3r<{ye$iVǨ-ZԄBzSu~17v7h8,&vDy-/f+'Q48:}^rm? F9n<#z0OËWXoV)q[2o! :Su쇦 {zT:,Nɾo1ZneWDx=IZ#~=ʁ|:?>"ZuXzp qK_C`'077e kE/1iEϧ7%:estZ'h ؀zc8 % `pd^Sl<%'\H@*O@}k+=NJBk Z3S#J$bٴT|цTXϔ>lbs'8!c7:c3lFʩPf慟+w짎'_ppq r㇒UE±z& DZZ,e96dO2F/ƀֱ8vzçIrxa~1qAާ76" JƧK:I4vRh931眥Fr2Ã81P X@A #WTOS4RD@`ʘ$`y '$AH,4C{#yb~O7[#9CfF aB).%A" _8hhs6LoP,&L  f5:po0S +uB6.?b~doP v\Xw"3@*Mk1oOyk0'_Ȳ~CJe81l/\LMDO/X}IvG]&&@Hi=ONl1RM2 A׾X;x #};{Ð Cc8C!%#*KXI8gSuo81e"t:?f\qLH~qxoo f+6 -*!&:`B`K98 2D} T~qGd;  x0O#VOav'1r50=pjy`G*:q2(.r <'Y' unl&i[U~MC%-vrGܳ)bo` `hmECo $ >qFr w c@1TIuih (UwK?9ڔ>{qH .'9Pӊa(ayȖgOcb\up-7m'n] OKxt.BU`'^'Iᆸ*J8>-m$ jVxfԳ[$a#kbQv ڮؔ;񗯻I4;MIJ胙]cVFO#ٍ;H`u?Lyc %ΡBb̳x5~ +ւoBTi""s+ `u +5kpֈםE/&h؊@""8STNOzqb7x%!L5\3 j Aq{tvb⧧"]} G R½2f uNYl%y`4rXC!J2Y<;?iC*$P4G 84%Xi- {Âm`0z?@hB"s_ǯ~i2bmfP߳ pWMM> S-E  HhK2DS9)/4UlI 푋}q94  N}hTv`#(T 3(V$" q]ƙ0 GXJ[~"pȋ6 Bh ?G/j ̺T>YkPخ'$~8$?#҆"f6 Z8@4 upm\[0nViU,T * _ ŷ)2GN[9Ј Ʋ3i)^@t'Ȋ>'wseZw~!Jy,u.ˉm¡"~05dCBj?y{0kj vALJ5 9P1fĚi=FVs\Leb_&=lseUo AdmA3梬Fmz켝t@__O}; ?gKTz~k^p?&'A/ Jf.GM^ʊy7~ĥUW4?Gq~NW(&`i,.?#cNt t? C*u%u7 c(7LEHag~זZQA (hdj j<(; #qx⦇}0#ćv&2@yT*<TuUDc҃f&\Z2@'0k}9No†+#9 .y`@i KOiv|~0$/ -4 \maY `nNQI:AIlh SBB ICP*b&"i!'~HEΜ?:lW4F1QGJӌ@0r;| UOOZY)9A(f/)KO?#%i>4BYOd$Ԁ2;0p(b*'7qM[ӎf$ITESh "?'A'@؍Lm?V?9!hčM.S"GL+Ymː$u3RncXj&>#!"͒cmNUyPB ɳW8Ω @Bh=;)ez]JD>:q!R0PLRP,A3ǎ(0]8 cޝ^#U}x B W 7P7O m?C QzƳk!tUơ[ VhTwף~`|cz8\B =\S߼X $pd*r"T-U˪ҜX{KM&4NtH֊K ^/,@ Dfnbk`t cH~-I G\gwn$dB40ҕ*$6vDaco/#\4ؾ-Jij] 8\TLNm3o_ه%]TF& ;՘4=lwVʨ8.cx pߡu2k o ۈ8Z{?SZ@?n#uʹя xj\Dlٛ[ZCa9^g:yʈi3:\B:e!aX1Od ZPRjW)RP0 L#]6XrU-WPfhT1S@r;œM3@uy)>=:*9A]fiQlISmB̗uPd; Dbg_`ჰ-~ryv-qcprh=g^kxֱ$+by4T`4T|P5V_x7? )/!c4S 7F%FQ@s$6@ p{HO`0tm7lF;1 [Œ87~7 ~(-렚C+`ɂ2_`)DZ0MJM瓋9- 5-#&8+XgG@Z)"&zp,%p1˄Io 2,8e ^di|Я 1 %ѶmC}cKk\!z­sÀO˱˳eƝ1e=r?(A2tJLI.TP:t=p_Ԇl4#d{'(pyRl1/˱PE~t8-߮ssL~GE@w%X->+8>B "-Qd]:L N1ɗG&'Cc4/3Q'ЎnoLu]f.jYtR8Y23Uh)Į!ry2|囥^'(-Jdbu=2wVii@9WHwʑo1svqcɓYأģqB9a}ǽԑ>3#ŝ1 CP#`7~LQo h7z2n<'^~s~P׮pb7w_x9qD1siA I7*}K Hx>1"q̬7]طkO(XA*1>et1`QBl?6 _d,֧ )Ye蟟%ƭV Fn>.UF85xٜPGN$-#(? to I/~ ,~X}%N%OP]BD-#^&!5M [fp8LԘ<P5_*rH1jGŲ ࡟8F1 ^AQ @+~h P{G<)`'mڍwaa!Rn lMg*uPaK~)I (4E^UHF6p8sԙN2}w0S!W*~ƈI`]bq W[Ґ4P/8#ĹpT_8I<vp??  ~241YK=y F!ؤϮ0Z~qTg pN”וq|&;AXQ1o0.P mA9>)8@OD c_aʩ?_ ޠ<~ b=z"QxFUʤ{ʦ*+d:o@eU#|qd0`8ԬUJo8ebJ-,ơSjr3WP(J٠'-VHMb 1[ D WS%B bې5Xaƺ< (+G]':7z1 +M}ed n)IpE~q.u?3KuJ5(d6t' $}cCoY2<CN"p8rb|(4UUI} 2:\U,2xf$7q}Lf!IW$[?6 ѴndѣƆhi_6QӜ4 Ü;Mmšלƒ&S #\VϜx>&uD]pPDLFxacWk(E1zqD [`eOna6)}V3oL Ekn+Z֟Cw@ 0rI< :ϣK ,8įZ=+*(WoX_-8ZE;AO>}(jS:J6ݏYu$ p0@ZdJAuZ7v&@ٰ4UE28bD -|Vkp˒V6ta 1m'wf5zw^Goi6l*;~f=dJ>DoU4So.s뛍rqb*cC\/xE)Xcn Dm/~v;111ҕzh (D nDH "`(rmB.$:nWӡBT&.p e~.Ly!"/P^s `#aW([.94In%L(ED`iLNS7tT;:r7`qpԲ4 )TqҜtd0,h;P]LTpv( MvAm_4ՙ:q< <`UpBC4$,aƇ?1f{b 1@\a0OF :. M܀08a XW8i\u19Y$k}cȍ^Zpϙ5N{/=bh/SA?K(#oe3y#;kʴP˜hOk"DŽCy>I%i5w:5Td(:0s;#v|xH702?-Pn%uQf5UH? Z( }Uః t]II͏ xN09q Fw-kjL@aሞqAva5?x]o :5P)ŦQSsgDy=\ķ ;0ȴ'+`~0ӟLtezq7LpM|e;92] AYcInarc/Tx b\Df\1L4emxr YYnU@l%(EiN @m`{T(mp]3`A(`RH^&m470L: ?8pW?3KA ')}#}Ȩ9HeGzԳtOз `^JՊ,3'CU  (Cbob:-%LfgHG `S_V2 e阌}?8ض ~uYg6YfY{]9~_>fۭeٚͺM~d l]3k]9YuŖ PoV;BX?ړ!tϯ$cyV,,JgoP?g:J1*n黟tӉdK3?89N#Xm-t>xzv=g!~_,?8p`988888888888l7qpppppppppppO\ "tA.^c Bw?۱\ "tAfs;p0? =|0Lw*8ýmvl~|=ezAv?nn2>?qppppppppppX' wzt{~]ZV Cv@AI};5Ǡg,7hYtUnG_߸OAm=?X!4{Ѿ?8p1 .m;j;~r?8qppppppppppX' ?fGwםdpgx] ?8+1 TmDɯ`ZV, bV;mgAer3`)V:;eAWwO9î` R"Nǻ~V1ꓼ?qppp{5 T_~'??9>.2d9bD_~8K|Z jiΏ>`A>l4aڥe ӧ}۵WE'۴Y2. %tOdUf#]7Ȗ{ }?ŮdYd|3K ~ f ]=ӇOq?M8S7q'^nhQ9ճpV/}[%/g=n*a 6 }( @!NxPz=3ˀ6uv UyO2觟Ne=EYt%}l=Gڹ=9C#]u:;ҧ 77ƈz2";;~YJܟՕEn?tLUd|=|݋xuxF;=wyG9p"'ķ2˘Z1g cu!-֎rpp!PJSwOylUe4_3j]sd(aA;MZ0"x(vʗﻅ/LTF[z哜Yy~tse_vpsֿݲ CΰƵsB*Y2=jYWeȒ1Ck] BY~r2GwhnI~~:Zxg-t2ˬ6SOg(Q-.k?_3ooy7֟qLvxg*[&vkga$۴{ƈ%ů>wnN^n4i7CT37SҕH#_e~2;pM}ƈ,An䏡"z5YϨ^-tRs΋nw>@^[ewt}2eyݬe~ꀺb,|e@ J"2-n| 8F[^{^Bo Bx<˛2Y}#3ݷ~Chc{c<{aŲ(|pc[#*$ :Qo}8qj~ 6qV=X;檇Z|={ôb"O7F^f΄P.ԗKz[J=I.6NEcهfO9p=+,k?7"=VeOڧ, BwA.~bUM5z'9&)I^oLtN=cUi'1q薂#4j,ѭwsV|K$fyهurdT۠̓&a#V$NcͳxE"Zy .*k+,.5%.. 3'DM.0=\'%0}򡢤Oդ٬hn&#{n*qJCu "TU|ӹCQ K3{N!ݻ)iwԭ2VYsq5"FV*KTw?%~ 秞(BNR.IHX!oqS2l8p]uw'['Z <ܵb|b͆p|i/~g}3MB?Gۄ ч)!7LxN>0Fh'p_q=cLG<8ոFT%YǴ7u[[ )Iz )Ab ;ȖNU^{1ksYdɺCOY~Cmq!{ksM!uk 9~%lA὏}RKA4Qh6:O+ I 7=N;gZ$Ѵ>O]0h~D/uqm&+P ZV 2j\K:]oT*j?xZgvK6ii(ꖒ0mU÷^Xwku~%"*("!I0N?>VȨa ÝtX0?| F7+caPȐ/`iCe-pЀ~: +oN}jP:ʭCȜEOR49޾ty(i*VuWsh,`\}w𝳯^ň`U‘‘YmhƑע`S%_O8ȇS % %H*O辩 y(.ri`%&,%8vrY- 392Õn3^7Suh : ^Kh}fY+m3Ln7ƍnJV+Z/&IKq*Ǖ]JP뉨LS V(`|E%/rtj9GՈ[Ԣn"N͏=jIUT5-/):G|yz[^{yj#r} t&Wp\__[^Z#>}VH;q!I ډ\_=<9 +˜m3Af&9q91JġϓsL_¦>gIPXY5VOZNo7h`DGF&-HE1*|;(2$`:Zq#9֣:vFO}?:rq@`Oy?gNX>f_h z+턽3gi4\\? i~ sȷ3~Y -5Q Q;* M_UDq7=Ϲd^N0HBFɇ-Tc _9;%Hg&B$HA̵$B'+VYCEt>r>Zb9+_שf^ސ+ߊ_҅ڈ\Ӧ)%h9ٻ${NҶ=[jPbM=+bE}R\e1HyQeu3t&!ƈz^㭪_H\O6.={DHj^a5zF:p]lKM/}vt.ct}B{sCb>TYAβ@Ad!Cz?LF1bbC~Wg?;A͞;ɮO'Y M%K֏̾i oēt=q#<VgYkm FVl4$FDRJPNi L.S.֤:ٝVvg!`s8*$/t%5Gjyj6fKE-#(iE'nJU!~*g<ߕs\c#8<|ks "{HꁧwiI;DzDSOJr''/^NUc,sTA[ =SZQ(b5U1c DSI|}jT=5:Hy?\A%ꥊ(x$VH@S[SӶsMڢ me=Q-1]_N rWR@ְkM)!J WvlήVxVUz5Gۏj:1V#=j[T51NUTcrGbqW'bc4jĺ~f#Im-y(7+d6>cHCX{q,}+kd~f]`3AI߳;Me6@;h;ofaT agД::-Lύ}Z(NCTa$QG-F ZzDd+*+$P\<~ZY˶A*{t Nזd|}M|MVó|wҹo=TڅSvgtT:_Qb>#<"*C 4A"J'.ܦv /-G& x~|.Mi3>є  x (S_ǃ֣d'iorMe.-QC5)+S.U#Fw! )ZL[,Ie1gi#_y3Q3YLYw.\l05RШ?u t;mIzvZ01;}s}.,R>>m;ۙ؏7.7oT IDATl?VV~p8_7.hd2%c<<^:n0ՇjTs~Xi?0,~9nlB~xcZǎ" >c0 z]'Gs#1zY<$0[X|T_RhzA&vJau+!wVQsS,_5FxV}SYa2Ʊ5TF~'>"(QHlgb) TPoA!WXնx) (ƌlI^&,6ɵ".N'X6*K]9JrMN ~U!N)ε1G7Nwr}q?zŦ庙˸ۧw ؇mv`Y ?>ɢ a|/%>~Htӳĩei<6g: H@zFGr,|"[x[|R ) ~곌훦ȝԱIyWf;NCO1+OܰED(j SC<ъcpPa=eY92/n^F>t#~{5 G&_2X __,n v@hE 1 >*|WȸufsWyw.ьAV^ݹd񕟞U tW(O]ѧ5䕕ݜ]$/da!D@rmNT71Aioމ=QcR4)A縔A@M@&V̓cHS tEZRJ Ή(_ L|SYhcE.~FVAsSchH<:vY\k/M$IÃB5)Tב\ {7ȕVCbz&.D&bCJ*TqnOߛY+i) d\grm^uu9+Dicg; mlr~7;xQfosOs3/r"SRDm&Rњ@ԊmUbQb5J$a./Ny_ƼYđyd'sz[:&}veq+Xn'u:ӣ%~[\3Wo z[n5Xެk*c 6ǣ,;Myu `VdþG3D8+KX.yMOؒ=" JGپb\kQʡ'y"XҒQ"~Hbs/ 4f%ͻ(,/F Ѱ**9hǯ_C? $`3Nb(-P` H-$afmw9fOIއKQ(xOa!Pvw%MIn*?Me)Rjd̏ RxT&o++gdep; 4_$ jUT9}h{Cu:`̑+1[gt){fc!^MQ*HS+iTX3'bQl4ex>Y;yF qq; 7`*2~3Yw2eš~myY~'̳\}dYtկݽVjNO݃~= +XY˪mٌxn-^?G3{(}$}V'yĜLTKnxW9_H~V~q~I϶$ZvȪwFZB,PRb[T_?k)6s^O b(بZ:QI0a[k(F-駔!٬ʺp9V ra," 0#WRXCXģ{kc#Yz_}ԱFtPz 9tw_x2V3'"|6}=ԭSj$z"F+KJ"zY>A1VX$"bT;YQQ TSQO$jj&li;FKZhEc}4:8pu珌hGU,0LY7)v@L~HUn {~^3,;Y.sa{ͽVz%d--AlEwɺ}'}__!c){lZ!8^+~ef̃eu)h%UYGڰQohp0#ń 4;&{ߦkfݰAtk 4 H]g<۹}Ţx)YSl-YKU5ZnSj=EUjVi~ H]Z|e#uԑ,_Bw~eRw=_ 7d,t 3#Xݷm-OfZG{qdʑQ~b6wyUˮђkn喼vyF&x^*R7]' <<\dxjnPH`#R|MqLZEtIĐOP$̍6yrtsIS9|iqģ23Z\4h;ϼrze y!Lk ^PmfN*s7tO".j bc(?E{%|p79~Ϩ׊l6g0/0| &gȣ,#QBWZJCqk#ER l|jf۰^ F]-sp6$x821s##Q۳K-QrhjuSk\=B J{acW~.xii}7kouw"ؿ6>AI,!< 5)JHlbhc,x3/)`@AUIExJb%A A[<18Yʨ9fT.S·a1V$:XMIDZRaj*zk8dǤ'Il%2njvY\n,&,&-{`b 'Z{n\KNv_=*ޣ{ +Q9Xj`'!#Mmr ө쮬|"/cx4+vEޕ4`b}W-Y#*^ VQÓT0ʽMՅE1Gt ?lO89?a ̌. KѼU[wP]֧_fKBnhLQ# -GgHF\D #H,c$|,Qj##ՇuiO>gi&%YܗS*I3}_'CփD.9R?_\/goh{G&E=<"VIRKΆ!IB楳oKziJ*DicIEL3kxZkE' >F j&QQ$e9Z`s5d"!#2tlNG:Aa?ܐ?|yH8pYzs˂5S؂amn)YnY\Ada%G<->:=b^[sYѯ~6gq-* 9~ut_)5mƷ}g<{Nʮ2_zT&,Yd6w풼847.mR/J*QKHa[9&VW ٝt*\cԤ5d%MeZ#,֥(!5;}R'OLŊJڵZ7v.͇*,YBj%3Z>#@gEvhbU%K*Lڨvϗ26r _<Ԯ<* ?|"#cyYNua>rJXVAB,Ycm3yu[گY1 0 !n(ҢDiEb$rTȩv].;DTJqDIb+XDIJ$D2>~{Ox=ANw=({jj[={ιg L Ҵ4D& ^`~rӀ2݌ zZeܚ&5_z/P~N3]S#s-~իl4*ה%ѧtmԲW.ikM^eIŸ1}wpk:ER'#f/ղpθ3k Z;/v9^i,kJaCٰCn._d٢$H [iX^ac-@ Fj#U!yfj)2J")Vԯ=Iǖ8ij\ /_gYshN ۰reޑmL G8= 1Ai T)u_R%S4(^/HV·<FMU$u;6%:'1Q`DqiEJ l4m)6?}rΙ(^wVn/x}Iy:F,\,xEyhL0+2Wec,jjjoeJH 6j ʢNjK|yu%xfcsFFi-4{u:jY::C,q^~m?P=>sRc{OcJS7ifw7wo A!ۄjG('N%A]d;]ob?)(o18.iP^6_X IDATϝh,oO [W8K#OvZcIF!Gΐ ]y,/՚lnE-pVOޑP)3=+?K>{+cٍ[ GJNu@78d#Y*ٷ+! CbsC>@ q DL}(R.ucduIR HL#zVmHӡU(jZxq OXM־qT,Hd@!Y 瞽Wj. $Ha_Ub%BPU9|iDÅqڙQLaףV&\Ɉ[]LJR .4<gf:bT)N挴G2O6t% f<9Jt(-z? r9h)p3m m }5 x]Zw%N96@(wlMn|Қirb{0 O1 wn=NcϚb "G2&Yy\I Z^K?6] 0Eó%6 y9(*ᆲg^do~QEҋ4תnd?0]GfKlK:.ՍtZO˜;8{8ΙKk:ww?NKH#Ik5 ʒ;v;k>43npܻ8 ^:Z efT.9cO<Xkk jnKm"9(uO3T1VQM$ҸolsӰq!$ʒZ0/*0H/W~J_|xg-B1p%fګ*E./Z_ld9]@h8*Ix$[$!aV%Gƥ(x#3*OݤK>g^Zc/iE<8VZpeyU huu\j9|rroC'{KNqL;FZQ4U$2_*T6[&CdpV1nr7nf_$j0Ĕ \sZ/+P!! :٢Oi*rV:Ff(6q͢Xmp~پӣ-kkF?~7'}mk/݋p@ v3d}sކv MyFaI#;]i7M9BddH یo;binv.tb\;x;w t:]g7?@Jj̷ Eճ${ 7Vozo?#:,~C#zcj2*͵j:mCW羣̶s5\?äKs=¥6(+ÏxH^pbڌʮ'0rʙs~vg}cF3(:䙱)fq L^# 9 T^3;g9M_kHCeQ2(0Gi9QԗxKv=4E49"r29s1RNR e GQ]Cd ˆh7Tou^׺IG?;j/ЈQ֤(YH(5ÐZG)8!,$t\Xm#sVkB63}H$uSȌf2쵻"fJF2ɜ@-EÜvig>V?Wخ{DyX//PwȎves^Vڷ8 ju)J,\eXCd@ic0 S$4sލ1]̨b$c)NUjmZ0RL!:˒@J8ܫ!=_ ,hݙ~cNQw`FME;\yEott/л?n鏛?&ayL?$v;؊;Ftb($.duZɆ)6LB5Ɋm|͒}x9~tohLK6N2L:ޛq"s3xWx{3xϽ,VXn> Tus=/=sg^_3|sԇspxa+}V"3|`iVьNFoH-OXO?z_‹z}|>ݸd0Li>Z>ǏPMh# jjȇ gU+1ӯa>S_6>ϝtuvEJǵ+5:Ο[zMtm*yH7zpyӯfsm:kÆ4DDBL1E؂J{Yx'6O?VtH{Q8OZ7UKlkT1hy%PUU7*22bVa@ELQFƑ|p*:RF') kPT8`[1C_wr3kWy-qK?ˉ~Xi͔˛eSO;DϿY@ɛYư1w^}wϧ=,ouۙ92n'c?x?cWK+qgaɻ8_|!7NE{N.sWpϑrou6BTUE̴vScm0hJ6Gt OQEN.#,4[{oUѤDED- ,#e  <޸ƹr k4} d@pm#eQyxbs(saQ}gS6!9"Ȍ )NfJɶuANȁ3ZkH+$O4U_HrYu#*'8P4[W t=M 6$Uvdp /aRSjy6"pĨȢFG݉ .VًmGυ>I5ZONn̡1rRby*䤭@:׆WYŒ6;w"ŀ/^-P;βx" WdhP6 ]d໠ZQ4Q 8ǵ'H,p*T HeaPw๚ &eza,4lWWZ#ցEsd.DhHRF&H@:@p B=by<}0o xZB8*wz7sn7N>o/uſ_n~w|_Dz|睮[B.`66ۍmұ`q4;ہ^;ц =ɼ'ɴߍ;={~ֈ~?V 76goa?'#޼WpNG<cܛ&ayowxwk::osKAl&[k]\ml%V:Irw-6-U\8g,UZ)`fkX͍@jS$Fjd%%4hOWcKR_Gla/[G,$onpl0n6z`` >{ʦqRo`xܚYS{ɂy3s%iZ9y xbնA5"P[G-3ike:,bX|4oA蘅e5kX-p$ h3Qf`FpբsX3eO q/.'GWjFR1GjަՌݽj2SˣҮ R3_, gn`&0{طW#ռRU U>e΄wfxĜU>0 $X=2Yp2:Y]Ig3[F6R,FgK-:V̢3HhfC<O?o|M[nb6a[&pߤvsL9wvibȔtҵ:ڴ[@ӅnI3t+g.'SeSI=q=umPQDQQȅrL*,eCMVis5Ըj\ u*܍h.q#62KI;Q:%%+`huyB!JE4a7?ćUmN sIFƥ5Wv tu?9~_ky[ˢ!'IJ%*S|^oDtz4ZŸHbyAHFC2A?+"ݐ\'Uug;a^3nᷰhRزK(&CUN49w`|EEׂlđjl4YW'ʔtՙslHkm/Cr֣[z1젤3jqͨBBk9иVp7È[[jl1V]OwƍIC Qpw+&͚8RdʂAW#B i$$ ݐ3UD=UH*po9$_ԡ-qKL?p!- ? ›Ie2ᆷ8 lh ^4WZ13¨UBŏ>}D?Quy?0'VGOѪ'{39WO_Ջ'o_c] ^h8JӜg BRF4h ˚!WYNV5*-*Qgjuf긴,k)zkm*I 992_'*K2aUkMF-6gYZѻyN/T}vHF|rt1>wyWm1pfɹfgdY~.2'LoYTD#o֣P39PyqƾKU`gJ6y&IW=֮5rZFZ9{Zxm6h#^zv( j4.v5F]CRe0ci8`X7q:doDJV#}mj2p%cP I F'k%=W05vlremTU2ւW;:H|LM#3Չ H}aRӛ*zqUR8CfF^78ǟ]ďsKٽOܫ;II &uROtQqhS~]fNC]…7cN;v.4)-T;͟M_r;w״|OG:LM2s"ьJ艔goO؁0ntgg8<"=A>C.q`?w'k?Nd_=ăw`js-4Yź5*%&V91٤bƬVXeH[uzzڣ]nZ35N3,:G+Rh\@i)A Hb%1:33B%̌gpӱRXa5@mOw[i]kžU~iG$\ FxJ* H4 o!m ׏ֈE;N=RK^8Do=(U2F0gBF+*"1M3sV~J-8; \sƢ5[\d1xE+\;嵋 iG3͔vϐZrX_G&ߦd"]BmPK,3A ~+}M纘rVL©¹q[adj5(%5c7.k0*oomQN'Xp#02GN1EyWecěgf3bmmz7t{R؅գvH}uSY$ ̓Saf KHvA˰lFA'xMjҏ#u]`(Dž '(d|fh]>KI,G&SrUMx J̢ /HUDH}W)5 VS\hrA-V"Y̲qօ8Q`5Q,DR&3I٪+ԩ*P8;]n ޷M;A$.cc4vF2]tV򹗨xiMRlmnX7nS?)X~7`Mo$&pg%4arkB O=tAT#hj_Ļ7.iW4QUGj4\r`Nc/s2s0h1 )E,8 )qD "24Ș#G5 cjnsEVu}`F}P4 g).-9ӅݩP0LEQFB#o$;)5A4q$Yz6?!KK}fsVFWx/GEv}ĊQHI]~~^jp8KKg_v~(p?KȆ3:S恬Z  wbu(()/oH IDATˎr92y"RQA,IK+|0U! #$T^Z7qdԤ.ETM\Yz`Nh^fNAƬLuDY%ju1/yj F`ě E )!^?4xm0G 's\["14%-'~?RdNi9: uN5KaB1 Mn!lẽmc4Mikq46#e?ұ ?_Z%oR'*aEg>z7FʗqS'y=oLKB{ό$2**R*:+Wl9Y㺖"QeU.c` f+qD5_Pmֈ)Vc@A 1:3|.Vefblə^"TX\JJbH@J.˹RnOכ #4)]8(-mZB443Yo)>2CF=5ˈnsSٶ}n Y9̺No;3ʭs_yAE,lѿt,/" [Qs92bmْ&KKW R`hhiE髛v x''.v`=:\Uxjbu4e@o  Bl 7TZQ%7oc:tloӺ^nlSc,$hHV9*XI"W%ն" "Epa Q٠ eoVWh[\ܗQb4LǸxHqzf,w"wT'{{> s-oe^7+ln?itiy3ʹG5jhҴ2 {IN׸$Y^w2 0}5׼MCb6ǩ:l9,3G )\[1]z`'~A~6juEKRgƄ`Ǽ^YCmyc|{~?F9n1lftC$. H"!+!ȭoe5 QوRٖaqv9d3Y?Hh̓ZA2ZdŸp:y3sd@bH'V3]id0C\ T1 U(VhegWbh^Uy&sVK笥K1dizD {u.]NIꞺ ztE* ZBǢBVQ9HȜhXBx<2N"Ϯ/#C'|ŝ /ۥyD㏾瘎u~}|+C3͙6]Y-p-H H*LTOx}B2JrǍ|aZIUYJՈ#7 ꣂZ1(ED4̹,2ȹ o\ LLR.˚'yp=Ut\C7ꊉ̇@9m;;ș3|BhV8{ \5"[QtNghTRáhlQFHnRfe q1\ @Y.cڴ7m\΂3G=+3+8qqEf;:S0Ȅgʛ= c0yt&YㅵRףXZG;a>a/=u=ky~@?"TZ1,*Ԓ]o K$Ә^NE!!S۸ҽ;drsVq'`K¡遊sAxgHeVZpQb 4.3L*୞y A"ިU^]J)AԨdة0 (3oR0$FzCBiV.bQяk=Kw;r_| **y0 bt(pMF88%fJYȻ[r7^nR TzloYmrj9_@^JYKD1$J[$"ıH+:b[+ؓ+1&ȏݔW€^OSgcɻ|ʌd1ޤy"cg!X(Nrk\Uڝ {qk5-Ye UtD8ݰfɃZ=ǙC/?q?3gxbG7O8\a_Kt=2>1]T⣣6JlIyL,X]HsZFh˅]3q7cJ$4'ʨ*fw5%ҍy r9j !Yp)+~E,:8<T*9[i̡WYy蓰?Hi?+3G*g]bH7x×}M>W3ʏ}]e;?vs>'I*rn2|>ذi1ta?6km9vHh uT\4IQ$`.Lv^$C;0/Ԥ:Ƌ i>OFFcK_ \.<*/&"ւc%8 #2*OJH{I>;uJ~%ɤԋdP(RG9czHUh'ZJS-eӔdLy%UNYT HtӊV^U~A2%iʏ؎C2l&t;nRb 2/$y*%2%cD 0mtUH2*e%D#\Rِ5) ]1пryl4a~1LuSRcP()k1XZqjy 렃)*L/԰ N%N T'j|6Fdu%!\4I>K^ 1..I~$((V:$̆\Z |kSK`+ʺzjNS3zdŧ%~{?zBo̮|vIVjZ2lӱV\TF)DTSj5od#gsRRHArU8.쎩@)tcGA!JM:>j&$tݸ(?MLDI?{oYv}ν-֮ezӜDEєXK!F8aCmV "8۱9 `ؖHH)ܷ8L{Wwկ~['_uO]U=#~޻sPY"3QHQ1Q`e$HR)T*bBWFRʤW: 555kE)qű\85699&iI(:PK/'wc ~Ghإo7;n#wCsD;Y7;cMp'kk7Y;];Yv`).tehv wu;?cmyOvs 2d}ϲL.l'WJOK w˿c'镪Q~%:˒D&(?S5ucMD 1 !oc &DH5WFl9d~)<_˪˯W'.T3X"kEO,\#K*ꐰ^kY.W-DT.Qo`ƂJ:E[*`X0$^$XH׮t.j,jBZ[y*ݕO`8Z?o%M%~h.!4<))5ͲAU D'ђvSCik(AT-vf]Ce5B)JjL7BGhQHAS0 Fz4$9NÉj8*QCQ'b-W^LNΦpqr2XzHSmI3w?%˗*skg>+LbνӄTv1)ԨU&]b5&mP+1iPPRcІksbԈUjHokWKv\3Y輷2~^b}JPSV1J ƨEE+ؠ ZFыE0M*x $`eMt):eaA_>L˷%,LZa3A2U?No/,$ bɧˇ>4<)?d @uo?< ~ݭ^s˰lIQ􃝄$r{':nۦЄ`ɞt*@,n 79iBrS6ro,eakOuǕm*:`y۹b$eOU t+s0u a2‘ DѢVj, - Kq,K'XFfS- ]P7#+$(6VD, AXAM` MH~ӌQP&/`i}U6z,I> <7TϓZC0o|cٿy/KK-csbG_vKQ56F}O~?_gge'{'︘.ؓ=yE'# p ` _gO}_`ucNDZjmcKgZZet4 # q{=Ņ1Ϟ_Er|`2+\Ƙ)_}+36 1X^>96'HJ2W6*Rw^_:lVҀW7 ѸZiR3eB ՜ D". K9yڙeLy6MWY!hzYe4T}+`VT HBim9a4oIԢPG'8&x5Mrc6ި UxE~'o/Qց# Z#nS1oVDI` c"A-,% VHW+VG\K B1Jn+T n$xq^<riØQlii>xùr`< 2xP- eJedWPVD! & E_d2dJ(\Yc~ԥUg 3X6ZEc|/r$vQ ojKZudR<:Ĝl-B:iϹz'U]9=w?GOo&Ͽ{n'{'^֞TҙTÂgؠRQoT,8wC|-.Yqtk+#w/9N,26z:3Rڦ&1`hl̐ew՗Vx󭔫c>(2 ~[NtT6D|sb!o]gS(>d] 5r[PS*uKlk,*AU(QW8g&!CEYK$\7#њ5VF?hoL<=c RqJiv}'N6`5d,6hZhJ10a`fC5$6:UhĆ#1 HRM˺:aNTk*,IL :QPJ,i>za U,C'j Fq:  :o zl`qnjw*XE?_^}?uy?ȧͫoH=}/wQ2` QGKtO,H* OO>?3-GPfZ q_l4ߨS-m9}ӱ8S ')%W 9V$J@TCn*2<υ:qQ"AP(A ѸiR!Q5$p8!(BNM!VY_I°T[%0_czM.0&Ewoҷ $`)mwo!2A r#eQi$P;xElTT#%p, .,L4?-hȚ ((0:Åd   Sc|E^`V88AԘ֒'[,<;^+إStͨ\Ĉ $bNRBt'|B91=Qy?7pښ2V W n)|)9iU&|U15ac$^ya[:U7oMDab2`kQ'5`0I"5V( (]M\I@T,$c.U҉,$h422&g:%h oMF9~42#.5w:'3svfaUs㈴6i ZhY50Ċ* IDb@Iи0j6 R;|SMqz@[JZsѐZOM[m_+NUJǺSjQ= > jUL`DM넶 s+@ÛX$\_$S\Xڜ*ppuz/]dq>WjЄ$P:79yڒh9ć>uy7=/~z1ɧ?÷μGk6#]81GK1NsKoY?޺~rot'nV;ӝ>y;znt;oW/߮#CS:䝀]>Ni;GH<Mޡ+;{'Vʝ.sI0bc\9zn?~?gRo*/d.k}h5;u$t޲[ygvA>O#-sڸqYV>sg_h2X,/> yD'1^a<ة2T5Dj]cۨUgj$$3u42#: PZְ#3*؇n5̈́TXZq b-mFÖJ[xcj1 k2MXYuct90=&uc"hKhLs-*mU/ rY0+jSszC)r@xfP2ER'HʒCFDh bņ 5ȯ>tD(&9WqHʹ^yM g*Kr]9><|BC+,\-&9uὥ! ojbrSۧӰ0)!oBǤ55*"Q1BD|edkU.xG9kpںJ^XW_մsǿ:i.tu`^@5y@mןz\ Np|P,闾7ll/.I^I퀤J1N(̰NJ3 fT%BdQETH'xSZU'*1gRi%c$^k kz0W}R t yyHB&}Wk fiV패*G1NѺ2D"F-~to$!-YK$`5%C}1k"0Xq+E#d+uK:IYkup9MDMk:k+Z!h/zIEǕmhB hUvl^7Wgg#iJF%ƭ&`k}h#u{Iӓ==?)Cȴr}Hڰ($H5EI6,4UF G O🧬 >osykn.~ڋ/w}f̢Vu.t׾p0cedit5zzvzvؿzޡxZ$rEwxw4ƝX÷cPܩt;Xa@}:vmM݉팣A;e][~f<>c GɻڹRmNX~l48z}5>`wd|_dKr廯7!n5W)qX,dVUa)1MCep@AiD,ڐ"45:xET >X ix$ĔẺwKZFaV.\[.mR|ؽe&F֒91He&A&"uM.[hCZ4(1RD8QmU kui_I#Jd$~y5)РaeZȼi6liFEVuY)V*꺨8B*dgF/tq̅Cm*"Ӕ[;eiB:*"( @\Ibwܐ}eưwhЮ "x9ྻȱK|kر9 燸wI1,)J0"53c:nS@v[]@nWcfɸޯwƷ/oD͜ܩmީ$? uڵ[kaɵS)k6.\w ԓrfkhňm) m`Ӱr+S#aƀSL[[$Wxܡ,si b퀼a -V+ \v꽃ڑmB2hZ mE*DjXy)s4Z ua 8TƓaQB[^G]s< 9I,HS05 .i{:l%Eۨ}Y}!#)qbl)LJ@07$QXI))MąFS,5xQBw_摏9 `BRYXqsҒ?Ψd|/^kKX3QXf x7 D $҃$ѲSc MDy"CS#:dvh۠4 I )Q *E9T%&5")LђJE-@K-5(PI*DC02̫aVM07ZY;O7Ø@v ӊN z2_(aY 9U_ f'+$vV; ɡ U\(ng@Vo…)ڜRxMDj&08+16EithMh4bx\bR%  iZ ƣ u7` oWZ]Rs0dIx.D$63d KƸqADִOԚ|Q7,1Pk" UA/2o[Moe֖(0J"K*K0Y(r>gԳ!5xe B[QkXvγ \A/%5gėqumހ9 6G_sD)$cFT`xu97.WI<6:НO7ǜKqQhF8l4XjuTG$5"JeQ 62JJdmN' swȬTH}OMN4\`5Ey#St WSCeh1( D9>jq|g=2g?EN+GyZbD g8=1< 'w{q]X^8|oxUL=9̏>g.^3O?@b \]H2xF4 ˈK\G9'{'o(oYz$^߾3ves2oU]>fooK.v;'mvcqgi}[a K-7 zc7]{oh5,]/t:ᵡkTQ"}b v:~ڻٕ:?K__A:ֻȌA dөֹ~'qԠjj*EjҠv[١YZ+1Fh]'g5bHҶ<ƂfR$PZeV.p2-MFLIm5J ɉXk*Q *LV1mԓDVƛ`:/ڎ+U JQ ҧ2CSMY+'цs`]gzhi NT^8B}\ܧZgt!?7HE*Fo:Myoͩ(Qkjε5x+Z뚙4N% zxlzKW&S,us4$#$%t5PΦdtе2p:̵*= Z|s'zտzDwU/KPnYUQMMiJ5(YT-2Q1&%1 *AfYK -ueUW:2zpv-%Q,%Jhh>wQD3 FUуUZTiDLƼ&>ex+?tvIOnK.]q2SUQksND5x㙯}p}A];<~@O 嚛/D]_|ȇ?bZV!w-?w~mXK$kE?G3Oӿy#4zi`U=ߝc:ǝXZw j;b':vNuwR.l;O?vQإӵy= |n~6ܮlnSӺ$Yjo]խv}{'I_yo7;MPM7}oܩ}9I{mN[a)C׎.mȅ/shNM#(BC!#r#[Kkʹ5"M~o^~_3_9dm@g'; ͐QJZMubbJ֮ "gϯI}3F1b,̬Y-HaZ%"*ؑQіD*ZL(z^"Ko}*W:AE@4υvd+Uyg%U6-88c@*#-/21*`%I7PїG˶fNҴFIm)1 "Jǃ㻥GDh H""9))Yi5x=A=x}\xQ J3\g JxT"p:kF-[Z=YX eԖ2{26}짲n IDAT|hG_[dJ5^EJ'm?vs܉uGЭ/vmTNemgi7.bvdmd4oE"A}gqVes$Zi+(V<-U߼Uޮ\Myխ}U[o7ۍ;~7e=dzZ- t&L P?%iZ?sK!}eDKU]>mڪC#Z@iTnMn}7rPձ?}<:u85BaRu";q˼wN"иՕG_x73}}Zv!~Q:y?Zg9ILFf#!bR*8\ވP%7jkD1XQjb4~IC5C gDIz:^CY!GTLV!@(2-E:F1H Yu!J/-M*((ڲe"dQ&6S\D$fcyձxJŋ\m:Q*ȸO4cWЪ-:\{W_in|Ф>Omo؛ڏFEeifi iemj`-ix>ā1Hf:),chG:-6AFqD`SLe?{>O=!gus!O]~g|cGKoǺSIo÷Bn4>mqPӈK3~F)iƩwpi}iדr};A@ :iN.AP4LOc7 w)N#XMMYڦsZXgwxQ@urS$@ʝ1ֈZ[煮DiUѨĺIk Ϝa>ˏsϣ zITٗ_W>"W$`8:)`EE"".%bGbގ}vv-6&P%eĐK!})T:;z&I͵$jXH(v6"$ H4B< L%*(*"ziaUU<ʣ;;Ѽ<(^¨^3SD$-٥/Y攷AVюie-ɒzG,Iw #g|JU!H)s|!" y[h2ɺRoIf[3T⣧?"?ȏ˺oAndJL_Lg݉cZ߽]*i}JiΝ 4;E@η:޷N`ړw3;BEiO6;⠱DDnLqiiO{Û S4x*ͫlue_uNOCn_y[^4 ?򃧩G @g/Cr_HYQD|6ly9]Rw6 r-F9Jeλ-)ѭgYLdusA⼗ F (j-bն՘ T5Z":-vvgsvGjHē ƐTzJw,0 $JǡpT_Ig8nsO=#32= ъ7JwJO;rG :0hزb?'!zdArAh['A(]iPyjRʶԚlh S()WhIV{E;kSZ |LIk*֬hmG-T_<$׺zlf79/R/ y*tU3".j?)SD'-`B%М}j DeDٵ^2T^v*}L2 ]zf[QC.^ PjTe(jSDٕK*Q dVvmyCMU;&p)f')!ȱѬLz *QX;h$r$A}֬ܿƱ3}C$!=l\䏾J[rx&6+/c% ǖT.+4"shpe)]IsۺT㱣rj锌KᅯWd<&bꉞ&="|Oڏ!7yBm/c'dO] Lo;h>6{":Zf)w``lʔP6pZWoLrPAHric8o9[}oTus]>ss&FT'<oʿϳzzFxT>>i+wİqgPÉ)c?tmT1J*%Td%X[͙DvzHm=҈hTO.팩mN/=.Ӟ V>'3.yb"i^ͽ .}1co5+ 41Q֪JH]-*fA$& ayRFŠ?<ń:1&QA 2gtw͟9n+4(=ۚ%"*wg_$y#7S&mtDV cM )U8~:ϲQ,^`PV-4 ^z!Ks؈EkB6ƄH=$lC"1&*Q.%kK@jIơbz(Z3ߣժ X< I?X8y0O6o2wv}5 {)߸™'>v3+㋚É XQle+}蔈i".s2ڿ1q[әiqe`M5=?[??ڋRޟ:NnӬU~K35fRG;! %KJ UH3m c0ER ``c$4&yQ1-$^5 >+$LbGAAMMBTXR-B bEX~'(T!O݁%/v0X)|A#hE?/G) R )A@W%Kd Ɯw 3ċɣ`p-6RơTBaRJF&0hG+:̨mdOP"'2EgMAjRaHp&8q8|ol*#UćXϫ˅KR,fM 8h4j'o}b5D=36\!G%؈`"cWa S$8FK.JL)U$jc HS @jڀFSF*#=ڊ\ =(;ڕgg7Xk$J-*[򉴂R}9Tv96R)lzGDZqw.FX+`IH+X R;pB82qՑ{Gj|˟#Fg)kGʅ_}lE`!fzfYnBX}֪0G.vIߠe̶fp9S?(!(³W-['ҟdaf-GI3{C~Nm0wܭ[r{6fmߗҧsxm"'|>Kk,B=P4OX=Lbo@dNm0__|g9|x?w˃Z}HЗ>˫Oyh @ ;y(8ׄpj 30a۴퐣yV& TbsL5a*NDBq10&cqIB>!!bbQףPD-uaIS;SkJ"M$ +(^qv-%Ӏf"/(`- x̅Yo/[x }X(Or*kRj: y4l*V)#FYhU E&I+fFr%\FR-u:f,ch ׻)*L llyʋ=OEdK$pyEi'NO=&V9$&452b xm$>F1Q0bY|FoFre%#AaC칈42\lLE4* ڴT (4vMdD@QB_->!skI[` &cn1W/$2Kɀ[mos/Xc&mrH lm*GZ[ڒՖ]Xb䮑r|f|iixowgV9`|ޮᝦ߃ro4k-8{`˟A˻l?q?<^^SwO\l[cUI~sD6|[\DUU bH1.#RP56F\|v[=|/G=~ W^ܙ}üRڑ5 tUhS83j T Z+ ֨FϨF45ܗT. R3Ř5A,Q(M3/ѰpAE!obJ8n#bdF7[b|9K8/4RG)7dxzA7!u^8 "! &9bmꥵzU[nPE *W R#AcBѺFOHPU254ZѰRtR^ziVQ Qh<Q$"4{Afu QѐYaM FJT:j `;loAREժH IlD4}~xÞDD]Z"u&jR9ƑW ;bAǢkFdie qd+:G,?3{,gN8 en;,sI=6CGzh4+Ҽt6a&Fu5V ">pIP?jjǎ?3萑0z\ҝIuHN%/ni?N?4'T/#63=LF?ޮ?۰POs?n08ͳ6|2qzADB2;jE-l7e66ApޮEn򻃜ݛD2x|\wp?}ۘo5)VA QI6⾿^1]m1p3P*b߸ȍbq˿Ġ,mdxU9th{\' GTϤ2rfT(?ȷ1;o6ǯZ;4׿rg!1l~׿*]&?{ϾJ33™ Duhc> bXQ-6@K@t"sy|6%M~A;ƞsOqIڧ/C?w?+g8ww91K_WnzQf˶8y"' mT#/ a7w$>He,S$F0ڒ*d5tdXE"CM-mQ'ͤ,hB^1`T&m<2OnRw{Gj7E%HyH{C;\mS Y+`=Ɣo&+;יtr4#7hd"AL0Jafh FJ<뵖qAK/ @vX h`ȱ*[s=@KN$B<`qbmUڻD[9k܃KrJELlo.bâ"1gάs`HbMdïJ { yWY@ttFh&r5 䓔 5Z;1HrqN4d&G0MjJ#n*)"}/T(ү( XY$\nW\ MƦEQ`czbVmdnU\e! jj) __L#1'EW4!Zx]K"U4W˜03aylP~w=y6FcfXhʎVaEЛZPMsz{%z5pr O5S_VϱO4.r!搵 CzK;m_<#1Rmw,Ko3⽓HN>e}7w;Ǵ0|62sb9z}!>"_D>`tiGZ}Bi/89蝘ac[#A C?d7v > )PBѪlrir$bsޅ-wlXG֟J9>B#(:3gp \b{mۚsO̿ t^%K'z,VLKdy>yUYd[3 [}T-6 65W@4*Hm)u-+g1e I= G ׏k%u5DR'Uhb֌M MgcTʔXՠj}dhv@bH;J]ۆͫ! Jb:VpriHb,rJkZ`XY&Kָbjmj]$* Rf:/6 %8]YЛCJY1`Zb,Xbi~Vh[ ;yMikY(3-Wvi/mSh[ph3kjKl2C}O{K^hJPO<>,2sz|DG9T5\)Lևz O_XI[Й)E. B/K:Q]xmw#_yeS tMnE#giut9=ASe< Wiɳ{pNyOYYq=kWY(53G5R5A(l "II 9m7ؚ,i1nɕͣZ-' j QJ9~ݐkZ訌kٵOߙiRyĭV>M?o?y 6. < ;7o`m7m} 3,7??yl;%tds۞giL- ҄Dޤ) @"13 ^+d۬VmvG(BیȒ䤔fQƄT*&Ek\ ;mzaLǏ>!8r7-:,HaШRl !NRb c o;ZӾjyOQ.9+q^Q QlCr6%G$ /" &iX۶1-%e!!c] jA4+{IWm;*# ]v8rE >;V'2B6`8{C-<yt|׮3}G-`F78/98l3Lvasopy֏sZ#{-qD@*ꐒ hXduO/cnRvwٰs6}3BD J&xI܆E,QVkl^ϓ7FA8QT"V՞dkGtMy΂D98:Ӓ=q$R.y}5:^h*^.A-;bhn~baڵRUmLPZ6auԦ,{yU "(cx$( [ӭ@ᝉrx1g}dAq`Dмs%xa1c3\í&$ !L,~1GÐa4d (bKMvfwU|Ι73!*u.pSsv!"ŷQ5\}ަ^Z-8 QSИ@/9N:m<=  U:9zjU<&f45 KASQ v9pxJ:M %36qku2csV]:!cXvXf\Ψ1:%q]#D&u$ifExn⸈tx]I\\]IHڝ\=f0y m՘ VίEś/]qjiboFC>px=O-r* (T2J:Q+pMNgQ p>Š.hjynk[vypƣgǤTMiLsĬR3ڻY*/<~Ob I n.z'vc]׾z[Gdmf^kyP5)5۟\P=y\͢br\eܺm|'cKnelz\e^lxNBɤ&!/N`rzd-krtTÝUz\oF'8;aJ>xz3z wcbc_E^x^Izc)2%E才#r&+6-Ӣ`r]D5Yӽ\Ry'29usI'r9yjXcRh, 7\)Դl)90#!KmbՖԴEɉ|zԛtuI#~P1Z)WīI#9c9ீ#bTx7T+":p3ɳ*JA`mytHBe@ HDװLGUpG#|gtF!L9ŸZ'pyy]ӽ](8)B0?ԞAo$'ў~ mdOЙO?_~%mm6?wrewwnGTk /?]6.1_D+(jU rHfkj#˂.s{D+}E݋94[6?9x߽sCzx7Ƚܱ;^aD2>֩tg:RR(K.Uo1bjӪx4aجk1٭;tSAgF g2wTX}gg(5z45zWtF)|[\ayal_Vz˾TT*6wXyÉN~c+;3n^/w:#.ǼM;yЇ?:#BEk@~q$u<\t邐րp }F]_~>ր"g3#gFF.8˺YgwO޵W7XJ};~)oc;`gvᾎ?8`hWnX=-^E^W4wbK(KH Ԯ&'eUj IDATnsd(3qv0XM +!5 7I}^_ݵ翶C]x#}}o?<qs;$gz,ӗ2kD*[^EY05bR;ҕoq:N@6{p6T3rւW}bM;IDXV(Wje41e%k QLޢ"y\"eY{,Z]TeR1i2*'_]ɪq6pg K8_ %6]r{, (Iu,ؼ,7d9T٧1Q(GD~?2-Y^yShd AOd1Lsn&L'L׉K+vXݏn/F}6,#GUbrdֳ|P-hDsMɛ a9yRؒ\(њT:o 'ʼbHND~PlFOJlқ.M.ܖH.L6lR0vDg f3zMfČZQz7Nfy7\QEe3j6 KUc,1V*c[byplȼL3sYIc[ےBC7:; I! xiHSc[{Œh ySo0 :Nf%˥/F(F<,6-^m82STs,!ق=J1`[FW_7[{(3FLzgFόrh0~B4O"? RkT}ֵ]u>V:"zuǥZ~f[B/͗[j~ ֱGtñ7g斖' -NgSt)ݑ~=:+ʵ ]R)b#k\팄,Eʹ0S.ӽSv>Θ>xo2_)$(:,&-&K:k4?3m@t68?~^ں=/ԕEPlR-ڪnl1Pq}.W$ƑL`yZ%'/,"״SPKjAe!5 S}1F93PX*WHZW wRbW3kkQT9 %ц-fm8I͔8TֹVW*`)썩T?k?ZXҗL4,zWP-)a6"!C0‡ʲWDdRz(1nG]ntnTY0uƼBQ\R5k iquRy AbQhaN}*p3eo1P'V<鬘Փ:0t2_-sZgGIh=֦nhH]Y~wr|aHwM[idyXTzPZNFH‹dI)Arg2è Z)jVBŠub>Z(&:g&#i i@&9FɅI(Dndڭ LP+"YrjH4/9Aŷb?alwAӧCN{S 9bk|Itii=SJGHNähd8m\Tr&ї$e,z2Q5b%7(thigpPHHA?&.AƓ%i񒙪l>kSivgL]0je-mSuT;rsi4^TF5@+/ܐf3ahct轼At:m& P@֦_zˋiOC,˓|y0_JO@z\=~`1iEۅ ``9'.@zg^<<.Jy$>/rmrqIxE2JEOri]gQ;uBTN+I}8sr@;;+&YtdXe=Q2pJVM,hbKg\i͒! D^ 9Qd"X?ɗz?"g+^K~֗t^0t7KE*GMkQ86!C iW!C0,L7|`/utgNқZ(!z9d1 37T53/5)kt>8Rr8{xt͢TslȳjWeFl/P"K DbI$aִH4E 56>ed(![5լ˵ D2$MA=T#Yrt\ *7'QD@Krvold[t4_}ީ,G҃Mh;mf[]czlCzVٙfpNf&KCMMź]ǬmgSމM֖t G. \lA@1&y BR8+ʬ&3KM*ډWTl߉$r5dd֨ͷ<>9&eLa~ooM%T2EXõ)sowUa`=E5<4+]hEI, *T!QfɎg:$yz'OL cd2jrDu 7=(5-\eDH 7C!jخ=덣q^MUdS`=AynqI1lfK|Y߭7t5ͭsd仗 G']Z>H=(CSWdzԭ?y]`.2?$mxJlO}"Y}h\Ci2 k=.@zrq,,מI1i?S֟[қȻEM5~윖[G>ں.*՞bٰ/d?y織켾-q>IzÇJM$.B2)٧+8"$*V:4ZJ5᥄ɘN&>.}\Btoi~rƵ7o(DS%h"GwiWu,7̵mծC7N&;鏷jotuuuo'RpU~e!&T8%seZgnxz5 -9k1/C Su%R UѽY~ɢɸ^4U^P)կuPUw5c5:.Mܖ?~|u\#{ޡd+gsҬYYY? oΞE nEYǺH= I9q\6=޿x>IMׯ{~؛OxS~~kk}Fݹ{}d| PYf5!Z6,ɐE,ETGZK3K&,-%Ýoh򡍄Hɬ"repYX(9{`es; j< ~%{omb/]2&ebٷՉi_ڿ{;vեɠTډiْ@VG|y hu\Z1`:Ò3;ڿ}wWĝ;zqj!5T䜥GIɩĹD3,Vv1:YCFcKD'oN(y YtBN!Rn/|xRY"=-E>V@w"h_9A9-U 1FkYx.+ΕSm^m;+eF^{i uقFmaI+k&+'ϥɐq?\13VW's1;Ϛ/0E-zVdQr?,o}pĢиoٺuˌFRԘZ RLL~(PFFcZvrsZ#6;IgK\3/4yׂIyXL|aaQg?AIxIXH5o_~G1X2򁸕0b*%%`.F#BlN3.s7aK?>flɂ77_y3ߞ`~'sE1uxJ\cO;wy.>B@Q+",ɃtP0bGjL2K ץ*{K瑵:9[|9 %cpuC 'v|~?ԏ:/w&6)Nj+/7X;z-V|+ڻQr[^e3جq') b=B0' "WE9rh&.–{Wwg-ԥ*e.OCxYW4xJ #/#Mnrff=r*NFLƜ;36ϠOg|x"=2 G4OL[Xr3<&z: /quzکdbNH_Bq>=lԧYl*Kj'Nl27qPprj|HE)ro·h٣&U1ziE_8orv2b}|i8ÛˣX}#D+/ ZQ1u .Fz /yCrWYGђKCHxZ&ҸFHM`'ICALt"^H'֤/ k% 9VLz"=Dזo*&/3qqFE~.K&?>eoB3_vBorЙrcN^G)Ajeg!2 Ik$;c"}gLڳgE-iiT@42=Μ"!ڎ0Ybx/.L}Ckmd틥'$F1Snb"^>^򣏖zq7⊨k46aT5h||ROg-Í&?K6ew\&j) ?urz P"z_ތzK뎏?p39/oo^>Iq°$C:S}NIO<-S'Qwipߓܣ>=*xy< @ΐW$'Nq!>/l0(+Xzq2)U :||%vt/PJmoLUYJŨc՚6[J $g1#Ědt-t]bm{sWQƭ?![ml6;/Ӱ> $+ژj<_~\6X̙>7Y/3~̫i^9Vwͼ^"$Ik]3ԩ!M0/2AD(|ɑmqX5I=7#wRiD^AeL6qhKD/]:U'tAJAg/} jVzq$dheI>>ZQ}Jޯ YT̰(LiA^UX- A \Ӏ&XewթbA%Qfpʑ2g%9ޡO"Lhc tˌGxBzw1cpGD8utTlNMSּp4U0|~߼z­Yכ֩Y&$2:0afy۸c>ᵁ饸eޙ]I"_8ՖA5FS1)J90Z{y L=Id:b$O4E:o~X<57֒g&S\YуIG/V9ؽU>`[ϑ~,8˗;%:NI<2g:SnT4#8m. z;F0s E#Bde8 `lG4)]YK$< I8Di2;M&q%^C?dBc})C۞7n>NY,+ō<Sŧsx-{c[g|GXz" ur~;.ͧ*x ^JyY,y< @  @̌+_̵_B j^+V^,#ǷNziS|$-N4O#R2]:+>8Y լI.sak^g)AJ^dqgć{Q'7obYUFqpo._)Yˏ9or{efe_ IDAT1?5پ߼1oѽ~N]65vo}~/Klk}ˣY,y< @.xOg۳i콵`k+e^W}}{e/?/^KM_9xyG\ ֟d?޷?"tsVdqv" 6<1^c~cWΟc׷{,/5G\;N[_R5-FpIĴ'_U82_L=6/!v<'&m~Υ=67k:cLCkaZEEc TՄn * p.a@'R¥HTFc]Wxte9 c'=.9RRNEѭj8+5T{ʘAOksNHϡ 3PH$kMOI׭H@na2D9:c`<4'Eͱ9=%W~YӲzHY^(ӛ,bytL8j{nls3e>p8v@G|el"!5::K@U .wAݺw欮x]cgQJ7 >7c=$/Oxg­lR̪:2>wja^`Zpue6z.[`p<Ylx* =P]^lxwlOaðga;}q#mxߗ`=+4Pz6sWާǏ[}NwsH΋ZJk% &a0;핶}-oS|9 -v2^Zhmén'nju]>hm7CNjv[2*d$ ԕLd 0Ꙏ]\!oݢm+{gW˗O'`ƞg>xܩ_B{oxŵt_Iö POTnRGZ\#9=Vkii((RVz܍^0^#g`})̚"3kp:J!X$uZ#>W0NvN);i&c># %YIYD-L*G&/ ,(+(Rrw9]y*$*UVZ5e(D8L"5zwE&AMcE#JFYGLBED#829 1y\!cfplA"$xϾiXU̎j C Sv[}ͨiǩWM{/uҲvcBɒ1IGjKR/!x  r戆EL̑V^Yqy/#g1m3 dҊ[N(+sy\6ln1:JOxؙ ,cIވWv2f:ntÿ6u%+q)GS|p2Ns渡Q\렕SjNGA3Nv.LǙ}*)nZ~$ݬT!T(0[+r2Nl^H>)f у\H}Tj3ŻzpXWWTYoEot~l0iOΫ\m=C;{*?!6?X%~Km5; /~ᴊY{TU#.ygt5zWŴйg:̪oW6hf1~sSislJ 3<J+> ynzk$6zYlxyB Bx͞b *ƈ/0DQ}aA>c~saaQ03ƀpy\b@>"yq O);2ޟ.-wEhgD1*83ጥKZ8|tCO/jiPkM{{޾oZ(kōאټR'FDڈ%HPV'~g8aNs)a&W7lu,=L,\\yΝ9R)ne4x3 0=qwkn69?Ԣ=oU=ۯ[t.]xăhx6X.4Fϫi\ķ#Kȱv4ou"LNQmTA&^J|IV$[r4KNCYrXbC;`azh}UJ&I)KqE(bY-udBOKV+jZh8V[*uj ajHΙ9i EΩWdv72M-6i%o))7X7!$mdmYIQAcϒBў{M+c׬WyoG" % dp"U}]/!B?G`ǍG݇‡ׇϒӄSg ~4rOK>1(ПE:رLygGq4SzzI [ZhoD1-.jt6u^YT`!;>qO0beE"Ŝ0b ofRN,[jPg5~aD, q٤JوS qth>BahOT8/lyy\˄C'Nk釘(hO lEFZ*X-SO\G@cjbLΕ9c׬Obbv,1)[T2" k~2"来,kc&vO] f(nk_#_qBB?ߖw8ZJ ȝᓈdjTub2qENedfIܑp?T1;Jq4*QP%IG$)qs]k y UTޚ8BaRbR}݄rV jٻoqWW8`e[\[79w:uYE4j"ϝXN=-_Rgܥcn_bȸ.%oۚ r^\J\k{;Ik}bLγv<6\Yp\^iD(˧?jLBR$?= $'g<'U?Tik)c ~>`},.|΁[CI(X0~1?I}'o՝#́|㷶6rYWѡF3jz'/[l84bfU }-^%YԌ/N֦-kpִPz֎"L;H"#wsDANFȄy&5Tfx&ۊ+ a%=MGtfmeusLQjR$y9Hj*3Z!ƪ:1 ^Fä6@Q͝mϊAqp\RK8yjϰ5S^]{WnD>B0aca8BrM˫rgZroN ~Kw4_ab%Wi<,T}{GCoVڴDIiN\Rôj # |Iyqb+Ua^ǼyNq5c{ 뺪GrO}J7UÝ:ѹ75j`wAStNMtzB7^ؠ=8s+8)b3/(b޹Ag/Rㅳ78 q!VXCuMwwM֬3[_YN57-g,Wa;84\:W'v:5~O d %XKR,<i*߫lQU<~z|io.=w?K#40ހP夠oY|e>ލ=aN{s}u6:ϴ{wvUk0xtG9P<8'ԁFXt:WBy)RV \\KN3~thLrաG)3nuYo I#S i|<PWVWi, /3Cu$'_~2ىq>yc׾I5ag.>Wo;: zP7'@ EȨ3;04z$^H9;CaU фwoYIIp`% KYhO<r .W?ǻߡ~se2.?;GWYF-vssrOpcm۠;2BI~̩J#7F|ns>nY3ԑh{[0 V ? jX }+^YTĔ' L4 xHjW[̨LOf, ؓ(\MQr-26 qvevr\č}~l?(gyԬ IJyx!HoGh@"HP&cx3ì=»tGsB߻s c^^rzeJZ{Vm!h?#v6./z4:Dqrim/~.E YAr8-&[,[Q0b,܏hth/=͍kr+t6_le^/{/1?+4Ӕ:EYS%ݥE)%ܻxF̈=`:)/nN(~OooO}J A Ye~yx˗8+h/I)S=ĭ ؿs@9-w:~C 8H; Wu8=`;bt8dU֗hӒhN[0ntO/ I+`p/Ϫ5*y^Kw@9c&S{mgD;G럾DHK۷u<ϭ^ptWGlӟsY' k?}oK9'[|yl~i|${B5]g2!?].Yg< {f:cw*4#Vrt\ܹ]ûJBՅ3gi⍽}U՘ /qfi7޹d4#OK]ǙʋsH86 u ňΛoV?.@>.@>{DHҘ(%娋Q^|gq:/y1xȺkI[)`gXę4:)à qQJ o#TY/c*>lр}+}V.0>39Nrc;9,fj4'g%@Mi[FiD{ex,kоr$KS:m=|V䓂Ӌ4٥ԡТ8q<i?9ފN1*y 7wh9O|0ZS{}{Ŵߢ3{]:,X_76EԠ7p8:-3":u|!1cVzmҾ KY9\VSV,2Q4u YHʠ{F 7 hXJ_5vba҈gty"W 3*=wH7:'#Հ~y`{,CZgy  Oe"+yWN$h#j.'c36@Ez!Ĺ5Fmu iu1ʌüaTk2_5@NMSl%5Vj)MQ!a2s>na0iRCSw^ܧpp?lqȌt½*%avw.e|a5)Jha\a3Uz*WRS~P䈨lQC ~$\ʰ! \=&ʈ2&rbQGSR1"C<&8aRgTA$sIޠ^KgtY;s,gәkx~/-% Q]Z^X{CgFA1-AkW4{N/q ֟_#jdN,NO  kXT۰jGϭr*^`tԊlV^l9qYavR-^ooޢbҊ{ gsG0!iֺ|`o&1|89g>,umswJQ5bF_]]lʤ4ά2c*z MVPtV܆픭l܊Z>ki|"w\LJn;F'E f7_Tr-kp(vtP7_⧟?b=3Y[F%;R⼳ׯix-s֝rv>'˧烄, cZSY(3kĦj3K!D"6QU.hDʙVjF3^M$VLrюL6i5U De J5ܘ١%ZuS;Ga)Uێ(i V̸KU9̗>IA^eļ ;[3ኚ^^AM L]lT0];O&'.2S744*5q yuKK:֊ RKBiWr.rPıG iTXktPhx̋E @krc*0izn|bJ9KkHik"ͻ}p$ML@CBFYApЭ0ڵC3<2^0:XAf&2x&jyr>t, Zv%GqaQpLR Ia +ݡWȞ[4eVo̻}_}b_7s&"۷KρSR25 wfז//+lX*$,:@:)vS˾nŌz9z":TSʯ="lѩ6!a{)]ܤRĴttj˧&V:b-8g3t2o֌:+j@}R|m WdqplUN 6۝M]X9WhFz'.< OsNB!F_*(f%UQdQkagX ˼Zҙ>nν|kBaȉ(QΨ+p8wɟgrBme$E=I+s+,뫽рbe~Kwǚrk}éٜ֞Nf6'ufqFɢO%1>{w9nAu =Ů^=ݠtZOy h0apD^g+`%Z?_o E^Yqa ^͍԰wv^,Czro N<2fov\Zzrz7abgzGXo:6t-ͬ;',HQ:|\AU%"Ď!;ޛ>ug~w=# Y(nq'vUS )HeNRIuNn Kh͖DRgČ \gOk/(y TҝT;g#(\Sf!Wh9Ezz\neJW#n$ $vJӏ%yPMB4'pQVM>(Rj mfT UCdJa 1m  $ua` "!%0ÂxU薁DV) ڋ)1dw"Vh "2AZ3D춤aR$= IvᜬݮZ]LɅ:O5Z2)lѣFtpMo^f X2E\v(L}D%z^x\*WmPjAj'RJRK[l&JSt'tRWWƠ 0PCb"HCT7iw(X\@n5ˀAk"(Qi`. g? :֕?afH$zcqܚ¨D? T3 F)W97NVCK("a.g b7b:y+>6 Ɖ.Cک)+facBd>㫮&hhzuc?삘Hɪ#0,h$N2/^Ԉ qgh#fW譍d Qz#BN\^ǔ5THF-1aK3pkǸGnSk!Yٔ|2zY19}%o?R,sNTjr%Nd9drjYܥ#E`X9*O|}ke&~ՕNZ&uԪD9("9YiEzmxmk̤C|^Ѝ?pBƿN?kX^t}<51=WKMs5fH1xhh_zLCݸjuu::P4t:U{DyG)JF'|)SW&TobC C*ُdxUe/<6Ӝ4I5Yh3L'9U.o*I6ݬ>6~9DVVx2HA`JC 7UG@ %N JkHa0%xFCabҒHR cJwIx&D&G]Q=LU_z(s"tb*:{Dڊ MD=erQsx0Ai PV&bc&! ,jD]Q( ,RwqssȡSxlI$t0ŗ^ϝFTWɑ F]V/IiRhT1Q ;b -(}ONE>IʞJYq9A(EA9jܟOuo}Aqq5pŖ|}x'|8ȭ'WgXŘGKB IDAT_~ m/ΐSY9X%>u#r^;4y̡6_ސtGxj͒r`fVTQxa[N$FM$1#j8[BQ\g:L\Ĩ&n*9ze 6- KqҘ$sMfDs1cz35fv'D 0^u"HkWyzw%iPrk=6stfFW97q`%ocdIUoY²Gd5P:+W5Q0QUMdt9|j2s0`jrih[ʮبO}0r{K A$%_yW;9jmm5/%t?5Bl`<ͣbOQv -5GdNd0A4PyC2lRuPS&*ڨhX+Zʜ3ꨤ~#W]ZJ! 2F .O(s3+'g%*X>F\yꉶEEcCHb AfM LFJh!N#.+)3O똮xQA ر\& s/;TJq9 |@zUQUcPTMaهxԋ4BK1q6S& 6;ՅU0i7Y)ehW .kHZө͖Jܗ|{P]5<<:?w_ޓ+W\.&|dfeA^"{̵/>,31oMb&5rcs]x2/lI=4'UչEy{؅Rʞ?F_]S8}cD~0Fޏ`T8}/1M6 y?" OI!|`Dx߿FbծH1-q+Qic kb1Ĉɷ!8)-3˥hw9DTԫ؊2%5VTX@@0 ΉN|V2ܫ|"[Š"`l)T,JaZf$A͊ih 4ZMiʹdէH I9-1"x_8a xl#H̬a(gk"bfgu|; ,dK/vL&o48葶\闔[r_ൂ(mȉpY"LԪ*ٙv*ߘpהi \%k̯?,jzIznzx _Ɩ/YƺjaA߼˗ܹ-HVyVRpIw`%?%~Qڎ82oeP8iJdJW#Dĉ k}1<2G_*^as'M1Q'W2[Q`Q"2Bj)T8_R/EZr"rLp&iXȠY w⃀2s/_xJUp ^T/&Np*ga_öJkόy)+_{zcjlWU8 _ܒ~`_vkz!Đ7v,tbuKЊ=wF&4DdfwS[tRL[#)e%̊k7&M<67.qFwwΟwidbL2,}1*iw7s7y(uNyoβ|.+'"xN˨~8`fu(4ϵr3v=be/J&`T]%fjq~?d -`*k9p\KJNb1OP!sHU"#j-Ng/YEӹ9=qB^}eXe-<ԡDBb\ &$V"C}UL\t0% VDo}`$ZS6EP4P~WYM$sו-dk 룯? 3]#/gRUދ+/3Q[riF$ӵE܁v3rת{A-jX)K),w;پ-}9.o 9\\rM@<(oTH%O3NWUQPА^Ql{Խէؒ6&k&-5>oK+9BcVSlM4 VPU~ڎg q`_OA8 2$$x5u,XtQQUQuKw9V| A-4I/FT+eJcTޝ-"mcK [S`¼[ ris|[¼XvUwxуӚ5[r|׵5͙n#7?uR'7/^ >tꎔD8DkV( s^M9%]MREa8uW^n<>7!!U^ţi'Rqjps9s(XJK[#⽼]PGCﲲ71]7A w`[&w1wa~i}]PnݞWʑw߭F[^}fs_8g\}l~ALzo/֧wc3GeBPX|pAj5/ꕸHز+Ի - Xgu*PQK g>}=WĔ+UӶ *!A#{eAge^N7^Z{bIz/⊊whhEZ&Db^R)nM^Qcz3gk@_wnk7\dn3a͒jcW ޘ=׎IcVȞ,W#=2_OO l[o]b~˭3r,,]|G*ӣ2{vIIژ#63ed4bSkc651) $EX$BJ` #x]^TM2Eu-6KI;$LFuĩi$ѐ3qM c.AHF4q"x\a)Jf@"AT(1i.|w*F-$)M)e,f/ EL$,<x }kSG3C4@j&ɭ|Mq)Dp&\ΠR+cJU/r!aZOb(`4V4޴]yUALUJt]vBu'+gIJQz(I!H]EsjAUX*O^=QB(XyI+Դ0HeN5W+N(l ƈDb"cNI ^S̟eR5ܿ|k @D!*&r+Ѥ$Z XY}g1Bf|F&&C9Rz qL71l9?t+|p }trIűxx)*I-%{1&>t~}omʸ0,(͵l-tmi"z B2ohȰ/|X^'x!qUƫ]Cmyb᨞Fx5no$~0r`u^v&Roh0ZQ*J8qn-IƄs'G- @;yH$[- ɞj/֍poEyP#vuw+=ƘJz8M"@H ^6p/OCaz@>x%g} ()k7\[ >U6[;7J&O-Dd , UOGe8$x8WGE=LT US˪jm(lHhT5nF)ȉ1'=b;'@Wq:CLH6#Tjj{#FrvŨT|о?Xy% `'Y0}q>\sM.\(=f<ԍ|G'g3fmtsyC9Yツ)7'YIYfo}~NຬrnvHv)Kym£C>R0=;6|o9~FCC@T#fRPIrZ%(f!S*o[9q NA*^OIf ֘XoB&&{D {%{ ;\>)yTz=S-"0y >_CJI$Q>$vLu/tMJ {^ܰZc\\j OiO9Z&^pNO)#v9VlL'M\r=s,yiUy<'ÉC6۴5"b1b1_;|#(x}O<_z"12U%RK,zC*PGV!Ci!Z/t7XBgՕ=,dxvS}taZx(d3(`\b&d"F"Kzš$A%obAjLs8%)E ;!`F$4 oRh%lO3diqP2yC (^$АL%m Gc]xK=B"Da USP!wxxؘ'*'t͕'&iN{awʥu´;:zwy+$ F]ʿt0o3MyrC woE,OukHBLw9rt2G|u6{|h +3ml26\ Kc~SPIoqY ~X\ Z ?9x8w` Nw{c0gֻ5[nG/r/-G>Q=x$d?d<ۯWuWoџGYuQ*Rlju6X騘d_)$y⴪JhȾUT{DUWa5pxɌjhDr( k·O?9bkTP4 h?Idi:$f41lĕUerЃϜ~r4?C4skޟ4" ^wyr`|y+'_|P?wʮ|c?W\c7~q8e7>jg9+lLU⾗zWי2~2(Kt~FcKZGх):蛵kKʖzd="+E@q`3ZEB^JZUwe-gd*҈&ƠSP %FI(_ްK'L1a4HFu1nG!v/[9M2k鷛,MwXmf}|e*aRt 8 >> ;`rHVVԾI<'oyQRi ވ^ P+BnzTDȽH\ :(Ԛ&P'۽6SfQ]]T\Sp)7%WKsM4g02FhGBJo*xj\UT<9_RZXPF&gV*U!Ҫ1E, T)_Z5@0D()ىRotԼanRf# nLYe7Ah)XzH-Xt!E,A<.\Uh6HͩP/u4o% >*M:(+T%eZ .5FZ T HU' lk"~cY gr`^D"a4,8(8}F#': ږBOR ⚚,'~dʰPfWJk{qA3ֿing8tkWxVaܨŇRe*z{q>.pEMbI}q>䓉1e:+X|4d~~t)'_Mt_oPʚf{zi(QŖKXWh+ )Ժc-],|Ie`+1"ƠA:&uQVOăRFXM)^~0Y^"DғxkQH(/=nģX?I{uvKLZҞ4gpjl]϶uA|p3-6sN|_awG++[*;\=̯=Enј/ޚcpӧ̓^Stfpe A]["qƒG>xKKl>7eF/z1G0ٌz&DV7$'4^&""6:%k kJ`0n$y B8)5p^\o_y,HH⒥VoCڭ1Ll<$kP 3H7ȏY93{ؖ-ðh&BN[ s1MSPX*zj6Id7pֿfdQNHI1E 4K-}SfJ |_SEH L02JĖ4wI ܒ[N*01P4sn0ۛLKR*tG4VƘ=bC-I1(Cq[K'xI4p4"`KCKuQk%@#Hh 6ȑ֝&œG辒R9vX(=FIː+#^EFauL^5ScfgAa#ҿihwY͓ҞL)MUy!?b9u-/7Yr]z'LLŇ0J[S^{n0Xjĸ! ÁE.pVC+LffFO>UƷ'qUKԍ4g꒎m:YuS_Tɪt'ZyRQj#d=`iU˫JT3ѫ,c:WuP0rO+H L'ok#2bs)ä) #ٻ$gȒH` )Ҝ^bi-=4'N}C:7H7TsЍ\ ifEҖ]#1ܜbQ 1[¢⍃R ǯ92`nܨdቑFIվ0ڊyGMH&Y{WMoNG<$ JQ*Zz+IE31Pj`Hzoq(ĵʯ@SaȞfmDǰe@Ȩ' 6YKBԊR"D53~GUDUs5EMxgYf.bt\0M=IN;z"Q9#8hZaJ@ юZ$pUxK%4y"5(Fh7s ]EQBjt4u;@8\*V$TVm9Ҹ>{Lj Z-%&RI$ϐcCT6TK5,U"Gdr? Z`V?-AA䥥 àRPPͼ%[y**2[6*XBPZC$vdPn5:%**E\:lN$-6$~xha)H7='NͨrKx"Dk1XF'3l7z2]LjuH&#2zsM˫A)ht4՞-QA"ӝ)VKf=7/ъ+WWcUShQ qf{ǡ*tb;? J]_ȑ 8Lb,7W(}I{o/< WdOyEۓbTٜrc~Fh/Zdڶn0SS\uxaC31-o1~"er '$7MS7ۄk7ɼġ~(#"lxhG.bjK ?ҭ7UM"_^kuWkN<=^('t.^e^H@~H}_l7E9k5qYAFO3{$gHs$&J7Xyp$WFB7{,[v'u9ߗ`8HB A )(R"EUf\.Ql~r*'M%R(A&r{͡o^p{.ښ]U}뜳O!3Mm\)EĄg &$DDx*G܃^4t q}DuZ.!@`Wa&Pk^^jQ A e!9Νl]T=FI <& kkviOq>EQXUX G(5]6ʼn2Z3Xa3$ -b%&vN_fl>\#//6@[4K}]D8|3Osr\9wKG;!#%O D .R|c̾\hQ6H'=!o #D1.ԤVw  i%q%8cwF tbx8%RjQf%kz@ID;ɀ=dnw($ 0cX4XŨuwjA(^ lA p9FKQX T&y|tGܖ$nTMl m!1m)=vFP#Ԃn7gqv@UbQ2tZSa Jd e,;jY-bB[- LԲ:3:-VIih&cD&'&)rX!_,}7<.Uva* w3ADgq VWb ~ܐ֎^;=(Bugdz Xv7[$|.sg)>S|<21SOMer2ꇈDؙݕ&Oq߯\eM/v=Qѷv9r_=OC8lkO4H[(TT(CXAj,~F#(XaPqkKpmchX2t$;]:dYffr3nКآ(@B"ü1(+ ;<6 ""5hi|)G)ѐCǑAHP$Pj0GUoxZ!Q6',}Ԡxqe ?_gӳC n|+^e'Կp4a)+NQvî-S]wC5*񸰏UCC񻩲 6=»8R+6]yhxa5^CwMN{UyiƞK!j GoRn !/v 'c{w, /H;KCY*β!x [xu@9Ѡ1NNdqIz C#;, ֙X'{<<6䇊ZCSIIH\w{WKr1KX Z-N;hU֎m%~&c#|YZ'^J?$ *gDgonQF^Bbu{7M+dgnsf_NY^9}Iv%[W_KD|9&<\zڛXh79۾1\'fH@C2"H,Ã>㵈Ä'W//d`#?xsm}_?Wҝ!w٥a@DTF\XlsdCoXhw@[=v;T?~Q˓?Uґųr)aAsi8Wx{hEҬ(o 8r,$aZ ܻ1qwH%k77ɇ+YƑL<6egiv#bE×gc_|ܔ ^ ̯ei,L2{*3lw'|? uDQ.)ⱜc4KHl$iMjk"&WfYxwD5D>(1;{uI'Y%<\.Prא6!k+6U?go0"eN (GMr-66f4dtrU.y^IאQA-z{CS/Ň1*FP:aso Rp3;žkbL#>bXP b"_y\ +^d|I NȌg`έu#0gEE<YBCA%^ |p'а(chݑ>R긾x項Q NN7CX#ċ'p枪;Wy/"73`Ds5d2%*'e(JP1BsXSڅ lR\O?h!VÐOլ谬 q42hKڻXyMaH47NLqL&~Zogy>B8t{@5RnmVI(.1iIF@3U53K\c?c:u^ zkzO玍IUgbZfG;1 z}PZMuɲ(ƚqR52EJzfROj jlK;EK%0lqYZ&R 6Bh9?<m2Vւדc2r[[:s_yOP?1xf;Ɩ俵 <'uiq>g}*'oymi[?M؉sCh"S) 6S-dMgT'@/ ;atu[>I3gnhv0& J'њQIhPBxԋCM%!D}P|2TD%UVpr;kIl {>[cfb!f2 :iә2ZQQK3zS\ðn @сEUbXET~[LDR/ӈ  %* fa/oi\zBY-HJPH3C RVzʄY+bjbl6lԆfEv cQ@_lBME3:ޥ`2r]!0|efR^9g]GݙeĬ^glsW} PȉGufdq-DwRƘ?>˝}Sw1M!w-Jsz۲(;F,^:DMhȐ^_)J^J0O&n'ֽ'Fm=;S[{YS11RǔWR"Wj{VSgNrGy훞ΐeཿpYF+̜1^gg?wIpkìnWzaLj؋x1*7k P%TVoL?~yz;BCtg= ^pe'S';c d~'R[gl<Vi||0s! C 8 2]I/LR-6w+H4*Y 9N?~矼WCCrsz|~Y![YY~,ΐlLw1€3u6X_5S$ (<ԵK IDATϟ-n0aHIx zV/Xۆ Gc-榅Z'᥋"+w8vlE!/<[iJSJR> \~/ afWou:kNoyF;T*y2z <Wvz}ƾ8M歔C_NUyǽmeKqȔC|pXE.K4 I{".;Hsl>}RZSw4GǤ5;:OEr;/˼ɼE>[|s<˲uxvG&벾֓JcF Ĥ} EQdzC)%HeDb}l(ab{'^'N(H=ĈE<<[\Q]{;g,#"Ӯ­e8:*i] QO_ۖ/BlO:Gx,n$@VyY\ eʞ ģ0g~AyiA"&LHBKM+ac 4{gh7>ʂkѱu/XʵvXޕ\8"svd[΀9ƽ ʹapCΨ[ĩH*QS5$-D"<'$*FS,edc*RvKVؚB ݓ^ٖH$i-H4,7d%ǡ rkQ5ĎA.fxc[0 w; ҉-SE9Qs >2LA&C f9z!"J2V[A'] 2l dadO+R&dӫdL Xʾ V$c$[Ƀcc7Ib(]xQ M& Nx9)tRtBl^L%.Z&"-ĹHAD XZѲrgM%kU!X TQE 5/NjʖRiT݂„7* XC.FnuFL-ˬH;k=iqHA!kqFy~Wxl\xxL@\2;ÒC;}![*1H`pA] RSjPƴy<&5Pc5IgcjLKJQ)aFVR?ۗVD!+A:@!ŋJ402 e>ߓw6#"pbgLgF͆r eqc\;6//#;zN&A^yܺ.tZZ() ryPp<ŝ>ty#LhJ5n_,_߿) =L᙭B$؊<>͠n1Q—~v2wا%;AvQq5~wrOK& OWȧ6;>0z4PY[e|kw_Y䕳W):rL} nF`j#3؏H g_|>3kL1G@S\<g-8D %|ovƀW8\>_[CYFFnkf&LGtwh3+UaRa 3G^S6V*WIuݵT.CQzZ Yi1qHc-hrf/g`r94 NArl M;`K4>AӴ:| {$?@mXT0t2(T =^+`*?4pe iؗ>"DlFSȖD풮gy'N V"TKy}(Юܠ6CM-jԬUx*ʁ~(ugyB")tP!wK!UWR׫=<o.8&.g GpG#̠e(Bر_ct{y2@qa5F)R;@]$!DQua|/PMyq$qxޏsЍ*azh] [?#r0\`C<-QxQ8+゚ ,tG,!9pwGQX3SIFۼ |3$7ngFFhxhO|}>3sd̷6_ ]>7:;ny;Yɩmt$inC'VB9g&nr]c9"!w_V޾rJɋ!cGUe.]E~Gg:4kSrQyd&a.kgjwͪ?|ȹ~HhOM iJTQfkM$U\q*.udnAl[ދObе%opYe^JLu?s ;w;پtUO B6γ05 7n/So+!&kDD珏jh'Ђ H~|Ԋ()L4d(P#5RѤWqb%S5\5TME3nG ^w;rҽ}KX7CR߿C{l3-/:"Fw0B—Bf YgVFHuj㻘UkGk% /pyŘ 9 .8V.7.ĵ~pCll|c`r_o^I&i0:dQF :=+ ցJb$#փ$k͗8E`w8DQ# #*rHYb+zaKd6$s%^ A5Vp%*RqrdaKΐj۔bujB)@ۣ]8dsgT2a%"dQ ,pE%0-&h2bod)*K4]¨ Q)P#ʱ(zCt-%G*Xo-Y&$06 48H9C- Eodq6%ӂ;%'E bi*CfY²MP&E  ԇ |qzU!(a5(EDWv@ÂPIyz"j a{^D5Qd ?5 EH He1/yTPsVD ͰS)cF-i ݉bby C$rn+Wˈ\P-.^{<%F@[A+ÖIq҆BۅXe NJP txU[r@_wgR Ih%/:Jw%ɻk:544B l[C蠯{- xƲ1r"r՛[{2:qcWů]yꁪ[+i`/@3"clC?y`G(s<6 _y,^pg7w#8{wdqmPfekO>Gq\Y6Ln4=x\ҋWeEY[k82&"k#vx?[r澶.;ve i!vb^,=K6 8 yK> T@ww(WApؾ~4#NsO` qߥޚ'xb*,g#6&Iq뷱'g\P|<%QfC_{) ^SO_!?տyg\H3|ёo2Ln?az+ K4*QEGE 0h>`{?e%2;bm0u[f33ac|#M>a8!7q1`|e퐭`k_%=\LiFF4V~*7K]yy*_#kp#W5c"[42KrUȳ{GY(ڐ3NR;ː < 'Whwi}h%6)KqEfp5~;Mnb'f3Iq]ǒq&vkD!]DxܡSfq }C(V!tH"8h`2ȲbF!>V؋5N^߈خ0in3a M]HGfXh:?29w&G&€흃 Ծ7#fNr'L\5ou:H{:#MoQ+#TϬ)_6xkK҉1V=e=dѠ>(pHIҌpa&!MY<7e_w0!ϏGcҭ>R0(ܱNC#!^zkT*w_ [#V7{@{f| ?HfNۨSvHfᲾ"?YzG9D[!?I◙Xg nm`. ?a utB@:6?ՏqN[+Sc}àՖ~ikGIy{6'p|YQJma)H@p2X>x9~7e_JjA <D D2ʫ&҈ddT:!E*& q@I^"sX?2]צN#7Rb/tԻ/ݓ}INKx;}7պN?N[I:$=PĈ$6#%\֑+GiN/qEtQ ݒ;@4mI.qџWV9xUCp͈aHLF2;28;. (Īx'Lp SBX0N(l viXm`c=zPSфܩ܋^0PP$pvǀD=aOb-ٟc_;4F=ޗ"ZVrF O8()H{$ u`ĕVvj+"{TX@<ɉ6HA0#`M&TZE@,*S)OdJ8 AX2^\ p Ƈeezu[?Bɐ9=W %>l/ +"GZE(Ƴg8ReWX |niޛYv}}r03@ (7Y6Z[q-%W%S)۩ر*TI)%$KQ9IApY1KL޾ɇƒ(`@po>05dg7@~$*!rY\V=J~\p Ō]ky۸>yY(*R ^ۣ*0ıרߖ8i35jlܑ6^%ygB+p~q2%/ I<΃(|#/LGE;&o0:{^1^30zK͔ԇmn1he&p^ɦnyr̆ _&.lD2%H"P)k$,GC{'i- ^&\΀C Q9rO!aB` ,B,4D c*U+ Ԯ v"?[<Ȉ_znY֗ek%>/#;+w=/W2\]Z R"@ BQL ÄommowD`;a;_wu‹1V+}/\.+O|hAh;҃x,;ǟzr+_PtШl'Ƭyh`J7Ѣ :~0 fTjJ4nFVZmvf{R)?'xVa77v9οoˏ?ĩ{w'2@Bڀp4ÈoJ aPdk ~)GxL4rIF=FH=㱝g8r<|Q ACyrShL<ATm:m "E**`,&v2.M*He7V|1ւ0"=[nxdsԳ:zb;x Y2<&a!Ƕ1BUQNשKFCh̄+knUFdri@Kh>>/͋UIҜ3۸m8ŸQhYQKЫ =7b.P2D3q#!* ų3Oqj܏̑6ß P!xR$q[Zֈ?M)' O=ޫԚ~N2_D,Xk0^ Ҳ/ƒ"D0sW]:,H+ʺ68t}k'XCH9* xaԓg.~eN|^toDu~`{ȧ/+q[-X3fY w_|^>; H UGiP)O/?ãR[[cKƓ;|'?0\.@.@IJp|%^W* IDAT<9ƤףckW/]^jR"= =ı<&׾en2%.b|Ubp}iGlvjb#<|l4|Ot%'SC>䭈]d#??OawLu,+,>>2 }#(sfl#9xaIs2%=gI$vh` GWnVh2 gEʫT ـL  ɥB.C`;Yl嚴^}٘*t[3əV(+ÏSjϬRhu$ٌ LA12 Ve\#Ov\p9/TpT1cv#;bF `LehTP Ia.h½yVoM P:G1HAaB0BxQ-q䑐5H嬪U (,@qPd\cVԘQ#3f(lυPZ[-2Z'Wm#2-%-h$gsv/7U^Va7&pJUbG+2!ϣCŮ_T[.`,Yf5֐K;k6Q,D|Jv:F<PeMD] .!*3C`YAҸԋ ,j;G,,@ƾGޒh{4Ta&PQxVBZR/.!(Az1jJ(ݼFk25{'J,b ױЧO,L="a#[CB~eG++;^"*|kJ?p* v5&)ƸL #v2/Q >O(@r!EA2O9컻d!epL;JZɕӷɿxW/MH%> bpZHviaS45!$YAIlvvvv'견=0ܰ^?͉3ɄT9FslXDu<>Z+>]3_hppɄ;‘~1ݣ{l8.L{uyg>q8NoFo]B݀O loMx׃>!'|{ε^&Vq8>Xsr(WBKͩ!u嬹JY``d>ECńS828Po%1^lt:!Z13~xW7>_XeyAZ9eV٬*yDj=!^v#n>6fvr_S|5o+aΤŨj{|Xr8&x8Vn@u6 wIJ=,zs|+f. 4H=qPGE!Cx"q;Wĕťk=>B7'\ d']G$t-,w>CSū .lrzL^c `6#S:\v0EpL<^a}Fc}39 A`(3ks{gsc̐tS*(W+\oؒU&0bjMUDsxUK%1- FE^1xo ̜pFYIE*J}Rfq@еU"`K-!$̰guR { Eʨn#JH핌7aZEuR/mb?}!NLY@:v:jy`#e |YINQ1`2gJhTl'w<&_6|<6GGI/^u4DU%w ;,2,Md c_^qLE1Ou1"X%)&cS07E\䞿M3gsJ A?(t 3Kd O'WeM6?yvJzBc'yaN2<>) E#,va vև2\Tuk!! Q)#V,I6 tb]d~iw/8EM!jpsXFLxS1D\㾓="GRӌ9_H-B<42R $JPU)=zPQӤ+0U 4C'R 0VF>cu((8WyJ}o #4l`|拲ں߭+^B2]ps>gg,3h6Ѧ ^eZ#c}" %N:BEtdEu.H;)D3liZaUYZʭ{hk :pCD R338FEL@*m,87\ EAJK %n`38S.d`J%V?D͛[ շ6 ͮ{7I'qox XXꀼQ[ {3܍\¾m`oϺpKݍ@8^w?.e)D% I3-6t| RXFE0k!29Cl_>;)IWh>(njdĸ8ǩjOZ6|7UCciޖ۵Lqr9gqǘ2O35H米{Rd9q2Sx"ǺU3cRDh&#_H^4V{!0lg~xd VDTTr $"Q,Z|nglao1=Gv dx0V^o2t\B_)B$N~Ǚ|4ϊ*+[TVˤ+::'R yXZT Hl‰\VܐL6:&߹1D͐'O~&=*4罗C \i$|m="dpӳ]9NuϵdļŇ"1HI4p-p{IXdH^q5G؋mx Jb N-^XQ c2c%vR2)3 FJwy$7wb)FpSx$g?%-s;xH(eۨqHш ##]Q3N& dPg!0o^kC<ɤuL:R.J6GŇhޔDE<7kdPt/0"r`ZސD&xp\ʞ xUqñlLxd@LQ ,@JM E}eAU"^Y""SF72<" R)ix#22#V74`"#D`T$ Y%gyVfn d217 3( JF 'w"PA#cv'>ǢO-bcn@\cS"wB,뀎&Lzh!4&^ʢTݟA;2O@㇥U7d"Ku#/*Da2"nS' j%ON`5œ?JV&}Pt[*Z# ŠN܋HW1%.D@ؾ1eG]+1dEPZ͊\rn1{ζhO-Ƴ=]7 F}͉Mb7.aOy nO[ sRն(oN8&QQom]9}Vbn]E$).R&̀<4ijB*#Ni2xdY˂" "ݛ"{9#,Zr\3gxUL1uʄwejɱdMhtp^4iZұ FG'H H6W).7xk,opj I1W2Y®caհp"=slПSQqeίqk_OؚyqO ?߱S!kzh37dߧ4od"%IZ]vs]ٹ轷  &2w=L-y;);ynhH5$SҼJN{* a?_tY}vV";]DQ‰9jG: K\7*\\s BL.x1!*0kW(ȉe glI/!Cy9ؠ?a6gT`QJZfUĈ8A!UM%UG~0 ]ﺈo"?v׫y^R@|iu:V&2 )s?:PҚƊU)Xjh4! ,d2Ô\y Ә 16caJ-A#wACJe\j\'Jsa.ehX#/\D;%eȁhI)S^a2 MB\vd<.ʔjD1Ϳ|w/3p_^eQԗ(VlWz9ӎTdKدGS_,d0K}:~1ׅG,;K% u2ZXfZg*qU>$S YI>{IPEŠ``s)Q]q^zܡx6,rmZ;걲nY?5ǹs3IclPElnj߯jteĕyV D8,"}2g~1#KJW3(O;:ms"y$A'C)'7嗿>M7'aF(]Jss7!,m#A&% 0DU'YX TrOI6H#ΗO2>DS h 8P]ye(i6ҽc1j}Om5G>gphJJ),,*Wha>e wBQhh(;7Ф`y&7}cVuSj]7teYl5&Y d{Fdk{ݡ%9=Dl骜,zY~Q$ֶ^r8Y S +c,'bThzU*q!JN. P5^v6ׁJsz &L~֭0lBMwzD[uy>7}ܔTww1 >C,T{D$s+\Wb\A/8ƀWiR4Hh&l8լ <.k밾EվT- kst\g,hkeٷYOK9|./Uٛ&9bjZ,$'l>CڈP#"qD|(y}WʶTRQ^PD%?Hƈ*B/12[ձ5-Bf1-5 "F= ju)(8e6s:E< R#Xu>Bdͭ挪Z/W1NUfoFZVB\XxU&=?+x$4*4T\Y{PcwLkrU !ū6E3Wө.2?y%YbHU1sOKP|. a=]1 Hd*Z w]ְKYؿ.iW#*!}+' m²66SGXFeꅑvu/d&jܗ)?6K!+JleļJVt IDAT.W PSFdm^/c2s$kdl|a[!#S0vY=TS8=sό}Ge RcMZR_h5m0mդW%)t,rg_.ɗ9uRڒPLpŀ(7bܨFP hPou+{w!X+*Dh1a?xuk!'\vS)XoSQ?ro&osUln{mmKހ&F7?Fg&;1oJ(> KUWB!X:Ή3/bDB!* *;Ni;ωNG \m^? ǟ|R.\{6H6Eg~$F|2-$-8U!TD&]n"6iGZqu[mֈHBP~;ܓk6'P(ui&p4 X 1)n ` ^Db+k'EJ°ՌCpVvY5R8;,l-~OwKؒF$UO)ܞMY}ТQl_4; !ȍC/'~K͋Z2 ˒  VNsiΟ Zo27٥P4DD⣈az(cɛP6YZ0[tMZ7vPTwyAOv% OϓL\ָeoI+A,kŒ M*Ȣy6dn.aVYm6XQ 9(@^.1ֈ*x.@Q`Yne*օoڂ[<"q` !)WKB` _/;p(S$;;݉V/߼̖q8'̍/#ˇd oeqό}jgN6Y"DȦ~&O9ɸ2G4 F c$K._lQHBd34e0JTGΞxuؕw1]#p4Ŋoۼny%o^F%y:Uncfe b{[|;iy|6ڪ-[o9fQ ږM1"ܹ)Tp! -N!Fb3OP)uanE^RYl|eˏp468R "gOL :5#K+*!eX5ɠsAږFH5[}gރ!$zMg܅СI8Ř#S`z<홸S1u o/l% |E .̝@&j(.9TW\MZrjc^fq˷ٲC0w9}pUO`uIF5HVeԝH.EV^=Q|j\p\b'hh>BJQLh *elD,wFrgN鞽񑕯m^N寅0!Gfӛeee.c?*{es |錡Yܩl,xq񻕏-yv>$d1O8kkL s,-jaXP$QN[4eUH$ 3'C%DgCѦ5nLEd?y.Uf|#xK Sk  ϊ"!>o';{$`^ˬsQD>'NZj,Np"[v8ɀ/64as9ǮaUZg ?^pYfޛuw~眻KKm*VIQ"%%힞i;v1ea$@>S>%@ {8==nmԒH")}vɇ[Rۉ;HE[?qit>ob ĴKENax&Guoٴ> 2N8g8"adVRZÉBaejZNI UHg* J{CRe Rs.;କVc*,DǖnU MsԖ\[K>ɥLEC~WțRcrSx.]sgXwnNW$(;LDwM-IJ51(PKoS%a".cvUZ D+ K[]^e yPG);}Yyc }\s,Y^`6E#>AeBc n&EO\՞ =KV16}a5ƙB \BVι`omsf#3D'? y\$e1يR9ȞuHqn-{~e!=ѝweC-6Jp_ :2l5\)ZSk4&vDIAK#7+0gWCU2h\Ptpڡ2!7V!.)$Oxeu=:r)`~Wv/ĹQ%DN5`blzP񈟚!*"džGx}weɺD8`GJFC|%ژpʕ"Q:w.Dh6(Q!W"ŘFP8ɞlUa{{/jg\SҦO5pm¥ ٳ#([Z%>@}c7,%Q+ E!OkX'wHKgd S{2eKsq E18!B-B{s/9Qb*-g #ӳ"kn'~Pΐ-:bk"T){ ,;b8heOĥVQzX|/V]p{Ξ ܝ=cL1Q]I3C9靶=f%MK󧼼r ԱKcxO{v'Ot?ls/ eW(X7=[ u!!믦`8YuNΜ/~*Gg^Ǫ%,8de1rrN |dgX :(g"@ךx(ѭkDP&qqUj8lIPo1nyIYpc՝XRbq s⛐lbGu98NEp/:֖! ޔ\a',ro&ۣdm֡@HmsZҵ(_ ƈјr"ʇz 1]WloFp0Fb2<&[M'OHHBIO\٘֩7%~SSRY+zƓ5MĕG^nI'ĸ,9$+%gyI.nXՅs#eIڧvxdO ^-c+\Z8MuoMٌm' *ӏs"ޓj<:⹮l^\myn/$q>8P抈SfD0?T+_&[kՖnPq%\FXшJ:$I{J2FU&MqKEwsq^CkJŻ1aڊ H bk32zKej,q{@ctmꠂLQa`Q*NJx h3yY,HkJ$H.*R2uLbȊV#:l3@z 1jpUBKhz9ǢR)ўArd̕NVK7$<<̘ |QL"iR1\[ qO4p?5/PI=9<եE[A~8"ZJ#G# j0f$uz1zB(\HDn$O/23M;PH]IΓ5v7#P4$X87FB.riL?se_Ÿ;)N3:F܂H+*ǐ@1iA[f$IzĝjAi3.%+,~$PHQY%Q`SSR;Za'?(㙔S([+- =%"n!J[ʁ 9bFXJ>.SlԵF*+WǴH#q拓,dkIZyY^cl!A$4 <ݼ+ҖL..7)\@ߕqO'( &K|(T6v2Ȥc-Г/>O|oջrHEZǞh⢨?a濠" *EQzWR<ӟ'?Id.[n)t@*ڧ)BN[[b-F;Zt{Js ]}ǞĿ<'Tyfuv&'wЫ؝.*q(3 _ YԙΜh3QIEͤΩϿJ}#љ᩽y:zvv> E/tI>+X[s/pib 3 rP~)s 21(C C %Rb(/("2ΰi^ND?k0Nq-Q3z 4(G4Iz k̾Rg|ˬ?F4K8G/xo_S){) 鰮 'qx? whQ#W]S-F?w4wͥؗ6jz-9 _JpPghOqjPcMYzu ;#a-m\ Fjz~F&P ,TYyoH׺Tr2fe+bvGxk ip ㌢j^dVCgş1Gɟm~`Ë]"1=qS8ms88T>=7/ s}n: Z3C-JYt<ͩϡ|~`# U[@4-wA: WX@ 3%]Ϋ7;)&#VIB0 QGg`=r]ʼns>1P 9; }.0,'CX5.yy☚՜51wD'K .х>v>_2 N{4<ǧ5ndZخx]Qzy &paS LQNA6{pG9q"\G'чzdq}H1ވoxs7.d|]2(@ N9DnzMNpG}/bvPղC[o2!9}J ; nǠrŨ?SۥyvUbE(2GF(]pK:Ӎ%N}m*E)k]?Z2cNy_s1Y_6\/J+pn[tfq&]v@w) ~ Gp{\' i:I1c4 JS "36 FA5a1$$+.{`2* epf'O!:R$hi`*e춸}h@q|Fc,KpCDYnċ#-KNY6$iRt/`TP=t?^s\xΰlbs ۯa\l𔅣vDNLz9Rl O4÷*Q=~vU9^E} IDAT*BQ8)&rVJQ8q zcF|Ѕd[꽼j2ے3eH#_^M>٣?)' ρsFB4axs(vT̨k|Ss{v3x9 \l¡dU^"?OpCCG[Di͠-3_9q 5]oXD" @Ѷ" kP7kZb{u/NEc))yk|:ar"%UhN,;ʍMotTPY򏮣BG}]'*":Eb$ }{n,2}N,yDakC8fklzoT7Y';h>b;Db#]1X㻫gK,rv({qZP#l8Z&ZKU]k|TP?[8|3q֢{t7^bң'xcO2}r݇<\טּ`p0ԵlT22hV|ӥ>RvV~0ͭѐLƊsdyƠO>Fw]8jrw|/s^ҹHzul Iḵac{|>3*?4SOP<ؙ_^Oƕe.^ܰ I<+"b7ta쿇- t#e ( 먵+ 58$5z#vr$ ;)Ghߚaf;a;>߃JZc䚝#LwSfr͠u7#qغ.{`mdE 3kK3vhCߟ2qsLJW^Ż4YԨ L]֧Me'oqr{5Cl{d`u@L4)"eq7Q"*GZ0`Z OzJBЕ$u\o nnH,EKn88̐?[~@u~T|'{/\|8bj'"q?V,;QtO7"TasJ,l}ET :Orngt<S% JY/ t>.ʹΞSXc ǔUFf @׭+'.Ad! %(4F[+ baX=~Gw0D( (hIꣂ߹}Dʱ8< !.M Db}GwV(\S&jQXJ!]d"ӐXvUBWrbZ*Q7sS47X46Z&y=֘LxJϾGyk8X542`OQz^i~x /_=B1$޷II{SZz nNEA hV#Y[8S AC\UsA'}k{"#]l1HtGu|'?'R|gj|D/\rNOMhJDT|p)81;.p'GWQZtTG/,:;K뎽Mf.^$- &efQr3v>zUiQC`uA[FÜá~s>!^-v}54S]tzEۓvQE^xqǙBhZ$><瞟>w3J+F'N rd+ Nˡ&bFKIm4+Z ;Dt5ܗuxVPVO~{GzYZn5]ӨȣH'o5͐d j bHKwn0_ : -(0;2O(YG됰1/~X"Ðv_w[N?`S';3W oL'7,z#7#Y[F봤8~p_N'8d}vD6󤇞ߐSuզy=i=ژȼ40Ð g(bgKP~Vw#5Î ]tߗ te <0on&7]#ol0PH_Y,܉Shw'7P\?94|rx1.Gh)D+A+8ޖskŰ谶ړw.-{> -J(pjxJ:Q t& WMD׺H+U5}j@kV;LE8]Zk;Zp S2-v-q^A8qFǴQDVXPZ) U9E F^5 B(?#C&COŤ*S ) E"O3\n8rE8'.;Pj>()K\b e'DFqdo"tkaSCyړɥ2QxW (@2'HIM;ɱci}/׷-N _ղ04ŲPk]{m;k>U oPUe*n1.-C6iPaӔ}'wefZ;I7oCb\0Y%VQD3/Hϕh!~,hoeFb(F5c-'gd=$RPjw?Dy 4}G/VXJzl2鼽!;6ܦMx\d<]WN@3_|}?^NJPh5F):ʁNRU::!Z[$Rhv)b HpΖ%B#:7xS-JT{rkz(BC_@x|_O˩'*1ɛ{r}/kb38~c KPDw(N( d>IFfcm@P,t{p_f/^)_*3ӈ B&O.ImqUI혁5X Z`8֝=vhnp`+h2y,IcgqNM}OJ7N&rzDWd1"f-lr'V%-"(WWVX+6y^^TDD +_ia oDz-x9)ƩN :wK ?wIkRD"sm2*2dk$i*19!^^w$˄8G;m$IN!ĴXE!3R|'PXgF>QȽןݕ8';=~Exdγ GfH܏e<8$ci֫b!ǾL>S ۾HX4ܗ뭷IW[5yzSxbPT5Y~bl,2brV%~4"ȡ<<` +Qcި%L4WtN$=>Nʉ&CTI;=`*sol&k{C(r]]5r*  C'}DFL\-D_9|e;B ΡIJY_eo)ֶYپuV WP c s:p(i*)_As8%3BETT'QJsTCajԓ{=*QWP)L:.>ce:L[8ĺVPN.s@"fB#J~1VF g+E x _Y.rTYB?֘ku:V@i0–[ȑY) ;U(q,玉Uk>q&a[@iīBI碌Q`/R"q"!_:Ec =F5c'+YO}+|@Fd;Y;*L$d@k1oe(i-M3>02\,՟R, 'k-M25l!/S#}~E ׶mҐt%N{-h#(PƲrڡB`(pfV1&r8Fo$"%N@[2N$?)OsQltnUR)LN&|bzEE v-=,|1c-/ζ1jFc~2$:EO?OWp8s*%,C Gдj Fi_sENļ-6 8şfPČ9`0I/01f3?w^ |=tE|/[jG#9 #nY]8~Co0Y Y|zmIFT򒛭\90SY<#͜S~3ZIqJa.O CJ,A& 2N*Q`Oe1Z 2t:By&e_"1Bb[\xC[z7;ZMո\X .i_4:)m!by;ʩ58RR(H% " S ,͜ |N,MmLqap֑~xo 5F3.clدxT*lŹ#yqC*F(Fb hX!D5s #zd~ R+((&ц^5^^rGpEGYFfLm,AH򇝒uG24z!+2z'謕GZ'D~nqV R S~{x&3 8Uol g~%e@[L\QciKrJq@zc=ǛRT yeof6&Y#[b+ v>&~aA,('@!)0h[?k-bz;67/g1[}یk:,={E>Ε.k8kxz^'J^O#aE}-ǩmƨgy%-T5>!xPgd bQ-*-`|dS s:`gh~c ^ NK͈X}\! *L܈1ԓ|=?]}X(=\?v ȏ;7S f6?:NRck<}0-c?'4b]9M+hy+õ-J4)tvVSX#)-j|!h _M*&1` ]RrlQ~od<^\&9o1JsfdqO?SyֆugQi-^BAZD{AH]О$Lm5tV|r[t3 !/7}2\󍥜7@fӓıgjunE*[w,oL㏬s6wrK-V? f?G~[ss$o圹ޛZySSWUWWOR4)ے"3CG6 A8 7 0;E ,SIn6swÙs>ݒb4*N^k~=K7vX;:'y*\zH-] 8mf$$-CL<$*D }TZגKh#ST_Io;G1&$n׈]k6f2v||颼0KDh(sDpk? V1_>#FK_A=V ֌ע$J2Rz8'l' iR9<PTSKDĖPIog 1$[jO􏝇1f!j(ZBhr,58"R"թF#nm%I fPW}Æt׏"}G@G)f?s8ySRGzssa]E&4چ"a>"iZM$AcPD3z[x]#~P>Ra[=ڥO (A.DrҌ `,q*TKC/9qs0v+fFu&P<T(<9wq_ځ#x~7S9" ^YV7U%sBgzZݙq#:`$-mA0fGDKA}NA|9c&*+pOIZI^R+3nP3legGmY_p1BS<_ ޯs뭄Ʌ5PJU--DLvСc+w:KX+Y7S}Eadpk58|4&;/p8=CojJi8Rev~Y )d9.-+s_<5ɠ:ODBx1@j;5~{?EA0 &1Ya!XrYD}4EŃh2捬.A@^W+A Bw[0mb:5ae֓߹v/f|O(oo= ͟tx{? Zaf~ߣ? "Q !FY/CQ-$ `+>C`%CE(4đ36w!6{+y)RjpH|7@x:-X.ch$zYOcyrBcD6{33m ̷\9s,6)R8HtҌ32&Z!˚IW1'Nul./b j-'\ }5"Aˤ hX|EwKQbC{%iVC]EFkS +T[ޫF8Y~t3'2=*w7iMw)n_򁇈YilIZz'.;* !QcVPQc9|OfevOV_'ܟ+ufq,0CF!)秇ZR(grunQt[  g>R rK4?QA#hJE*|}De/6mHkRxK {W%P0Srp4:Yۖvx{=@P[䕜ፗ$*TL7kbFeȧ>UYK |KDau y;åC13:RXjeRmdT'RmW.r񱶜BA.s~jUOʥ \ʤ֧"i),鬪mB.ޫ3VƝkq "Iz_b dܕ ]mf[fWijM 5FZ::cZkֈ5xbvrE߄Iݸ OoɺQ1 VHEE^sD"fQ.8%(:ID?3/eQK#x]|_XeXT/EOP N$ YD3. 19}NRNmpC RToַ!s^;?f񙧵ݚK'9Ֆl퐌 ¦K=#W~bsSGx[!BI2Lѭu,=j(Sw}wY  l .~p>鉪WH26w)S|TҁJ2AD䅄T IERV"JL^Ϊ#Y n@B~K-YnDL7 pۇ|'-ߧt:{>?Ch?M\vv:\gȒ>E29fL I-=s΂,[Jܦ.2 G1 ɂe-_Tv0{tݛo-'X3 9MvqsA:6Ӥóg#k#\997vJ~&\DbII:y\iUfmr!Wȹp_#ο?wgx៚6Wjӭy ݛS[\p|zB-5x}r|[ݩq'3n|B p:ݠ Kـ66(gU)RUr); qP;}͈s;<>(z5J/8/vT{kl@VdlmPd'j0PJ)^sԀBN]1J7JplR-TƴF@F_Yts$̃h)vAȳ~f(P1FN-R1t[&7sg2Rٽ?G≮>X9!!(}JJDr035x+Y1~YnJ;}6?ŧSNEP#8M|Ȱ|es%=J|``%K<݌pN:1:KJ0EYL.~9f]%+2Z*ƈ^%nҺ7>Mm1 1 ?M y?%_c=#?>ӡRaNAEzl?T~6#MRQ'jd P!)6 c'u7^I7(.MqU:Je`d{iќ^|w75rK'97K ?̓}y ++ꞇ;'uOLQ+ zŎ_k&6MѥUZwȺ,,u{D A?DbDHҒO.N3UߓD W2A tG%ZE>6cu2s8Ip̫~b67W._+ 0TUά#F,i`@b.d*?T$>dM-0!k9lK>6_I8\%KOi6~k4xxi{np|\b%PɊHvziͥ<i̭4a(soER7i 4C[|l$X袥7=÷v8_I#|P<[bl/A`Y-ʀ<ʂiJvmWnwUv%Z6orR=8D#Zu/xF6 A&E\cg!Wu鴟[6*T骲Mb ͥl:{H F9'α$%{@{n5'"*G=N'>,C`kzP)9C|P00*{'? > c c:L$y:xKXoRYǣg#)J%Ng-ҐDžĕptxn?"t!bp3V$:DKCKGb޾5saԒ}F)DpeA:^y)cjYO tlK@[YeJ Jʊ%_h`:Jv(ndJ=hɁs wfi{\y40vSro2{8srr"7b@]&)b,1ԛ`=`2ǜ%i.puu024r~AaVS4,>gdрG,<2w9fb'c>]Z$&ƩlE0[k۽mOv8`c#>JxZb#XntMZˆ9& T\?0h5\"Ax _k3%NJqRF)ӂq¨2ꥌ9^.~FHt@.Z qjTAb:!y'Xsc~!8ُ qF:", ae8j(^}_'.nʨ0R3=i)DO nCg\ ޡVȫ1NDR,X,0TuP0e\ɿ{!UBk UL`ޟFNecT%Oy.2c{ſ.Mя>b+ǴbXۄk0VԏS 銧`<{($ޔWf \őnt龴q5+P*H)ɟtk{`gZtk/uywؼ~_,<'ͪ ~{۷ɓsfAtq[xibT@kHY:|OxW ya1[܏ݼX bL4[o wMYcʒ2aHGqDŨeFf&[i blDp@@?/B[}Ā|Ā|ĀȀ]~h1ԃwefb+͖6M* hw6i-2<Д(;-CLYANkTx-/^ "c ʘQԕ4:@ "I7R)5/bA 2ֺ rTXDYF`t0[ooԋVXL[x+9{Yqu~Eo y>X]gβX {TAyr -qh&8xhi5REHU80PgtKY/x, P]3[E}tjnVw, )ET<# 3t>YL98GCJܙ7H}}w.i7H*!w92N3QSIb;+dL:/KzbCVRKMǰnyaҲ =j.I+Fr(Joޱy!ʩ6}:(Qy)/ؙZ#v#88E^=' *Ru\lN!6*.zJ$^Asj(Z;B+K:j0r]zmUQjdY%dqy>Ho,Ky<,JAᯨxA%pqlo ˵V D\'YxVEeYB\2ٖ.tK?d3fohP B ƈn>]0ٕk޼mGF"@<&sj W,NZW9:,(2& ԃYqY#Iq !S4~Ӱ5d Ԗ%5tL_L8X5H*V[jiٯ ikR"ID{11'dyY#øocS&Κ IDATffw6$K 1֌+ a@qH%Ziɷ*cE;(y\~&Z⽘V鈌s 3)O$_lX?K:Vh,bgP["v%69i a ےvJX -L5O2q3GL]Jv+R#{By\2m2m5U2hT*3M+'#/_}'g~pvJ++"opXѫ ^z*RHIjT"Tr/?+R*⪌\u}fr/yj@3)K$EJ^x_[T9Qgb.:ĸ)7/Q 6g(őRf"=1) IUqjbDcWkL}bGc |AB+D1C_ w *'}e 9YsҟH1*rܙwu]xH"t { bB'ɷ7(6+c܊,=3+L_%nW}ZK-7llgʗټuBd9Sw%(dgd2J2CNco?B]!>$ s]Ǎn:<`VTC$i@klLJt?h`3Vge #X? %~~4UDt@/@:w: C!8|/qdxxQP[~D΋ubD%"裋-jD ,ǫzetk{""+z%` K!Ѩ >26c3}2%#olam]ΑŇWҋXč%/B ժD ֨6Do9!Ug$+lː7=E [tL秹xJ]l t~+{^cJc;Z%DA܈ SGO|̢`< -Y#*TȰLˡz S ha$-~쬊|} Qy?hζhԦ ^=ا|D7 bV,n13ИϨk!趤U?,YsJÝ}\:KS%d:3u[WU02Fu/ݗ"nӯ^hy_9VI*:o b^4` *9N,r7D)̌cB]?M!5a&}ٞꉥ1"+Rp*1aʴU% XML(U`epijnsqom*x8^Kp/u5ʑ%fOooi~c_W{3N9^c!h~RHumodoZ{[km.hѢO`!Yi/rne,d!}\8R #*a0JX!*$^4M$ބM7xh875 a$ an<稄c̴iK(wZ͍Y'œ$NV$C&1w*(Өk8.䨖`^Wv0BS{mW&v1TmR~kDt8#xBC*F9e?c{BW\b;3JAeV\NW "^Y.4 S!]7{'ݯiEjGeB܎$ h8 TBwGq4TfUJ'i)qS HMX3 H FUՏ ji FA-e~$sη{feVeuUW:gz8%٤ [  ˀ_׾ЕB ql (Mr{zܷ#_DQ`=!aXD"ķeV 3ry |EQ͔$^_ d#C?zZҜ*u~5qm+<4dj&m>i:\4˽e@>+;ϢsiOπ2 F? OÂ|d543aA4b}I:Q|3Zӊ#Nf5B%E0㽡3XdK:RR9F"źX?F8_;9Fdc^DppbxIX*r&eY*#9˪u3gf# IW.[!޽k]]Qt2|p\F=|A΀ ]&e^x>&J2ý3)[_PtSU>>ÒB%>5L\jOPCpZ!CCAeH-|i}RN+q:5 䫊ʉƶgg%_vwB~Ab5oP_Gјx]AXl!13݊ Ekj o2X>:Q3`~Vmsه ;岸f3>4r^XeDHeteL7JrtNRSZчigxw{<^?V`#kqu2%Hf/~~[3JgٽWQ UC>1.[N9dǴ֣ѸPzT9L gg;L7{لG:Lw-ZEXzfѤOs-px) Bah£;4.M{N Oeo7;#V~p֭ 6Kvb弤Z2m3w9)S>8;{ݔɩSwkv꼲[ 6y~k/^YK;vV6[:vK+&+Kͮ-[M^~gonW/Wʁ5bcdEA<ƫJJ҈vfU^sθ9' Vfu\Eèz0&8ʲ$SW4aكI0yJA=ɻXMۧŃU]iQw(51AfFxo1 1aCL ^7t?LyNz'Tt#ܼTuw@T#G<3U|E-ܖX`kr8r=es5.ڼXui> & g\Ifجd:7ZJ.l~5t=8JCj!W1*W9ۄqΒ[,ힳw83BÕW'C0Y2$ 6K/EeR9C7 2)䯿 [4bZ;cUFhv!ȳrJ} k!EsoB>Ynh9y %>caG#BrK<><!6rzE}{̀$zq qG].L։u`TQAU1'sc0t [4ЫXr:86(}0(>iZ <,Hk-.qaCq$ى1k r"oV2+5|lo = R#(6fAzxUx2S*7?pY?)FL?!ǟT||>?}rmk$7Sٯ2ڿ|Km}cyZp+q8nFbax`ݢ6a<3KJE;x%TJb0X|460+ bOAOu|Zqw9Үxp2чk~V6֋ ˩zX#c[(ΏTMs7>3+9βm:Ϳvx%$Err^'{\ 5t>~[|x')4b0tD'3#&=MƇtrs3g+C+E'D)X,QZR͂4|-4QKD{s5+a~'[7]D VᗙFG޺)M-rƚE@E JSb彗y$R' ri2G-3FeRz,|ٓ H="kdkTfR|ESyZA2#Â~[j0D`@6tp5E^a4{ IppTKOcbz{ ]'*};ޑ|} A& URHs4~$+0.㠅P=(lӮ{AUGCV51dZV*hR>)MK֘JO#)U*rac*ZYB<_MTVI.fsk(j1> y5S煵U7~fDi7Nۜ<ȯT.?wЋ8Y|PQr\R`u8akk37l6}lO?Nof?iI~֟l|̗$>4ϟ\Yg1SP, z7Wr|ʵ_2;;]ڛ+[\%d,g'eT9QPRTW譴h@4h٣5[%* $I*Jyy,F^OWq(V+ TP.N2e1hوޞC%ŞLre\heˊp+ 0JBKC-X:|>٘Pfsز"réT:>z~rCG?%m-˙N'Ϭ5VO18`8/i.~8 gjxQ>+O&l|kI+/$j]FOC\||>QNDpףO==ߞј#G'<7NU{98.QŽ❰pN΁y,؈*| V0~HjQp>-/L(t0K]m=9]#gZ){Pl>Vrs?N@`B v"PHH+ZbbGϫ*~wo>Z\4dF5Qy[$iQ¨JR4l1x$d%y 苗#()pN>JJQd6T)R 056d1!tŬFfHq"M U[ĚB6y/y?te·cۧ%5c=_ZT$8:UKӱ`Bkp"3Mx9Wɩʋ\ +$:bW(3O$Fw^+{6 U+ *Bs8,_Œt!땷{Nlw?nFIftitn,SO3y1']}O&}5,'ewr V{sLA(O>TQd<+kLfUI=٨ZԈeeTSB'|Z1UB*&ɛOC2bT*RN(c0 0!'P!{l* .\Aj6.=<2{aqC7FUL>0rB\nq\+\u{KOMP^4մpr`*nndQh\R?nno4X群Շ#gZjv:whϖثjcw}-_zТ21g6o-'tVm9Y|'?N;P4[FKJZb|Jx>C1HeI;pNmWpw+[=2D$w{9]qe#^ [}VsRyp9vw9Yo2U+s!T5DYH+CNV[ܾ1ꄔ8j0Md'%~~8$cwpA :oY X޲ŕ7}so.i[K\%,BQBs7аբѡfhkjLԡp#|}G/q_a#a':|EFplhQ(bvuV{_`^3%S?c_7E|7H3c֚tܔ[,g\p =tvIs|czG?-~.\s g`}om|:]f#_+lDV?rZlɒ"2^d*Vy*7hV0`!J3^"ZHoBd4j:rSR>]Э V&\'ÀVe"9A^C\h! 8О[[Zʻ?{#mw랽(%EḤbsRB0-q#/ѠГ)qh*E۵EփV!M{BaVZ[*Oy۫-x/J/.h/D Wk}ތGufɯZjk?KDgyOf^ÍҟNTf+vNBp ; GxGg%VOOR݋n;3%T5^q#I_dI:K S9kLR6ok;/& % I>9Kq>鈠B0((\lʬrtsr>21S#Z;jgVןX9~Ч^KM?r`mPY3`{YQ^1m&>KBu#~tH h}$K 8#SjGs5cZj%s mibv\7\gjF,]olCK6z,?H1VUO]n ;a"<(fZc؛nBӑ7 _#k!<$RF1#_S`=\U"kd%zu9:B9)jeGT>TEr\!cxNP:OY9y(| IK_-} lW~[}X;#Mp-J$SQtNRs~õjq dCp5ٝsr)?C!w(iHޑz Kљ.bY9 (*߿`} >?!|DU6 V6%)#2:7]NlyKX cfr ¯9CSUq̳"$%-IL*g$>U=&Xb⸎j~@ӥ$Uȫrkr^O]xTVV G< O D%q:}wzKG'5; >gQsTnT~9}#-q&nˆ}q)yDhFs %XK>k_yI^ڸCOpVtNˍ$x'W)D(b #*.^ʑDHOTj.Hd*C`*T0.UI8JaU{Oi*Y..b.^rھt#~7.x KJw.`o\fXSt@4ΆlGi|vG5>YJtԎ5C.ZfnCfDJjWVU+A5URAǠAMke$YX2q@#(r8r*-K7L%U0&שXy`xP 5k|v[ t+ynU^oGZ ( )Xo<eEٹ*N {E$)eyYY$*FUepi"5H%֣cjC]Rgf:Z/X V?{d.\&i$n$I<[kr#xz0 FeNTdUvN-1&U '0 *VM)”z[$?Q!2kUT9_0 Vy<'DyWVI,XSVpm h!';l~aǤg"֑,* $A^9ވ"d<:{FΎ|teWֹѹy-ZǏO kWTc}h [v38%>(hkO"&Tetڛ*5j9cPȱuض|V 1y*b:;N5/U xx1T ll{Fnǐ'XN}6%?FzwQD- x;H_$?'84>gK_{ cbP1+_>WΪ.HO ]y& a;WBfHIO 8i] å!NBasߠso.wFA}Hg'3|k/1hnVƑ,K% z89Qu&X*| ˜uA뜢v70g윞 W/yZC;MW۷>>G'l{\?gwݽ yD`L9)%[s?*1<:WϸKC.Z5˅q/dk=ZrI^A 8"IsVFtS6hgك/5 Jǰ$.J:G=8zf[WYUD|w~'Kq?qvŃsߙ#ޞpDoO;S=y{Stk*ڽV(F&\ЂedcaBKY%Wggc?=+ ~vwg>~}]cQw0{)ˌ><"d;L$aV4qn pS"O GUu>"W9BPT 庯LE#-/|_1&!cZQsxU%Kc\H:Iӯ4jW'T WJj7I>|mEJ8N573Uj?A5ծjdYAMe1,] J#g}}#ocSqAm3d]Ϳz-Cň!Whbj3fs2o\ލ)6̱(/DXL>ՒqAN,PI ǸpZ|9)KeIAQ0è΃ߤ7+I`UU|$y I,NbQ[A|/by&na fOqstFaǐQ@pN;XLpl妹'M/NY 2' [pj53lXug΀|΀|΀7E6>c0EF=i iX#]5i%"JU*P8+cJRhJΨϗho#aʎ*QI Z%40v^M;0hw ɖlϝ鏯_`K4m3 iyIֿtwl}uT3rIňħc DEʺ_8ʬ /J)K( @/*f_v~{ oޓr2=|Ӿԑŏ?RՆ.QBmKA4X7FʑG"6#z)JIA"JFő{}/7rjCq_(8W6[!T#t}K0C  g %#9S1KbRN%4:ʋl" -M9;>+[!9X$~u{~.q*FC-xސRN/DOXf[6SUO%(Aiš۝D罨VsvK4k!?SoI"lΟg剧(]IGZ8+OkʜƳֈ4DAtjN-֦TG%ko"Y##1! D$&DZ9iD`Rbkf@pN2›TԷ SɌD.]0__:OJF ? > x%KXìp)婚r8ݐU IDAT~6b͗e/qiyh#S]QJԖ"LA+ ֑zH TQ;'WIZ1֒9G-ճgsߧT`%v`LT e3BokarMzjPyܭ2YP$sICsÐu~mYKSy7%])I*.I~ ex|'I_sYgHoxfnr~r]\NJ7%á42W~n0RXtkLypGyګ1 ^>I&zm>6xN% -s:ˌscJ…,vxd8b KӞ9pq!]je",;_ۑY\rPT=3]!X[ $cdr雜-U`,%Qj&OpDj)KC骲])PJ` O&ri8E`iob L'݂ѐǷ;b3ۢ5Y_lk$"aR&F)F[{n郁(pY8 eƶ4}ZGQksagV² mW9?`m7aT})g`ş8A/^;1R^뒿ӥ֥6yu׺W{ț=ގ5G2GWϰǷsIA^KD )c!c#$4%u@J&11x[FlʆcAT`d5=JM*I]6 2{FIKΦ RC$, |r{tYccR`ՓCI8JSƒgW 7-W|AItqlFiI#u_:yJ)@DjQ/[)f[O_kwG~AT?FҒ{чOPм)_s0MJLBTKTRbIņV 0W!2*2bBaX C+8 '9YYVaeC'R lmǢEXC-aEf8i 3DQT,^oLP@mS9!Sek^U*8a"e/*>1 <}4q0r1}gvXkRL z=amYb#* gM*Q^Ь^\s+w+^q@G_Wt?lO~}:oN4_ҁk9c]TUG by._a+9_ؾ>;ʓO.PkFk,^Jx~ «=yhؗTa )lunhXDâ:AC/0 EQra$R`$!^ H \HXaTҤQ4ygMlߓ,'ϑےt)cz"5e[zb >##uFa 8U7c{9_~E ӕIʨlNaRqc| U$Ju>TQ\4a;7xlau뻤RYzoK|tAeyረQCft"֑z 嫜!+ |'x>]>󛄵HJXjXhzbj_ٜoIo`*bO>OE:Ͼ_ >Η/n7<|>Y° MҁrQ,|6g0CI yΏ;m 0Ǖ^1$|K`5rYO䦅`H#O/HۤALQР̰r$Ğ3wtz7(kU9c/̲6ՠ>JY3 ^t?j^AOy qݡ\=NDE|0*Kf=96B 7>DiNn1Fb-T 2D8usȓ[C͍HGY+ތ9_ePG.Tիx8#llCLzHFMIÑNmY-r{637.RX/,H#^([ڋE[pݻ$21Z#үI=-gt_OyU7U?vQA2ϙ>;MZ*D`&6&UEOMY*j X0\qq#!Wn+gYΫR=gNβ<|pab2 »I #ı*B^TDB! kUkDV/T*oNOK~Y[ qՏ;Y',Qŏ'k.?e]6Cѿ~b2+Pd<ѵ.2Q"Lʹh2scH,[{=a(°NК^ 9H\A:C\Ia'+Tb y D3GvP,CNڰA$Ht,۫ ֢DŽDQd*V 2KwaO}8 pkhk0f#p퍷ibkK$! ToPcJ3eFۻ\aT̐r2E+:EY,W79}3@(f9ܓ% sL]Rk֒AҟRM2Fs VȢJ"CA9f>LuT 1S#)9 0fsmg .mJDhYvזx[#Tojb '޺e y(@@'Lu8(- 96JLv@P:7 (V+T#vGY^  0a+Ѿ2?JJݯ6 6 RSxy6XCmI\`|(R$zGQ龶M-©1O$tTC~)ҩR{lXi,eR2h mO8p8?LB.+Cq4|4|6 ޒćaL@ExD^294ĪA]32#~2@.#OPF9N QQ%%F0 q@_(jdt]_H*3&J- 8[ >0X pRPH%`.|Ht[N͖&2$A&"8BQ59תBX%8([%GI%#kHCQQ;JOn;l]N3_ڠL8KB JaB ,c'4JeA! aG99Ox]jiXawb7ce"" ?8Id-bLxqM H*WnݼʼnfU6!IN/ HܔFOnn!BıJ˲OwkCG챴 /]dx-gC90M ØCß-`W'E#:֨~%cGg̞%徥w4k1&eqq_ Ֆ(J=ZON@vvW{@N4Qc֎+W=djN[)JC-*Xw./`ƃop.wkom[;f7r ܈%-e|٤J(Uj!RUC7wrtوczuz?<2BE4 j*A{CG Tb3-UZT, Ӆ`\Лi*2G8y8AfwXs飮(P~ IDATąCn^Z?$/;dD͓sl̲Y!ecCT.mw4ӂp(AJd$gI  +RV2T1"b%!TEAJ_ <'ILLP1~ݱg)(LW":X$ D"aȟJJ#|0,G:tNrGe9#w2t*ovI,?{ {zv{ gwV*mzݔq n3N1ď 3(0 ՕiY]&Sґ$(F![$w3o y1 Fe)UC) Cn%a:0$6+W2)E$C#\"6fw* #y; j@3@%>)v9ٚ !"gg{'.s1\s/m'W9 PO0S!bRj OĊQkU~r#ݎ×^KWx7r'_㷾G{ߝT?&|pT[㤔w,0)  'ZD6>9z[uD;6@bB3N>B7T XzyfFvșzDy6{8ϟ8RF&eũcyEU<>43t !NJR R qDcr(qIڌHFiTUp/9̄440*jeyeqII^x{]8BڱOhjJq9Q$# Z8E\50j[hlq^ߧí($H?8]\YU/ :O?5{g9n/p|G)=NG|sK\A_hG̞Rmc4FΣ@LD8?bCK-'ٸWC5 8L2^;q{7p1ՠ;np@U% ITs-f5N JIOTx|gWS"1</U>x0i5Fw)xHE L){Ct{REC1}4C dL0)oYΑZG 0KmX⍐EJ5bqXJJi8j1 Hw>!6b'f%z:~o@\EY;zd[vN0jVh3nqjAr٥28}ml{,Kh-SKǞ* MLnqCh#m7ŎwZ%UeN9ACT*[.hL`}jCx q*A1"NQDhۨJu|0:WHw /B+qԂQ+L% RҏP&)n2,f QcX/ǜ̚>ǵǔJ1wD9ZgWVC̙l-y|kb$b+м8Og& ^2zeBdž4=W{ %oR+* _NSFz3!qJYD 0J1(a{]) 40LRWTڣI bx1^as&'᱅7s_NKhM&3qB:ac>sZsֿbu"f)k]d !]#8Ueѐ%6QmvƊS &/'\%n|N91*us_arUy~|6_{?n?&073u_GeF-9 ?o>o“tְqDu?FX T":h恆GSZ@ iycKEy+k-{Xٲ5W{Dbs4iRdJ" H` yH8z ; $ǖG`ɤlf7ӽ}svaalZ쁒"}[k׮k>{qI+WS9;~HGxX"Z~Srh2n7*Z#Z$= nhڗ,3gW/iQ@Mz+%:UAZ-!P+T(vhbv~<$JnYnm`|͆,lhܖG9^ApQgmw;D#V ~vCpԑVsتtMYu|xa]N'>]'7_y[?m]Wmߑ_ >$kȆ'u!cnS1U ƨXj'-OrĀ6[JteINZmSgcZe2]~y!Ib1`{S?r?xA|/RB""/ZL'~h6~օ"tE~{_ @GW)XuӛOx1h:4>5+I2*h3etbZ1$緛2UXki;4AFeQUӡtʊ|y"w恵*1Q%RG(p-MR'Jvp~BKmH{R0TwaU\#&hG4h݌\Vnq6 n^Ycxs!A S{`ATT Fv6,UV h{VqGbKFc'P9#D%IR5 ƈ@j$ngGLXATT#0"΂7cHЍQIE赵#wJvznY+siK~sA~qjY,yp)mfㅾ;wd82Ħ >dx7_/( R]\+ˬ|zDyXtۿmHШn(3|&G_Iu{} X|X}tYܫFE;NX+,\5K }ATTW%IIWju4o&6(** Хl2MHTwn̫.b@]}n-+S~pDAGN6f?յ~-9ε6뇊jV۲ݮ DK˷ouyr/?4 (^LP uKU=CЂ˃*7e_ʧ>1\fhz,kW*- AŇE pfJb0R*`րi. j=qjae23~?rwWxU<$Y_ݯj^$P;cM!m] ![PW'Q]h@$w{Omlx/S":?[[Ar#uJC+ZB$guZ7yŤ9=N׈1ѣ:xg(Uqb0(io u`(Cb Ż9M-+T|upJe(Iٕ?3w6nRʘk$x0pN%S|+rWвgg7I,M XHRVƽ$+,.W?<ڌg=`jsnU;V|OG'>lCN8&` E w98ZbycS^se=03 }X[mqD]޿ 7vC6>Q[O~r-n2뱃v8u+\wώ1 o2TViJK1Ղ!1g Kcڝ `Z+-&WdR.P1oj~Hj?| ^O\'+Ǭdhv-pFmR4Is US?"ff?i*dL䄨M WͲk7WQgo|FN $$l9HLn_>ihN|A͘JMStU zGG.5P⨺{ B '7Gm Y;WwHL3x#ܝ#6vOxn❥*[Dv$bM#4Eb m,Iиa c`9͑XSeOS5YE!=B*Fj5H\!ŠEeNao6 mEX>r,9%*°YtjrMS:ASHg?/>{ nu68mHҦqQYۑ$) )ӝb󷨿?'`vBPkZ\#]%-GcꨤO WRC75XF"Dgn]0'LH R;yY=Ccٓ8qmlK̫rĶ82,0Wf,]1JgVia@6䰚8&s" /gь$Iج={ ~.2{LWMJ3el;)Q(LsBl˘%&~K՟SNcK_X?7I3alJ0[T_ӊvˡoϊ!yGqE^'3 "|/*BYGT'tFZKAe)E DRʓY#u잔/PNb=>Ac"DJB PGJu0$,̄*Ԋ5Ӻ;9сX>67k--S9ՒXΰfZظr[VlR8O޽E٬䲅*uI/҂RZpN#XKRq KΈVI%>Gqsr N=uUֈ3`jlc} > >4d6u=G[|S8͹5ԩpZ9>/̈́%+D/'P9^`h6!HNB(labJ~ak*{^LM68͙ېR_A(W9}^t*hq=.gh3I{g~]Qfs`\3􇴝Jw0 &m]p}^զNA5B!uotm;Alڪrsߧ7S[˵<ʋ7_?LgXAZ2.93rA\]nJ3c[Kq*PR B'*ADmKW$!4FҢ"-$)1z. McCJ"HܨXSW̫#UEm"O X =霛W䰛18.1,뙴ʀShDۗIjxFg^2:]0:IJT&4be`E`aKtҙZcbM5/BamTQ\,(JBb$EdX*D%FjbrHUE4m"ltZ SݲnKQUĶ:7}Ӓ`6oJdIY2+x=I'~WFv<=;m9Y1{Z1|wL}cL9%0n}lU+ZÖX+PxaJkRF0HڸDg εhDԀ /bTI,Ƀ$=ꝩ` Aiq3a`;1)Y$%|p8_dKJ)ěduGhV{bÄ,42qA^JFeWrxΰ9h:*Ezl?'ٳL'Ϡa{t9I.f^蟞RB{§.//+<|2[\^Fw[VZYio1guƵ.zn5W2a=%j_-S9D@LƆ \A!U`R "#~|ƨr8ި[O] fP=6et׮\ڈJvmS/}aVd I$M%!?р)րSX m_6ESٔCH0D jiS=ߚ`PzwJ'M|S~P3fT"&am FIˈRz)黄]^a>c533?x3V`.ݶaXZzJ=3C~rngÂo_b}U9׻7G/ѩWE#dI :քM0Ҕ(BD5, IYZ=>I| zZ6聃VcXoIQ@GUi$FB@ b3\}?'&=kĈK4?}VY]FCxJr0 1iEUO@*OHepV)FCVm.cS1.\FЊ6 y04c}ڱ3lA}0ꗟK)*yE"ц&(n#(as <{J^'?J=w#ҚI`gB/"qy1c`AX+# w &J].ʟs-D~t";9ɏɃ{sG(M >˻]' IDATvZa|.Z?kFXI;#j%h˸5kU^I"u EF:&".g?H$ G`ET "QN &/\QglL .fϽZ z! ✪)[&y)&qRt3Xue%ݖcO䟽.Ari}֕YU\J;uϐ!ui*C:QD%,ʲ%]:&IkO.]f1)i}Iz瀞7p!tO4NEĠ9ޞ#YlH}d7&:k-Ik]=5AnGo:n'b.2.¿{9o룯jYs_W}u$؍kdUaM@PfR"~O-̅THQJ_}jE!Hi~f.oV;TXS%jT"͗OX&Ҙ4WRzH *]TD$D`\4>@ƩȢ#Z&GҲx,1utGvGb­wwڄY.jP]U9D:P9kI#pwQcƞ]yFz  ^XJ˩yF!BoA\0/gH+ʍ7_nUo=+ǪFeck ia֥EBL4a30QrD41P 1ۺ'8QDZH:Fl(>ODq*|/ ϕ/>vG:+ ksX^^2" {\ZIOUm4:Jަn~T^Evܻհ6AךFE!p y%y Ļ|P׻ѦCQ-8ڻ%v^o0O .HZEWVYm <ս$~g١'r/gZ3ܽ!2i w3pP0ee)eY:`{uLcMݮ1sw:Q\:~ϴzx^k/ :{W%"gl.e&RaQWEL3J%=8*،$r4_`HG ;AA4qG%2=: r_R=e`ix63WNd>2%^N˸ PR3 ŝş Ɗ')gQG`hS̕]b3&H [ F9 sK75[jx֖D`<\emuAopOč1uA2?Eb5DP`%+rZłv`y)"nǎzIJS赩E"Vdn~eHt͝{tfcI 2Ӝ*$)WmHSAG v bY>*K[tRbU4.;AAT+cLV}HHR0j K}WQׂ ^yL Po%1vXTu \\q hj܇[Hj 6)/]XanZiAF㛬3r LW/> K "VJ'#`xk{^swHo^(4X{J-FԷGMC²WYYĂiA?ݴi8ޓ_ܸ7ve꼼ƕw(wQj^;߽CGv]5IU Dn=x>˿b*f(FV҄3q*gw玴m?w>:ֺT|cz7 F)NSƺ{~OsbZc.gN[2J,dE鄯CEL>9Ã+VÄOl*M%=֣E$;J^?H*psrmBŎ<$یlt s\ۏÚ9zTʉ5yRԖ.$:Ti1 "j*!sh O|0!BN} X?~y Py iDƠVN f9R)[#zG秸au |_sCi=+e9@ ^8# MʽWXꡱ,|Ik<ړhT7Ō0RDƦUňHJgыQjbH@%~Yt_8սH{cKU+O5"Vj4Vŀ1DUu"$F",P C  1J3W5H>C5U_g( kU"T ݞi﯀*Z*v1GW*+:DuhIz&EсjBdbV64-$)>FSr(+1Yq\A){{׮5=He8?e+]ӽV!'\V'0O溺pJaXVeס7%wdKsʿs8 vՊi&K5j"AdiF*mRi짪JPUQHUFCTH#U )rsHԨZsfmfDE%*'IഥЮQ1)ԉ\^P{asRS+UD&hBO1K[\G^:k3IT+ՙ$?AkKt?3Xd"$?C`&O*QFlnSYܝKK Y'0UIʣ!"A5@NwgL](144]i7AujEAC֜ Ƴ;k:9*QKBc>"xk-:G7K؝chK+rpt,&q;>iqV籋2=c&Or7'"jAuhJ-dg:6|bXi#jҠu^i[%$Q=S 2k L+wfG)@ e5zDESWHTX ZYM2kO*> r5DOiJQW9AF/J'{w_啶)=:Yk_Y%2wQ߭qǚSa_?;owcjw (|LvnV`4zOv xp_3 1F?!ڼQECMc򘓹.c_LKKK7)LZHK?_HI7OCc-ˮ͇|g#ULM")q2ŠUi;ᥣwNLŝ#LBY,XobyhE'^".[ָk;}i*iK;"gbH=Uh{5n)5Ox/1P+iI)*Ϫ|>K:v)*d7H!1&],-RLWϵᕓS~23D+$`k8?"#4 b I viXb1P x%>IpmMmI!v|:&. sٯAǁWLe@HJH *?=՘3Y@_6W**w_Dw{h۾S|ˑPb&2Dz8 1`cLI'G =@q>eG)uXL ܣ7-JD 21az}Kt fe=ca%`P(;) tg%[7^gx@R7NTX/ʠ,$e h)A,ĊS7\>w}w59;+=bDŝMt$Bl>=[jLufp z8i$I!n2jOeb}&}eYZFHhdWlL.s({$53퓥FԞQ!ڏ-QF~Y؛>αܿ褦W71'?sVEP [9;6q8uu< Î̿Gq᧯`{) hqbdmK"k-*G7~xְm9 3..y+꥿Eyy"ĄfQydkUTD^~;G~7s2Gp}m>9$X>k|^0h gb/G! s~qÖ^ iDbb tAb3@8'Xp'UfU2KIVa2Ve^,&mZAy(N3ZxQcEbRp8P/c?\0{t;x PU?)GmzXoGǿ ڧ?#֕CiWrk^ȹ^_z Q4_71re+KXTw$Gb/4͎</]nWDDZɹVGr맻8\L>>\ϭlɓ5y!Vx"A nGU$J!EhIT$R+sJƕQ"F*Y^ڹ\n/dyc.z^;L#O\? rk,gS 1VWWKu+hk]V\U.KDdb1ADD1! *f,2 Bb8$I)B^-x!FGJ؋*N̢Bt~ 1FNFAQ ^ /Zd 2,њd+w$3)F])s9H@K·"X1KTs9=S9s9m&3KXHɩb" Ѫ F "֙FA"#|PΚjL$"gHqHQ4껿x*h_:]$n,TD""*: *缓+dFؓxQZ!i$uou8$:xD#!v[GkDrgd]_DʬOl2`J32vV,Q)$VAHr#sTK3܉[$=ӓڦJlbw~[he=פ*:I m?-(o(F?N$ƋW@L:!SL&/w%MчTSTo7)?/HN(,r8mZE-gAO2kŨ21rΫh m"(CDU|TRdd+9 $" qe&ׅ_=/Uߪf~~];`I"?8EwzggH4Yɜ/}P7U}[o0ApHVlޙcJzyw]}^<'lҌa$EiHX /zV$mOyJAo V8TT4E5-"QGbp: 4^.EJ@fn~W3:uq=&`A%∝xb*L~t[^B)Q% :c8%Du V.^Eb(jM86tߓUDbi#Xј<@@g[>o.^OoeUU)m6h"XS@0ۅq'شh}"aqؤc|uTKI&f &ETCJ:[p-텹XiLAP޿&ax.K}꓆ݣٛuo%+ va[O%#.&Do[+]NJ#Ƃd~Glyt'U ",۸OGQF2(*}NLIw[1[5*\9S5F|o610 XD #Gk3*Sr@L-ۯ_g`ˏ\ahU/Mojem1\_Z%"FqNr=E$NdPN8DPS;ڡV]kg1׹bHh$F1Fk*b&fYZ+"5F4TcHDdG'A[b8TXbFG85&N+cUq)c#&q -N DKlmϲkn1)+e}yAz#'%Ǔ# +uJJiйZJY IDATT2W%JRlz5TT+\z~~|wGj l 2=Vw~&E]qMhi!߃voWw={Tt=ޙ >^k4_}f6Ø%/"^*{_2b#]NDu2o;I0{zh>o7ϏVR rO>g6~_J|b*PcH]t5SRZd!c- F-*J錢ZդK-%Qj2 lchjOBBOacJ 2354aOk$bIiFN -VbA4cNN IգFo|#a^ Рp!Oabxa'}4垯fzk5/ `8R#t΂UiBz|LORk%ZqBtitFh""]HYu%)2.->bK7>WԈcC% :F0uJiyZ͖~b)a]1 "!# )YɏOX{unEvVrhjn:09F1s>s{L-Vƞ(κJU9ߖc(鎿!.bNOtdJ/]&֣;h׷.C5>U Q1"6I^qڹǩJq1Z#(m1T)([lS3ξ&&4ŀjA'cFc\ 19$Oƣ=85-|._8st3FQxV$ -i7[Z$ЊcLL0`=iÄ N!8oXVp̒nUc>;KCeNR 9d&wmV1R pLBZF#*F'X,YPa--8''EOnn}/_vOxrAI4NFOR.b5dޥ*/h j[1߼N~ |c\i8mZoyn߳'86_sy~QW6oE/ eq{rz$28z6gVOL̶QWɛwy;,;?w"Ӭ&~?E$Ե{+L#J/85~O5wni ?͇9?XuY6S%AxSm8.]Z3JoxV4#bT ur*,=Q>!͹[:"i@j&AO1i0 5 }0KԠf@ EH APFi{!1 ],! b5&2i)ꢑSTkAg0Oi)Ŋ̟p$-i&t2i=7O&j|m,[;'Vڹ>ϯӳ ζy54rjLmږGU~_b+ܺRS!er"f28胕΀>hL;%sUC*AD L6Dzy׍ ^e>+,%λ&TLؘb|Kzz b0{`k$l&boˮm/Y%^Ku6MlONX=8s42%sЃV Iv[ ʪÒW^#/HP+)AQ'5JaH~.bHzts^HTqIBpD7Bf12.I4_?#UUE_t~YFTR;새hP|5`&:dQ [J ”3L5%$)`U,d"9Gؿv{[ vh߸%lJ70m5ϾMu]fcsaا7>%ifi[elTl0kRI ssQKJWjM vRc2 Q ai%b9F2.Tw%H'Mx)J}Te?u/ҨWWԌr-lQ&s7dXNE$.-];(uӒ{V><]~-w&z\3h߫XՄs +X}}NY[4 NJv>fЌ'{VlwrâR*BFF);NKnj@UsljĂ,]rÊjPf/=fsH 0W$;c3h+7.{ZR'^D DӠdTYv__30k+48D'bPPT#Nj9F=EB$,Hw{~v]I$K,.8#ZFCOd˾Iq2%On.8 (iFNUɵ,q1Ō"$Ԙy~EPi vH[AO[>~!HM[ H9]2ZTshPOGr02Jfޓ5FٜE3gFbB5AQ͏U~s F ;}/KPB?@@~^! RHWWЮ*"gֳOu½Յt'1bcPqvՀ꒹ ]'1mQUgDDPR^hBg犔Uk:9g\"O}" =rI)0A A%.v< tL DQE>݌2-i#-*ʨ3gL)d]zmY_쐈% _k:D^9l_/2ϔ$u|t/Rգ҈X7§}bv/ЖiR$J'tmДѰ+nii[k Bu2ynzP >˻D#!ohɊ1((I t[Vk&.w 8u g^4|Lx{g6 U?{,<@:]( Dx"4(Eb$:f.!]nVtoFH% %T[OO[N.iG3D;c]rzU@c@TcgcSVɊF,.q\#h]\v;N>p, bJ:>Ljo0>ѥKTOzrk5H=oۊE09w;]Ta$g:G-%awGCXQRIF4 .Ć1jLg9P AH$0# dl;Bv5R{t#>+ #)uQp*FbxRwQ7oGCo}[+E -R{;d)a^S ?53WpУ*]_Ȗzj:(ML\Yn^7Cܾ9VⰇ]P KDvUb H҄EfUM[6S2ce)&D0iyG]VB_P,,`Q'8z_oŸ /{v'[솿E |1tEmTUZ6z+W#Шq4hTi Fl鸚RJC]XzUΧd+\Nxmy-PPVz0JpQ-SAUGֲs &k̤4cՐHkzs+d>gNeR?е5z(.O*mH@UTY =)NF̲GSgcJ b+<Y3ƴ: K/=Ozx@HUF!ּ~ uZO苧W{."X 1RhH_̈x<nKd),DQitȈОEM쾹^֠f٬`50,WeYF+'wm#QDAt懈JTQ4zԢAU*ǧ޼[UFc9:]4 G=&dY}r'7gO+e<ʄeb'UM& `[Q#Qa9?pDL+T٣oJ.־yGEH7ȃ~JP lI-ҡ^!5*M ؤqI17c i^ՖNGU@ QrP[$9=ĖS蜄/"R ۇ{ NN;r==bpr,s- RzxӍ3dR_?pR>׏dz8teƒ*<.?w J#cj6XWdqҲ}^ㅿ1Uơ29-U26~ Ҟ ϝ;cL\y?0%0l?H:eN)!_ Uk_̨orH;DPĄo'.2nEhDo[X>ȀENޞ_\pFPSY͟ѭy2ߐO6&$czYpe"ՠl^bz5\,%C4:Tl0&?݆KZUB^ܖY}-[L};D^[Wd&>*TtۯV\XRsm2kC#V8vO+tǬ*"Ty QZAqqpq M(& :HeV_e29PIam7?Wըߍ ~5෻?s]ԺIß 3D.l4gXQUsFrmC+~2ёgژMdP^^{y`gͺòIbHDYgk %ޢpk6؅;GTrѱz轼^yZe[c:n A6]pmufBJ&  L{d'Sɜ6J],I0Ci,8z{!g.?8DJiOpv|V%7Y~ʋ_<4G{nyff\!uև SGxԐpIZRoaz=5=0]|%Ф/5` R/-z@Yc>]1cx[+[|e2FޞebU| T[4*4|:c!;ߡGCN65?Mb}Mx2wpglqV~qF9ipIܒrgY")(eg]3+0OҬHo|yЮ$4i`w3DNpsOnخV2m$;h0&* bB"_$8lG47XXי>DxxJuyʯQ*Ki*RIGriΪXLGu1 sޢh.Ʌ I`2ߧ<9UAxk珎x@!b }і3UMUnTƀY2Q*UYj opa9%tbd;=)*KJ\ .g4m'p n]NĂP% (uxuLbf$v& D#&ZG/mV/߻E= :}}f'0k0}@$ BHmgɟXCrK*'f4wet-={3l3ʸYhmT_R5FeK)Daܴd`y Jdyl:ҫaMub611Y10ߟrOq}ҹ:X4yD}/yDDQ `XY;'&̧9Tgor9tOUR7b>-fȬ9@Ck+ܤu˅n'GݻwqIx%u}sU~?+#٧(ncLC uuۙ{)H}/ݜя[pN `k>J-뽚zrd mӶՄ{#RU)>>FBh'(%)f^$i՝bhYr:?`8ApvCj[*˟b=Ҁ} ȷlgwFu u{>dT,>4$e.d`2bPZKXU5,*b62C&yr_"M@{ɀTZRSYO.KBq6qry&y>tDpXdslSb›T_y K؛8.s^"/VZ{ɍ+rTOB0M$k Қą(-$:u$WQ#'#i6}mB낢VT+Л8󇺲Y˕QJzvp8-dx=me]edytA 0B@;E IDAT( Qj5D:rL;9B8PɒH3[$Pg),8DRÕ:7È`aZg5 HϏO? kԯs_a[78SlDD7lKwn΋ITE{SiƑO/YD^ %Fa'SDORe.*dcb9{tIg3mb%k:Q,̊ՠD|'% F!i_{PD"MY+<~D {Wryw'r{YI4 zoրBzݠ>鯯-4|r_WAxFƮ1N)uX~Y%>K/tSM 5ү*~nmW?u>ANYמu:JJE[ m~.}L !ǻ w}Nٷ>&ϷBMݿhqV8庥hflX^ШC2+UdKEoFQSOdsIBzȭTLVݙWr1&$4J|m dg#9RfէLj ݤ Md5p 2}au?|7^SϠU%&+褐&$DJvnIVi`Qw, oE+%wI o@DQ߹]2LYxPTWJRWIwa-^7nR/ƴpnssSOd/H6U\ޙkb_<ğ#"bUeBtyȲ/2*H:`܉$/,kriu:<3FrW{beL5%RT1TWXF:OK f_mv/QԢHBm-Rk 1V5ԡۀEU#3NDTՕ}bSs{SXkҶEhjtj鉑hKD*4((F;NMKx#1+G/XL~e,ƢD>LQ%y4CQIskƥQ36yۚ Vo0^{o?&~Q^y~;/ ?Eܟ~gӻ 1Dީםk+>J5DiҺ~{U#@mhFTIBxcLNç3L/=??2(AEqZdr}k\Z)i'b*mh[L3X.j\'C,1fÜꈶ]H:z@|NX+ŹBՂ-I&e#:?Kbh (X?`}/)XcZ~V rM|#]NAൣ}v7/rye~NNy~s:o|U޳vSNc˹R6ScQaΣ_vHd6u-Jn\WlLָ TԄ7 :o>snHKlɒ8RrUIr;W*N*R*q,'RiQ) Hh1|֗n@" "YaWEw}>{}}xxNgKޮkNl[we% tHy  O8f|a9W, 1P& ulEi"&ogڤ7( ֧dU8s_FmdM=;ƁE a`aFFNoŁ\>52?Qj()ƈi-8IBD?-4M7;Cɧ9̬GLk]' y래.8w;rq{{IBR7~)ˆ|qnnYB8wFl1w1b;F$.1ڜЊ]#T&d)nqu6Iŀ?`~=eK {p{7nUk h̞*{w- j r7L\eL@L s4v*9۔_CŐEa^%W!Q Qڮ_Y!|bD#ƿ<='ɻS75)[1~ VŊJ8ײ4{p@ V9p:S Y|zG_ C՛2H6 lΉXv/o<+ӎn  ;,/p\G{3^ɘJýϯ8)# 񃚍,e7d =.MiU0g{Q*E(/) ᔲ-)W$8li'7G4lU ԡ ͝Z1^:|7o(s aA3)q~XdC}!y5޼wJlnmDB)ixzcܦ3n.% 26GVm['OC>&Mfl'5WQF/K"u6zLPKŜR=u ȝOvC-Uju S(/J-h(HG Q(#7CͼbJW|hf"om.hy0ވoWYHѽ)n4A'^T`18U7q~.ɲ.iK(r  7͂xL0m4/fe1g1M1A&q+@ UYxY|%x|L˩û3CT7"Uf"%hPBf1aMJ KaZI#!mbfj!Op16E.6] AVӠLqi~YHny*$9`L_-=Co9'!HGK c@ WزVU%X6A1 .,N_ٔgaQњ@jdlȼς(+f`( [yIC<ro bsyjh. 0׊iLN˶ZDŽT{(ZU,ڶ#[L~#c3L&7Z4Mp9T42B%#6ӑJ6꣭R5vN6N4tR<0 Z%|)&q)1+6z]LkIQ0FH\2*nVФ8btێ#n Q;8LV4x~M)^I{E*HGX% Md5Ak8A-+\\9\\% Fii:-gBl875MtQQ7."XTO o:k\;/sЀPkMO~K\\\j.GF:{ &)Z~ӯ29>HA ЭC YחFV /΃j"Gr]bQyvhGw})VPlr1ɓ#/'j*+W˔&YFkU;4t+JgրzQDCi TG3=\)>5_ɤ3̀LP|ȵw :MT*Q:1ŨAUi4Jњ((]U*(mpFM-9̌/^'ԣAnE)=m>j@$Q Wф5Vm1Aj;ڙ_!TDQ#*Iw/UFB).hZ.'t#HO ImOy-vG|b+r}'('֓391T\eՌo^'coiyO;~SџlE@Jmi&#m)%~\$c9okZjs𩭔ͼ2=%Ъ4@cM[ ˘%.ŚȲ-iIǚgiZT#  ôAc-USچK0NW X):X)}72|{?`k.r#~"a̖y"=NG3^ͅz??2&E}}Z;Y =_pu OpiK%PPqV(Z@bC>ѓzi"';j3W<n<䞬Za9f"+ŨU L_l܊:;IEQf {;j |b$g)bl!# %JԀ$ݓۧx i% /6V;"tkhbm7L{K'2 &A)5%4%H/j&ҟAGVy&>sQ@Fe';#ru8"YIۀNC 3oy5;]Q0"bj]>ԗJEL^, d7xwC!)WLɌQiY$Vd ,mӠB> {j ےRh-^×ށ9Cbwi(|6=YoC"$7RĨWGIJ&륢j'j/M)5i ;L2_N-;G{%5 V4҄ p0GHXң;۔416:61Do^'Dh0-6=F$C T8E"x6 bpѰWeVX4sU*uRL+!2j.pIeg5@TFrJmMt+X6Ԓa%pJkFzVKzns|/Y% *Sܰx(#{t!FwHiK'ę2:>, hrq,#7*~e=*Oyd_N4_z/|AzUN.cI`szB!]V:7S֐RfG PFEO^e2e"UW2=pc8QOen8VQ|/L&jLbLmW:a\[T:u@QCC )QmK)[.Dʖl! גj9굔ab4\<^_ѣ z.AY䧞yp|opxO<-;W<>)ȹ Gw-_ >c}^ɊJ狂$=Jӈ1S bOwy)`lMk<#/cG\o;Cn=G6pM"W>GЈ3̋ \ V"*E[ \T|K nEJg <(z&PéoZeF0&"zRQpsi;|櫷˯~Cmנi߫ihJhji4C?1|?w:rQϺM~# k9mJ;|x;KCCjiټQEE]pssn ;b'˚ OGJ60Jc67߼+v~'xosB'h5{޴dR 9*$q  `l`uAmo yܯP5E j _R_sJe{[% )2n_a>IBdxQ97/)2Z5I !t6V6D\А3 $`7pG2GʲQ_Rs|I[J v2 mZ)`P?GlqgE^GBj~Yx]~ELYqRĮthH!QH׆6DA`7-x~s&tOζ!@%wGjжǨp NZbAA#;);aѐVj'gu>X4d iɦ &vo6( U]҆~rCȊLZ0Xjtzj>.I秘X} WOQ̦ 8 R y*#2%JE!iE(8z҈EP#ڗopizc£Iie;Qqr;ʅsr+vq/|4b*m;C[t7SX_\cjYpRz\٨,UT @\>M0*("FށⓂbl86hhAЯ-1%"hCʳ\9%^ OXA6 |'oCL $I<ϴMWbq4PAbvBIDGNe|dR7+$Cڡq޺Ƚ{E7p:Dĭ75+Z霰 (I7D]йq6%`I .4D]ȃEB$:/jHk3c#h-n^ @d6d8'>9fë]PN Jq(X*Խ-&ĮG]W>Y!"m`O1{i#丢dR^Q˨*Hq$Y"K0gܢZP /OH,鱗cʸĸt2X8ؿl2yh [HW rl4❛ g 4EޢըFDLYuL#\}7o]zplL( IIF֊~]OL0Aɚ#Uu*B"+Řt`q(]Qi}@/s +>MZStP }|,^4GkK;߰λ"B,T|"_͆ g3LkSkxm)-9~$-F>pVW#/Ns`eM/*kNXwޢjFScv_\G'<{!rlg_/4ʏ\0gyɭ~pr{vzW"5h,Wpt$;):WJ7"#(>x\ʎ1r'-=giaXAqn%i0ri64nrz89SnMBM/4d/v*JD1DSs} f"qpdW7ra^"M|ZRzl5JmD\$ZjiVQWESַ+W_~1k!&GO[e)Fa4rH˦{^vCkeOwSd#jJac4PL!'Mu):n$c/T>)A{lƊgTFyES7ozc.ږQo~j{{~뼯z޼knQs@˫Eȃ}TXeI'M|_Ni "*L4f vMU)FR|5" D( $ӤZgyDB6yYFr/+gV( ;/ǫV'7MV/ML6J&Fy&d8L5lfIzTvz|zh-!qk=qhrPҪ+QB"c|w8GB$TBDňu5Y6M%iyUI k8IN1>"LGUZ;]S~JݒTw}MW#f)ty$'%n!N:}Y/)[KjY%łP:\RJb i,XʌVb.ϹضlNʽ(==}o\۔ 7Ox)wkQx` jyUuu脛^ҝ}d\td髨ADhň\]YpLr[ޭP' aZ;G0^Rd;<{^VEQQQҽ?z͟yS}}nGE̒cGFBOuUH{(Æ Mi杊ٳA  G ?Q|1VKlր ^S;p%Ľ̵K;{' je#6uN}z_i}*EkDqҎ|餦 =# N*}X_ \ڪ8ze&ΓۤdJ:MOdR̈́:QhtHcFZ'6ÒW+On-E9NY2VI]LR-Y0[E'[m`I b3ZkQ3H ʶQxyzEy̯_`F񦀾}ǭewI p\+^,1,@'||7*bq XCzTc4Eh.!d֞M1jUg"{QgZ,ǂVzo3oYK=4'lC"a)cm$7Ѽ L Tz}{9#Ɵ} ʊ,I/0X MMĶU6(+3ܺ2ҡ-& JЄO * FK*iu56*vխe0!M1mC:?3>U?N;' ^NYXE*>:琧aDשB*o;PwT35Y&NN'$ (] 9[%9.Qof,.Y^,$;mɪ,*=I>$8hO)Q.έJfYlGŞOuE=y 'WKҦE ^% wxA6.Gfrnl%vg>Z%$]&Юq&Bvf2>;l^٠x h1]X*V:in = N>kOa{ vwg.L Rj#! \4L"FЪZY4+^KR$3~16`[ToqHR1/z|+\HPk ]O!Wf=XXh<4/rP` bpMNg-6Rܾ+79:'W-kl挲 lHS9+\[vCBiI-<,:.z4c W9vwV8xyK,&.dDWQx!e[Szg |kGIYvýw#['sW_Y&䄖 u$0y@&DBl*!` %6m)BCN'7 9#^**0'G])8y|T*7%3FQUOKՐ"h8Rg~Ǩ:E9KMk1h` /K0퓹;٧?$L!JZԦgvK 5 bdl!֨8c PUC oXLPdԙ2QDr}QqA5jYJRג/ZMbMSAF˜6gnhAKIr~axp(U-R晉I! GJǬi$""ŏR $Vy%2J0B^ei8b[Ϫr6lttu_L٨: Q R'4 bCT ]'NId E[QUH-teُE1* Y'x?0j$81bD-Rڪ%uFa:*hhZVnQ5 b1"&&c&H1'=(iri72>9$Y)+.%-=oEۨmiWsXSWH$Z65&:B] I\{Z׆ Y*RNcS\і>Yrxvp>'[@Rk7t0Lj;Z,2a{-vuPFU)ŪTJEU@!k?QU:ZDy!"b!^D*ID1jTS=JzaވߊZe`͆q"oB6v HDQV%Zŧov@͙ߢ5hlOկ[ N\.]NeXUy_Y@E5f~J1b9ceVzgVeΠ6hPN38՞ymo-tI:7 ,io̪B!2%Dɐ4O:Ӽ'^U!5ŢFebֲ$͹:A+Cl4!CF(HBc1b#יmi$p'4gtlqO!rJ unJ#Q4Ҡݖ,&*e2[e2_DeZZV\0XjlQK:!| Tq_E}!AQ:Ol*Q !u0〗Gc^N Q9nSmD?.D^BID|n0Ar kӒm&&CrIuKCK$Em8K"ݽŃǸ0UL+)S_t=WO2IBm gY`)i-'AH]=cل]rr#f#k]eWާa2c%=B`p^f rk/>!![rCOvyoѿG*"\YY)'N4R}т/"<@&,jKQ=9=~Hx̙Cͼ`:7D9UD 4NT!s5HPPPv/GO_rN]3ɏtm,UemiM xrs?J^;w\.ڬ@suqGz/PT M 7 iw8> Q_.1QztN'<[TE?Mp |ON˅5lvHR\ء>i< U%uNqRLg9M@1"wWFglwH+484JDW&S٘eͮT(bbR"VVD앤b).P kimߥןG?{sO/Qt=6n*a'!t n3#bd%->.<A#Tf!JCdy\+g&Rpқĥ# hd譵)!y5+Q hbMND9^FdS S!CM7bjkB%1BA%NA`b8jZ4lDofTiusBA,؅x2k=& gz"> ΓIT+ s1͝VL,=!E딬J^#IDQGak*I ~DI9쑎NH&2/f, tF'MFWIO yFk66 ֒-A K{{x#fp, g{ j2 ^bm2*!&:I^BIPHG?bԞ' 1/#t4N-4钢A/%O\̓TMOau; 0/QW%㫬\_%c@]yg"qz,=½)+/41m96ʨq"Ȕo=)Tnhd8@ 5f3֥֩k2J7@8win:eVKO/(y>qvP@eذ+Ɍ1 e3dQĒL~+"cZӈHHʚZ`zu $UI3q!nh$ .ZR.<ALGe|Q_y.?Wx#=h\]k'p `"|b\Ήn =GW#! 8wRL*95T 2_5,b#*ZT(^ eݍ%R!M- SL 2w>!sF)6n`d;n*4KE}"# FMNM36;'Vw '6+-7G&5͈Qa0&&K.nmckKqdv]LX}E}ݿuߊֻ UD(2iKFB9@DjW_ƇK?a֞BLD]}1wvοл*mItnԶ囸+;=NhrFuXeܖ)n~UzO,QU" k+`HlD L4]åV"G``ys㵖ǯzybK/s,dpʝC:$[$<;lS@c͛!z-UWTsvڦ%9k5WfZ^D0K kLZKk%G="u/zܴÐAhlV $x'& ʲ̖:dF#ijnR.EzLFo9b_>AݢXnH 1U| "M XԒR4pV}bRU7畸nJz\Pdvu].`Ki$tJHIє[@uy;KL3^fD/&Kԉe0aHZ2Ev1Tq"(x;Xx~I6s$:+.J5Y$ dт431ZuAՋZ06ƊQQX)R)RՒ֊Rn6_o\Fs{F+`cT?xyHI\ BpXkE5h3T:3Jbfx|J?&ㄪ$[^eXӑe-!gx60xHfs\,)(](s9Z]ͷi*Y흰;ŃDkfk8yrPфf$Њ5rLR%DѢS==%ͅ9t.CcJ=_ƕ^(2c)!uC(/z̍$QExfk̢{ _:`!bLt*Eds)>pI gR^ycKgo~^s%I PJ4yj=Ƈr6;>}rJUVD<C~Eh>|kY8"Xz] $RS:唼$yg889ԁC漵SsKJqT2)7RHc KWN)iȥ47_ř^B444ezV/>ar_+ ^3Dnھ\0'Dnj9gIj-tf拖xUzǞ 5B :'"]B`!3%VM#ATF k+RADC0UefVD'ʼ7D+TbSm8ZIMH 9:9+X֚W=YF\I{TU`Ĉ,D$ZhnǛ}Ͼz(KwХbٟ<{D XNG+"|yŏTDtć情 V81 EUrW9ƫr/ʹ>Q8}.d~;t<{=W'>U}DD E 1.Aibԫ.K"I0?:@'c阜k"4Ҋ0WsS"1[mI#ܸCz$u%ҵ 뾭2:ToJ< IÒVe ':p|DV\jqxbY,?l |=pjhkC.D9ȕ YƿE|gM K-,pP0{,1j!>9j|q. yUgYP#rq>roʡ(x=܌\ j6M+b5X,2UQaz# FqyUls$DDɼc8ѓ FGMQٙIlr9ER? 0/Q"! 7n< *1{r߈QA=urVOKY}jb#tR&-$k /t=dU9qɝ!>NO&C0*jLbj"hm;tEPIm6**@ޮ. zhpOȳrb+/6 5؉,Qli+Yl.LKE8 0›kR%A%=͙\PqI1̋ĂIшxUsN5!ĨV-%&&i-P@cLKbdw6;EP/]5">A;46TA0 .N¼b" o斸(Pֹw ZT:0V(P? *alDu2cр'\E+Ά01p &Jpe`=C=b8W_z6*&pqhP Ĩ"P_~{A+WKh1ݏ\$^o'M.2 )͟4똆zF5nKYÉѣZtjtVCl²OSlZD*X#r{:ֽm1b6$ /gATg3?M~o1 E7bߡ̰Q$ }[?N+KVwU_C=݁?qV[YYTLC֮]RG gfR&j Y^`D8R],9#C?yΘ&)2pߞx Di248c|,plW+~OD2cPjL,kuIGE$;ʼn ꊽ=cޖ *zFIUI-#+K4܊V6!$lE|0Oy!IHL—{w$( L#4CB'56S ao.\_Z^de=u_i0{K$MoD;_JEqr'6 v{P"(45Wy$>P V Ѹ u"!~ 0ƖS^ݥ=L/v]P7bR{u;WRn_\{3MRե&4"kF4+dm. I7ԗŵZ /t( CwoS7/êUgŝl o?3u9hS6%1k 5Tb3#YiJUJ]vxi"Eʑ:EFI=@ǫHE#B,FBX ap_^~z+晋Z ޴bi5Ѝ[CS[+PI+]kraam4QkYq1D`jD I\jvfr!]!Ju]A*H"5RfMH &tt ˇ?KkpU>KONirs\r k=TLDuqQjc&=L <( .JNkfxr1 GNjhBN%Nð4f.q_k9Ӝ"!AUn>GDūGg+B#0Alf$d!IeX,|W>wHL+ICveP뇖L, Ҍ)j&8H#/yj|5~[v*Tn̼֗' q!4?K{mܸ>{{}Gy]>jFţ5/?uyFJCUWL@=J|F46iZ,_{q+[q#j(Pur&8dHa}We`jGPׂԓ9Co_.؋1p Gfv:+ &ҎXT>Sp.@UP (РlzGQ]r5dgijo(^P\f5ฮcKM1Yq$9Bx6jebvUTK>Isf?1`-\T*xY.؈4w$m!•1:&^,')/n.D7e͔|?揳X-C(&/[;|W_$ڜtN6p_WkĪת-|H5cNew

!Mť T欱-w*fc ;mH7q)I/\AX]1j9'w?q)^P!p:2jxUaSƑHم0[)*09k{'"NTjE*5Br[Bjim4;*%W 1"+YTڬAZ!a\hŢ:N taR,ڄBqOlQ]UgmkGrr$DgN7@yX'KT1qN,9EU۝W(J]95FIsZ,%MuW!bVh"]o~r/.[]S^<x7hsĢIM ̈́'D@%Je~4 LjJھL}F.`LP{Tx\{SVk]GY]߬`ҒB>噛UI?54𳮅Bk v5)˯: #(~ѸUoti)뀫ʋ1݇M%{}B8ZOrƔ{=uVWuXesC9N䊎,Z>T,.Ef)yj{j-dQunqo|Eٯ|P2Z<\Yc}YD|yJyA6bbKj(hq.ƒmpء-{\'PHA4[dI*ɲPɨ:F@YUU/(K[FvT+u#̂" E3(*t#JlVY?[ֈ󪪈 .\Ug@[J $\W_?7"O?7-'tPK\m{g|嵿R .'7_r0p9M(wka?>WFN߯69ޭ Qx3_LA\0 IDATDnG?juR9|SOrt>Hy!eOHnPڣBcg + ;76tz}M ZF̟8/UҸsL^OI,ѐ:VrF'vX4y:bnIJh(恆8ݦ1UmUY+ ?V+7Rԥx_ѻ'}{fSg3tp;$UH0w oKZOj9aN1k2$jb*喎 /aΑ"0BZIӈmRXXbM@  ZŔA~!QL.5eѾ3!?(YPoAb ޏϧTS$˛jL VYtZYQ 4K˘\⃝o Axxm*DsD$DZ)Uww_w$6O-$kVh>T5NDl ?=LP}F1b<x%^ S2+jZ%./IcYbti%Mj{x_.h3)%XU4bS;e(%5FGFja:B>{"O^[,7h6FDgNDN|ωWQr|3-ݟcc0h1WZRiҖ֌%Rd< )3B0: Z*UD*Ir盻 =oqɗX Dj]Th4Bl=The ONPFg" [ȥ+b'%iNNϿ O_巾#q>}ȗƱaN"eXZ#_~#z%UV֍Ɓ/=mkeVj}ˤ/ߟs|G<qFV]`qzg7P ' ;L8w}g2t4e+prlg.IHYs?39֟7_Tgؖ{wqebl[\t>C9=M$JdɱdKNIlsĹ #0|N؉ȑ㈲dQ&%I6uΩj׮=Z_.dKljw}g0);bWlbZR`#$n c%ܬ - Gy+;Fi$di@/HFRKj2aP8<ɕ6"Uβ[Ul1Q)%uwk\Ǘ(7Yf:.sZqpeɹ^VHzq1hɂ6@l-wE9('' ED#ZNgڜnČS~q{GgKX֥wn?m3n3*'l_ސޯS U|{@)F-E$ 2Bƒ]F, cތEp$ܶS327duhO'E)|6s?DVjQGRω${Jۢ Ӎ&E0}Q>FS/V_4&t;Y'XZ̃;Hᨛ!N䤕Rf(e$f0n*:3"BBδfu*AŤ0&}HZYjuLFAsVN).l6/&. UvEUdX+x}yY~0WT` 3ct'|t>Mէˊ\CP$s(t1a(I#KlDq6XÒ|3-\L-;xVK6_g3~FKTeS{]yvIu%`?r!şƂV<pZ29 IÐ2dg*la.Vw_Tj% AN^IH Djf 1*1"btk ~W}VZDk0C !⊒5Jr9f 5ݩ,%gOY0 xA?$ʂ|!ȷ7R.C5ӳ<>~SNYꈟ9(h=i5NZÀ܆yRCj/|a0> "đ~_oڂ|L[־\ևW_epO1^Yy*N}U39qU9_|Bw߹!;{LyuG}.bT\]Sp_x_yN߽?t?"n,T%Ki/SٜziϿx箲p 7 14ʌn6WZ8܈vc|)Qkd*LzQeh+=Њ ElÀi5ej᪊XOl +q2SjLTW\MxeߎCiI*6%#r9<u]19dLz˗ #PJTvZCiUgݾ_~xG^>ꓗkҪ-zPC.]:Y)׿}ʲqBx Z'NuJ/'=U;Hƙw4VHih*RU;R40+hT^ow/VųGG|R]Kzwk5]Yzh'" T1C&j(D4fVjT :-c̼N,e>59hl>Dc8 Q3)DEll/9.Q^ZU>{{MrrKT.N.VTIG"2LwoP3ܣ쒈 VPT=ѱ*ʽw9b>!OLh}#;B8Λ[Ap6 #XS4֔y s`f#a/\C{>)6Ry\b57N֘Xĵf+"sཨDrjL Y՝~ɫoW>c2i֞s8NyT]<]ԼVJOuT2q" #¦ק/uWNΖ1ZzϷh~-nzļk١+M;Qv|$>qg?-w75}, ÖLʍ`Փ FR.N"A赝XR$P ńԻj"0$~dw f)?<_yϴGͰ" bҏeG(G?{hoX@>'/c( 7;4W%jspe5> ˚ƫ^3D8tS8ɧrt\n}&ϽtExgCgPHYG u퇒Z;_gx!_~xs69tpȃ9Գe =sjl Z~XJߪL1/eHkp`O{}QSSRg*UT u<ŏNFU[vw^ NND7CDעN"QH{wQSaҡRzfSj$(_Q4/_eaMK01-,Wvۂ֪5d)'h^}M K`+uI)Zt~鳫صl}MV~n@hJk>\Ӻu^NŎZKF^Uy~wfw4Hիɍ%/^*II&#^\w v?{2g`Wy_|ՀV]zہ|Џ&ܣwtEmv$+dɘtJ-Azi,dV)^iKWA{gg(G5l8wA+7')[Odc4*mKOqlCjMN_MԎKW|߿^Ką'Z<^vɪXj2MHFHL$U}!w#W,t>xRZUňGq#҈ >]g#)~dv, G? H}>'|(? G\?Dq[9\嵺JE|h<~F(YRy[w~-$jkpkߦg?,k\!j5[ƄML5Ic) YJ{tf%@ : Xf?'t[4Ȣ) GM#C+1AS*@J5{ńcC5 3̜ z 9S{VC?RjRN@\*8e-Yy,6r]Y= qx/&t|+Ձ=b59!rؐT‚ijDI!E9u,-۲98I" DZk85 ҙoFcy7$jTp{KMMI2kt_W4ZCa AksZ>qq@OuWQfSIJ0ǫyuEΞ rzu,h!.X>e ޱ^+F8%>y&DžtaElΑ/dDӂ-1.n騌L#T*Y hwI9OŸ~W6)ޚj_{\L/z:=$s@'cV%CKL-wD+G{DOӺHz}HF$WRfXF%oj*{Rj $ "\k|J8Ћnvbkͯ'[2ټ-Lѱ` ci喤D'p]됖Fľ4p:=aa8ъQTvW3\mk8߂n{2| (Թs)7:W^ը'ǦG;Y辔 P#`Dĥ!z8кtZj\z#S1*`Z}0`%RʅKf pFBEk bBu5dZLEmH6IQSaUy8 IDATNSq2K_r'.uԢN4KYR$!8LHhbTj|Sx1N 36"·839X_ fxrweuK$&6j4mٔwdai_a9k0zmhM.V8=-uJ5M>kD>]@0jYQ1HT7ߤ"Gg3,'6DGoNT M=+s8~O>qU.C67 S3P0- $0F ޫG<'rTݐ*AŦf2Pd*LU}r" Kziy咦V$%YOuK|$>وd[ƮQ5C,[v|Q0Ft<*ebah"xEv'D6QapAl-*HXmSVkE۞fKFTGK0GģnZ[>gT][Z]o[+r{5zvEOxWTQG썌~<+"=R/է.v;mmWD\NȰVXigzn Y,=v%3>58/JF c @tC{F>j6?$Qy~b?la8>1sO^|N겚J\(J0iV?ZU7ޠM(}:o7(9jxuQ^:ȻߺK{`)CJKe*UQxO&8A\E5)O&TQDjgG`ȱU#a?/Bc}@>RO)"&#E\+ub QJǿ=[mhxKiMD+Ř׳؇ږe`УZUH3`ƴYleamqAS: K4o'UDLm9nE8Mx} >wَ? _[sU 'o魹 ?+iS>v+_|ܜo!\ӡϸt6 A'fA3~I08ܑ*ȨT5pH0q#duQ'jm{h(RJ`ba`8O݉);1';G~qSR xp% u<>!lᣐ3Oj,/x h#X 'R bAU[bClq%>M|R%- JhkCVB&y7h@s*^I++ϣ M# 6l%Ć1^*BMi̭?ڏ00UJMqu0hod-/cߟQ!ݗJXhyy48ZPS:7Br|vOO^\:[W8Yo*8zc7Kzv/ܹ١S n>)KaZfz??1.EǙGLp_u1iK1tcц j0`Pbq,625>dR$X.% MvDE5O;ln+-*WU3TufVH?Bԫ$GqWb#G{Qغ^Kg>G_4Q&S24ar%OiHF--#X癋 zaˋOh@جxį_n{=w6N*K^hS-$rjcEUTd1x, xkŽ)ֳ|8`so)*?c8)LX |K'ѵ~}q굋4z-͌ /-"^g6YKTJ+>1R7NN'PO ?YA321QDr8DÀjmLJ.9CFn,1 KI]}jt_FcBc ҄5*Т54ОZ3l AJ(jqg jīԅNu̍P>оZ"M tsԋ=qQȕSݓ7`pّ"8saʋ٘ML}N!e,:!i+5%˱C k mq%]{*"VfqJ,Yy|{U$XaQ Vxȱ=᪏e".- >yNQCB 2!YP5LV{qHw23)ʔP;BKܯ)w9o1{6Esh?mO?ٓ$ӹ9mJE$ 8EDt !`$ѴoJ=@JrhX$'漊,yP+4N"J1YUSB8=pqkV'uO\T;zXW1i`dPТ+3,S, ve4~@8xߡ?gI`"ޞlW=l@榆ڻϥ~S{wE^?^Í6S6 2_:*jd ,7&u$3sdLʂzw`T =6) 8zx%@x@8p ET!ԭ v$ 0Ƶb|h(SʹXðz+Dལh5'A(Nͫ@t :,cŀ= cZsA A6B&t7>4XQ"|4X71^x+DENX ]A8`!XiTȠXM6|"T8 T:c4uzSRTNHOEtZ:\Oŝ]m9U6tײۊϰ%sn|)~_ ۀЀ!7-wJNﱱ-k~W&,Җܻ/2rސՅo8)Qb3,w4wnӳ57sW.=\ŕ.kMۃLnǹĊ1g`͇cqmĬ8ZfW8[ִC =3IpoIyF6̗mIrLlN],SYء3wnn}pI;%4&0 Pf|oDUse/RT Tl ;r~H(UL'¬$C*ْFu24*p4(4(4ň)4e#Vq$ EV2M&NUVā2*?|DtQ|gHKW#~ϗSICpcg}A/fXА%֓ӧ^HH10ߺǑlrs(q3J?G܌QE #BQ}do(Ėb!"DűS7W1˯pgwflh@~|PT.ϾnsړI{]~A#>H4P10ZuGG ~@qةbr7{]Aܸ"K)@mV h3"953 ˊ+/hh$[L6YjM#W܀7 EQ~fSVqUMZ:<=+^Eޜ~ֵ8|.3L(G]e&<ݘي>Rv ݰ~~Հg%ҹ#]yOKX8BtŪu-Wt{-)dIP^$0yH-.ّujK3šnM9y`|с$""a`Q%ܸ'l˫zLJJ5̻e>6XFSzV 1F*oWr+YCD*&!^?ؖnYjSPe*ZԲ j-6C}IjlXF6Y {DSIK0kS!ݲHעDK\s9] j9/"?zNùt.=~4oV!amth2I$⚳Q9n&'$Lz񪮙a&pFAJ!Vሽ%1ijf:#r<dw(ϐ-1zM ,PD%KB&>K9ٰdY@{D#ܔ@E{9ȱ4}Q] HÐUO7gS:%|]=~W>,n<`{ ҸfEkDc,֠)Si7mNҠ˜GG\X +IzqIqM5ǷNwqPBBpDc2w.^@T1ŽVN $ 1;ZL _m"}pERse&c wn9@g23䅥ۯ·ɏ>s Z(4^hS: =9{mIX* <^gKÑ!l  8r.=w'/)yf{x&4'n5fGT --Ɔp=VZ.i{N΂*LM)dyj̙3/0L^7_y,r~8dqv|cb#Tr5Oiz@/ZH!Fď(gGlW UEQ踞"J+5T "I_1bhG !o0RSd:<[ߜ .fe}֞|Ǻn6M)!XBlEFC@ yS:d@; `"Ke4%&nvPw<󰧵~y8P}oNhx^MU+NfuȲ(As OiM`)Wwy쇦ɚAy@|@M- F2d2KbǕ^ ym ' N5jiNH8~:TKkF+!1-Uٖ@RQg"Rչ BARW)"%_ycƭe,$8 AtG~P)9^@x)>mc ZL`IbN: AtI nRVP55-P#9j >12(87e6^qWQIsOgO&l`F3PUϪ|*ɲ6[{X_ǭr9Hy}4Lhn띚 L3xEj2˭ذcfsе}}i%Twvm2a5M*o( 0[\9/3ӗ`|xo0`\+":J.z6n!uںȧ>ɫ7v{cB) !nL٥StѱGɢ9Vxf0$b8/hq1 QhS-("oFw;a5*EH/bLɤϸၦCF 0 WڄY) VEu, F͕CN9*XVY߹y+ cͳ1u&%{ANn \4ΕȒ, L19H akĠU2 )JV*EMVLpZ-K0tbG2}O)oE`5RRd +ט> 9O kjq:E$BV{9vng…w8 J4Xߣ%Q¨s?Η&<(Gf4z33M|5c:ٟX2<fr!ox.ns`;Z DbrC' hN3N&G/ZU!+-*w:g2l>`5ⶏ02x*]RJxflғ!_8*WQ%* aBdyI4(V3E ͔xXҼ=%X8d\9*h=eQMGT%)6 e'hje" p@AiRV,F`34琢\J''pwRTK ꕠMNWV)X1)ǘ0#l'∃> iT^ɛ+>V?ǫZg3t"pxHyQ$rʋƆ:[sxMr]S9)!7o} Ayyo<~ЗrVVksHTP=qb.3o}WOh,qTQݽB˸ތF+g~L9Y'cp:3$iED=ކ<9yy$\FzMv>Y3f;7.^jHRK+6f e#!]8De^@yrZ5bs: 0ALT,[֋gM^sX;14=tO\L>t/$Xsx1-Րfͥ5'̋I=ɝ v.,{%`B X*㩽ozLaEcXԈ̫qF(pL=JW1~wx'Vd INpj9B-U4U:k5/| >w=]Pvnq/|mg>q>N'3(=u`P:f 8Q)1iI\Rm#Ρ6b]μ%#" bJ{T9l(3 &0LK6{dndZV)S as K ZiQ]ZtaK & y^]\!RG܁0`A{EB-lv HUt?FRNUEB)P`lO}&o:g@Ϭ{c}ע~d ^=0$mwhOkҘpl1mo]}W s9EjCGr^7K1~/$ pR ]f IDATg<ɥND(%%B¬9Ԛ?>SGbpC^"}428c &DFkXg44|'^)e&Ii)Q8+J3b 8#DR]DLm~pJJp`2oWc@ּ Y} 41ݐٜ npZAa"J|=!;%9ŀQ}eNGq!ӯQ\D! ވ*(o_U5lm[b"QC $#)=Ņ6xT'!AF4PC-_+JkXnA]U<ɵ.< {6n3ywevV,.5/=u{SΗdqCp}h ?~-_ buMmYA1VV2!e{xg_:n|O ^~coU1xԱcroi\̫W 1rtg,b>:̼ *e35&61[ dmjEx_ !V#(ӚyRjsU;!u2Jbك`02k)H*7BxLhuHl#&ūCU@k"udbL00!X@Y;4} HclLtp B bwlI4F \Ml"=re:!, OBt%ٻ|$7z}5\b5s?ν7I}.2gi ;wpoKz6 jzmv@o:6J %gZ8$_ݻokIm=oˊd#kB}w?K*ޘZ--)1+jdt)q `uU_ΰk2@(od%R@?:@,u0/mC?cσQ 5r3FBⰦ!732S1_?-ړ.u_;`Mv%jq[ޔK i\"fIqFc0$`{-TR{P?w,%olf7O~?(@]>679{gUNw}F+N5yR:i]΋s+giO4Fq n #N/…*h΢0otIF;eQ.MRgZ\ k=iSE-:||P~O&cO˒NaЇ$"\['_h+&,) c\~6`V>ɾbt)o#:s!S ۞\o7c꫸Bm'Xu]n""qGSt2Ab$FF aF* ݻ= +-#6YChϿGw2yjސ .; hSFbsigX ,A:$\VlNHGVqI)j\Eʚh/:vY|fK@a*9$ZN\ExC<3VO՛Mfײc^=>9gŷmJOd<RRioA1O4m껄I\[ p8"r5X$#캗ERe.[*O#quJn`?x@pYP[&vdžNU/{=i5z_[ݾ<,8F L,m*צF.)o]'X?%[M),3~}öbẌCyO6J#XƤ`D&CX@b0ѵU^s%a? ,0\T)N)G)g׉1'U`pťӸj+]k~ ^ۡZMirD r_ #h^GN)/cr|TQNcILdS9u4Bd]8:˱Pԏ_OL}GAEQ̋!lAզ.+_m^}+uxI1KQ Jɾa@Iz5K'MT 9>JSPU +F {ev !AMҰrXsfY;ʱh\Z#So0μ/S="VMB6VЍ.(*>ԗ`4'5R8`b5Y<8bn)s7YFkY)&lmyV=*i&m*q!.ܼ{ȽEħK ӧ0c ;- L% j-> V]AH.,E T=3+T]x eHT'1:O]$L,K':2 lHD]\t=uŢO1̲fǬϕlw9uJGT[!x+Nge0}/~?obpiB[4L1 Ӌ;ԽM֧b/)dV;Y4x_*L4l6wcqђ$KY_[ZyMXD _pk|tH8aj/]ы]v&n-]:qǖo o Mwhm*-q켏6Iʇ&'}Cy#-ECzOv]5X!B6 ŵ5.`X )'}x{J5O=Bv_At-:_~4,6&z"vdM?ine ~M;s&${WsCT-=JgOҦqS4" wR?g(_8L+h  j<15wvO=<[=A2l/audfΡBP+qV4rav]f<:V$cWXɸX$iq;;)gw>Yǽ1h>f 3n)_bN.H/US?\8$qXeM1 #bXΥ߃,RDPCCkj5xQR !@b#R!HΤh3$FSC8v]_dJBvwDVh8p6Y$aOlVIo~s76W~MeHQ{jga`j1x`1ooMx 1 %s ҾH "XFԞ&AB,@$Ph]"uLH3uoZmn彷UG*.&,?%C=}?5Sc*b t6I3%"e(FakO 3K-GNhzFbﺔ@ň`  t0F3 Q81U1W&-zLb 2˼򠇓m; |Ȑm/E^ڠ_.>OF >DoM;8gWǔ)eaE;ev QћW9W%|T.X֧UQBZL^fŽ,O`k<1XY</]0) #ͧhV|ür1Jvs{?B2.&\gԄʐn&]sk1 D5w',/NKwOgȗ^=JE,5~JK8a0#=e} 8`V8s#Qvܣ?̥_Spu =.̈́h`o^8UJOB8QVۻ3{ZUrj?oIETbbZxV5qWǓIX5e F]}z YL/X,0R#Dk/8-{Ob`pLFl4j0ה.ҀvT0!Z4 1?TJfpZOQŰM[ ZvO0<č1Y>#hNڸ2K~}XȏFFRCU*B"29Mڠ/.9,$[3 p,?'Šu 2x2ճi7zoաzk0](@읨؃8xxE"Mk(T%@^hG1j +VeMw}򟪝fdKFNYۖӋEWD>Q 6u }'(bC :.kƂ_r\z -km9 ݵG8:B:9>xY(H4WVtEW2X 8XY'%>Qڷgԭ9?q|ML%׵`6O_z/ܣB+ f5լEg6#qgwُ:U߿Qry'NQւiNPT*"(e6,@XCu:2m&WJ'?>ҿU==#eqΕ;vnt·uH4Ko*6%5}/TLw$@c:hOx9zzwN7_{ QӘIEߘ 6UAuUn)W |? OGVFdu ug 3kV7zLrZGCCY@jKO,u{kKsKv<YFjلX<_ 8A^'i|J (BjY }cm%zxDv|Ȩ_m򑑲1kV|$fڟJToQOЛ7xOv"K Gt[l6.w#N9Ӻ 4hw SޢzųLYH<g5&5(ljJ75ZL;mL>f6qB7Y1qQŋ-9[TiX|[whYьhvY7bEN!3[}!(^ tdz'o= 6z83֎vAmwMf4#҉&ђaV2W&gw)?U;~eɝA!Om`RK[5?P/Qci%0Ե3ޔ٪%pqVɿc b-g:W]nF*e=#Ha LrjbJ*70ʻ{ O_J gVRtYs2WGiD%idlZ#u_\)Z͈)1bTj6RyA: n!\[!~bBWNcSZ=rBk6SRDBV꺎/gzS,*IxDבۯq=y(Τ+ C/@ZEy_i^~U{(Iӥ/#Sy*Ds9SL֕*ι(ڜ0uxċΕ\\7X@8RqU'$DeFYcU!o;UsM=*\Kl tYL-C6Xk\iuRc h#Qzb<.\}'Eϟ>,I]s]ݜ,K|Y;/®QF \,Oiye IDAT; 9EPKKcd;hɏ#2)$tޑ[)O^#_+8*2,ִ\(iQMyѹ*y~A 5D\hVP;v Ы4rG D1hW =A?OƄ6 JF,'xY#n_ =;:oUS1'?Byvė毎d6-(-q*, m6}[OcύMv;HA5 V1/)p΂뾍SYb6/(-տ*/^;'_ǾÄȶǨQcC>DꝐ Kb[S` ò5E,qi}J#RΒXqY6#/K>Zljc}e%6u ?^TCN'-yqoONk5D@D8pSe1>:1vD8*%> hPQ*B` 97j:hURBŜr2E#K6 l^gDxx\#Sd{r6& #("BaSfn˗<{b77N&GpjC>wyg@hĂ;@`5raa1jIsD[L}/oQ-ŹA~7=NCl !T>Nځ='b BڌAh`,}HPUU2:18MtKC%> zQ1X*Jׅ<}u'v|rըɂ,K3. wkkYJlp0"{x8s7+뽿__2n Vs\ڻrI7n(жduyn4KB[A,hT񝌠n, n6xFV=Z,3C023xP"ɋkAhQ ʍ{ĉh\|Dp3nǸՙX֫t}V4gZw^[{s|T#e͂-߷|{{t8K"٢BvȥKr\CʩrTW\VⲜHMDqA$@ݷw:Z_i *A>O޽笽Z)y֍]_&6xXt]^KBJ+.+#mK˲;қN(kOdCrUmү'S!!>6mKg>&}ݓ<(}pV^8cN/}u|On2if2_۟)a'ЙF|G!f*K.ȊPEkg"}gi XVyFXTj7J;JH1l2 8?se5!emϱb m{Ls$} {ե!mwlRġٹ,>XJ{G8jKNtޅܺ^pW7I?_("O{0ZK ǐ)u~>#m_glWuR𙫜etlr74Jn}E8jK*ٻ q&*tZN"~J lF qM锥vDQEA=,B^ޑ$-mA#XIF0* F@&ƓC)L+v1b5M$IZ BAOGu2)3ٕ^(6ٙv3ldNWʎ,nk.W j<|n=Ikї8rkdHemTPoNzǏ6?O8r6#JQm܁|w,CZ$ &ь.N7e;(7SF;4M5/*?},{| /ݐq تV/^yvZ0vEqaV ~'ͲwovD_f.u}xqF5jѝǮfU;q:U1i-|hnRhnKy0}7:a1Itb#*JY=Tѳ}ZEnr[6r:(4mO-ֵT,$=5mq׿a9 ?f8:FĠjthэJreg`)|7_p&Zg;,$- ζčf͡#h/.śnYT_HLTґ1}خR8Y2R tfV}VoLgkOѺT$NPN'O|y|J S&";[$6p6 'Uʏ?/[굚'8.l&U>쀭P% $0?+w> NVGC2^ OpIW 똢¡REؿc|`q-Ц`\QZX:ozuI*4UGT%І)Ɠj)d;"UK@v6G2ax#5Sw΋~cX_ "6h;Asm}!{ub $&x*:)xL(xG{Fx7V(6r+fzM JhT*䑥B1#ah `u96RunW%hpejktD#&{fgA!"W|~<\ F OSU_c"$jNYbIJ(zGwpa&y/I(6& ډH֑%*&0 q~@ to1aH tN7XaOo"](H<V/fɇ^忒QudV0~jfX*c9f0]n܊+ܔO~k;ŃVDUznY#%bֆZfKS X:!jFJod)h7dmYۅ;1ۇ2ӱ2YD=H_38yBƳ/VPt#`p>3;\po&iGL(U"FaQ`Y ⣜Ti%uC"DfDV|LB*e4y8 IV8J`9OAS i]&><@!xjWP.ftָSqV0Yth FI"T Zh9%-9o..*et7 ;$VV2Jo11m}jJk\xVDߧ~7n`U 5X~(R 6sH#:~rEaͩi@kT3(,O~.G< Lˑh+p0xd"^LT*A.9NjPװX[9uV3Gc9:%Z>?'D$ ?k 6͕C+V D3WFGAX4hĖ1X+d6>Cg#mcDbEdyՀR%BnYj#5QrLjc3;a e^uyc5/9W&yZr5< \m h2O6t8D}x49:(9jS2>Άr5f6 x iׂ?etſ?m?gBB.h.ӂ=.Qpz7rΞsm_ʝ3'x|vcRs?hLtcMc/Bt3& U^(8|wଊq ! ESAC(u'ćÅPAů KI: =l{"+H}gF*EX.]#p)w]{_ViO*MYOe8m7>[CkU %҄uUEuo sm7PUTuw87\yz+H$?|WX=S|#QŃYǵ, iHEAˆb!jWS5YUjFpUCW#&*vJ5VamCJVbn?g`P Z88ҞMܚak~aG_~(ǯ3xyN~Ϙ}tG."ϝ$g1}s91[Q<̷p gҪ+F [^)p^18DU 8i m#O<"'*T æҞXyFF]pK Hv:lB`F":2ċ~"u>&0HvxXv֛[NK06l^ʌgTW\;51ic]|v-i*w {^arp*>}D]N8A[ ܓkn!}>Y;}Z|sU kY̰ /Eޥ8J=w}2JlX jK=s?,|$$qIi bx  2&WK;5@6⭧څ&ݒ0HMqHs!FS\\O.V[<[\{1\Lk¼Q(8( B؊(SN 3K SxZVD)+Gj\S E3ALe<[] 5`e@!GyAw#I$eeܪG]&킯~u- A1YA0p9-2+D ׬5 B{aոJLa]-۴M40`Bl$(k<)eG}pcB,`7OxD i.;18U6SΝC mxKYkaNpwFZ IT# ǚ!^O^XVSKF EmQ~\ oSi̩n@ʚfc3gE)Y IBSSLmDŘ5sQո|t+Df=cK:ݛrs`?; "\fïZfNBnN_wÍQMt(0Ut޺uhyybOE /QǴT꣘VF{+He ?Hǫ*>{Ooc/lcW~oI6{ [JX4|e6@Rtj\,Z*O:5U7QEk~4i t8M15gGwD:ZBK[\6~H6rW°KXDUB,m<5~oߍh?7oxef)fˏmʖ*ZYU%f*Ew9PKMWjB4 H!m*cÂCc:"f Y@n/ Waoϸ+m_~[o0pGqLt ɋhEM}sj̤sNJ'H1øYv Nf6$xii7Gd"lT&|{/N3p.e"X {@-IZ?C|Er8̺>)زd2[L/GH?:o¹jQ[ RUUE71q`R,'s޷ D E:٣zKIxŌl$むq91&I Q8w]mQ#՜lax߹kYJ؅)UbcuDJM(㜵ӧ:eH}kZEd"L )3m)o ]Ã)^",oר8o(|@?.(,\@'3͸S3}^0M"ګ!U Q~LXM'e#,*ɺ0~ f{^ÂׯΈ㐟œ|ǺtӮ*rҹWZ<[㏇kfWfd2?Q%pv]HBJcKHl@'5Cd zr<7G˖lVj$րQ>0 H(^}sƯ~VKRF#Z=jnĦ#G:ժrJY@^XYN&`;؅kd(16^2 tg#{3!z#4[OӝG$y/6wu[9ϵG zW3%-=;"%6`pDIr)4^EwPQؐA g.Y+7^p9^(%wX*6!N!Q?^oQunJ8%>Te[ .dyo[)u@2 2f mi[l gl~b۲hՐT,Jx:@g,֢mak_!x>'T/e"?ʐ3':ta"s|7`Bz%ΈvoF>/ B=X0xȱ>7*Xm ^soH#F"%6"P6J.FD|-͵lxVSLS/ qR5ĄFدUʭ/D]6*[$ mj RP-Gtu^?b4-pqHG==δ! ksY70L5YE܁i6- Be6!š4yQ1wB\(j*칈D<h`ℰoBjN#(q5|@ld|&!׮NQUJ.=k#L"bn g?H?ͅv׮NMX "G?̀%Ax|z7 T!2P/=~8dĶĠ uEidT0wyo;_zɬw{H@{MBHw%k=/RfoK@{k@㭊6~z&4mCRWFu6E)mSqR|{QshU%eZUT3`*Fn)F6%-~oLRhIo6_<\/ʀ4T!"Ҿ~w~W) 齭3^|$C@32fыZ Mƣx`ñD;>sE$,vj u{"KM=|}- X m_sr4nL84hNK eR:_zTEVU$NVuBV!G#-ҦQ+iK%}dm'8 k7+?&|A'2c|$`Uq'c?1q+ wN~E3#z-g}jB8q)V^9zƵd-*ŠnGc{Y-L>lbRMjK1F5BS!RIFZp0?,mPŁu$Tfop "xhxQ;ZЪ3®ߗH.I}}2TS:}DD߼?x0z^(Zjb=浈aBVQATfByTfbN%jZe>=Ҳcx_anlHm.eP$==ooIZ*5JuBU ~A~MVڱof29Pԕ^9+qǶl7Bܣ2V h>t0) Eg"rRR3tקvd(T7kaA!U⪆ĈXU5EUR;Uե:6e:( )XWE-<~vE ԋwm ClB Nƌ ՛heUk[R!y bDZLU/OxU8t%l0b5&4*S_NhW(#j|eTWJĢZȨN6bP#s#d:ת22#5KKu E(EAabTیeѡ<M㩜6Y U+Csdj8l+q3&U.ߚr};L{X4?ۓ|h)/S͞ XX ZNq ڍ V|U=ٵFȠmT=^eZU&N)XTĦ~ee%̵B|@ ?]<`9G>z>Yo?ﱤ^o{S y$t3 {=) a(W.r'>No<ω[1Sq cQM^,{--EimmjY.e%0 b뫎NQBaD-1~<*]byTV. 8%-ZbUk&YkLip;Yh+N,Y0[eiLHKR%4-dQ7/Ig`Nsg4er˕{xgZw9^CCONK_VaH/D"c {+XkT*RmiÚQ`z] E"'I0 詀Bڜtd.C)$/k_;'_}?kt>wley-2D"s9{d[8k19"' "I|MWTx5bTwU4Xd)* U5F"KX'tEzYN]蔲r cu eɴ`+'a^xwc7CzO>|{Ʃi7Dnx_ƃz}֓ īp9[ "E:ssriOk+/V.|x'Wpg/ W>ɕWO:/h\w .Tyɮ$ab@\'ugq'L\uJN]*v#)[2IYx;L%*6zv("" +UbZ 07 DAvƗJw5X櫌qA-di=*Zx~-h/"#Zќ(tls'T߬Y<U ¬ H"J; 3z-h%᭨ ^A@TIhw—W{ÄW#wv'|w3'^}G7xٶфLK #˳}sJg e\^R#?t0ae%zQQ:&4N+^ڲ-f%#Q%hUT!""b-x>p5,,Aex7ټpV?+^.5ɳI"]RШO +\TF-w/*vR7nV*hdZU]V 0aaIۼejDP;hފyk+T’X[]YoÂ[q#q؂ёj8wQ.)tPaz@tSUF?<džk.a3Z 0trKcuRjDY3jgEAko%0ZIX jT)ZISL6hޣSÎ ruOrtϨ[pR>} UM\0VTtƚurwJ{)scmJi\ޛ>v}w3p<b& `RSd'qlR|JU_*W/p%|TRv8UeeIIJ s>za7us޽^}=\zZyx@^*mPcu^ֲ\ QAXfwlW֨7UN $!΋ڠD*# 4]2xZ.Pcb6*ܨIn 6ݺn>K]-G⤽4#eώ-TH ]aە%8G7o/Lb X!jsoyߛKcT7fig y]"zu9ĨA5R92:V]R<|/BvWzƸҵB(:+~z:3ev}E6O&rڌ^9Pkq8.uKj:^v*F .]StMIҫuV"wҒH,y4/|~rNR`Xc\*iƢn>޻OKs.;Ʃ Uy "$CmRJO9O]8HmUh]; ``.9"'8W0Sbמm*VN"w 1ҍ բ$oy{w^)) Q&i;ͩ3]}C9}֪5mm2?\󡸃"`i?O=+#Ld V2cZ St VZ D&Hו"m%AH'B{4Dž"&D(*V*ʣF+$+߻_hVc1y #a _`>~SP,svx[mGMKSGcD8ƨ Z\FAFL)ŗG4Kdզ%ET-:mM!Bڈ$bu1qOU45hǡNgЪGb P4X1vBM+E)^ `.yGϭ 1}i>Tۿcصd kT"EPzHޠZ~ڻy=^f_Fʊ5W2M/n̓ KZ! AʆO3/"]X̲`tdwIt$F=y>_ĥFlɌ4r Wo#~ƾ 3 q5bؓh_&iYͨm{~l\͎쏧[_YCyx<ޣR\;+1nŜ~6fYΐl>Es- U~*q\Z$鲃>—:/nO}h!XZ%;rnvD?7| #.}:?AxKjuڠ{q՟$Ȍr4'rjJ}w/O"O-$JJs\p Xok)64T$s WLJ{F'-.Xlt¶v7)u <) ݴ"{3 |/+^XRv4rW:61bӓ ZU~pkPy7w[iD5[}e$?lwリ|DI"]O3V mu='^zT 9R Q *A9I IDAT"%$S&V&BQ Zͬ"*Rz5KУ%}&>TѱX;HTU4 f9}B4g;{1:Vd<rE;|3:{*z4GIm,-sXaG*JBLKvP AChTAÃXq] yp9kuemH~/zxw~鄄#|[^:gH.n%-iFVtVQ$Au]tO{ccTM_bMpT۞I^S;Qn$bKOtd6 .<$'.𳏫J-_-f>eRaD9"~()F*?odyѺדh5X9.xVh ͨ*CBbZEah/*\Q*DmoPK% J*VDmb'^E+vuv[Dy!w{NZL >擿DcWId)l[o|Ov N~?a&{y+OyCt}^ΖSGH'g[O<=fHuKgkL J^.C~MϘ1u7P˜aJʙ%o}ćf_sc3#{~!|ԗ~r2}Ws+u*/,:ʆVeK%]N"'FFߞ4\y/jHs͍GU$ [L88*iDGOV,Q1vؿ~2#d]O=Vxduv^&U%ɡF< -7ý8V*Aqik\6W=Ux*.=;YND"фL\r4.kJ}z'9Âׯb2S*E$XKދ@ͻѳ-ݫQ F0F$mTct5/݃]yFҞHk8l9ף;\ ZgrkbWYH5f3ʉeDTM<(U2GQZsWjThSt>{+wֽa;P(:=|QUc)f)h'I_xM+9O7~@o}^2SJ|OeB' Kdզ8#jwiV) NfUu\tV?^*~uGuͺ XqbLoR:49zFMU,| \quXj2#QF_F-[Kؑx+n@"*MZ7H})/k(7tOk\}1UgsDVqZGpA䵠mq1T8JdY*FP[P7|5/T2Y4zLG~o]J"~h^/?4C|ub(I Ή8 JsxL #u^jXxE1,mt"F4}?U(CA[3h KhTT4q4x~,n|5=I?lo\( ?c#~^  ؏?h$VyDVO}vO,D1RP)BIY Pch#-FVbJ.V>"b#IRn$bmMP U{ZX'SN9l'NDP3nREV{P4G$jallfJ[H!mL-Xj[K!"H^@ZUU鎠US8Q6S5ٍxlSU)Y^YH:+TF5WNI7MRsN4ȃɛڨ32="Wog4y.u~Gׯ%HK\~NٳW 拗'%GK*N}Nh6-I {\)6l-9mV5ߗ^hT|TlO}rr n+kxu_=dpY}̑pf)$)g>yYƳ]E&8zClz0Xe;<~'rci O/vZ:8;g}]L]P3-lxUֺ[۾ok'k<1{_d\IY!#/r'r_ _,s'OqϜn~Yƥ=k5$#a4YS~M'3Kw}-I{zWm#,ՈxW]bَDe*r5*VQN@'5N\C4>w.;tV'N̉4%vSXڒ2ۓ:! >Jb "uPW:%awcΫLDDHIt$ LvfZ'.eYgk`"Ϝy"_ze]qxs!'/&_N䏾v[wR.* thP(jEm-:E sBEuKɋYZK߈`?Wo4gT :FSZ{ b-/LXY`Xg7(摪U2ǰc5l|.q8㇢|geqGW}īz}q-6rH9KVJ;{͌ H{wL:&K)*(׽.ZV9>qVܛR(}?,E@}GUEFD؝Nud0dq&3S+{*ڏbi5ݍP>P"FMg!d[vI2Վ.)o,I`?Ƨ>hJXC.X-Q:@mJ͛ԋJ>;F"|~aY)I\ƴ򡫥F&ϤvRDq \Aq`ըAh$4F%p@w9'1- &'i٘$ET`8O{$bIV.oPg-BDEL?ӨG&&DQ*3@pݹw0jmQ0,Mװ* mS&i,ጇSED UؿGZ9Ь$ޱ %Ih,ۤIcU z^%ͭ/iہ +} `et *[cpuu .BN$|͗&JkO72\ZM~xG)hќa| Ɣl<7\?xiq)IuֳS8Ik=dR|?/z ZǻO&RWE_iQ+cv"Q.b(ʻ]CSG4#.%5S3s2j<PRp> bG#^NPD3'i_܆/=dΏYFڲtpa=6<}SՂ7Zn3/YM&Y8t;XzIFd=u]4ޓְXgjh?@ޓ`A0葞 !P߹ŹzĽ՘iE1I',,'\eүsi?W&<OOa.}z*KsiiL<:GNu$7 t4t^ *7{GAopȑj=Ē/Wӿ^tDx_]'ߞp|ț(\"_pWkz5) ;g#3z<'^>I}uzΩi̭oF/&9UT2RTa(x3aޜnPJI:S'P< >Yf+ n)]fUVN- >R>O;.9w6'}OɫYd` PKQv(1L<SN؝qXX4 77Cr B,Dۆ,$-^Jx_%1rsOE) *&yƽEDycwˌc}—#ۜ~ǥ;{_ߟpga~-8w9J"iu̵7ͻ3xUa{yz[Ha}`2?~,k'S,,F-(2nމzƘ@@)zN^uD#QHVv[P 9ƒl O֜2}[ J)mj0S,!!e\LYӈVHP{ń@QE!Zrɸ^6O/uHȢ_ք?ޭ(S:'O'<q́N-`[m(Mӈ%cugҀwZ1I[x03=y%Qb~pIC;`ab1bQH8cUU+;ka0`44V-Lk_ߧxn0V}{ob3{)jk=BӼ* ݄O#D1̌pB'BFDl;R$E:]$NcZ?ki QT:. ;3n >?~MX=c+( "q+~W| J"ڹ{b9dqΰHl5TcXtW7DlC 8E}*>CiD <yS"-!NEPhQ  `8P17lT)JlԴ#:FU9r8e HtsX/,)7^eZl v]Vs)PQw=`ﱑ,_=Wv$PS+Z:?Y/wĊ"ss ip:^hR3D'% `h)eSPg+ҐJ'c֕gvI%~ 폲 fo:9\nΑX]9|1;F W֤wFůlju_3 (|9*8ꀭ4(=x/ 4.Y!YE2b-K^bvdpmYIl7/ӒƇ9KPV^ֻcFE%: 瞈0n!.9]wAj~P- $kW'BTrtד=bjQFvly)C~H %ʏE wXɖ;q4o`8+S/mN Ymns?DϿ淟zAF[0pg\%54>`K't3WS6Dٶ"ip >ZD6z4[%c|ʪ,IzV _d Ay7Jczz^^k!mupɍ^s(ѐ6 M6psQ'T4,YLb E& {uGG32d"sm{WKEh޳m3!D2̒w#3o,B*ns^km]$IBU^&ժ[)H\+bϿ6[5kDZ2|^ĖʓT YsK@X;R>#_twVSu7yV(yN9{Q.q s*Q[ 2-)ԉ!2HH[1A@JN1u1BneV !rF&G%Kw\SԱU}k={{|{g^ȆvVɛ6ˉJZA儸*YCKo'6؜ӈ[>B7>*Wrb ]i@Tő1;hVQQ|2rXlxut\|֪&8_0Dkh5D}#Es =JݠQòh]*"tQ ihihYaxfa2_{Zocd5⌡9Y A O+47n9u HmQI\' ;fkLw$A Z|.Q6Yw)Zwn67{޻F6#ӡ]BoD+3!iPňaT4:Kk*|xr61Q.iQBK`3!#01Uj+H3RܬSυi ԢI:R("4 *g>V,Ik@v1={Nb2 C^rvȢᴢ04?zYq@d0QCsM Un`[t럺LdmHf *S1&x#G 1O:/G A K94d^7[K0WQ<׆v25CIK P&&?fy~o<󧨿{7v@;tuIel%K?"R^F=Z^?IEIrPW&F=Y2'_NΖغ$. ɻHSC^21Q'#ٓxrM߸w߼/0g^W j*0]בj;tAt^Ղ4Kz`fU5EDT)xm8$C̝^ޣZ,vI.{{d%٨f5cUEl)+%, ٷ!Z\N1i3Mʌۻlܪ×ԏ/qGIe!ܫ9@-0;Gk9ÄrSF8 szoq&o8Pٝs|c?b" ^(#4u\8{ nޏoX:N0*CU3.f.*Ӣ j50W"N4UFnZqUOY˖ {v\M5F$XkTXj,ԽV A++᭜C'#:jfꕂ{#*D4endԢ.VǬMH%ac>-HK%3 T/V@|8#lGVH+[ ljƕJlT^ik?^[^Wj_Ns=ѧdq\o |р)䬐K.-UE1o@&R5F I!P&+BP?hVy.X$$RK6Ƀ>>tIR0 q(.qsݙagB*n 3v䥛;K2>tDWl@|f=;>t~9fq{v@ƭʵ_NqLi1 \.+>rq[!${t;"PF_7?8Μ'wu YiX{d Φ,&)1[|wl:,}<0ZGhݙIs{N0RŖp\i4(LWkJ"19!JIt*J."$bH!!5!(V!pk㜈X&Ib92&5(icMws\蘷4熳;Ss@fyQU+GI4k1vNe#Z}jX$PK>E61*N;R²Z؅cD5_# "Y9IF2B<]޽b4Cԋl]A9FS0CG#ZkHm{|ٞ|V]*nO={;v.mNH5Է1-WeTaS&;.wԣYfS-đ)zB/2LWŨMU]njy/e2%oC_YFC<#ĉGű8Q1"2-DI$VwSH%P႘騒ODw9oԔъ1}=aޤ+\[.VɜGkeWveTo^sj_o&z /]ft6_\'Seͣ4J~%.6 _yl XI¼ A$E,)gJ{ZbdSXYs+t-Nu,;]gT;*e 5E%f$>uZR| s;Lˉ*>b"N*r}SFܺC=f%ƞw8#q>Ntwrƽ*$9AM ^[lu3;pp+SY>4nnyJD@Y1>b x6>E*V$ƒWq^Ibpē4WY{QtPQXQ D#rsp C;aso#+HԜ.>,cBQD*+jdMIńO%%$]/UQ#,jwʿ@&f؊w[.5k~rqm^׈*V"ӾC‰x0r,3';9" REY+YP>&m6x'`?Zzz}DF4Rʆk(5\?sqC6ףec|`TQSa A2P#X1! ) ։.Ep{۸6a`oz-"hkID<FĐN)Jڜo*k;ݭ돮PyZIbBf=69y0*'g/%/K?g9=o9/o.:`Mjn-Lob9:BaJG%b7ѢB1 Q?'(֏bt;4(BNvfzLJ4(NƠ`D+ǼX2^HDj%=IAt'")7)jrvVqmƧSku+Cau!ˣ!"s=&>j>eEXл7┪p;12:h_˿o<mk.d}(᧟k>ze[HssJ|j6( 2Qxd̃mc,0amJ>Scoq_,8_x=~#fz!K$zxv;ZpƋzQ5r4d.(TKp?6{15gCR:@ 0r;ѕ3Bͣbg!U%*iL'ڣfl$+;8f칂F:&BX! *κ[8rBS)F%,ʝ|p,jjc;{|fr?Uԝ+a)cCwi۴M"f>jӟ76Ǥ+9є ֍o.ظ5˻|啰Xeb' @R)LwV&Ә/*^ . )c3{(q8chhoSD-Sc%LKXbL4`5/X2i; Ǜ[D\g Wf7Ya|{Yɧ"dx?7;wYuJkgg<p野M@I3Ѽ"g %6{3{8\fd`LjTfyq͊Frw!6p}aR80,S"%ۿ3{xk/ l<:YaP7xeb49ڌ8b6=Cg 2\3aL5geTEP|PGiC/s, GgBwLxoa:pK9;,N )#D+TդT9qL|{wU`xk@CSX *輞"FT&T ARG~N MZ2'ۼ>ᆎ{sҭ!:\=5!D}mOې!"J *L` Ӳ` uxbX\\Yh5DVGbB^8;%Ae3>xj UKٗuNal% B|U,Fpꂉu#ɞUG,Zps2_fSSc>%orpӝ(PM:a@{f)1U{E@j[uv^grIEhh~`oGKCJ@BuDRXb'@)ʙYvޫcj:]_áj:O<ϼ@itMbzX12|q6"RVD͛Ʒ}_7se"5A>i/:[\\pzSu{[1iΤ:PP:_^ef5N&tB^лsr]3wfz"g~Eu`7Nx4;$w ҳ)-4-}ӥ>o簾Es/?Ña0H2ZYƖ.LzqLYcQNh7 -GorxUo^D[_LH.qWlӫZ&YXC@KPGJQi:85K$Ҋ`j;IC :Dh%+rs9&MNU^_Z<2X`eҤk{l [LYm `g V8yw˸}u^}.ǎsl3vgW/o1 ڤBCBZ!PWӦF2h:e^vRio^=V(* 76K/ I7=}R [*hܢ5 qEkJ4=,pFoK+u5+Fwk \bg9&w_*uz_yDc ڷx, T?.W4"uHyFr%-=UMItAWɿy]{DUY:nɚaZ{6Y6b@_~%wfdq?w^s?7n:6=WjTF(2oǎ"u=PMRN`+r@\zbB426}N|DEU"YcbR^YZ 8u̲Vtjh1(-[YLN>LC]@]<[Z1x5DPBhjPklT<ޮT5h'YtZ*EcEug67.}rD3xL;u|hQk ASQ#Q/j^VԚuz]]]]-R{LR7b(:q(FԪeLo4b(1x㠥@`:gpm_M_ _r:ԋO/y!u|c/ZK:{Mn/$c^|ڂh8de}M${cUku6_bx_^_xN4tK#ݤƫ/ɡ|@ᏠZ{5dY<\kg"o1"&J9׎k]'jtA*rZu$u ~,|;!5V,kVCգT[/K^nW%/s)uλ;Y؊ImqީTEVT;knGNR!g5fE_z{"՛9ÏQ޺K_VҗRGv(ÀV +K1&Fjm*IU1\ZK̊xċHcI[$MflVYtuF"ht|M.]+eBbƬiS_NYKԌ,܋UvmvHOԩR:uOYI}KgS}z[k4#94s}-e}oɞRwCʆ@?pݯK[4Y4.4߾th{Z,7 -T424j#lwDdy>WNņXcdT3XGz/E(y)J|g3Sϥı` yj8ry4Xd&eՄ7:)ږ`]ZKш&h{c>d͹ IDATl NZ~YB~iƃQȯ\/4z\'$(F;Q[K',4g"f!&KO{%:oqzzI˅ ýz(:ȑcқ"zgfFqj&*lȘj ݈~=Zs&> =>+u&2/L4dT +!q ѣL0F}RF."PUF}9dJD˃S /olgήqX+w l*傄C4"V!8Sұ2kÚ8½6O%*(3suɁjU,=f} U`쒭Ѫ1lSèWrMwUz$u:@ץJv|I>_&d[ChZ$78%X /ZZ'Տ~A:}} iDbX-;^9Jao;w 셚ddLK\H,s;7E?n=9NM6%0{|.o >(80B|)\PzQqjEԊ*>5Xȝ͢F֬ȧ>Уp:bGh!9[T#J*),lJyh(&B1Tx-T*u> !⼧Z @i05s @D+dv!DD y~2-|Ɗ/)E~My~|k3|G>,ckZ"3EϟmPBhao=7& ֟gS~m=uuWa=tcߏ<Q]_Y|4LeM$OG#xYXs雈v4dlƈ~?J4|+}F5sk2|6zұâ& ޟ2[ C\PTN¼BT E`_ [B4/l}1 B6AdHU:)qVQ g[-F {RSxD^;@;HL[gP).0*.gZ YؾGNy[':3<~0PqV?}6?_w^+tH![NRZ$s1 Һ$I"*qԢrS#he&4bdugJX8fa,KzVTS &ػCqV0FOЖ%DO*ߪgk4 +#&ƢeZز8# DxTZ3t!\$*,[X=o^ O<z+qerݷůżbByI> r`A_ y!1U}>=2Uy Jxu_*v_$Z-Кewף^e:u:cDKʸa5vwXoeҠ|%> (-f`|%0! "X^D#DCTQ ,< A > P <ĥ+[KcL\¼&ŠyxkjHqZDB X 6F10j=WLˀ4+aZpB$H K,)$$RӔXJP[!4*Ȩ8cRUrRϟl>rkTkt^f{PT9&/6>%:ÁZ02?g |3/z{Tg=^˟Q^wٿW'SL&SOo||o0͸p>Ȭ? \Jٖ#0SG4Di^ 8GXTD} 0B(BmRBsBo0va &aVϼStjTJ41%ROkKqGZ (8gFFFzzʑVȏ]Xcg:Z9ܠN% M+/\N'>pv7N)B=#.RfbV 6C${X|;W;("}j͓3l`$u"?JqR6XVH?aܛ2;j2djj4#~'u ƼMh,c)\-rnxr[,l|@5ĩ*$q6(h i) bzűkl_oTaY]FsZqNjndžF8h̻#zRNY˩p}} !kuЈM*-Nu]Ig3R(#KBxP&u=-۔8 ϣcKDA,K'6Y ~~SM K9[ J8\xNc.gm6;:<+KVnpSe)n٠딯ȡ'"֞vET(\;N<JSŌ0  G?L,}#H$w)~֊y"^ "YV> q*LA_5r*'L^S y=pNE\Nv0?XA@n=FP`+osH&$6sJ N s!()B;=C{xXanz#"Ehld҃r WȪm "c9 5%YDu<d ϵVpSbTpoqc~)s'v= *:CwPF M'~w):C!8@31qV*!յ¹&Lzh0W#ns4 3ZyZnlVep34LpY$  pjV4=z Ӯ㵯Np-nĹ1TbY?[[d}fڍaycb6B) , O,lC#cp")Y1cg m)Ѐ򊄞ƵP)0JPi݇fsH2 3Gڈ&ptP xLᮛbώzn2 'd]y4CQFX* ]1s<Kp97 #T#x %~"_ŢDhXW][*^7mxDP4(8F#obZ`,.`}D*Ds@fd{FpF> A}Sc +vS2PRF;w{lUԒP4c@g+Y PAc`6Yxؗg2 ̣'0g0m8:y$':c|tc3s塚Z-ƥVou,&3MDžxY Zz!K DZl9QSß]Y}ijJG= `Q# *4l!yq?B PᦇC-Y_Auؐvܔì~H-\YNZo>j Mz5v3J`7:H\A9c4|6$!#(un% Pc d {<`ʌQ6DPdp&DVk#`.JfP2E 2c0J4,YC[ IDAT$ !AH^z~6q(S T==i%nbq[%?'=,R!SFq..@1ҹG B JxmA *8jJb}?M)S+BbN1J1OVџxq6$bU E K1U2zLǑ`.ˉ ]I/^Jq0P*( (-Xza AhXZk\b`5 @a62l:.&(Z{[2B{\v9ϪF'4}:3 h+M6vGޯҗ9^>< A ~,ayLJWMя?泀q e2Q@[72W>s/VX9{F `wZY 4(T ":() h&q7G\8X-HCf@O7DR5JOP6L0bP%>3d AG,5r3gbQޝw 8]CQ39)kq34Z?,$w/0e"\5SAud}|q-괃k/[Ah@AE |aٔ:,[iCFe(v >L+cqG9<ې:.@]B^1r]v _iƅxAN3 H{dEgbN+hZ+a(<Δ+niyC(К9%5ֆcq(W4],'X$b .4Q^ܩ=r%waZ`.޻u u~%ًX XtJW.l <+ *O$lPŀ&5ffFTB@k¤>esBV+|^ Oyڈ4\ łqPȃ!^IzRμrSf-tċp^F M|j)O0a>m`lG=LM~{CL˗`0J#M 氏ʸ@}1`sgGq yxd>A`5._H Q 5UZX 2) YJ>Cl[dJ_%2,-wVގhwSؤ_VXŦ+GzTR-"@HOqqb Ep SCզ!Tgut- F!ya/* `>]gϵx{J^C",Ny2]3 aP da F*7"|/3s@Pf#h[PL yDVk!fjFJ&VQSP^!z=h_- g'7N!4}8 `Vo sr?n[Xbj J5+ت&կ|s\tZlpr VG#C9rJ*#dꩵ @A1;w7_YE᰼P‹$T"Nq; +OV0.`0[Y L4ENi A>_V4G;xY>"@%2y4V$JE!Ŵry8dNSZb'ne\!Bj,V0H3HN` Y5X. uFID89^ <;X ?B6sL""6Ρx_>s1ra ce7aX+oR=GDR^A?y-*riy2ګ\83>SO^BQc$8Qè#H()^,wvLo].=r^xt+t*/hҳB3f4 (7X&@8,ģ ƤV,'E>ܻh ky`3rN!w:}T`)9gֈ( Jt:&F[_?vO{/p/b8ٻq;B@_ h 6j~\&wjDU Q>$=r &  5M̳r:u:6 *pL&ƃ*q8B90}ls5r)f#ss\t|F* I-f)S8TOn' EÆrY A=z}.Sxm/]itF[ф- ;. 8*g.ViAU: XGzT3ʎ'>?=\[.M:Wɻ5Bp?vM]+SRLЩN/(@{Y,=Z F5%8&D˼6dl)= 8ӟj3xn9%o BP'ԣ>+3AfA’BQj1BU:Dz~-Pǂ!B*)IД9E+6&%d&Kg_qʐ!Rъ-Αk&|ʰ@-U҈qgm>y@c3+j屮0`VF N.D1hax4OaW=NjgS>uizXL[QGg[7x\L ]iSJtQ3{XظF>j.kmɐ~t~dmAQ @IM#dJ̐%@̔0w(ZZ8A(z% AэxXa|ko^h1&-IZT|X`VJ@qJP>w-3T1ebRa6tdqhSm/qd8=vG@Ը?LmO!Gg$?$C~ |L@t`*aA騈,'HWZ4Ȩ=D&8r'׿24K 2?BuIHzo3Oz $s:K"¢ sy RI3N"GX:kn<4j4*i4@" 3H.ETzraMVdxl'y'ݩ({PCP8WoOG>y7M[+s2}D)Q5G{H;wܕ<T+Б*`aTPsq xOCHֈEI݃Y S-E~*ԡ潜Р҆lx1Eנ1|! aQwX$Z#Tj(NF AfK喱HCx)[&8j‰^,]NXU$+ Î6qzI?Ζ(.`t ,E7$4-ox-ir +2}44cDp)qBrņ%VIДVti)2F2ߣĉY݁ 1VaFnx Œݻ"ճh7=|pjR+)dhMэr񠏝沼]Sa| wyuqbgl=,!Gce8|bzP*bmn~ )HBE*(ɲA%1FO`T"&PF;RI^Σ;-RB PƲhd+7} 6 ~`Tw- Q*%0fW@Ca&ՒhDdo<2Aa0~Fg>uPEY( "J:йɉ iP&"ْ̄0oexsğt?n)##P#zGsCԽ_Od\zX} XrT{0( B-'JA{E㽜Rw&<&ݶj |=HDf?bSa_ܙ1Y} o>ឭQ<|p{,PTfڞ1Ĺ iEa\SMzuRlbP&e T1p3ݙ@s^j}@ l0͡p(H+#44VŚqJ[}, `)n6JJ'\jKI=dj-^|`7ߑoQUDA-;Q'o qyMl. y}oۿLv ^v$Q|@Xxq,~P ^)Z+ 8&($)u7C w3hLSVCpPդJPT0.rO^pD9y.5[g\)rK/*YJB(/E\1:D<HKH5+( Da P䔥CWJuuMEmq]1e eP;([SٹY=,@8w L},:8ppuF(^IoS%AD}9@A;[.R:LZfPb1S$)qvLH@hhne 8x\as)d>>G5RH@#P{#y+@x`Sۂ2w(SBGcI( G2^BVw~qA_sŭ02?$XtC$`?1/58aǞx羿1u9gM?3P)ؼ6I$὇ég.QJ9̭.c:!$h,?KJ3tV -zN2͙+Ǥlt U lL( 0Kf) WKP =ݕ s5DەP&t aŽm'D€z`BC7#~O~نWon7_/d J@YL=H:JGд4q(B*dsV${te3LJb:5t!E- UR@h8Y DgZ` ;CjlK ‘: Sj@.[N0Z?Ejf^EH[-n31Q 3ևT y 6Wxdх07 s|=囷!pcy*.ŠVǻe Q!f7^ (jjkg{((H m@\Ǒ2^; hH\][v.dnƌzCaao7E[E;KS]q/P>`#bz=ү/, DC C8P&PJ#B\F鵂5j50,p(ᘰXGYqu,ub^VN aYKVw4_u;wNL߼OSt7|nvN^ŧͅp%ѿԠ5{mlVrRz?C<m HDYb+*fj k2so8~jHZj7t^O#tk=wrhM(3qVAlLwH2'BfU§I^P;3a\UA5OQxB`dF'I3T}$6"|>0£0$%C\:: w<@z ?@USh.ՠKLEWpc٨poQh戓7ך^#VPoT7C5SqȕB{Ocu9=ǟl !dv8値chh#(Y1*Q*P)Ӳ02,:, 2\B{ O-4 NxxPC*hSW#N^:I6ј4NAy(-l". }d\[}TQIZat%dou3Co1=^V!Ba`I;P:Ju'9m p=`ArpK>YCݵoa#JCgFj4C{زZ\Z ?pO0ӏ`ž\{%޿ ws ԘT\8qd$+ @/JJ@XCOC t0P, H9|sD+B}&͊V)㛀+ HNI`SĮ݊dIrB1B%0Z0,}`B H0c e`s02ljQbޡ9#EECaIvn"`* icD/a*:'v*̀" 0Z@ecssBAzdPxc-nP4O&'^iOI$Iī']c>J)&Aâ3FyґlC9Jt{`W I `k(n(!ngYnIw1EK2+pݪJ(2ӡc BeA:O(<\eՇ0rəJ{c )-TDg `o%\-B ;EY `B0I1h A`%B3QjRjBB8_(aaVLkDE' le`XĨj(HdP@`Y&W?\Y'( ]_99%Bh yJ{a;w"Y`!7$DjG\J5G+ΐ;D2OeΉ~qFqɝ3fXP ޣ umiDjH9Ӡ)$5 Ro4q E|58W)kS"rWp*Ti #ě*O؛j0w5U~vT°(E$DdQKBLSFaע&F'JQtp͝#9Ɲ *㻗tetl jsLW; 4b׮7dQ"t1Hɟ;z_e/tM-"2 >bK`{z5J}>Bc dul$J6qU6,IG?|(a3?kzS˜x4g2<'$Oҥ8">C, i3:e 7X,REVbb|Fz3qrDvx07 Jڴml;Pȿwҏ|Ħ;'8+TW ~L<f-ꪆUsez%wĝD;'| yyJOZҸ=F,dѕ6?zfWp9M/OS?roaZ3^Hz}fs6iO4MB[<^ݝ5q8@R {&0υ PqA.QxHXxT6M l5$XM(".B _Z5Rz01Z\cVo/PBiaEJ4+xIhSPS98lF<//+%K!zŕ ES2BSUzT,=yocxoqdR2g!%főǎI=`LAoὒ؋|/mp~=GSb+2V5}h䥡, @!"@j j*Itdm*!y,wf4kB-v TQT!7Y"O+`R̵ϴX%\2j71!G눆1aH(%w$aCRZpw52yenj -˧>7cOE,.cLĨ_gp, &'i4ceIM-5A)-:#33%X *|x2VpZ]SN cWAlends{xaxa O'|!٠@LNcudqrV'fM&ŀT+NNX[jrSxUZ1>ǯ||GS OHJ̩xCm]ďk>A~$0t}A,6 X8:g+* 9{ Af9IRCgKnH;fRa"AQ%%hY 'ԏ&KM|q3Zzܾ?<9|^"qDA4<hJݖJL[]^cqm.Ѣ`eo۟g8C]jՂ.t'`Ǿ*H43I6.al4g= hc .\OTi+y <听Ҳ!wOLOi|"\Km9QΑ+8ґZN(IҜs&dNE8l)w:k.v^ل" pxH,Bh,-lrWzdgп`#&e&36;;^XfrƬMǬ1y,4W)BB6=a:8U Bae0يơgװs,ll< G[x0_֫dzkdq&]ӛ1ЬAV½ ō0Z^a');7гWp} G`<%^ 6H^B_(ާ_ N ߥ_#\YXj@Ոd>Iwi`7Ag(9~5,QKk+|?Bdv%b3=jG`̾D?dE:c'~EF'0_1JL}CQ%G8*2_M+ZEr8teHxz_y_!!bNJDy'{)fZkSN_ݞgR'=٭BqOEJ0:W8yUF۷y'<\t,i~Ż»;ة?O/:s~4oY4#38<%:\%8>ӕtW<$5þ#>3р+AHPFph#;ūO?4Z$;c,@j#R&.l-"6sS MR[5"M*ݧ7I#j#\TFRKYg`=i1zh(fBT u`-!JVnn5*:(>> YZQT2́B'NHN{߻j:{DQQ;@䕎f/A5}@ci ~8 NF4E28DK@֛crJz $Ԧ.`ob(()K&S `2xD< YUϰ9#v!T$1T5:2 Nبϸc e:OCqA1󘰤!Ig7hy`Uaj J%+g9<,Ì]lj7J/C݅޿o@\lS,_%!"8f\>$_$Sl1|1Θ-qyr{c7/l2nuʒQ ğ0u W.4Ȧ4[4eptsa5Rd3aR"&6Ԗ s)#c|d16`y..= MZ!ai"u F q&iG6y\&]9-;D6fg9?zCDx<0zI:~sy7w^: Ьor:ߠ .XҗIȖS6&mQ)k*fz[i+`oqkW܏xʽ armjM9&,sgbs^;|ϲ<[h  S6i:.byli(ꇸȣe5G)%86bM *ɝ6 #_p+cG8ጫ܄4'_F&j)ZNZ#Wwrx0r<):x;-}ԋs ?xc" h23'xͱlιqUo蟴1[ËO82=a|N%ڷ#c0w`$ic>yb*:,-Ӻ:Ow{>Bs)r_"n 7),)!RĘ8F`F! |'95(D.d<-fD2hA r\C_ wycΔ+gIfPW"@#8aL M.w[!0YWl(#P4,!X>uy}}{d̥ VVJfc$A×X^x\T3 [w(%E`bL8t҂%AwJl,$"ńqF)SOiB =+19ͯxL q)n{V$dw 4IӀ&=Y$=ZFEL =y:O+ıl0ܻ!g'\pqFiY@^0%fxmM"V= s:>tCfD3FTNneēSjc.zS9D% TAɹ%ru8y3_ː<E(pf~VZER23މ=Oq^"Cl@?bH{T!gCB!L@$pDS^^= Yp8b1/hUfHpjn)D!)KrKK3 3yA$h 5'nQqtSơ2h5q8Ӝ'GHF?I:GDÈtosoz)Q ~Lrp݅En?ϴդڨGy}z#EZsMi4ZAw؍A)Qć$s  k$&=< }=1!R_gN'c9XWM ؕLc@l}[OUJD`3)Cp9*wi$6}YHY` 6Q7 1[Wu+ry>׾&[{b+|.g0AS48gm :[qiND&s'C8}nL B2Gn 4)7ˏc>ɯCl~.%/bk- cOG[|DÂi}]GK3ʢɐ6y)=7;@qg>%u —Ъ{.=58xL8wJëD"UTT'w<΀&ԝw;&lS].#& [l g&.P{Hǵ|[GMt~ƭ{%wJ35l1V SF9Z}jANPzO}ҲMIj +.6h١T+v3+> \wYPlR0ycݭm6aUu+uù |3k|)e)ah]ܙ)^0ICN1K Z B0UEznfܾ?̃*\X }OmʪHbh $X"xd] w_EA_FqQb "PS#g|JR-S6aS,Z&OzQDY{5 pڮa٧>v&w)ũ^ Z1xca@hqNQ0 ŐQ-bԨQ'OOX纵l1(5tGh5rJX幛w߾z3k(MWh,zO:g6פ:+Qa0+r- cZl/ q>ŨRܺEyxt,|)3%-LɈh"=2hĖٴ!c]V/~{wﰸ ;O*11LUdNo\ŧew<Ɗ'|Bvccz]d[2_tqx꡺n$2_,B^qi]/$zpK &z:~)w 3'dJ87sd;|#L(Zx+0 q+&iNH/xci>'ƿF[l,3|C^ m} C:ZrN]b95 FbKZ.cSK3Xob:RB;.4\J_k .KPCFHe@-)9Y25$Sx3bR}剼p[˩~-& L{/쁾i"%>~?!+K`}a7/IhM}e{ִ`wS._o_XyqM[?<BwZNL,S5'GNvkS~B&s?Qkj럖ͧj4;}j}cpzoU>Az\C3GD 8NN[$ }eĨjb%o7 1;SF̤&)՜`<%zBJ sU!" )HA2Dr0F<侎7,_dM%M &SOИQGR8e?Ίȸ"*F*2'Wq!QT"jڪ](#҃1ɝbNmV9%GY+!ښ39#lF}S0`<SFQ ݮea6j x ٻ7ᡫ^T0swZܸc98zJqbKAévT*gb4Irf08ʡC%x!hUHHS'Xw1D?ƠwB1ɇgļf*FT C8xp|43]oegﭡ*mI* /袔*N$Xe!R"{n][fYA٬UҎa4j^Sc(YV} 1߇Twjwm↘vwʶe,Zg)E ,0s2祛d~1h]'rFXz[#>l|Avƛo>ZjsE>1^h4A> `V9di%0 :%~w[ה^AEmʧ_H`bD gg%qm+5x#}{6/MQKqS8MrIN&`;J)#cc\\ébfYlYI̥=|1aù6:A0T9 c+6)t?Uq+0˨ &tz,/6y# CK0<\qY!KA>Y>dAtk#Sz䓉$W,45YXF4w%e7hCt4r1A6,3NC =GS\ 0de*IW7<\{[nq^0=? 1g2R[x#g9Mm̅/|"Az>- myMa8+:?[_~CK&Ff_ɡ OJ1 E& +rEC/͵]-#i)Se#Ph^Yht6[ׅrMK[2q7dm|l"[W(O7̰٠H sw,ӇD\Uq4=0 h"VF > %NڕTO9) D5Hz Po ̓Zr6pӎ6/*;8^>_`"/+Ka1R44_U} e/ M k;]WΈeٟ촖t$j[&'2~N=X`G>;ms|Xέ8~䳿xFؐ.;sf{&("5gH[z԰d)P 2:vFg12f2<sEGvLЉciؙLDaKb&V3"3/eTcl6C1i.QccȤ'*xqE:w\&jlflWayO-HrC9#IZE<彊+өrYz$Aaӥ6f.&8IVHn:1*Eֻ""ym?~'p΁/OUIeAᡲ5c,-̤-q.qt1 ags|]/z<78geMDZƑM{$cАDe1M%@yetUK=シK)l\1aIPiT%) gRT$1 &Bw)*w-8М3|3τuD#ixEPSqlaEơhEdk6b>l=aZ8ccY"B-2 <ZTvzJW1AkK<͵9FZyrdy=*^(VV+‚A*ĹgKm3$d_Zk G W<O VM#"wrM`zW7J"R1te% 1QXRq( \pw~?yQתK4,!_1I,eEUi*||.b /&AKE@[ɸq~W(jPPj@8N?=+jE53BL A GsoqhI^4")X, H#+R $,iМEk1ǓUcrǠ vvo dezlY⎼;ԞE~Sw Y,艟q7HYk,? xW(e.'5Z$z-ӠDAFl$PfZ.%J+ α,gdN"c%ou0DԨEik5. %n0H{(9dz=6w˒C5/2^T3B8|x圌[ 1yN-}NVk^֢{iIcqc ʡh9'1XJtFyJw .l./$zq0oh+b3R~Dgņriɢ^0k҈EV2t|1"wNЀ {;TjT<Нz1v;U.+=7:]ݔՇ5|`i"zwnCgoGz1?xO>O.44 w7E}=+q*# P;G)[uV$Bo Қ?:ڨrG+"Z=A^8ɂ",f?\ٖ'ڽ4ͦg`̽5OL_IVi&^;, r仧VV*ZdHpF)N \y 'b[.J3t< u幯l"w&|pN"{mZVjُdY~s2"}ӳ3.E)bl`02`=؀!۰IqlqRCg4KLwTwWuY{qs7{4lȆ %/ȼq9|ߏL)$Ƀ?y  s6r~zWoFh'aUwL~-Y2vgV+wɟ7OWNt ;b&';ݚhһTD5Gg#M6k@;JRXhFso݅4>{F_7Pb?Wc yAuM  RKr2V&TKKFCy7Rl+%ZM1_849,FC(߼.;4|f:nKʝk'W1Kr3ڜ3 ytѶ`SkUHB\*$YC{ ]v.kc^$R42D((q.ス#KC$=}zKN$ %W"y6bD= ʱ1iCN|[ΛsFM8apRʹSm~3]LFv(#Vܼ7ۏƦ65YK; k]?b_iL$Mߐo?ݡ$JKEޓrb<2Xm648ÿDvM]jqʳpc-L+Rx3y4 sU(Ũ!j"-Y9bu9ZŊa4mRO$ԃژhhibgϭxzX_*s5t #"I, Sol8(ZRƊ7^̦{NR0":B!SK9iq}L7{hT'ة>*EgAv%Y<|]6h{EC* (I7%] ȃRVJR>7ɿ`/jcLJ"~t9J_'O|S>* O1ݭU."}QU+/W,+#A Jь] }(Gსf +ڵ3VUEw .tq0[t Q{SVHblOoq⠉0-mtzr铟~/6uTL]RUL5'4o=.y{箶ڒEwP.7ѴRuyCig+M3- 7h1<8GR0[yJh.uhEMzÜU]ҭ)iw )ў yy5(F&#ʩ㫀(S2Y }}3߿CU%N"_M=Axj_g}_?bu~+b:Hg~rUoSF+؟>1{i|cU͉^mA7TTooНV 83~>0t0,$g>2ՓGK;\TTT"irWdpj^jt+4yΨ~An鄨Ns~F/<ۗ*wa !I j! `W6e ıV!h,.Ȏ Z;stb>!6S"_1? JiL58*)1:7> VEl\a!֕p plPHNRN/GeaEk~؈(5辶y&gzV LZfl}/Y"']|Z8j<ޢnS+=lptWzolY =et璈*IxXqMџKD" ^)fPvZhdxUR!H=#jIBQQ%!3Heey"YR2WG%^"Z4N*Cb81AMX:Le^za0Sy@R(~cD[ I%JLfik`mԖ7ʫ8+{wHA?JdS*1S9 `rjc]?yyy. ?\'d)JX$UPܳ"b*=UWA$ q2'!1(ŋ<"cV , K 2ɣfFg/e|ֈ5 %*,eOhj|VITg6( U'~ IDATf=86VGKgiֈ%'$vuEɬUfh2}|_=)gC a@3iaag{" Rť"BX<D.~NenҊ:^Uѓ!om^C={K !nR(iqvpL|3=_3V9ԁF>}c/lE 89#k&g(#Q’~v$mߢ\_Q]5tuWV KW=j{\a-QɊ.!]f@Y#ǁN4>~GƁ8%$ {'f/LaOua_N-|wS) {4&z<T#,-Ea[5'u6Xyw7reZV;348z/RIJpqO|˕(:7~3|VGđI=|J0暠4ѯQ++6<&,EJA^1\|(~h:Wbg*l޽z-l{ As6 ~r-9 ?\Ҷrfi!b8ҝ9䫙e\tcN]*ҊO"Ă*EU;h:_?3XGʦjܐ| <|/.a"(O%m0)A!'we oJi#N(PN!E֦T& qJQִAi" B0VG؋&En1Pd ^z0deùJJ}:ǵ"&NL9 4,_y4xAu \̮GjВPIEM{#q& lÊm'jc#z6 Y|+`?&.GN\5©em-KW4L㊒c_QR>p}:XdS/Kf'wF4ŘZ"Tbj)S[ϱ[g|z%xGN%rl騺mx8ƥ1}Cspx:=W,fo0A6_e.3( 9ڒHGTU:y7nsg0gOyޖ5FH$Gh0.b޹":ֺ4ǺMGdSYZ9M: KjDV&a6%iAtetWT=C!A0yD\muh^}ISlQbB88hgz)z2fd_#5>66qIDD,|j0hiΡӌe)QW% Vg^Η44rpAk{z:ryZbu,єYl6NarB&ß;/FGlTB+V6HD ڕ*fc+]1HDQBbR¼$TDU:`Ll"!q`N ]#z"@KShv6\*UQ0 M>3{Y_lL81LV ?M*-|ܾϽҩB u ׺'$I%,Fkvi{Cfhϰ3r Uyqt׾?zDV.c#zіb *hk `L9ކ 3'1U8atl+w{LگK |D.xbj1x.֌i{f>:bWTO=gZļv:]ϟY+tQy*ՔE)d)1(TaOf8㏸0Sus&Q 0&W|ryoe FҹA Io(I& .]-qD6~(W4)~m\SԉE6i5W$Q@ErO0PfxѢ \#ax|A:*ɍ%Xp'1R[^~T6fH)҉\>Ǘ;_8͊M9ܿ˲ȳeL}m9>wvdҲ1>3h3#| >~ V7hDĩ%/ (jL7ϽFbs$N;(owVU@M=}1گOHG)滘NA\oƎkR&&(~4Kr!A#H Gd¬BPhR M `Gyrj>f}/21Oh8iLrϱ֙fzc%G߻$2010MPϪi '~xw znlN1 䘶AWJXɦ Qa |L/VI Z4g>CgbʭƔhH$Ph䅘!;pQŊ2f2('v Rے{cl;LӇ W+NcT0yCҷ#60Mel&vF_>|mO7?/>gY~g6i]0x϶?1}/kwq ڔE~4SX5Ξm2)P\[Hu*E0L`fY¶ fz#>x XLǘCyQwHFGsQE(#ɰqpD4-UAUʑ-ֶXO6Z[K,OclNre{eE2NF&ĵbPw1"b.]:h9**V7Z1|v{w|._Y4[~@1Ϯ""읔J ,kBGڌ0b8unϱ-zuP:f}sK~/uR3s 8!*BZ:+SC!4 4S``d"%2Aj9 [l<-ɻ%3c)w3A "\6]lUx]EczX̮!VK)O/~{~3t7 M4f?_yμ8b޽ =B'Akw_G=Ȥ FfAG~{g/5Nsd:g`Luh\x1urŠڼH>*$J >?AAu.;DfSZ,P UgQl njl!(lP02VT3p.neidZU\򁍢 WVb[ac:u.9M,3^D/2库Ɛ$cFJ:O.N*KΕY}9> 'EuE& Ѣm1j8P,̒&kxb@<&12IrG|R0aQsN2iCD9;KT Wzʓ+e3cjHm^v3#a4˹ugǟS/y1As֞[1d.: |W -O1)|'*Ǵim&ҢD@ՈꥤʸF;wP1ܻ.lF;5eS@bDq5)YBhhT_+U%F"|>*̂e,F+[} dbx6`3_bXx]rd_5#Ib>H0[EM"qGӉe=\w5=CYR"FQΣx^<ȳ9-Yi*fIi?Wgrĸ"Ocea+Wޏ%T46I:%(S$dfcG%hRƣY4HRM|9 "E!"-V&M G c[!6wH=#.0qf4?xgؼrcӤ=VessztsPGt$Yp')?ob4L˴dLeJdgM&=2' bxiPi5hCjYè'#U_>ǓwS<+e-=f^}2k#LU`#S1패p.*Nٌ. Wju8ml'LDŽ||hhL0h;\*X5~#EE!8Q8k <]ijM^HF6A~)j>;&&ksW9y1?Fu;4vfxxmu8ejFt I![20U<ޠQ)̕q3!64/ 7LνC& 1вKuAad2oycn;|՟% hoJb8^y 1*j! +wAm ֨tQ!_dE`BQOFXMkx37GmEBێ,K)%8`|' 8FҕLJhΫˊIPFV})}WzboǃSKkF'!e "|&|j#=t|^;eKdy ++ϑ\{A8o X􇌶o] +<QBwfvMBDk f2"iFn "MW:k^z FIQ/Z1vUzk$Т&6Yb s_x/1謓G_lQ7FF){nӢG8'X#X 4Zݬ*F"o~y:?8$ lkfE{ET^\arO9MLN6>TyȹG;|OFEݛZey{/ڌ3+**YEEʤMٰɰ-’a#{OG2A&hڐAA*Y,VV}fnO^"E4#c."{qik_X nrVƪ:Y">-:׻BW):q }S~}1\IHHC:4x *"bէč /N1!qX05Тd$Ikn 'TkHj6lwb0  wD%ĊvV#m(9{كmw>A A=^'RɃŌW^b<2KMed?W}rCO]}kvٿ<125>{eehp$"0Hă;G@%XjZtvv0ӨAEo5&pFc::Iq|gdZyޓxbe}8PI %"=\]coU[(hTiI1V!⽘ECzj]<zVUEXw94UCsf&P?Ō sGm!D<{nS)X)Г,CU7ॿ&g/PgmnD[\wj\Jwj| hD4hh.REV&'yVH ,N-(W+5^29a3Y-7{=c*E.5ƂpU\.ڊ'a`F#[jCC:wOtopjN3mH x6_xmW3î-%>rM _K7BS)~*L8h*Mj\?iNSfKY>Ӕ:us"ԤYFŔE,ugLσ'/ UD;]n>8!) U ҄k2m=?":u[[@']T=רs\ĕ+a{Y䙨1Uws,a !H\t;&㽫ϕdAm<*NUԣYM `j0ض%bbФRڼ=UҶo]OOtE5AE$B ۃ=}x!+/S=z.Ue:Г|[Kٸh4YudLY;-M4ŤҢ6.MPoXQjMG#~ӛ+!{j-ɶtHx^]j ٕ+$[7/nŚyO,9t½@v5iNH7Y=̠݌ڹ0W64u""b U+WQf Hj0˕c Kk_rZժ#5IY#-dߵ e# 1VFB46HA|*5CqąD5ڼՇE+#9f`Q%,a'T̲UD: 4 ]Hv] tZ?.1kȭT]7.Γ7 i 6lqFY|]# $4ԗF6U|0YCŴÜ(J6_)"Mو"F@+1,E e-:-iZW">3ds.rNTPcc.$)]ƣqn&Mnk?DΎōX* 41d1dtt!̿E|aH=NQnp&½-Zvb$+:LfqWK*0MpT"kW4(F<:uȺxLWa|xʰ>~.iT$JB$][hЬUuHR LKyB6jMml+%edK,E%s}^+5 _s\V%4Sjvk#ts2'&i %$i yD=b,4)~Gɺ[\8:Z!.o=lY{ϣ=if`];.rD,=^^>JyL = m>-=X 5XQ AT7_z~+G5=/4ѳ5V$x1 5jD,6O(*εxNГSMPrjbtwUMƖD މjIs1c ;$}ar玮z$Fa Y:ySL5+{zjy.߼HM3a'&S z9|ED1&QwFKEXBmfi#9MʈLJfGdZm9vj1m6@L&PLO/zU`6Đִ[TwV?3TMMQ6|ǼRQw|diz!&(Wo~ӂ. @6-P$jJɠc yͱ-)<: WA'ݪK,hL4_3%Q(Jm}AŔ$J دdXqpj(M4ihXi#H7:IKI ~8wZ<6=?SCP֏2)ڢn8g5MbLDl[pv `׆ 먓 ] 4XT-wJŪB8:==Zđ r$x8KbBjhH֍-_o I#ӟܖ%;??+-JumO~YTbRnS#ւ mzh(PbPVB]!B;|]"E#Uܼdmh?79*OCwT"ojX4bך-+OD qv QD.'j)ȺۡמE6}KZ)EgGN`;5_~/<{HYT~oQe|{U|zvKXzq,aiDE'ٚӺ:(G0^g?݇>| MQ/U4U j4&.u'U["gp"B .&ĝ=Mwω;߼sW*ވj/E-m^KOlu{4Ȫ5vC2Bsq [,[?e -!Fk1X[/59&nQ ;@g6cxIWD?wpHgdM80g1*nhݱrvM;yǚ}rȋ@ĻMCYT +]Xr= Be/l۔?%­"DqX{>x4W?bMOj!tq H=aVIsIi&B>qVbHܻȬƕ5lb,(o(ΒC7Ƃ["w*_''؏2Àc+*nOWi,Xn$4cmqQy@}5E De -.jiLBz1jP=FU ΂pE-)v"Z6 :t/% 1(,YDHg:*(Dx|5cYuJ_=VYAyy,[61vgi.6EZߕ`5MF޷|\i*MSV^YYA.Oc!鹚 ֏;lyզ2}$VH5݇ (A_7>v[ASI0Γ6-)We7a޻}羘^Gl)R"h,ҊPQ@[@1 oY*UA}tu ]M*wVׇb*E"Ԧ*2uVzXr4T6uF%v[<< t4#FN{)GA>{kSxwf, +oIR7UT_꜏'_VM#n Su[Ps[qxŒi(MΕ\V#~f}Nt5*-UO&%hm*Sp%! ".-EQ$6"QDAFT ITZ9q6GEFB"ւ(ƃISɌQ4GT/<D%Iu)kKq% V4xX͠!Ѧ!]/w,9;P $Bo(AijhK'k/ I܎cmɚ74'w?zO,bWŃyʷU yte-\3 9h[bEvEx3#Y-Ƌex\uIn0E+ZT#XYFWi45 6Hd3j-XAWڋEn\E$۔U{\ 3~DZi;b:~d^Hgp)ْU/|O(/tJ gJIA *g#DG ,\T*QU ,-3 ԑ"ª Q"upu$FI˂@ǡ vhPk!ɭSeuFe^wmv6]:v^{]Lguos_b!eԬu$5BC]jT bhFAU9ZI8[atMW޺1:H%G(>uqT"*+Q (un9=HgZh$|3v׻ś=gsHZ-J+uպ=Rd#2aU0/h$[DEO8|&NnF,K5uT%[9ո-m.I+_X  TM%ZbFxK쯸;oZ­P@&RsrʚUlx-T/xOЄE>ad8Ɣ &N[H B&yt* :eфG|I*Ȫp "rZwߗp!c0 21.2kA,'"Q@$ 4nL"!4ͣ =ݡbR#JDMR(X&"^Y^p7MQ U8:I:WˆrrDָbPl{>`*9T`N˜+W{ԏ_ۏ<8Y "K9w$g߾ǫ{]}`(D\,h4sQP{c-&ykNĩGOcoZAmGm _!#yQ}d+"5Ww Mh&4tb D{<:XH,:Z&NbD6u(g9Qx;0=1*F#&0I*5gS@՞ƍ͆J&MQhf~er K_kxa@k.zC[S`1! kMW+$ ѶR=u7'j%o]lKwM$b], + ,MИ\>I Ejmڱ#h2&F~u37M)k0HM |HUodYHh/ftGQ%t 鋯 CNC0iQ!dW&"Mɜ\2הĒT&J|J/͌k! B`zPY[5$^)=7nY' wLPJJ]-83n?9YWo_lϑoߤxZᥔ6˭hK|@r5"}5Qpe)NѮ}k_%C;B8՟?W:i7ᤄfq$[Ivak~I5Cr$0JSkPMwF5uhtکILS)fkN2=f6cBlQ.i".V:Nx1C?XFqx5b[NNz=:B{5Z'~rHM^5`{I$x]Nm7[{Q!mH:P]h8JQ̯{jS$MQuFҨXkJl>V?ɫ5^%( %U-*MN( W+߸O:otp5I SLY Z+>ouMa# Sd7qҮtݼy riHJ]RܙK1p졮@WVbhm&]cń4ت 8t1A|Z+*m a4OkDUd$%+Ľ]b ]ev#|URU!t;\EG t:i8Qvoi[gU~CTIDJAE8Ѩ$Z$iJ| #M-ŤzB!)=ZPz=;䮅BBx-Mi _PCpwtSymץ>$VkVͩYCj%s#"}m^{kE jO2Lˏ;yd5f?P:0 a pQ)Ib'̛ yy)1)!_+? NY*HO¢>h@EjaU.O.r3<5<'~ ヘ~Ύ,pO.ڋ3 XB@RO>Fy+4hxdxE䌠*>zdq a,1^p._#ۻv䡃x4mrzU? ;qk$j83NDY 7Jө;`tp0㧎O809XVw+YsU$-dmչ#=D!ު ?yŭ2ݒ*Zy×nXśF]Hқ ش" viYOXOIMC/qlOEOGTL$}PR'Ņa5o="CbU6C5"-Ue*^: u0<:_G띋oIhhnbl-/uTFP^'pKǏ?si޸~A'(w7_{GR}п|4 Ʈh`p$Ln>#cn4p[SbG0 IдDm۝6HRZQ %vݴ^dNgZVucU_/-oִ4kG!R 11ϱF0R̟l`905M$w`#W7$DUd]ZzɰF.}/?D5ׯ>6kؘs~ R8'#UKm_GJ7COWm듺!\SLZz5BɃ۬&b?AuP :dx;¥^{pgHQ02v0—+b]\u)y$4hi[{j-@NISuDK;vqTFtЭ*xKgf'.TP+'GĺO?ޢwYشt?FGg^|$:&vb:_0,SEBV,;vh8O#d6k" %NFVZ =&$H4ԺճX=8|#(>h+K7:TsXUu@(\-bA:FEALol,lH~ZSNy8K}c/eaxrS&O%ySfdDefUfM]CU t@7A$F@ZpvvZi%2Q&Jf$e$ an#"cz;*X0n }97ṯjFOKm s`b ?~Gp_hXcz?02y:|&%=IX}U[rv;wkO>|Z%t?< 7`&9[mXg~3d}rԿx< o-&--q6t߿MtZ8-  #b5(4cYX+ 3nax.ЫO#@"3 j"$EкAKi뱮IV![ְ=BP=>oYyG!1K3B'!tkL߶6q ޴M&p~iI4CK|‹eu' υ.!&Dŕ 1ԁ M?ecKbm]EZsuV͟AӷҤJ D f4 mEv76D͞.]˻mO3T b ,kc_H%CbF,i҄bw㙲qC4ԾC]yRA~M/-@G@5 f9p;'6_.IIٹ 5^*  1UtȳrisN40.&95lX(ꌯ=E?$Ͷzw6wbo!ng25\3n7ج~d$\.l ].xC8/p!NH7qyuj44}r2Dc *zc]#=Owj;9jRN}Z M*ƘWGU'4>k >F൉v('pBI7vmlpTp ]UBh Uy҄ O'(2!5*:+D>潷o2u''fUVgTC\kyP&.affL/NTN"xoGO(jNR;4lZwaE!b4 jJlubbsF:mS;'kTջKXIkH lREF$6gpA, hP ޫhF5#du$(5w1SQ r;]7czݬ֘D$wN4]6 Sy='fEJvث=ھ,9gs>rf˥^o _-("B]k0#橪is.⪢YtE[$cyUnΞJ_ǯ"DF/ss,NN`cD':3g)'GŁK`@VwD~jErlW6DQlڶAB$=t(eQtR?acK4Gf~6QNӕ٦.ղ zFZEu-cx .F,bLZGmr72fӻUPQn.u'_I:" l1a6p>j>E 5[[[GZ! x9FQ6]-5u# bPu"Z}~v|1x,I}@}Px|Pdmx PϖNe0׬AHiRkU`WTcUQ"ΨuꓜF2f#ERŃ%,ڄlDG%h Kl7rv d16 FVl}6o__1O_ɿ/8}~_"o?́@ѹ?/\"vG(Q‡栩 zٻN; 0:OyDuKՠpTZ[w;ao'"ӱmjh ts΀G} 1`ň AlvIr$^ʲҊ!MڣB{dY_UcT16S{a|K#Mx$v|ho/ywƱ#9#q\IUYdi#1S'{ҟ<>_<bO) Po\W_ s|*.MܻrKgEZoKZwc۹G ^1z\1u@,CT Ф `NPgѭK[`q R=شKwQmI*g0wfwv1YA,hTv|^ `뵶^ǡ# d1E|C2?'+R R['ԑ-y>r)Id۪@:N>3e~xycB IDATg-ul̗Rϳ(BIFw%t ƣM=RgGCC$Kt<.R3T*EXFH'P` @mwFB,&A1WZm۳/_-νaHH`.ݜ|v냯ҔPO6ёl_9>yKbgi['cKv82,= u+o5+H&hH&1}gxH%45s7,w@2|S_cStاn3YJ6mͿMzۻ8'h U,$ ĀMF5a= ,bAgĦAˊ0_xf%or:uW]K HL[ bj!Yvz" &Z1:Wi;FdvA-lu wJ*sbD!O#۴t.֯, 1 3R2#.fێlp^7>{^o|#}~oS.`ddej'3B`B">}4Z6D$8RIP:%d]8I&7gS:ʍPF?9$,JtiIS%GQ[ WwӼ}1&8M L˛L.R=O=M߯|tTĥR~J _#:8"AA$ˌ-7,ǯQO$9 oqr:b6)m| qvV{IMcd|$ O+KH: /N,Gri$9| ӌe4!K:SJ 'i~-/ڐyq?UR`:^00샄e|YG DLző?a4˃t vTewH]aV.xhkrDArc( gOaV)7wL,ہ_z Vwԓ5aՕ7npTz7.~[ۚ1J~1ə<-~pk"ã\.ggnKsbҜ(h-^ZKKC`"FHljc$NVTZY Ѥ=DF?e#UbC͔p5CėZՊ>EstTe G5h@$<:IDG8lE u[w|0nq*q&G$, "CQ'ZE$q8Ӫo3H4.6 \YҭV-ZG3I""{ UT%ݹ.4Zv㒮\B@"NgUs˥.\"ll#zcWJ?iGn1ՊtzN~~64$*FVmQFy~/˩0[L1u2G&L| ֞ywA5V[GX$ ]*{˚Sv0$#==#@mR޸;wH'K]B]䐦Ul5M7tܿok4#J;&!"! &U4Ti"ySS'g 7P#H!cO!{ERMk/ؾWV匭+ ;Flpnp@WOx;}Eͪ˟&/Pgbs",?;lΑw;Rdc\[o3{Zߘt'^l!j|iW3OFɪz:t~&nANٛUx|DL?gLvnh|^!qyK5dppy5m/QLq zrBvp[5z^2{ԏ]ZCZ3:"ơh>$_a_Yrz/㝋̖U1~9XԓD:*=)tmL1Ʋp]#ꢑosgh"ڠvIkhF%LhŇ+yypFs|pi:g&*Gp@b ^#>յw>"?WU#5y&ɵ+T9 fSWBWX]14XUTKbYK+|*._zko.: `LjĵlcDԈt$@j,6_HStو|Ч6EͳyM/ }N`ƹx∟jz| o {:'/fbZF%K'xvBY00)U 2Y/oBWo+9_R_)3<&?~τ3JBkujD'Bp 1䳚+0-ꮥ%kښ!c*q@(ƢAiQMFGcWpiqi=CWPPb!u"v25Q됼 J4;c.zuTM1 rPeјG+ƑdkXB3< pzo~pƫG ^~JZa[?>'LjAb v H$I<aF5ؼ1y&@G~ ҝ(4ncn{]VG֙kf{-|SQ htk&ly8joZrB:9o N [Wes)Ҍ󺢰 kxp]NRԎ酔~0\C6|ψ,Y>R_as2a-"z[Ut]Jtzt{jl1_t( cT #PTfUQ%H5WHb|̛艉8Γ41;GV֮%֢"0VDʹ:172'rx'_ó/Q{IŠO?Á\tgq .MHN~󍝖!tկlq cۇ4޻G'S? X#_ }#̎GdOc#/ZCշmӼ -񯰼}"L? :OQl~o/} Ә=]|1H=6B'gN[,T*FT-6`m83T/s"Ad1?_RMgeI"Yp3f T x"\V SC BGO #diK$[E\i2%4QAWs R:LbHΐm#q#bCI_ 0N+;ėcq>a=S|%ါ! 7]"S̠M3$*mAlh r9ky RsB@c=ūwdYUٮ"H@C  '%.Y%Ku451&&SL $U dcԷ)L*>ZqYmrGTfJ'om%%]>BqHYҧɲImF Fag/{bã3-J$>>MF P*3TKK5n7p[\m1[)=IH3Oޫ A"Ůjf9x#g|BF5#5J6*+Mt){MiQ|ty)$.и@q1ь'@Uf|*g#E݊wiQJU<T,6Y^ O ַ qg?b ÑԚ*ի; |QdOZ{@>޻Ń3qYJtBK()TVFF쇦.کiǨAh=b KR!=8$~qk4n (e!`f0$$v*)D_:Q)NJva("j-.Y @IHo\eU$^,`Fؽ/X`l'hGGv4U6g#xw#N ɎnfOXh,b3D#&H vhш$ZtHk?HfIF!I!FB5' h9*ᇛUc$4rF>~@6|m`bT nz\/ 8Nwq3"`oo\ ^)!& 5<;!6vYRv,;}^Nqk4w.vѰl,' . Ztʑ5m?'(*dI<605H1!'m!IɛHc4&1t]wRr"GcHB¼zdN B\cjѴ ]O&c 6s6LIθHRm?x 2gn;OaN>}^¿!_&nv_=Cxk_Td2CC RGg,k_%]fyf<|t?4qy~V̖oO%@oCsԣ\6c#G ` ?* Hɑ8W3?=Hq3Ц B`*ya׽AXMy[Y62`1Q>3\6͓-. |IY_"cTgw釜(.Y'xg'WYnRwst6CNȭ;S6~Y;GGt/퐏z\$߿uU :!C(צAcDdvm&BaC{M6:mm! &*8$Iy|B٘r$W"sjL+F5sp?1ri*Դ2$6Dm[s\aVHH2"e5ءȏpPĬ 51-U4 %W wqK <.%N/~n%߼[(' _:?&0uiܻ(!$v2w:11$Ou[١(2Ҏ.(Suy#dK~vo9?ѿa9GEcU'Mɶ,%T5J7 2OSF 8YO2-n!TNBpzaaI CшEeߜ#Ls\GC$5Y'wwg g6DttJR,Z( 1n,b60-EYuo+,#S/L{T.4ҍ+Q3K<7a;lWYN,#o89+&cZׅ.,T&ֱWnstR^!! 5Ϧ _\@wtW\@[aPʓSc M>OΙi$ Rto>`~GZd1ѼIz%&,lilȂ=DXՐI¿7hB~ڄ. ֧~_#}bå ^#a)mԶQ5QZO۩ӦbX7UsiYθ1j(9OlH6d }:\%?<`OSbc\NY=F'v:Chu[jS<{t<K6|Rfm+R !FJܺJZŊt%[VtOgļG]&Y.i L߷y`n(V\6'ȪOpl^˗3OX{mpPhh.(/o y{&<ۉa~"QccloGbhXѦ$6%%aB\'qYb!} .v>M.hF[-H6=%Y0WuC4Vx4I=K7Jczˊw6.&=17.mS9,S˾"E-MA%=IeBQ:RaU%7' V +8wK["t3UO"U.eӂ+'S\m'xux~׿39;,޾"!ݑH4L3ܣ!g&k$RP _OU/ނXXv[/ޘm\$Jܚ}I=.Id+,,Ifej$n-+yJK+JH "b2Q&v E'Q운N}6Tԉvv YLLhbkh/h+=5*_cN!7FlY:${0O,͕=L1f;—^>?wnmo9RikѪBUI"4I =JEݼ-ktAHZEH !ԓԵы|ߊ} q“Cn|eM8[.:cH J1#*$\TjvR_.0IU/ŢL0NCAܢsQS ]1R[p(wվv7Jk?u?}O+۷ K$K?V!-ZJU!Q'b~/Q?3Q?s@aO bm/+HS'{WϿإ9&Gcѓsil1"jxE$Sl$/h+Ļqk*̰3ٯ81dXױ?SLV"hjʒ_%V{]|˙,jx4'TrcɯrRAŪLiormgf \*et+h**Aa yZ#*n4̂$]esDUV gWvj7Xi6UjNXJf IDAT:U+< VA"E/]Ki-R!)-9omnH;7KE>OBYzNVC#|#^}\b[߾iW<ϧy./h?ǿ{)5m^rQt5ŧF]P2)YҶ&W ^bM uج#"Y0T2\jU Bv@7&~6!Iay)іƦABjy JWwOͽaq,K\Mu&tՆ$zN*]1k妕tԤ%"f֢ҦB}*mJM'ˆ̥%Czцj)hYiT˾xsk_QG^37$FL4jSw?h),"c`6{`˲<[{fTeU@A`+ҢeK!S9'Gi[2e%Q[Bՠ^׿wfypnf%(4"Fd{{RQĮSRDBHO1+5e4@FB? ^%b1i}ǒ!F4n9:f k5{#DDbZ_lmL>`p$2ZLhtw%^E4L#r )jU<MTabm׷RYb@ ¥KdM˜ )ń޽.q\hvT:%;v1ꄬ{SY߫ոNa[ Y5{c FUsBzG u2S1]ӪwH]Rkdrx.Jeݸ-_u4ME7r}ITլZZqo]e,~A/wߡDiWdHh$IR+ė^*߃? l i4{v޼:(I)#7"zL휅 aG/M~iz,NԞ'Rݒ=_OD@;u25Noq M9/~_L2PK\4ڈkdJDd:U,!FԼі:TG:'ECW\PB&E,TPA$ƤHF IBDFLR*HtNDUR]idWvҳVU#Nt炓vѱGش xezWI|ތ+,,7D\YhLJbhD8ckI1mC=eԢTARӢwE\:wFqm_/dպi,ia iMc>_Ӭɪ03vObOʨLK!IE]a5Q10L [NEWA)ZC+IB湦h02q0=6:?g iMW6"ء \SUJ{g2 a` *Yj yBs&k Oȇh55%m",KbSd36_1]mESRk'ƈ-°T\]9~pTr$ܓ[Ёr+^Fp&& V~ᇜa?S,OwyڀY\~PRQ"ҏ)~Lq.MZԷҸ ,HTLP$ 'O}`Wm]tmJIJBW\'zdغڕ~.~_ $w9x&D~>'_x4 K{pD[dUvhDLY0tӪxQwCݩi{'=u7GO}JKs[_F%- #Ow!{ѝkKao!u~EKy`O-K.b.i11&xb^ҭm-phOq!'KOAB4fsHGK>aAܜ V,`s$lqVT] aÓ)-1Č((BK#G.ev=;="./S%iCʌz*a؇$}ʙ2 |I.O41>O91ab/t#P<T2W|%#מ Bޛ96Af1!h@T6\xi3RW9ؘæQ.V\bqs3W.wk8ctʣS'COh>$?#s:9|rJY~G+LMZc"/>_b%)\(5`mݷ_E#\ k,\vlŌz0=Fֆ4~i]!Wo hkGF`L TkNY.u1CSζ1߻~xy'x楧;Ɛ(RD:DO(~ɬK5I%jYBZo0MFa-]jck$QhWR,)އzYg - !1Am޷{9A rtmڞDD8tP=`mS8D='ˣSb s1 Ji޻hǜl~&xC77 Ksw1oLRߍP:,j÷?ԋ^ڝLϟjŸb6ìl}DV RXL UVS6#l'B䊺0hY,2C( Ph b:~9~ď7Պ={ 5=K ~L ,$䤭ǩ2ܙ qJǩ{T 0u 9ݤNnnqdW#eeR^$?xp!Uýg e/n[F|92$ƛlIq>gh %LL#E] t9>t vBs~u^ixEPM J7n'~?6OWrq駆_7/y 0)$&A%@<Û7idQFtT%E9&`m蓯a9 %v1$t8 EP SC35D/`]B;"%e.Q8᭻`oó_pml Q!˩5tQXtㄯ^> 31|D{Nh zʥS\u%fm(jf, I)ѝy7A@HgR#Q*@"y? x.1wY|H+$ta||ۣۜ["ưw-* ֩qY/2c}_C}K~ q6adۛA ('̢+a}7nз'_HJ>CHP  ]K"`H )ɃD:/9M$юyF%4sb[OvXb\&nRƧAjy~s%ܡN*!O"ǮM]'2 MVE6О1нfa5wZ/XOM d%h"{(D.ǟ[α]Hd,1|!PZ6yA.}{?"zfӭ@[ıoSYYK 5ק5/,L'2=%wC0pp&QUv.Ilyb?K1`-]NO҆V•z0+Ⲡ+TJ cQi6J Sf l`TM:H}B^Ȋ@ڮʞU46 Oyn Y-ɿBḞ>bKQn{Nv}]bӢkkR{.c簷bէ(1k}2+8KI=Dks{Fd\3;/)e<H MZo$T#T׾@V  4w1db''n{Ǥ5F/3sr7!];ž0~T+Xq"gpKdm)N Ύdxь(A*;!7M<.Ȣ, AVM;x2=|!)"丣 P}PJ7!C{,,Ϊx#4h&8Mt)0 +)c,̺<",[΃^5h|͗\}怰;1 >")GFZzɝy;4%hu$nxh}'b5F l0jar ?\}vHRD!H\rMCjFaeJM]=R'Z%eĮ_A>t$ LHR79R`i0;3fg|Kk$?W?^yH?w]Uů.ggb BKZ/$27$!MIvڊԝ؃mdp1k03|4㒼Lv;[ Y|po,lzWE,w^`gԆqu\B(۾)d!%<:!{-dV8?t+i!z(סY,z9xZ>?CB/^ޗ6ZBz|t`3%B꭭SaC$5j ղ揾bLL=гd1.F2$~J~δvL=ޛ"*DFIk$ܰU:)D=Ç/9Zb mn%I+^[x~6f~64[)&%b5vmY+"gńfZ"63!f?K9!yA.?{s8PKbCU,ƕ4\<߆S&3ڀ6ؘy3YbA厒-VfDU.ZmvZCE֒]k6($߅$Cir%ާ|v ?V^%gYߐ/kgl -ɣU,C= g_-ϭNQ#6r'=82;S}tD:^4⊌͸~|lIpzr1$c=s_R Âh&ަr2!^Z:q ŝ] ;=!; ؞rk}0Q ,3" 3bmQ|"j "*:RLOq7!,`Qp t(Y 3`叟*DgxxソJɯ`K ?;gx9M:[0Fэ.|WYTC^oeVx՟gO(?K,d'YM2²̩nO țD]eo}4ɸ~7O tP\&H-MM̥ɚ6/g-F vF=N]&0;@ILӷe}9[?iFϠ-KQWz,N93\gDdѕ3T5.ERiJXu SYM!D֧viӝt| wb[Ic]ºR)2b'DwN`K E0+uFMDL#$M12G}c0Yc; s[Hl >N""Ծp2.=/s\ o~Adp)GI{f5l8QbhNi7:0 pLE21f l"UZZ,cJ0h=ySB"`Я'S r܋h! )$uS/N,0p:~sI|m~7nO[ ?>G^/!24gSQ@ fRޟraq ヱr-~{"7~aE,ίuܸa6*~{pAjƁw^sq3 7ܡ)8?9MrʨN ڀhhe8Og"{g|˖`|}0/m@A['WdN8^'w\rLnĶ [GKzۘ#YwD 53AWnɖ ?}Ē$k]HqՍ` &q! [jQ 會g!|ֻpUbRăUnsh9&kc Dt2&S3+[s._:m?;& Sud׎]Sm Z\.di2܍Gެ씟 !zKt߾o##'[ iG퟈ ygM3Wmszg$HYh5T@I5Qcs˕n„D`ʌhg!rl]@K槈1TEN Ȯ`" JUʫbVFU2S_"))b0%li ëd(r~]_vyM2Ktԓ]p>⋌jLYǭR߸0WL~aΉY%hRY[aMJ5IgQg`>,A'dr̍UPއ0?=4FUQWiO * jRo(Y.e땤$WVͰ=N;㤨n!!D}#CQLөFB@'wXaQHQIuAs 1E=Ttg)J[9ԭ28FYRy9,|E5 +Eaj3F?CKFu2r-85h),'Vg4M$V$"A.-TʠV[[zvsM%ݼB4"DԠ"!W C&%ܩZc; `WoP G(1 8l}y2R•gxm3_q^|u6ۖ #:<=3VU&ՉJ[߼\;PEOv蕋a{?[auUX{WUtnLVe^Z7|Tb3'#^X0:b1xuńЌ(LGLߝ<~oK>G(vudh'Ar Ptr)R>nw盭ijmCgH/#Ekh2C )D^{IRXOyBsz7xlrzxuHgY.q.Sc-m!DU.p-o@YMCtOEF:$FvèJb/1%D>-ꦥ>A]p%A< G IDAT4{."hh?[,(A')4QUQQi'݀bYj&rTúh#DzQ ,Jf·bͤ‰B^1EC7/K%.J3dىm9kB (7|O%^yyK۟Yo^oo>3E&bNk53/BE$ 1X=8vkqM"[&ғ-JlM7>Cq<+;*/Q~6Z[f>TrK+GHR+[25'bBc}XҬZl!+7o~K_6|l͗Z"qTs)24T0:=%maG11 I'?f{׎4%"b2@|(4fHW)\Ȗ=R?F\ 4 ɒk>5HKƒ hm'8YwŴL?@TRUR1ہH#37bяrhӷL%LRm_ qĴɰ7|)7hm'dTED]$yL1h 3p0}6*F{Y+3MBuR>sFh~y.0CdREF6AkۧAVg~; $2@߫8v.Qȿk~|1ƄS\Q`?0ΐb9"H&ip ݲ96( lR 1plm| YOYM7KZ+ڲ*U6M?֣y"7KԔKWFh֐G6KLlFqG51ٸtOϹsټ^$U[p).r{.g1'9~+?wȾke+%a,Fɋ髷S)pW^~Ev~{1a豶@b\"-:d[;8c&ǿ|c8~0xg'Wi,qqmO@vO~e#o ذc)$B$=(L-t~|bd(&*4~iP^NyMWl. 6ڂek._`rݢfmK_yj2&tׯp[h8&E'Yx=W{CSͲ0c[r=e'~Tk,}?v~0xX2\p+_aϊUԭ>}_X#?2A.IOO}{Z_<>fF`T!'ƺ ʜ.5x|Di1),{I/h$B6ێQ1IeA,t$$hT(} ^hi?v6Hyd&`Cb`L Yj(|G0CGoN/^%|*|cWt˚?b"eFw'$)Z O9zbRI:yD|0Y3ӜГes`X]"T^7 eQo:NC`gB'0Xtf, ^-^ /Џq/mPBOzޱ>Գ5u|#p/tCxiaih[ejx`~Md߰dy๊VNy*D D FO{iUc5&ϥPg(#+iFvLɆ0W3[,Ԏ4#䴵87a0F5qM<ܳ W s,/.a7H(=jlS߾N<n0 “&sGqw4m4gW0yD&sdyFf`qBj}$xܜf^ןާ>p:3W\?eri A=wWZl\FD8$%FAdX.G}SG\|P\ћ߸$ 4&T#R 1PPelѥd6U<}dPX'BlhdqX!,i`L.萍Jv^+9v($l֕1jF,trK8H"QyGd0Qy拯/FX-2lapr{W9MlIМqoą ů~g_~uƪʕ%K%"ƬMVKE%!R#?<ìW4?>iJ/eʇVЌ^w|i>O%G*/<P#E"-P?sUWBTGhD(٬ e,} cs$J/{SW=P&7h$ke`ʘvPTBl[A=wʀ87Iۄ9OmΖerPÚ! A#?=X^h6s+A~h]!5WyObb /ྐ!G̒BY?2mqT9Xi;mGs8&'}'b!Z$1%sBҞܼ!W20oFw~7?:`ɑRkYJ[vogzZKԮBhQLbI6]G0O~S2p憌6rx1Q+^:vB;FhDI䃈.kż ^|$(62$#ΩxȈMIHEkhGtu>JٯFlFFdBnR<1[Tkn*|6#,+dInT.cl tD}Rx4 VjґHxk"Îk>bT0"n__@x'fBȦWx_? ҭyx~v;عu!F:?`{^ɏ_=b}$J²a2K{ET.1Z7 w\0"FzhR!R(zDy:oI{ntœQK ciB;2E=d%HD+L6X9~qk&d買ϠcX$.!XMFto%#3Qu9R!&ylnpzMjIO9zb,߷U]=w%) r ΃ ׼'A6#@6b#c$[ER+ޱ}{ϸ֗}o_"#R=U9g[>f'OL:󆬂XGvU!H7O<{'+O!9R7eu(k-<!`놐u(ʰN$&2sˊE)$d]n~js7 $.U*4#C|hϧiqSjYNuZpQbcb}^Wj EV5RwW15DQer.A|nX>I##nҮu!6%`ؑXhC0BQ\*KҒ}}( e:&b Ϭ07`x}ybm߿ϽUl]t{2/$ JX}tޛL?EpA}f,~?ѿS/Rt3X)#K9|WFv w;qL'5ާs8Pr. ߾yzHT詩XgI;Jb$_+66:C`kd26Ĩ8hP/J^ goeNW' ?Cǧ4aqp8XSPzgFxopsm4\>"64"/|{ T*b=.Uq$D|TgڷVTc[,,D@'SXQ4OTA\20M-uh8i՞2bd^[h!i I\-WJLҜYיLkv`!Y4V1m"UoR ^#,}\; lp{J(lNPR$"JW]܉1fd;o{*W6@`t┦קȱtܐNa-qxݱBdxxu%suOӤd6-8X6ay/ +7nszwϘ9l]rxo JR u0a _lk/-/F|7J\݊| |SG)f~^yOT5|b/AsMRe茢*$f|sN/RJyx3\{ePb!1IBl!tX+)FM8RJy*ZJ yϳɬåфB3$vP\p{ǜ,x1; K-33 k\;![a[&W68"vAɿ|5hiI>)3KWOH4:qa&0`? (fq߼36.E{^'[][%`=r?3y_ll!綗B|BPs{D9+19S Xڑ;pN)-l-kBiBCjh1B-4ItJ=H_( Z)uug2O8)=WĺVM@MxX 3*JΠ%R%}4ǖU4`L.jtޠ:'n.I|jp:‘b #-tJN@|n5f秘[R.d&g/wh8r2cDlNbh8u5GQRuhYdƹ/|g}r> +CwxOGgW vbuCBZ'%3~CpخhxIQNU6o)EZ)eb}4\ ~<͓6J|v/ }JfFiI{yJřG[G%E }6/bmJJctV`cmh֥8ϱ8gY|t1q$dZDL/v=[9g+*RfV6Kak+RNiAzXroppRk7*Sz1~9Ա~><{*-_"wGvf-xX>[l8wF9\3' '`7\9f{C84 n"|-)mMw/`rIO9mF0P)rԀ AZI7LYٌ8nT'lY8T wg5N1 qdޥs,1IZQLUBKY/=?ܞFʃ5_`ϝb@E:7wC|M)a$\>FI@;CL:T!49CMWQ3g b0*oGjYpyHq&.H,^ݷK[%Q_?9!Ԟûxn}y+gybrp;LN-oIߡ&tNIg5R \ /_$Ŗ*IB=ZVD_CfZ _ct>Ňbʳv4&/ԓ̀M.avup0Ù sE3Nft!wsdY_̤LJ >/ZsQsX)ӚI1:M0 HON!p5B"êf,%YII1ims)c[{TϢi(+GLV1t:/!FY45T Q%5uZ5LHZ8mTs²3܊>EL 6:Ω/dxuPo 1r` Hf>Y|b[sV т]5[&F͓iK,|:{rX|gE`Yx Nl2z:m_++D$&)G>uͨn{ϙ(d__}w!/xSu(/t IDAT_e!o/kP1^'{]Fo$_v2Ltsb 7n?xL5=fM]c&^~/_k:lJ 5$o5yOYˆ0ʩ@TQRlUb:CCh!/}3_ tN C_47鰱jq "t\J8 B8;Xg5)l5 >"`r68K=`FAz'dӊX-ltyj\+4!RӪՓ}r#t1G} RgsCܽ1F=fb|/aS ,Nosmc_4k/>#w<{!qSWBӟ :zHD.ւ)_ ϿR-˟3 0"Nsts@$k#?2' G8{GXĤ/~Ny2|曞{~/!V & p|yOt#}k+:cVM,SM)k[4'%1W+LlHJΗMtA eM4s%+/ΈaxvZX]fo @!|Wщt [ VkLDR{;%U˚=1_?id.i&b|zPr. TKc|8Bv~/-qFswpSh]TT:u@k3tZvƘqVz]A4XW4 ռ"v8elksAo^E_?/T{'~'_hTݽ~Wo 4?`wwLN~ñE6/n%ECc\U!Z5f z_SU \1h(NB9I vQ^\ҒܫEGڙ/EjqɵLg["f!H4 WqB,z;c[fX뉆(ecW d]sbe^TJgOD}趘.bI =Ժ:KjZ"%U<&p1#jQJz]Nt#f\ Y[[t;K`AXiRiCU2&AҊVӓgNN1DufB37>U3&{KԖH7;*j%5}M*Y~ӑ:51><Уv_?A7 TfHj\IC'E/×Q"FKJ&&MFshvS=|/(VШ$"e7E{)jtQkqEAz)a?.Z5D4FT:>֠H >QpW,js|k/sm6n1{laqd^UlU7=uw*^=]~s~ӑ/itGol!ε Yu?Sb]tn{Nbh 2N ->/Y%ro`ސ:ҨQӪܹt_:nWZ(I3_p1bL[ V/}n?wEVn=n}-Nop֑v: V11w!Ńd)- ~%NIGԅV1%i[<"4(el΋lTMPQyzZV]h}J,[My*&*չ Ĵ&5TC4?hܶW&[6&M&Έ4F'~*шC+9ڤtO4Eh$-r(VڝEt6R1j%~!N2zRxqE¨vJ"6*G35Wrӫi?,O7;_ߒ/W3g׾t+Ny.襳VnL<~3h`xPj,қ%uˉk\=/I^3۶a:1w iC9$$ZQ@H{w` ?;3Nih 2sE1csâ9場ܿco\:0>){Q7NS.5o|9f#9FJu9!ͣ\;^DFpNڌ8z5+6G[5="y7n$Iƣ(!w>\{ݣF^E:GEۓUΉF i}JDWG=DYjTgc9~8XE~N$W?IlеRŠVV.guBHZ}[+> jP` EP8dHT?)~򻢘y6L5oߙგ%"Ϩ& ]~ʛ}Пktvprx{'оfv2w"Ft qEg!߽έ?`6!1D4g.}|kj%BYDD2O.D%`UфzOyiA3 w<-5B# uSz b-~KtCM&;)uUGƓN y%ѡ u(B/1K3'\tE LS~mAZ#V#6ShHur]Lu+RJw6:~ϡg/ Q i45!8O׭Qp]DW4, Ln..&O|,ar/UT[g9p TP5X$؏.³kil.AFb41hzbO" e5_JL蒛(˵,!+똪5čBPc-YڡR`[fzG4˰5>j %#>0Zia8j7(0?7XO(g!$`?LQ3).j!R[KpAe=:G؈ T02\Қ8>b *ǡЩ(.e.}~1x˿_a߁_|+!CK!x .zI})3Z1/馺7/'Χi?k0ِPN_;+ri f7n,hv6mU!_P%M38@zYDǂTJw1תZ|-&[׬*JH & sHH k$ޛRuhV2m ȓ&)Y{/]_ؖ?s̛1_dGvwyA+R;%^)yguM1fg|G(#998s9*-x9 F=o_yy RɔM|+ܺt+J]I*wިt%u]R 9䔓3-ɬcm6]$&$E|$w׮t AMjʄXtWkT4$%[&G28÷фE4X+ss=v^WJrUV#MTkDhEl?O8øPy+@) j: URWد^D#:ss/z8'8Υzs1E9ʄi #l΅=ANS(q 2E}%QYCVn i\U¢]O,,f$*LͰF`e tydI˭+Y䔅tcBH_RqY ?r9'3?3 ]- .ge}<8yp} +*>&CE?%wocӬꆜ_ߤgڱK?`E;}]w ƫXa(h="גbjY c1$掤VTnW{+7-Ȑ+e/T^ZkdsK]G}~ᄆ߼nٺM4 ٤Hj 1i鎄5bZܥz>ޫ ^Y7J䒬BZ7ΐz)>4hU UC#=$XK_QU(c,BS w`."浌)n#bpƊ!$5ɖ`y"3ꉨ4DZX12xʀv9jzCiE[& ;Ec-]t 4ud"u$ICƎHb5I #tEIp0Pa1>`1PҌR3Ns4Hl}V(t3 OmHe輐!͖!8]bysA,Q :h7OXc!FʇOMm.nE=Z]q99*b,t| }o}_Wү2zx*>'DVF̭eAKnN?dL/`qAOky@B/D2.QN~э9S݇w]͢f,0vnG&Q#;-i*xWN' Z"ÔD[t:'L }:2ig90}+]' kp OdwB*,iN4J[8AkLŒWP5%J%i1e}[[Hᱸ,)I0/?j*t A4p!]b0 gD Ts+G-TTFOBK*`J4t*ϨVlTT DcO,w=&X%&MhxAD|@IНP2DlN锁6.JѢBN[D%ArCAZAZF$RA RQ3BnRQ\2tN$bgsl.jMnsd <:l.[g8?.?#'Ǽ}\H3bhW} ^$hNXٺY=n[{*bs9>鄺TN׵bI-L<0&S7%ij.\"}Du:xdQF,ǩ'*i`YYZ(hnbUyԤ-ΦL ¨ĘJ\ 1k,BCci`dHN8~c\F1"tp>%Kggn+.`ӄy21VYAb+yu]rNJ*U w ǭ!ZS'ӽsȢ>33D!w1ylV)bi/< m ڈDqf^ӒG AVdQ㩆SB-E|,Mo7i[y"'|f94=w ~u{H(5G.VZ/ t&5Y""M3cQ$ibA2E63he[ڵI(e.o %FtDPl ZQkU'b,nP$&NXk(g"Z6(Xc)O^ZWxs,UHG&Z18Nͼ Dq$L4|rJ~9T IXR:3<3VO3NN>E`BD+D9RU'1RZ8H\i\A^˞w{)Jkkg=+Ͷ !@Z5z"^Mn)&A8i {dOT92Seee޾;򳰳Ag99[MV\maiHEM<-0I]#?m/tqM$_oY.P*$H6R 'VjOM$ >O}b,Ks%bȵ9NZ_=EҨƵA*yvf/Doȷ j6r GV$v_(ֿD=N@>vN='(~֪ɹS׸}K[ҫ;TyPsON/i[I Hkj7E#˖d)+z4_`Y;@ڣXb-sPcvboj83ɻ /mtxnbx>nO:PMտ~ tH{VWv T%*:UמxuHa=IqD#X❰v% Q;M=S6-?(,k"\*jbf1F1F]JlٔK{6Zp#aRd Mb Qbn9-1 1l{K-$Ifꋣ:9ꄅ?enҷ V42D5cUzңK5{N.n: 4tKMHY%{LL IDATh`c\֪|$m؈ B*Au0u#ncY.!fK"C#jѐ6Q,!"`hʕgȷNn=jסhk qC\OjA zUY;w0)8,OY :GbW,sJ)d a7x~[k]|UYۺ=Ց̓Ms?;r翮u!e~7wȾ9Γ P_b{½}9={zu ͒qGSνk_Y8Ke8i;מ$oO`>|=dfw~ָ3s<\.]n! hGx'$) !@'R47mˮn{gNڱ4H %-g_OZ ~9a{/S~+~^/Ϩ; tG#F qCV~'Z[]td*k%dpJ|&ѣ]ajҨHssABq2pA.Q$}.@֥ khY!UH%+I.>wDAA.ϨʼnL*,&E=($,Yr8qk}4&EWwnXŒ3rH1E0O\#K%GO=YG' bk}6G| x ȣPn^;GjW6Lzd*!AA]jh>[;d|vlNi'GH6 ĝXz{:~9fAa lOYet $A<~;KxFVE"i:OC#raKzA^T5bX|t$"q)R5w A+M~<5HY=!eśCd4qk! =?8{eΗ^I,̏<*A8B>VdѓToo5o?O/޽Ml(QЊ&x&A~X299a)|vxpT k ble/m &q6%p)9R1vRHo/a"vBdNRP!h[sDQ #D" B>dA' ą%_0Ҝk\$d!"{'@w8b{!Zs3. #JQ1NE.i?ğejJ0? -ȟ&yYB?5u(wGNЂ֚U2\B4 \}dIc# Bץ.TNPhɂ`-q)>7̮ $ ตtNAhFV\w|X~'mD>s)CN┐-]8wzƍ mIvPTN!;/{\-0R$JmvP%FjkjDt yBX4oi bA;-t!. x\̜uD,B42J.zpӭjO!l"Wˌq-bEqx,rC桘cM;6.d0HpJK bVCt.{Bh~{; -=FfZrf J@%\yXnm:ByDQ)AԺ+zLݣ=PM+BVE(r|3zM3>{^)Z$NV8} =Ym hqra,C֖9qeU[Zw=`tːue6/!tE@_zնHwl\{dՃ v`163}.o>Gs;('wi_$ LXd)>bu 5 Wg\s(fp S ӎߺ(w{<12o_وuD)]7[_j!˃mٖL/1_Cy&l_y{__s_;G{_fY3L>ӞpK-%'aC Dj,XTqv݉,!;iNx흕(KR.JB-Fc(.2"~w:q>nG=}}bQ8=-: ΩÅ^Թ^А4lYV8 Jh0R_tC mS.ԅehˊݣ9eq6U Y &NįE7r>l<"J-59`@YD$#>bf-ɢ#%vAY¯ I/hFGԋ$.AxƤ `1 QIc'1~DQڊ=>R՚<]$ɚ5ܡk'>"|Y\X O`>[$"~ nh0dqai;'Edb #z尉VGs-r}7+Mt~.HR"GHݭ. {;Lf9VHR"Ƴa#&Kyڕt{& mD~aků]XA'_hxmE9-N.0g+gR]اήnj^3uGn.ꠡW>X?B>y)agIEIQ ]{m|ދ~fT#6:JEܣ8I$*%0KƢjKl 50DA&] Kn(~Ky"H~&V]ȮՒ^j}j!xe#ݒGóM.͸8hΟaLg#a@~1'#X?E~B]?gfO6O/ۈ{_x 5oEwW$QaDKDZǸQ""EDKHӧJŘkU>v"H&Yˊ`$$lZS 'dHx.Jĵl9\Vi%i渹Y1[ .itE7w79|S%nz斸g>_+w":Eu+旼!6*KL %@gFSY"1z^#ъ$K<#MJa#VV)ɍE)h"4A5wߗlbEJ,%{T%vpCi ы' {<l}/{.̼6 ~)Q:%Q+iC'bEXIA'ta˔aubsi`%RKYedrPfkw-ߓ7~;z6oM'oo[?H îo<\:)ww6v+s)}E\ N` {iG26f-*Ht&F)eH+}V7=mTqLM`=3h+WHTָ(>y&;ꜨE{>FȦϻo6*rcp%*^c8ՎASK[%R <>O$$h!JnzȬ7bOޏ+% e)6kQPIc ,KDl1^[bƾHQy(+Td1]bBqr$KdӉ4~tF:,>-[#jAQQ D~]Lrf(e=+"O,SH-v lICE/zlV #nʎ ]6J"]ĢE˼hX^LM)9-bY)ToKrt@,j]q 4ÇH' '8skM FV4gصR8( '1 XIQQ6k0H3UN4 /}Hbdrd2\F$;OnԢG46HӔXEds;ho#ވĨHK'yHV7ᅋ.|q׶RضAT[ÆK'ۧ|#.^t|!B4UߟЄ^S7Leܤ2בꥲ qsG$NŻ9EBǛ1O#G4H<>툅[{RqJG%fVISd^meVM̊sӘ2gh͚F8lhY02%yed?3qisOyEEl tnT$W rsݒ/Y^˽⟲`ƤԛwJYe{.\-E>ZGiN{I/..gHsawHߗ⁸ሐ=^AR^ 6*l2O"!fR1lpG#~r$}{\¦{Xtt_3|c,[r*nq\~r[v'kR.i:` Yg#.es\1\,TNb^ 0 FW"K E!DNjh/sht`L M$DD%r930M&&23A3A&Jc ĈRThNRi<>g\j/$۷Z6"Q)I#*P!^(ҧ6"yr*r!Co8x0ɐڋ 5qWnBJ_/Eֳ*aKb-DŽDE[c51kc@ Ntv%b&!2!E)~MoS{m`[hD'LF^T"u- Ɗh-d<2\y|*ZXIHLHIPd if}щwhHQy*mDխa{,O/fEAkrrJw%R7K i8 tErj.*BRXb݈TV|ىd?ZZp"(kp9/mR^?3*`z^>ϕ^6}pGq0زgc,a1TE̍ho!芈VD6n1ڝZ*SH1[¸jyCvk*v[HXaFw Y0Om')(㋝dṕax/N }{%~+a$JwTrJQ$Z1ǥ*}R 'i*+\ppm%ɕFZ3KI wl}vk߮e1I1-?c|sIҘ@P-}\ жB<EP77bn8]Mzk|ZN $>G$B"Z+ʘ%c0 &w}>kz÷@58e81O#$.[dx>y($xtq#Z=Ic5){X$Σc\<V8ID,딼i'5<(:rI;hnmoUJ-/k@s{`4<7kN a9]XLԔ1:AnFa`Z!M&Oc>wE|=]wl$kA9GfK" )gz\"DEi:]S &Ѵhœ[ʱfCp#DT 'd tAre z?:Jwhӓ²c jM c)vXDGp _9goc?f٫ç'XN1%);?Õ}BsjwWdz GW~ng+_s/7~?~'3.۫D 1V,~$^蝂M,r5W\HpήfOI&xB2K"f>0v}P& کLb$tɼ?בV45І>+d!F1:R$&;D68ec$*C(cN!}7 Ü0@񊮍̞}Q>O;An Xή'*R4=[Ӣ'vv($uM9J'-ŪYcr7PHA#.MMSBp#ʐ48OD*b˅^jrcAydG9jg\ͭgx.1aQm =PgucDWW]1!M7qaNO1z8eE (9,C"yGMq.|D%5HVU柔#up"+7l#67s)L}K/ g ރqmYW^0NKowmdxhE,2U\,)wN^#:gY+nUl&}\&tJ3ݍ*7$5g+s^a"ӒսkzXxBP4mk4n=q#N^Pyx'0.aϳ/>&0E k$E ?=1^Ps&ϮpY=7IO3QC'^SK-p[+xùME FkiK45xD} t yfoi=LΪ mu邝|4[r.# IDAT7_1DVoη8γ;dF̺^&X5%^zvARъwľ91#'Sڑg{< ~~"9eB5Jqy=iOg!P:eϗgxn1䤙r->z Gt5YPZp[aŢZs4۞]f^ߤx!Qh֢:Všh]cך@ΟkUAAZ6{y LhW[oը&>$M!x\p4͂;^5.cYG{ 9<r4 1!Giʬmע|uL>cOgq Yxre^`51.pJQG YEE:5CSbI 9zq.U%)ԪcΌ(k{ϸ(AkU, uVE#yaQ6B=;Lz ]tqngSБU4xUfּ4[LϨ(9}pqeV'N[_ K5c׸UXEp=IM .%v<-s;7E?xW-vW?po ~@o;]UH% rq6'ӱT/^º;=G;y?@vH:[u^jR{c|!0iSf2%CnveJf%/XMHɇ`t$'1G|rE:⺋'F~leXM FzMp(F}9CvjO>6须>~MWG۴F [fN,6JbWWsxX;)2"-(`vBXD"bvzs]$chqwX5M,kE Pf|@RM7dBr]yi@54@, #ڬe UA;!&LOi.t[F-a.DvRޝ9BJ6Pgs$0^A9;13_exQMC\p>:7?P#.Ā34()FyV%YS-霮 t[}'U!>6^Mn_0F'ĮBY@O@.&OhI"䟿-ҏ9H{ :#ƀ1-Z("](?:!{H UMN=iD߃蝖c['٦'Y$Z&T'=A .n;-ST^pFy9'FFsv-IqBj'8GdC Y.iDy0x<#?AW^|bc?d̗쫻 37`?/rQ7{\wO&r|X^ܔ𭷿`MIgpF#*(iʀmޭ7L5O8LH=P1U9p J3Ypݭ}R"*0lPn1vDb'+-2(SЇ &Ope˜{Fꈨ-7}̍鐢GX/%{f ;[wa41] _|O|owΰOd ;Kt_FTݡR-j7o$=.@ijJԬ!(*\rB"|ohse<̦wF7¢HPܚC=dAzGp*,X*/g%n4$y?} {fHw9U"c֚21(do:rdUu:錨MĀ"Jx{a3 &mF..&0|9D6l_#,n"Z:HZ,eޓCG%sMxB 쮘 vt~W޿K5#}yC(x>xʓ31?C|GGee ׎.Y}2a|\to{WUsf%֤@u-mK}9tfG'I5F/Q@U h)P*9J!zeE[}zi"4S;qJ *d&+vum%T+H!34VDдܺ>2 N>W)ِuDTH;6NoG(&wŠJVSĎvbEuc^C'䴛1(!:O9RA1(KxuBH|$m=# IƗdC:T ]jCPfӒ h ޼ClTly˻pISc|R`4q~]_6VtQXt4e Qѡb/XE8 3hcա<r?ڇgؠi6D-$[,Iyt [a2z4ght%yil>۔*╡A[ ENbb"TUOH51 Ol],y0}N ٞYU`xbi.Z7{ocv}~kݜnu|)6%lȑc8p1dL AA2EXcɒ,IJN/]>2(ɖDL>{_~YH$]\tSw[:9cq2Se}R''<5;E#)y(K:6&EkM֢bEh1MȬb02SMzw#6fs35<2h]aX]W[;C$}#] !.vhzM =Gmn3zj<~D1 *IW;b@N@=\PfW4njFCjrW/wj2r?񫛜ˁ)jgj'w1og%9 ɘKx7Uvl0ϚzΝD&GڭKFoscdWD% >;ߒB;jhS$4A(ĉUePդ*:nXX F̧ΰ.iVjZkE*e0/8NaNHJ|UHwbtjMĆGW$AY'zq~!/ yWrƀGH cnrxtL$#ٴKf(1kT dyhDPmk.V3fT(+lNtRSErA(pMDV۔Mɞ"]ȬHnn"XieNj]7:n!"ÜYH WYH'N.dĪX$݌$1bgkUHzbQbRF0F!c%VoHޱ|ky (Z֚뻜L=\pds%AA7- ">SX 6>"g=\T;ȝ+,<|(5Ғ;]YFO}>&oR[ <,$$IYRoϬ2 F{R6K9v{)!.؄JJg>/eO_/Od&铍U:\JЈx%^2ߥwSB/ͭWC4࣊h6˕ bŘM2bXb'd[v(Y_U2lɰ`]!5$$k&[{ȳWd;lJו,#ZlHZT֥PPS8ّgns}c7ǝ-yn3K+r[ Nx1 HQ,0lVqh2< *b!i0I"Qc$ 1bĈw28F0\Ɗ+>S'=;% R![47esH}ToPoj{u&bmFtNj -Q,d2t.udHSBQ]v7Y>~*MYJlFa>Z'`*u-QROwS3/錴eIsF+Fg o<߂JrΊ3N|lY:HjjLfHbE"N"!UTQB?cĭKZĹ"PFA~81hO$8h"ig7Ę!w$&zb[)D!w[$3HfEFmUӜy":kD#f$h݈#Ԟ8_D"= E"8G'KX^%Hɜp:RA %m%NHIRiNH)WMB^{%yë_,/k?x Jz!W}Iӳ]L& _G*{H/2py "BWKv煛sW"ۅx%zڙ)MgM,Y?4MBF{b HV/G4ɻ{%7ddFt|5b8G8Ò4B*A"b(dc&x$mIH|G@>bcC"^$q6$AFņPJ PLj y^ ?/"Y-QOoƣ f,Ƚ'ɻVkfJD$>x9;UNNA#Xo`1{;4em$Dx*EM ͍ qk{rN9n!ilIRl}p SdI. kz*ۇ \A3rP{{YFd̘D06#khOuj@{LXy~,|k]˅[/s;+4B- UBYbP׵4UE]VR j.b U|#4HD">ĵ10INNsuwяhk}Ky-Vw$m` ߟb<'JFcA,;u觩tK 2褐 #Ct`vQ f~])g+c] g-eºD&W4 SizDobpuIzT}A9SZY8y'|.Mۡ!Dnחpt퉤J%hY i!2$&VLJi%9E0|MUؔnhъ0hpWAJumX-ٜA6T*uV 1"ɝHbDF+c'$@¬Y Y7br7ߐt~;ݥY2P"!0kFHcBb-:ڊ!!{uJQn9!#!QyFdo;o|{Q69d+~K3_1Ǩ?=5$ǿl<&4&r{0 &H>0m@UqYB*O" ^((ZA9~gHsg'4i~I,f~|Jt?btSԽt y@M(F*/8^yB&IWٱ|Ov4r] duQnxL}+;#;>B O9y|Ow |EZ,5W@#**1|&$UjR%1 D_sGoquIJ:Gĉ%ޑ/Y/:M 8B^TTڻZ\is:U:&@1ww_㣋5zNrZ9yCsq`zKf#q0,#gKғJ:_IP.?|ļAd̾&~o1߿LS;1Jғ.Ì:#|oA?'큄%oI~\_,~/;¶?Ľ):EcͶu} R_=D'c.KdֆXjF"ЗUY#,Ah1 \N+$\EVJ+z9aC6ǘ D$^U$ӊY:f+pԗN&A(ETEv [߂d_P%?V!*:5"mϱ VCUL#5J0d=]#$bV-,Z%'NkrIΛLmrY$];'ږr(7'w߻oՏxtxӣxɿ{H= >QN|%]`3}0ƽ6G+5!$`wZLtd_Z AӫVbmhIM~_RKt.錿so-^ gJ[/qGQG1 J=o]hml s)+jWb5sbiHZ'iH'Wf[{B3&PިȫكW.%l\ ÉrU`|)i;Top4͟8?fOY'VkpW:UQaaw޽X圇)5X]-?d2~)gc2soz-:K*i3iL4j#'iCtNɚ16.8ell,P눘lFX? ΐ|O۝=Tȋ*PD@!d)֏0ށ[ yo)Oȑ}0 (@NNQT&*tQuƬW3VB@"M 6Un%19@w7a8ň >KEui&Kǜ^AGϰd U9sGV+ifTb"hӘk@6Y1&!ᠹ 7~!m:3QXa[vl?JcF@}i&"bmP<~B(|?Ch7g!zD',pz@_u,ξNM> W\V'3v?@4a|h`5B$ u2K {+^y}=bC6Up]t,nt|a*SF[}Ӭ #<|xi3upB[E6iI{ْ1n!M i+adyR#eQjcQ3bggV,%v;5}yt"t /N|ݎ9Z=ǬV%ޚ VK^HwIB|V՞*{cެ~`9c۔\}MXX{1/J@r.ehֆX>yN' :+!DzݜްrG%YH7Eb|H1*OO !WH. N\"z(_ͩˆ%_ov8}&Tͧfk,ef{J4%4Y7~I%DI:DJ֠%@H#h)]Ji(O!T& \㻿_ iʗwNH˄OFkms\ z PS1.ZT~xl,Xi-&+bhLC84d "(.$,gӔhN5;n 1Ƅ!땭}D-m0!$sф5t#]+!(hm &!F!qkʽ"߾c,B,~ܧ{&eԓ7W|oCOqwϸ!i+ 2' QkO|}@6܆ΟH14Zl~d*J% TĈG#L%A 5H<-p~-7'|;j H` 8r=c--Rufׁs;sR6hr6f[Pfu)!F־ffq=4C0Gt޾u:WTXl*RQWAqV qGzQ`%~@jU S^2ҁN'c1GIMoՙƲ*OTHZzLT]FuEв+=1*17%=6)em?{V5 ,&iB8c bW¢o$TtK̯jɼ~~AsJ]?k?6r:tdW׷Udg35?ї:{ϴ3+ߢ3K?տNĥZ' t&zIʈd;ZSqCYG\I2]FVqeiQQ~ =!׭mc me2q]נ CQ!b Zt)úJm4D1ykPcۺAh{jMvt(=78x]TCЌz"ŋ6*X1m5"Q˂P/IU;<5]'H]+b<}NR4$n 1b /<ʵR0PHPlj=t[8㡳Z*јh,Mr|ʳ*:ӷ^a2B@JM>\-QTc$;X ps55!"jQ"A[N&Kϴ+c+TT4Hpu#,gdeo` ay?_*㦝ƹNB#9^4w-%1EQŔ"*jQ|hF4RVj Iv^i 2 Mz ,@bV>PN#-uao[Ω]CKB1&dԑ-]oI"hv0&>&n>DCoٜ_K߮1`y;Dw3ҝ.935e.=K CU9pkKj5Jt@JPs}˭ QHk>TȺs}ztOl'àܗJ*ŏmoeBZk;FMtڒZ碢QE 혗Q*B4Ę\!dDMX'۸{=uA3htl;R7PEU K;dHpT1V#=±ԕeh:67Q}ã5?Ʈꏈ?mL'gs9{t٣ϲSB18xk {|I0 sfgS58,<678m*Q#͍ NC3BYa JF 3z IdcާaN7$9_hVI9!y DO C3CHⰃ& ظ#F. .H#t2?;z} D & ."[L_?/X0ٙ>ȯCni"F0>|Sς5 7vc(OONO@[;ɑ(l^^`щ]tNvm"~D<9F{BSJH"Aœde9낪\"2 -J& $p-(p1tHuDkkG&SY~ѐ5skhDM,ALam+jcMԊON0B\pe%d)GdY" tų'7^Wk:Q(XC}G 1*b% ~<;b] 1!ˈ5{şd _91WU 2v1[3wΘ]g-7WHǛ ~[tD`uo2Wo!==b?k_#Q#j"OŊ$#byeD4sA<1=eor,֞+r#> "N3…"h\C=O0/jMIv!ѳ#eY6۟ L>umPSױuuӎ65HμLB 뚹 _=G4A mVZF`8BZ-Nz.Dg{[4 T.WmajӮc&dW1rJ# "'SńC4b h-DMEEqvH# ֽ\eS)غPgP\oT{45yA%'٪,eS5Ҏ1o #H%+IFQVeX!J:tNbL_60`m ^!c,rMIq%MS%˸jfYci"ܗܣ:B 0W%g}ƦcxhώyO=_<`z%{U]TɒotI 1b?dמp<@F%0Bg}R;J I+K,G>{Ɉ2 ? #t+/ Ea+d_.2c5*lXogHմ~+ Jm<ǟqbU|&F]dp0`#67opƄ`pg4KQ Ծ‡!Ϟ=Bfә\\^ɪLIJ4s~*?E%_FSKzZFO>aGzo`b>b:}ghRmeh(P/ub.ZkO׉\[7:!]z"ݷ?aرǬ [I+ˍlL岮*GwqInw(./s@wkvp!6d\מoݕήtII]N&6zLqD캔x>%ցlaΣܔH v =z+yZBR t0х".7mq fUc;\CVFJ o!J!j+MIUp#۲+IV+)t..^K84q[QS#,0!GT1l EZNF[vu>/Z<@$aQT2w6p2=feyo9Wӟ{=U,b#"hɲ:D5/?OF#FɎIՔHb'Yoߜ~ݭ~ΑﭢHR$ Lc}BTxr%b'1 s8kfXZ.2g3@K'2H-E/EH϶M!:^9|ED!+zƐ)|"du3>M։ACM]Mޛs1bQ]:"U/r1A'a b*a1#/Ӊ_,ŒPU٭2G-E:cE51D/A?*kPQãc(;CȈKjU0%.UZ"l""46s$g49b㸢>߽F}0D=dYB7 i6jl IDATT54 `DmCbţ[46+ S Bhv^dy-}Kt@ 3>("v'Yѳ jPFu'ۀkxT\xjDsrC*^,o䫿o,2|?>s_:EDW!tj= ]ex<}KN\-bZɳ`b;ߎL@~daX5`DPq ]KbT;0NmG$X7o!ř-!k⭒rcFT\/|d+c+5/Cn_I BG[Mi`%4'n6DDe-їS(lb1d$@TZ g%(w| 0(] 5#E >+;:ʰl3Lc 'EO,rʥFk-S`CMYRlG+Ru1`O׌s--RgQ6Bmh<M8up|9<z"i0iDqq%"FMf[ ;,G;DU-^k$*@~0O'0!`ږ(DF;hP)iMr.~5Q,}yk节nZO>[o 钽a5a%fxqh"siݧwdcx LdwKfgX]3b3 L`vs:nJڦB ZUtd0QKX`yK-icHnʎz#Ѫx*怚wY,[p1:BU1V Q)֯Sg\zpO@=g?fɮaD4빐1)DY7ģLÒ UDsB0YIKBBZ4"6µK.Iy0=,vX̪Q+b$ϑ!RXi>EE%7d$ӆt5mz  ڐ#1Q࢘(I9u8#8i)N4EZ*kв& mS\Lb<3-K#75aM]Mqc ^`n'qQ[3|jKo9yJz x$p,VIo"4W4;1&P40 7myV>G(Ϟ9?}ew7竿P Wk#~?=c19Ygs3'q ҥ! Qi%ȃ UJd%zҋIrK23Z5l++H0F@}6yHGSL]*QKObH_*jp0CKV"v0)%4AXDE6joHSF7?#o=MU6 aw =jm!shm"kU$2?I>ŧe?o1y]T{)fmն;(Dkc;iz];Li[ۅlӲ1Ǵڻ{W0Yd|= /hR^* G}|q&|Se["':FͺA}CJ% 'b8MR hwvO.v0,8K?dڍWo`*gsbv=<Өj'g>}r}AۍezXjBʼnU=Ѽ() (a4VO D]{[U޻q~N>JDTp*7v2Ђf5W4_\&iF+Aw6VoJΎ ׮V/ؿtc?߸}{QV/xkSZj9F^[/g:/]dE`PNμUB VU( 5+"j߹T^on\0a k . |hZ$ pN*wX#*{w߿EQ=(l)gf:33o2q+CfkkSHqUQWz}շf 46kҘb-s?co{>K9 o{"ױ,=F)!Kxx4%+fOdm}F;2q8˙> BE1U3`807VmhôBX}yp?Ts;d޳Kj/J"⺖nā|e $epQg+.1"i BaXC0~EHmHu4٥l[nC/ ,qƭgw0UHI*a޳5!ĄIllЅ8n D!^v媪>}2P r.oaİ'r^eܛQ!+R4by;zB<}~taA#ČydcƊ4-27"|4$s^YUyₛSX<鋤kDaDrz%z㡈>u"]Ly|%'^PoMޑ\Kk5\5/pq,ۚaf$٘eފx⿹IҺq;lY`H?}}zgUɾ-痤LU$Xl ^N-IroCu>1n؂֢p>Dc:9SOx~~;ƈwcAﺂDu.㜨hAz2 [rk$\I{La4m ?,/VVU3KlDb"Y;ۏdu6'UϞ~kF,YeQ,M?khU`~-KC]@'.KQϘbD ƆuЌ*į' -ADz9Q)mDɌ9+!,q~@SDl "tGEѬ"B9tkԬ^긴5&=.FYH4asȍqqvb~H,{݄ICG s]qQl[$$&:ӸTf$%@=pOX.E1-F uIJlFAˈF<L}ϊ`cE. IY|>sxg'7ps|6y^Ϋm|9qDhrrNTT=<l A"L\YOZNtgYv =Jd\B\ZqYBTbi0Eo*pбrsKV[/:=Rtu?ML@ǰN@4(hچxĵwiW$Q(UR myҊ@(F$ 6UU0H\rq>àд+zJwS/(hG^' 7^Aڐf=/ AD3ƭue-ɤNmxflbQq9*TjzF.*H;L;eNiKFX٦Nf/i>}D=5g9 ߒ7F_kjpj!esFh՞Y䈳e ӴU iy[ɴ*1*tL^TE< ~Zdb""N>bVYZ'1_飨ZR=xĉjU{3:) ]!OFvu׋._H>, 3IusBYߓ$Ni'6]sR^wS˂5^Cqۗzܹx-R&%ߺ+(raLxO4YPhN[K*qc:x(łhԶJ#t͛%_ilb'`xmqb]|1b3 Hz$YrӈrKo-n~!ԗy{O }G/~o3y%cީ *'XH^¢ l!K{PE+ V<&\>"˂UyE(@N:/O& {O 8H??D@Vh80񻈿˲4kwer L/h1\چ~Ӱ#2qʋCQ8]Q]1A5t^ɺnr[j!7ͻ<8B@8F` #1Gk(IPѼ$L"IEZ˹H7mlwu7]Z# F|Simx2%[@}_ſԮG|%$zBm /wJE!2ZHb!;/pQZڏub(npOIMK*Ӽ,OZmDx/r:~q[{|-_ N >{읜3ӊW8||ֺrrZ>-%+t#]Cz)2FVQEAd~ ~Lb^x/9Ǔ57RvrKG dCfGi$ xNe>zc`հc%(\U}yI{cjQ*īoWAk0j{h &RA, 0Ŝ&*k/_r/gW2?,iO;/3O}}/sEQE$I !Vz*#脼,\~8:UP*OJDez6yP ? 3OOgkH=Y_DJ(׆(B|cѦ۴ 죋.+@KhJIp!4FThzz3" MMߣ v 41QPr|zJ>!YKtmzb:T E9 QL/+p7M.Ge`e?ri45h4 G"dߡuA$v僩B38pa4ڡlpw>pmcL5_slj{N9UE;P|a| ҄8ϑ}ApeMr:Mbddb#NYq,3ޞ 5 eF_?'Jmm+.eWr &p|G&1j :56-& ¼8D-ʶa4(BS+iCKz"c^" D8Ή4CkOߤ*.8{=[ lbf~Yҫ> 5oKx2,e!Txa_ۏxw9*QPCJ/;^)"BC9eP<7~f|ɂ3_2W:Q^{cFYZ%fWU*_aBMR$Q|SF̂%dO\}m`:FZP!&NϠfQkD_wxv81d\$~9 IDAT ͵wn1_Gd܊A"x"(: @Űۍa@'9.ů䟲̖Kl'H.SHAKdY:gsli*;('QO0]b8;3s_%t:(żMqW0Qu<$z^AULi6c4*#;$v}n 4}rދ@/~bZ ؠ6 !9.v"Dql):\fw ZKe~U i`)P}H>6ݙ 3]A|@x1eh;zWv.—7g-wIג;Jhr׿=',%/n1f4 Mu[V֛½yWC>n:.akfT|?Gz-r z7jr,[&7U b¡`󨣿0 WqT ad0]jJt }DfHgL{~gxnـ !y"` ]a4mwTh-n^`Z6/=O/۠%05uS98D%a~G]ַz.YMy^|V(o 9C.f-V[ΫS/YIK7P!FU>%xB6xii}%_AX6qz{ /4CyҐ_dQԮojB*.M+4du Bڦ+VgθD B],ʂ(% ;4O|%\Ъ >Q>XڧB:偝p,6JU80gx>`1f4 + ?|z*MlG3t)*w:C./ˆ; XhѾCn6c|%TvfEBO3B^lFI dfhحljlpCǛwխ\i. ISRFG郶hGDq+N/O1%S[Έ뙖Wn0zMޣ~pKO2;:DhpՌϏ2h#=k1e>CSffc=`JQm*Ow^A+qH捻NeLy7 yz\/p²>TCr&\ڼU4# E)vNU"3 jY>:ΟUPޭ|cDa Tp]1k !!L)zM5ފxUNK&'] +nLFƐDsvTi?P2f]\Y`]\#N>)sݳZ4%,٤=?EfD"2!ǽ5Md[emSeqYԢ>RU;Y}rU6 # jJT=Xiki|βe-@/nSqAyH-Qg3/LǬ lKܹqI_ҔK凬 Bh m+CيCNzu}.SCC a$S|5Zi1*}ȫ>T .R75*)qې1r}~K,q&V$(Ux镞 KQJ[ z:!/5ǒ^1kg sKှC#hOQFUί#燢6c F|eu7?.õ74I~oD|x\YcWuU&[dӠ$$LC1 ؀7x᥷{Ó%RTvWwאUU9߼әxnfU5d 1'9⋈7=O8m?a :8"$s>;HP#1jh6IJ `@^m;_Vujn^I9٣Nbz1C igfQeiPJhgcVВ0>,M'ΰ+*Aq3G]:ld&K.ڍpH}KNJwS\okBSK"CsLh>zlfXhb;KtLj4Qpn$sC- IeK|Fa111B 7OZ lʗUL*v ռGMΗKnhq!j~MTl c 5y&K.4{=]ֺ[Bhqj}8Wf˵^A=]HhҴ$Q[߈Q9p#}aGJij9rM;=@oL`}4VGny~M'f,?<ѯu  G¯|DP tν]+3y+G~l-\tx%m/+okƞη:o~&ƃEs &blN+Π!vpގQLn6rb{H/8iӥ+2Im9=Y9@R|5 oՊ5]OVB{',-yjUU)RYbDjOYNW;UKN\=7,/b@~k_T\_yٵ]UA_Dsb`q1WlRzw:qu3MmIHڝ̋cUWBǚV*/ 7j6DDŐUёPdWԞ2 *m{F¡ d˷liw[h 9oAeZk7^R]*URqckh% S^T ~*|JZ__~VDUg/Y5Hڀr)dһaG*1xYj}JƋ >g:#NmHF*M/B}SU ߞc[x.z$d$c4[ɜ&I?& ^ε׾Qbg2|llH+CED$FUƴBgKl)+R$#Vt}O2/y{ONJ\,GT:wQO r|K?`,y+D r1eIw 1BZ)$ /kN@A:Iʸӄ$f#T+gROOD%a|pUcѬ<=ooDV3L3zoOAn@[_[ÚW񖦽sSʓo$h[vMT oN|mH$#m\AvoIP7zG1AI˰]ZBX[b7cNoK8[\{3/R_ gpqMWr2GT6P_EHRzjIF֫o0Nr(?]BU(bDkob#\sF:kaSײ[4l g3iB@ErNZu8ҴueғtA,Td[!dG:/yMOQ%#Z6H T!![<ޢq +Qt zC~6$aCQr! RZa"lZH ͧq@aidVm6Qޓ]xT!:vL"L 綾]bJ hů+Ղ:DQlL4 jh'+D/BU+n "IvTIٰ[:/l#Kssz;Ϩ*?7/SܾSpwB^{Gv"xqۈM~~w)|!|D'rI"q! ~n0N ڵ#Kf Us^K{'kvk0az9фrf_7 v2 ϔ9撳EIP9MvI1kjCjNa/|3F}4J_Lы}znFkFQr|dJxNcD g4Tc]GXmz+$/06)Mo2[R zS/mCô-I-l]Bs0D"U?YU@~z785%p&p IsPSXm.ӆ'iÎm@Q%hk$DQdG^2!T݆R6lucB]<{֥K 1*MsV!2/iSf| [Fh/ŗc;qu'zWa}YW_#D03({iC11iJw u̸x|Jܶd0p1#cZvLq5H1vA(db=Vw >,?`9lL6&p1vie\bm|"X{º&nŎ`"B( ]*.tHR8$Mͪ"lRA*7dh:_zv H"a =#q/orW^oX1{3~턫cG>\9=nQwѫG,gfM;;zJ|q0d)_zޯ̳3OV'|yo;r6\z*@_0?|Qf2FV7E&cB- q#p>3XTG.ieWM2Ԓ?]͞:qW  N|KZ&Ì_Ku`=W= #*Ϥ|XTo9T͒ !y_cIa׶,*+9U [*lQOsr-R@y$6ipҦBk,I2ޏ*驆^Bd-"&*!(V;OT-"M~MyEUzΟ_VW/Cdy^sH:s W^? 7)YD(izCW)UQ7_`+1cvKN5=\^KOv_*R3>S?b c=?Y/8V`N!@>7 9J?\^ULBhbM,یr!=Y/Nq޸MOt-xf5'JTh2TAATuG\Bcp!bDIֲܜ89@"PuXΪGr|a:ԩeT^oq;{TPK[?f=ۿKYLi}j! 1t~CU]H$g&q >11@Q+N 9u,E_SPWz2ڣ_%lr޾n]?e-xD/.6zCiWjoO֘(WQKodCWpOe_OAl5z IDATU!7t/|,l?;~KoaWf A4֟mbJ=F7 .3 im?/d=liVWBjiKp6'XN.?{L^ TaU`Oj+,WÀ[89Mn}=%X 1 "J,i)gەƳ9]тXdokŖiǧzU VmAmc͗Y?c(owt={CDM]'dIZ+J5l,w8;}Zմ%FTlFc޾#KbFTZXrD³8SWZb2ԨQY?8'V^]fEbb4Jah \:*XҼ$jh@|fM=p;r7wؗG]QF]| Œ$>YrepΣS>~R%FaH"| 7̥0T:94}~prǷٴxŋoGPhS!'`_/ ^Lz P|ɾDj{kڑ帞 F$ QpNro?`Ĩ?o;o6hHTUB~XH$}g绿j?$X?sK~𳋐yb}_e)$qA#HMjw~t7Ŷ;FLo: lK&܀:ݫ9 ]dwz*VP#8bD;o'7Wrgwly!euD%DMX_̐Od_q~uc ̥>w[}%KrBE˵ r+/O욆5`}K3N58'j# k'aưw>bZUCӨ͡m$ddmƊ4Qyvw$OC֒nZv FO{^g3ojv#v Ƈ FN4ң!U:f̸0D5=l|rz(u0KҶ՟$"Ԫj"xmҐelo RC5Rj%DE%ȋtkA kڥPE`dMsDʪ'MռT$&Q/~qmf˷&M Ʃvs^ib/ Rxcr"&*R356MXO&X?grv Y!G<梜H1zd2k|tO}S7+o-⩄2MH3<^ NrlDXB2oߚt=^"3 S=d8_2ՠ8 Skbf0m( j<>[ī0JnP^|Iֻلؾ&/bbd-]I10^V 2$9o$v̐t$foN{Sb7[ٟ<]/ÛGZTRoHUEbMQbjpGbD0!cllFHI˄4wl]H䜰`ynPbwP!ԤcmwĀ26!.of&0He3^Jf׋-krQe*PN tH]ʫ?7 <-Bg*nBh,;|Jÿ?Oo WpUr 5m'7:$?eYCOw3>=2mw^Mޙݷӿ/޸t ub Vmkb~ԟ}6Z>'(o~]U~y}̏#o8}̠Lw U$y5L{IflSlE$H;q/ZL]ii*0wd0Eŀ5ˈWc=o.d#~;lffzݻ'}[s5Ow٤e1TokFbةA.9=)NթD%L'=#1"3\,$! A@Lyab+Mwwl 8~d,JMR}r0Acٹ4FMkv&oG LFPd$ynY°ӟ}J3[J gqƪgmMzC9LaW$,.frEr0&h7}l &$^><ӌ'gg_{R5}>zzš-3S;Sf2J|K0yƠt(n5J.jC뫷~,/QѴ ݗڞW]¤"".TT$(;kޔ.'aNjǴ))CN愲R-GHT0y]͜٭:>̪|xPsenO/C(_KS]U=xQy¤uɔ-G&?5+ј>FAB"j[svq/_n~Y 2{]]'I:}$ D/5)>=B|&h@U9ǎu4.jXI4Wu%!qd|a7cpqBڀdj6g,ŋG\ITO[mĊ.P b)UdM}ƒ3Y҂KQ$Mp}Jʶ-u7?Äh9FK4u^ՖOPADLi\MR,h )d>}FT نgteJ,5d^f@Ruo?3WU@TuBP$dỉ%R{5- \j[HH2ʹ hbC?F j\,aTM1F2HSXdv^J!@DTg;bbo琢`$U3i"xΨ\D.dI\ٶ/63ܨ@R,E%iJ#i]kUzq+gCB{) "b d&ZҐJJ^$w1/bL?VMZkFeoO > $ .d ?LI%47{QgrZ=zGZ?9}^RQbP>tO|륩~#iCԶ~c+yAxs~d X\A@qHlw^gwxCw[\,wn"SһS=J6S\2 >R_:S&H4BGMǤCI*dWez#A g<墕L {~xm gt7p!xl.ht6-?|^ڽO7? O2ƧcOޫX+{d~_djEd4*5VrK{BA "V$TJ$qoAbQj[Q(dĄcċ"CqNL*jʡ:9ro+ySDB rctҸCH~]Y>i$7MƃՈs?D(N8!i($V~zF:-ּsf6d!6"zzxn^g j z|%⚠/)P>Z{҃+ KIELw'hxoݻ']o~ '렫(DJh"U%f x 5b-+el}NyxKeccdE=N}vz]&: 1P ڷ$.!F5'MpS1Kl/z ~AT;q v( "G?`H\gWu+ۻWH>rte?Yp볭,񓔍Dj u޾*dKE:)H%1RSU6ׄeюQ1d(Ym)Y/A,:f&qJ hiHT`Dv#gp(eF.Ϲژc΢ԘCx#6R엸>˳9 µw$cu#$TҞ#-Mq<'-N*z~p;kyxSih4"Dt򥫖 Ӗ mk$^:֔&>|$-Of,WBP 7RIJTIBIn啑%w*gPFU * $IꢖIC(YsH{BY,),N_}([dY=h U+Ή3eJP?}a׽L?S>bxӿH @q|?/{QEt*o_#e W3oZpv]xtC2Y u;DzZ1x0evR̐:0dT!RڣLRtgYۖ86ĩ0M` }eC8$d`C gU a lZ%/Y.mT HoοoKFlS0:; Gg~w\_˛Θϕ0b 8# f Y<<7 xnGTq;bh"&ØL$`<$Fٟѡ^A65[GV#ohe]H<<ޘ̨K=&vTr1T1"%ҸvQN=M#(VI sp"tk0fFFDD,ncjDi7[tIR,QR=& GMv$i(z ^l_|EUj)f&FdDn"Dk]d^yR %@P!^Vf}zDڑ23b%c0ڭI\&u>1\*hiAݎQpZuh-6KğSQ,C4m:՚"R}r_-Yjv9YohPPcZ H$fhm0VF4͆Bieu L.Ah_hKSPffOi$ܫDcSxl8b4ݒY tB3JZtc"F0Vvõ愗p3Y+s\nѲ'bSUh]@;>7+l4X;-Q7%HQy0.V$:` x[Ԋ/B^̨{PFs*iGc ffWoEtf1nH6@}n^iߗL.=szD2+.OIjuʭI/*e+)Y= la8 +9|a%e!6Rh+1` GM .+Xk/V;e^-$#|C}u m kJ5j`-z } b鉣pN4%-3i7.;O%28!k- G9,</{ok[v}vۿ{_^"H5TFA3#@F{d_2I ł-Yl"Kbͫwn`*2FM:{ᄃ~_FQ_b[ARK,V`dlek^. HYZHoҪ*+I*Z0*$E=\ c3n X~`s;B 0Qldhv<;(Lo͗5}#5{}ϕQkNӏLҖAVU`̫(bY5)C:$.8(K8ZE,SICي,G(҂#e|xxA{O)NL%uɲ2p*||:"2pZ,DDbH%Kb"CmʳcJU~g:NuC!=3qՓuν2{'V8ơC[Owf q7u~ ?{q/X41~fi/Igs w'9 s\m|q`"`Lύ佂h&'ˆξlidP̞_ю,IE`Az2dِ?u zF͔ڑ A],ܒ9!q,/PFR+' i9±2#p4Dh,DNWЫ< 5<jF yOd}U3n $_)g7z!LJS%G%kCGlI7j"DQ yE A9FSeiSjJUdw=;b¼#! 1A#dNNИ>qrW7$tdHEQ&f${ @ ƖD־[3cZW0qBSPQ)ΈEW\E4jq)rKYda!Ohoڀ-zM/BlD\0eAHm50e /oջ%IT(I\2sWVb!P1)q2B5М#J GHhh>`^S'=>̇QӖؕ4yIMk=AN0 .H?F)BFD IDAT[;'G`2`4z|gيxbCc:JvriEƷ1T̨t#AU`r; C# cf3g槌$wTicKQLL@`z#"1:kF0! ^@l8~ht9[*[cJ"كQ쏕bo""$9ٸ`oKʣ2'+G[1a_~ҵ!Gu3Q'F!H2/Ps?HlZ $)Pw0,f^k߻F37?& ]701xbU" mZyaD@K5*ǧ "-5H*?!>3Bh:?&5 1",Y]Y$xH}bt8A' 5I4CO+S$@YE(">2e#7[1%) W5LLubb?; @;rBj{~:܏z@Uyj >4*6j}Ƴm6.G3bmI+ǷwqWq~c&j{Ջ*&zxA>(Xe<¨VknvG{1O_6yQ1 JBuyY L}#P:ҷ0>/;@wX/zdZLt_(PUD "7pl}rBy {xE!3\c8A v[b''#4,y`0MT PҴGOޗSQ=hqgU>+LS.Ver0%G؀Wn=b-D7^]hSbchC1ut<$kd8ߗfֹ>ۄd Fb׬Lyx̣w ]# ] f9Q\pzv7r8- ˔Oxh2<w9{xGz᧼H7k QW#sR[UEݡ.h֌r/Dl{)c%р%(ZuKť+5>Ub=t^A<[oRKIe݈栧ˍ!&l6Qlm{4"US1a,`y] g}#N~:כ6KG*a lDŽmUBsPZHe9:=7g}An9¸ogO E45RSiJ*|D~L>rxIUkis&AePF-f-I!Ymu>ܪo`-!( %~qm)Ms($ϵ*t$xtvu`,j櫄2+Tf[̰!f5S,5]; Z֜'{\w;og`O0E4%:{'HAvZmՊ)773k9 I fL$3)TW2F X  FBF9˛fnA4ڸJ>zlw٫<t{~CmÛ} 6Ȱ>_V <ZK%c"k$꥘^u{B,jXbL(^iH\ݲZ4gKGHGc &t ۠Ҫ#`hX6jԘo; 62kxejFg Rf 5mu*h% :/nmd'}̽<]ѳ{GIJǧB>;ȕ+`ĈџWXU.-jj?em:DNKÓHu]FFv0UHmTBW1r/j$>B横C-2Ƭ7ï}m[zƓcޫiON??c- #/I9؍y[7~.L+[*vĖNo//3`M?O?cg?O_Dǟȳ^q:28+?*w*L&M0/S֫X,@ߢ)W#1QY#=]3!/oQj\acOJڽ@GؕKצa/ a#5 CY^34T!MP5iEk7Zf ^ &|aJgE<>(Yڤ^C8'9/~%dPۖ`m3ܾኽEwo {Y7qnfϽrY_GauʳdJ 7 ![M$UFHP JRVpЃ.(Re! bN@vI,(x?Ƙm$܇G mM FJˉ­g1QR%XuHX1jA^Ҏ %* \f{rOAbr1| qTl iŽQxz60 ob$ꜷr)'JA$"H rѼ oUA"/~>6J).ײƚ Bj9M~7I>ID]+4&- 4Gy)48N74Sd1 dгwGELy6y3cqmE *Woqq|{_ҿOmҮke; nW7E*W@M{Cvr'aI=?gpƤϪXQ1/YOX͎9}dm0PRXbgfV(g-C~i}dd-iB(Tc3k7SتG!KN7;8n51^)5'H-5&ChNoc+|QuShZŜWr)}Yn)۔BQL줢!J U .j¢JQL~6J#񂴁Pg7M-j;Ѥ^1V>M$w)H6 Q-IG*eE4Mr_˽/We0_K?ENZQENb{aYchTbydc{ ]7Ӫwcَ~J[DI;qM F:`1@ @1%^Fo|E~89gvc lǏx/fg'#UxՄ2piɉGGurtG{cmS,˯[bw7Q?O8[Ng1?򹫘EK;6cBdhI67 #5Vp1qX*I_d .heIZp".oŪ VV$3l.4Ɏ>3|=Q4vOg ΦDjFYA1"<~rHE) Be ̈́OD$0#y;_?wݝ;\b1kvNjx/gx!6ypUTLwx9& v60:Z Hv`E\`˒4L  .(@;Y#J h;;\&^o64kY?ޭɂ)UM6q kjZ4&-3q~qǣ1:B<ޤ]/X#!H@+lݝK5kȆ(xEBgPGU[wܜȤ,3=#6k/[?Ǽ|ģ!7ǿfrp92z>5*ݕ[o<[&b*@˰G%"a̲)*kLuZ/Myb&.Z$M1"diJjb(lLò?!&ݣ Yy4g C\3ԛl)t +"'0,y6Bf)^a«,!O(M1 -^V İCtɰQ'R_nS{LDPhҌ,`M]LĴ m3ge#mGZ,'Ȥ$&(B]a3n"ޠZ?,drՌ W܊vTBh^vr4B-!) M:u4Śub(faAp3+Y>6wh+5a(-VnW}M☺&#&f|3CG((GQbbvI:Tu}gG3'hyiSl#6H}bcR*4[Ůj\gn&,@h5TZ2_ rϟRɬoY Gv: nQ4 l6FKp{egԡB=cT)h]cZ.!x~&qA))X.h"0Wǘ!,D^XG[~i~xTpeojQы(!!iǤ!Y6,p!d\\g1S%]BI5nlbػֵ1xtJ'Ӷ75 DKL *_\gOG96p÷fD}Zc.Ь>$@0X'1Rc^] pNdsv…1`kU0Ai] E8;3'GUi5ɵjYbqoE[4#Km8d\>$)vɫS!54t Ƃ3Bjⓑ.LH5orR1t %Mbh&4 Hѫ-v%Vi\!E"ѐl"e7l ;eckn4[7a]U-|֎G7BD­k`~I96/~eܼw}P'8Oo4`߶,1p|w~bl ,K̔_'#%\=ܹ~j'U4.~_ف/ J9я1ǴEzsKIm$=Lо1Tci&PI7:ۉ cK# ``sH.rA7Hb/,x\60^a]EԖɄ&D)ٌO?MGEۄdBy7Ii ;ﱬ[c8Y4yXjû_Nɓ y뿑Wc~nf9^$7's[22cyT4k';Qٲ!EF\+#q7 IDAT1-MTwow{o>ЫEBiXīR|d;X)L$uxFzK>fz  9:jhɪZ6k2YIh_ 5%*rN/pƤ)) b;lj"db5eY9[US':ʄ^$!ˈMB I+<범o3Nw;eӇgW CAZN)MVsoIj\SQTMCoy ӫI*$&H,K /Ib^V2?YтjQ uۑJ[l6K zC>Kk dM **jk"Ɗw!F=Co`8Y'fiܥ%J#H"I$"ԎxIb]z$YDSK-b, R+S< 'F=ê&*l}v1VJuqJ5lI=_J;_數ٚvB@dP{tM;]e+Q1 HRZ1cc:ڵt0E KlĿp  DŴCVni #I`w~e]NƒC0IqʕWf{,qarslY FOώ<>T|3>ϕW2G FCdD5AȊ1]qKxēOgQ_gMNV; |\ݶ/_~%O_׾>bu/\-i5D$9eqcjcK6|l}tvY- l D1"*:vg[dhnhnX0܂ } f +}ٻ$&>ax4|`YLQ$Di=5TfnCQSS &i#`^D䚐jA`7{"mZlVg)/9bW~_5#~n~$w}y}>|Ti/ \/~mY籼75YJ=HHc&EHkv ;*[i8< [o)_8|g~U}?RooǶ:9v_U7$%%3rȶ H^ H<) $IG]$S%"$ݹ?uj#Υ,Wbk֮{U{1~߇_==`c)S MfW猓Ї"Jh&T3bsÍH=M-Z"~ǀSљIE_\.M jġFDZgs5ӓ&+`2UbSӜ-0# #h59ˏ>'uG3)*lo e-rJl=mղ:Zj\{Z) 0c?Czd.pD!xl558u>~et Xcu"[r YΉHn~g"F'ڌSY?W>X\0Pe7yb!H 1IJ}d(y }ksaS׳"j.dMz1B}JG泱֨+AH}n]ZdhLCjFUQgk}|s43%H3B;Ô5ɳsQL!U+a^Ҕ  Bz}k(KM+8>_ h榒NZv§O'FbT]e؄l]Sޅ"ˤϪ$Xzwqi"ϫV?c@u;ê7_ ?;LR+#^*p9y =&qUzov<Ud|ˡ Ò alY:Cä8>sf"D1b#4E|gQ y0R}&O8*D-.C#8z"|L &3+뚼.]BN!q%#ގ)Qt-1pA0TʴS17#>YIf{\~xO1:hJ3mv/gBTq!HDЦw]`5lF1с\s|gY ڈ1O>fݡiA4B6 Wo5_[G?}Da֜ҔKxy? q YIsk@!IMqF ƗhVt͜aI%b;l^-r+#ЮjC/I9 Jl;&wn1sȩ*xLfyt1XcP9bI-'>,0ܥڢ+Rڒ[ѥvB5 rghW-О_?C0$=FW"i5)A<ל/K >,E0|DK6hp@Ćwh$͘>dMNmaOɝ-M/1gh[cΈdVvUևޝOBIS˭8ݺBBHnXyB7&MzEGuė W] Y+ޝ[Ol=b >&[ c|D^wrLt|`Ȇ)a04y-awcF x ȠN#3/Mٜ iR7Uΐg2hb|ZVôJ3Z| j#2.~2xJs,m=U1~QvN9&wh?>dFAJU9"pb֫Z6NTshbIw,#t]G✬uc =˶cĮb 1F3%b 6Kp$0Jp]δY$3dt{ JI!9.ͰB׸t8J{yVЍ( ;WwgklPm/"C'C4Ņ*|F $ZxkT4#&/ܬ]Ox.j8r>#MUw:<ĪVk|kQ_YuU)ߺo>]Cj"AΗ?x?Eb@>UN/ *%J1kG!7cHj;^9i06V;\ш9}!%ݏÌl5cb>l%$H1dI`--$B:.H@Z ~бJRJ t.EO65N`knu~GbzQS? 7^_)k)_U_|oox!G߸=F;c~?C7G_?%$-'S>_ *,s]plֳcNs\֑Tɵo*QjQEh \x&qE?א{>ʚeOlXD "8\Z/L*D[H- dmd img."Yбܝ»BRUiY ng8Ƚ%+#lEl=24+ gwnqsoQAlng[ly||d {xM"͈cXBaqJ¯o\~3 k~?޵G|XxK.=R7#5;ٗ8WOD% N7 {O t(qrcTI&(綟DOCm`혒v 3K9ӗ/s%'EP]EF/#CT 2DDB$1FV*iF}rF~&6+hoZL|Yiw㞬 7G}VkN a} f5Ư僓cj;31`if5$L$,Yx/Muf瞎lymN1'oc ec7 ;uF -o0`\_~k-mٹ*NR]>h}qHQ ᶷQg4 c|8(Eh:Rs+F.% DU&go3g6˶!w2__fAώX &EŜh 7occXG)Z#Kn8 rZP-ON^S1匶F[tP=VDc(5,Ske)/K~pei'5G 6͸R򕠓 rٛ"%ʣIfp~y$(|dBDdS  1ݍGRmOo3!6'v>kb,ݤѬޢ g_ҳLA}ˌ1&%D5םbA}@'s!DsK Ml>yz[Wh-,w ;BºƬjHBAwR2vLmik%˳*4 M(#M8\rg9EU EBh3¨ )ۻOSA ^qb.y2ABQbK۬ ǪM 6@ZXvvY ZqS$Kp&1!.b6"~.ZiN{^ 1;o,W-HTL hEV'Kb!6Ghfie?Yh)x z*q#yBH4($h&UOm R.ePgsZghk0IGCkvL,|PV%c۲^s)?aNθvf2ycW,  vg#&m*Lc61k q6M-ON8[Őpd2aT >xIEx)?z=EQ6|)'ώXՎT'QO(Zޔ IDATӂ s؍?Y|sY /mYa P\fH}g7uMh>P܍FeֱLjL"bdOWm: )42v-*:(:mE^h,c[գ.#tQ,w?9bQLRRa6qt:(AYIXVUh 6|!$kqqˇOs<׮` ЦFmȭK|&m+76w[*QUi?}@?&GO#j}ɿd|Ma~!VO;99Z֖|o}t2yUko`Yh6Y o'63o>T}100NcdѵRL)jj欀 i"MΗ"MG(EzLylJ Ҝ\Dkb .#F'bㄘqRd| lYBP#1j$9Zfx疊jl*GTk\f,7t H/ߕgI݈+dd.ŲAT2sˡD *iQRȰ6Z>ϗ{9z"T+Q2y5'lTt‰r9{L0Oھz׿.PUU b4f{DuKλ vyIB(TĮ+zs6DlnpC~7Sm#ڮ& !iɶӻweP/ G,q}׮!Ӗӝ]Y6AG~E罔DS<~DAպ$q[M4im#oqS%BF7ݡXKIpcGF)uը.Ďnk5b|LX|$)B;95Ky8m具kzVo=_yN%1".WY9CSsbU4lN_JX8:)7nȣoT_{#Wq'wqgb- ˇbD.7ֲw)Oa1<9Mz$f$ځH6R|x"<>L,DkI*b;;ԃ[W2x\#{m`/ۥaIԠ`Ħ!tQ#A=A٠ x0%sK>4 ͑M2%U۪4-:NU֍]X>aqDf$%]jblp]MXjvoDbhaP Q\]j&xQRk TQ $A1N6MX#]R(H3NV%AF4ƈo;AcDSIŷF;' !BC${2%ܭ4 TB"ZLƭ V-z?P ~hR$B>]Id#.Glb̥:l)&s>J &7BPkES' 1(H SwhTu4j UۯP{$j~S:TxсԀƪ׵I1$%EY^{^M:?8f]K1PLe8NL}֗<XHӔ'w=$lє;xGOc4|k[*\,zfwMy1H7nԍ>cڟ./!$s͟xVx Z~{+ Sh|"nVG$?\n W_%gr|\pɎ Z,!Wa)SyIHv2;wI*n\'&uHZ+XIEGz}% H1-hrO(\Z7^à+Dc Ui0HȎ+dŏ ~HQ9:9`~F<߿>?kUT4I$24F|H$@ӵR.Pi%}O? (ootk/9vh)g(N~7~u3;?Zw֗TFA\sU0g'40։ J<#`;T魋kbu:g)IY2Z2'Dg $$4(=:T3ù1#䶢n5YWDz*M 6+14% !ɥ-*42c.,Ezk9DpgQn~ nrm5vņ W/2ɶI 23 usMKPJ/k#F3cȺs]~'b)D ]S]m!7<r:eKl,79g,7.0K/EIl+CIyyԅ_Kb8ax׈[t$DWEJ Rl\R޻HO>d0W/]|LF>2I 񇆎z}*]S%OJ/P!@q`4:虚d:I%e&l I$ h?@j$4~BasQ%DQܣ49OehA(2YJF&I mO֔d]Fd#EŜ$(㣗8h൐rk=r&OV.'l'4ݙ-t!哧wx㯈>fOF^H}&yy;Тl:Dg{zX8Q]1b݈O # f@+ΊYvCvUX9]ZNz!+[l*nRSYe͛}a-^S/ $Cc 8M@}|8%")6OH%'%6Dg,#Q;G!.J1@ Ik[EJ] !K2ٺ?yےؒK2k :{RcU1ڸM*KɷN$F<!EnNK]RVtgkړ 'QthS[k xZ1J6,}~]xp c+;RogZ܃@3b}nd,a%=S23"ܒ$P#nFg:P+eow`Q$ސ`M9gӌ0HءP5!h /LE#G,;Int&HRGD+LPAHP5biv[nAUqIF$$L}FF隚5<ֈ:AW-\C2Heg} oJ6 `,ͺqe%)q0Bn]Bv*HZSlYIZhpy{G|UVz<<~gqw|:|цl\͸9"W1ΊG Jº"6r&7n+Gc1XَV꟢^3)̓EE\~sO>gs~ֹ4$k,r[4m8VHjHc"$[3̸o/ϫ 邴l{i;ۗ+D6hT4vj+UC0Q}heďE$FAN;Q tưTUKJr I*nBQ AIb=:Cux5~Nwnd"]}bO.{RA.+>w/ ;#8?Qz'+F6Y}mմkO^[sPTΑMtc!K_)W /ՖoG +qJ{qYF^pDͶ[ Qu20b-}`#;~DB+'Qe?2k1i7!&zdD!GOY3T/ii9-."?8>`^ٝ[["1 mRKŵ>Ÿ\?#( V5@`9+J t*AvZQr`'< 5g \B\7_U:4!FރkhÎK{3kQY: EdvU^`½,d{z=YzK)?|MߧN+xǏYe#kvm/m3ܸU&ƨkw:jDVÇ\'3ܼ\߻XPuY*f/2}E};<|>:KNfk1mǠnpQegnϴQ>Tm6iup;[oH]j1Is*+5Z.Sx$'9OSݕ?t_V!jGb-]nlhC&""jJ54F *bnF+,7R4u/*ĕ5J7ІV).)&:j ?-^-}0թ5@P[kPtA1vzqUM `K_ S >PdOKbuvbzI6d8`\I1,t@n.i0? :-ZTUzK>`euTbꢒ3e\j#+щ""FJ1J46wl a["HV*Y*1 uؕW?Pt B sDUբGòD~QqH@Bݠuũ5FX/1B%K1Td!U>;%ٚmjdhR?<נt6l:%bF91Ұ,iO)#1 &!ϟǓh"g j~8?x~k_ɽҬן=[C˗ڝW& 'P bYӱ޸VڮӪ. 4,_?6s7I"3ώ?v2Majk7e_{G/C?9~T0-~v $ɰ'BM:inh6,rD-{?8zjKQj*ݜFL EAE \>f9;ۛ3M8ɺ"A1|#q%pPx Ͼ T]AXrix!7ǯH@#F0HA 6&(>"2c| $C4+`*uwAq IzGas ՀSE5 LfSiOj1VXWbP Bco~o7^KҜ>Cq|rwMy,@ܸ 'j`|5R)> O1M [wy{ٳ)ǏXeZ4FBH#-3~JQ:SV Lf:߉34=a-y B:?ˠ$+3lU*a7)f's>=EDXI@3Kg72ȗ&E׃/;_3(-- hZRS}NfNpF !2Yќu}`/)1,(FC:1${[,' tY!ZCb k=`ejyS^=~BXc9}}o)aUfh"e^L;(;oKx ߴo'q Nu*X^ hӚMzpMEM~lZ.~O1ho\R*) $H1m'XE߷!AuR,VXS333[C^͵T-Kʫ#'x7{{/_()H\B%ܣ5h9OvҚʜaXX~kYx?:>˔'g T 6 qp:#"%"Y-\QI%ZJ }wtia"L2лXi9Y"HlWVZd|81Җlg7;@]T57 nnnj6>&VyXۑ9>v>X,s=;;a[S"-<ز'P8a٪yd(m׿r;߼|ywoyJ[w4*; X.7dkhmLIS\.!3V<[9H T[LcNP.TSEhpL\MKh7v֋=Y7YxIN+9JrfÏ:ܨef/1.NÝ+4$yJXgiwo=?3+e!jYWʟGDUݦ0B( v%!x~\ެ!w{-=[Gk!RFd `H0fm5bMթz: 5I"FOeؤ jco wV+~^IXgߡ\IBJ?nJO5%5P\OBvM@'^vT#mZj!BуwdFO^5tG"&cHumQZQYcT4kM˼VjAۨՑldo|~d\>scTFوU1b`>tcfYKPp3\FnY'gOp5(YiTPfZȭx7b=Ŵ_.9t?x3m ΦhU),X )b}U߾@>ɳO/*˩(jAƥ}E&X*K [mibm*|jur$hz5#&逍On{~sZ6+S}_~7QsHj:_]UTP=|Qj\Z/YF1?f=Ͷnm t''Td0FʛaA<`Lb$?S*@y>\?E}h҅"QA4S𙓦ھU߁FR8cu5Q mεs!BX3hh/--n`9 hP󪖎 fCgȣRO1F͛FcCzTИY >P,=5қANnض$K0o¥RSFZOoY\ɊІu;l>jjEzk댊V^xa .!gG3fM$J+';4mEg8] #ua[N^r>BԨJ'ˌe&L¡јQ%Z/Vz:_5ڪƯ\[*iq]ֵ7vIˌg',N$6Q+"R/zuO,gn0_׺Zm w(3bb4گ5*F-3ML&09w%{ Kn_YM4zZOt>fQQdb $5$V51wQZh4d$ jDƆ!ԩ_U<]*RBlZJ%g5%;Qq&Uk(48E+, Chu}yB!P54Ĉ ?j[.e(5ua4nȝCӌƺE۶fW[BS_4kbTu' =;=_|k+<ꗝ_ "_)f?轱P&µ<"27oΨQreŔCد0};$9bXXwXHOdmHN$hpv+oJ/ 9(^YG ĮtuE3LU-jԣ3Iod֐Oߥ1^ͅ~\$%E[qk&D7=eO{4=$Pta?VӋ?CB\762`x/7*mC1[UDEXJ4)/*u)ԤZKrL'_rj[en]8Q U #"9T-eAE>\GFhBQe(=fI|%N՜wN l4ZOY%$!'iI'Ti͛M.mhQ<wHMX=k]W"1r^،bHua}hF,mڜ^7UQD8'@mkWs狨7LhkzWc+$.48G|6%3ԍ Lj y-ȃK$3,Qs2%- Q^ ϳKɒf7\"_evIl$@.XOf|?a>e8.P mmIbҤ5$6%MS66$|1씪m<񐟼|ArdѬym&#|h;ċIr:~hiڊ $lCn4#i'lprKO;7ľ+$[$ Y,iCZi%~9P2'_Iޡ sg4紱76v}%k5Hnk^Vwf0kI"b/»~7|)%0W_W,ƀII &ḬOlL"DO8{XdD|̗S_xBiW bM:7&XlG]xv".֭NG~q6]aHhQEvze:F;#ȪUd]*iúb>uhŭ[$$ nIz)AGcԦrp0f e1چ'sz9hmd<}~RC"%~J%˦@+gTa!mK 11J+/UP*:O"]~ L7 H-&3)iqk䃂G,. AiiϢv~XHRKjqV4Vܲ&^NimE[comS+F;CwZ?=)J)Z. R'.>B("I`PBuӪ\ʼ=1B؆NƖY YM$+0 ʡN@#PA,iyz*%pvUj욕iV1<^$Ƒ *9BHʄ__,G1TTb D Cq1jhcb0\![iE2d,!xA5k9==瓏>hlM߿Dʿ _fhtkh^O4"+<||tZ+>?ސwwՓ9q]ӫ>w)>nY7rD F Ҕ3UOT duƙSȴ"y^IG ]Qtd #UzѬ5RK~/s nep)ɪ@GKvT#?L2e%.M2LiV:] [&˹>'ǟtϹFk~g%C֪zc,tbИRj~l+Uo݃PDə,biϩ*e85j@Z6dRwO™T~GȶFk+BMA/ĻC6km&\-I]ӔA{Uq1ْ`O?a4dxÖ́6rp6s4SN>}G{7Bg1:PIΤ^m_/e4ES/T:9juǸ$/Pr596[5jd-BXEXVOC7~ %V1tRqшх_m,F KMG}čzB]٤Є+uFІI9C.}$6X'ƎG^oε;eo]Q+Be^+DKTXt׮;ojZ&Dķ/7wv;ݢ!jTR |}1s1!X5V-w%-j >Q#Eb^]Pbh͒:Du#ˁ=8r9gW31j}_oFH@$ ԉb5HlNR7%zA.>c};۸_.^̚(1^-*B%z_(ƈP:nA'PawrwˮHrhmJJF+H2-VCA ^C-d0[5t+.HdKzc~u7%]aЧZN) M4-Qo[@O<}U Η np.-ղ|\M"fcZmbkTU$&NA> cbM`]EZ>z1bh exԯgĻ/[-D,%D/qWD&KU|J,ZuF81"th@UULg anpڻ״>hxԎrSa=ZlXQ2Z qT/ZK&Ugݷ6苉8kWY,rSШؕ^ԃOOP٦q|NkDyMh+njT4D)HTbgۓG-k 8 Yg^C wKYi5h}R!*[v򫧪KSe-_ի#gj<.kaTUTFhCP;C+CsZJD qY4 Ql;"|>PbuM\WĶHZ&25k10{x#ߕɉ|%(XKhdmO# eZG^% ywޥU_% Ņes{7e}ODr΀n8m [0/swdMASڡ<Deވ+y0ELR_@cn!m]4ւ ]Tǯ4]jpRU5l[!kdsDc 2d._=)G\퍹Ш*Bﴠ~``1>n.ewW'ժd1w~Oj}a_`S@p?n)4:!e#`<0~T+ܫ'o/w)ۃgͿxئ}eLu K$t]` \ܚ7E` A#ڰ +,BVd=5Oul.\E/^RO,MP#4.Ȗuܶd)1Cv2}\Gl\sm<~/>Gpyɽ{m.uG3I<}P>fm+l&؂QwN7m[߬f4 W4Lvv1S%#!/ 'ϟg6dqJJi%!$k'гstn3[a&s`=lm$?^hq uB9ڭv7(n1|@ rJX,{Bj4WJ,҈%[ϘE׭emCh#:jXzƻ}~kCqKj}9lި7.0ƨȗp|XZzpFTEDRg)3]0T⳻f1=2"'kR! !\z&UW߾-<]Qnom E^NYhS͒AQ'ŪYgmG_;{cCh=J`Yp-ca A^4v:ՁL{ni˥d\) E&bG۶4zUqttOC뿆1Ƙ?/>c-_ޟzg'rR}9<4~&,>~ i/eщlFG6חD"+Wʟ7$:FUFDԈ'Ua!I٣L}=z'U*GV/Қf )dS&3ͬBخ;ݞ/5<=9e\?N$>5Cry5|2_I_r5֯~^?o靫mMF\*bOh_a=@6-}}^v PW^ߣ M-;<1gcr(hh Tĥ4͢ fB%yڱermٲ.HGQq 1z&QfR&TO Yι>-0_mSԥ6,`Vm"͂q:ǔ)WOwdX9xDh"CI音,hp!ּ?0ڦZJh5o-JKN"FZQn -s+FSHJ*hlqYٱ=yќ3޹bk2k$A(hjb͓?>B#BLJMR굵)Cmj}_2 Kr*9?N7?d*B[78Xij+ګ+MCe-VyhkId&ͽfqI/NIvJezxٽEՊz~Ic:ON[osBtLVKm5o\4Ӯ8?uO>j3YΎ4*eQI KUzٰ W7 |įԫ jfY&&ioK(7tO4ĿzI(1tu IApVc/GEhL$HիVfZijh)o/ݎǂDTPUZUb }UC4VYLcImkPe>_pq~Gd23Z{Weg5q~pCV9ɿ atVbxl›ӵbi.KD9 5lah?YG|.=_U]r|~|plBgXݷj"UEK IhB3#5厸k E`x4cuHWAsGo^x,v2&Gj.|D%Z* ?5j S8 91&=ouQgBZw$ aGш]54ż89b\qX'gzkȯ-+S%}iK*z+?\p+Y%>FlZ )"1Xsdr$PM͖<>a` ǔWXiFy#w܂gK*}\jX-,wo}!x] DTFD$rxh }Xy{||},78xW?>&m^"kZb6FШhW ~|ҮUr(~* a 6,L©|F?>v7PǙhڊT$r%dk='ǭdEC0LoH*Wk2O+ݞ-ekv u'RsN% }o^  IlZJ]Nh9bH?thuܹkY y$]ࣿ ݿ@&[NN9?\Ɩn%&2R 1咽OyΦgR*Fh  b& u|%g<}OXc"{@`'^ڕ1qq|g>@mXِ~snjzYU+բ"vsYDZ7 Piŗa/N^1Ԓ-WV p;Dѓ VSuWDQ`w@ Fzf" FRH[Փq呩!Y 27_d/ӿQ?޻owg? X4ҘVij<|}1O=*ɇם%"&=iѵ8H:O m5rH%yo|)Z9=?z z=IV5t&#)J(BT1>Tz M&@8Hh8ӲޗDWnS7H =^s,ULUwtkUbWdK/""j"b7_>To*ѩ]T2DP {#W-FMdY̼dYEHo0AArz/eN3ؔA_~kTVIXvKըJ9^HX'U̩q UX* FՖQ.ͬnX<9Z=?=АXm(E("?πl)h[49J4H:\}vZ<:!I#h/#9= kbsHFQEFLw@AMSZ0 +fN+\bʀz^1=:QDX$:ynquͬL5-cyQiDoV4ud+W)QhǻhlcG(n3b ΞZ7&b5{3iF@/.Xy/hp*a'9jEGt\wx"Ag=?|7XzFm}dRIDjWc{R,Y)yTxmK=,ZV涋]LgP%ѪQ%<)LSSޣU)jJFR,rL8GAѶmMM7YED kXΤ.+F< >ϣIvo~{ WV8ۣjyrkncݽJaxS_I_՝ղ7׬?|} _\Q.CO؈F}Β߿hFHx u(lbN'T~F /)Vŵ`kz{#w'u]x?Zh$=zJTG5L/蝝vư΅H'_k:"6&""XT"^pJ3qcQ# RM(Qpӑ3}t\j ?R &oL'ꝶjeCeg{WwxH瓦΃ (Qkot>mK{_hܺ*+lղ۸ל>1D;VS-:&H,bPU^}ٿ+?3?uNjzo,<{ z)l?cbcML.!v:}GԈCI،xlG}]eraTqϟSܽEZ.Yl;yi*^3hy+jAiYdB?Xj^,er@"E[\pqvS#%l;7~cjDgFcyUǟptdt;' F)59%%FO 3֮o0=sNvg|7eC&$#-RByp#ݽ91bnjp;# (z C,OiS!]1R 7pY|pRP|Y#+W^ 9N&XiJhBȊU;+9|di+TȜP3sbYEWܐI`O!оfBbr\SW.V_ʶv;&S^9RI^ z @uT~K^hlĚH9vtl V^ P߽c'4gK'7ij{d[ZbjWGbG=G1,kgw=C mnul99;e_гs\: qp„G<=yB O. 5ep3_bA\E}9upbD_//xoˣ_?d iCo2 R'3N6HumD3ү\ZHXѬL̃Csnb䷳6=Sr&fPg !QisEK%'ZSzGKɀv8$;|eRӚe4b3#uu!`9: ݇VlKsm.1 ~4˜Տ~Brz@u7ԞIa,ie_T'x$ws|#;ߠrP8{S͞Mo`Kz _}؋ob~DŽ"crN|7\c1mԺ9o+|NOpiOy Nu%~|^ɓH!* wIu4L H/?_ÏXԢikX4ymFaݶA :ǔ~jIo\\BS$Ϟџpb -m*51I95Yjĩp=@-'n/-NPGa";|I]L匛LJ$A$] IDAT40#њIXжs(.#Yk$ugLIypk:'<}r'ln~oIޏ k}Ͻ~ÿGG q5"W˟̜{lnWyrw dxkp_}C012Wg&DgQMBe$7%쀶:^}Ks )SΞ>:Rb+Em%Ɯ6˺Bت<cN]W0q[Q9. v92=#ڌhS%0a7^%,k"04bټZOA3cI$._<{$*z2GFߘ K[~tg'{7ۥix2PcQc ay ~E(kXw 6^leyaO{)5V_YPgwBpq{ LH=&IGs˒du{`/sy RҤ5x_ІH̤ZQޘŽ6.1/i}_,W:e#Lw,lcC *b|G:,lEEloP`H} Y?%?e߁U)_.SzYXYMN*QkOv-U؝YuA­g­']{m=qYu:_1,$uͲ|=N|ʼ&P Mg޻6'sWyTm}8 Hw ѣ45iSMOOq?42=Ŕ M#y|r'Z4|@uY}ZqT&FG?28 xvuCʾ`b'A)G'X!sE5O$ZMS ^kh1eZf=i'|"Q6>!mQ8XokjDU"YѬh$isqP6ňd a(D`-bI:+"Fr QLǏnKZ.1}G!|D'{\z_?s_Q\x\c})kDROb&79y1Ӄ/\K4@yl/Qcp[1\FhZ|5Qbj M<1 wU?#Yz]LV˻Z'cYdc0:|}G~_>V9AQ-$ܒf1ze< Afz}@RDI4 _)~2QYV$Ϟ;9gR7[e8iөi=LbXN'J81 ے9.^l/f2֢CĘT7S 8)d+643ٟ oOu$jk(&KyW_sdid η-w.q=S<SE&M%<@8cXݺ(i^#}myzUxm CEDBgd9;I3%$X$I=ДSB<?_ȠݑUiYChHӴW5Us,>, V;oE Ddiju"J41TĦ'Ŷ-u$s$߽`i7m#mLu.QFb6H0yv\S$¬ 6Xq5ûD^"_VM9yC(x^QZm|X5r'Ӕ%t޽GuRPwzW~b4%'_ޯ?Z,x- mWF_ *;0ݟrgη>/ן6\GTN< +Җ ώX ĨVKa;~z"]C cZ Qp\BϩjKL7޽3v!IH$⦞d֒GBZz'FΑ48on)'"_RK~vd}X׿ג,mw*?2f֩~M ء Ҟil HKIVPلUw &ʰ7)"rNM+\rgT4ʋ^Y8pC*[##U_9-\MM;X ~2}A>ق#Qfbcj,AF[&*H՛r%#5#0F@N%mCrz$&sj2==.ge[WȎhoJ;\!hIr{.1i0y4Cl+zhhfY,I>5:IjH˺RɓCc*hݖNJ)#Q15_} 4n=)c=_1-vW責[ɽ.uA4fF"K㧧Iב佄d:?x|  -z| ?g2;Z 4>LG2w.=3 0n yLċ-[~a+C(B~l.1e:tg\ܺ5n-ټ֘"'??Ӈڨru :Xٔ?OOYYW~}$S9՚\K71X]7W O|D H.]9UgF`ZHSOi)7}DDy5Y G,iΦ|d3ޡZhKa&)1&UvO$yl?ƥMLS۴ Yy̢@,&n!޷z4?rN̳\=*[v 45U$b|$I۾FWVT#Q$xLݥNqj3U=+$Uq]5XUbtqGU1bc$9a_d"-kɢ#E%y^B@3fQ n<`lf> FAfP2RXUqjOY ~`{CH֗|C[jSv8䮦\F] ZO4 *f.$=ZEmɣTa)f3.8[k6Z\zjn/ӳ.Y,DA]AAhg񘭞kqI`Ye2HH9z:mfRj!o@bK#foFOA<>?NgmJ:Ieei%?$kq.iwKTm$x4 iU;sV۹o#F:BNh Zn\gĥD 4!V5x|`uo^~& M cdz ۯ^d)'Ow<6鼼rᢌV'\qC'Lǟ':?=+cBZ 74kzѯqgwQE&ˏ?M-=<Ѿ81րOO%ȃ oF:ɨ %=*)bˀfFX N(RGwbj)G-g;QdCĦ@1&%:8ROP+15T3̧ 3l#dkz-|4?s(\(JdDċe Ha;ǸM'6_b沸ʐm1%y!\QuijXHtEWG?8& + Ү]Fz;S9ou/^v^huPQ`[̑ʐO>6"nѩ44!}qclt}C2=ܡMIB:XW1DCRjL$7jhDϻKǗԫGċB4`Őȕ -NMpb1&Dqbh"3IsN1|Ǐ{b)LBYf[Z2axYz.%[zKcȃݦWy5 {EֵD2"a=kW NHQ4[.HyzBx2Kš(M1Vϱ*RV/|s ͂qXcn-wmu4o9L+rrq1N-}([㶛f]֞|\\,R$E,[ [v'35'!<%H^1hmlK$),YŚ;gZ뗇}nV{ ORtԫP⍃KܨYXR_`Z'gE-4HJvW .NPE\T5$ ,}x1"i YQ2eA*^C1#Yhg$YĢDLlLMZ ;YFa#*JimѴP; nsuc-2+i\OAZA Hd$jAMCߦ5&ʩ&V찋Y쩵FdVcT\ X7'Ado ٠ZHd6R;7?'RU;_| _[7)$mNJ2R4874Zz-%O{O}Q}YY".uՋRӪeMLr6FƬu; '6Ȳ=ݭG. f`D[z H5vlΠ;éFbĶ3MODäbXFq"NWaO$+aضjs2͓7)lQI]B@A*B [aZzFO6pEb$C~ZzTN'^KxHzx( A1# 4h۹I8KhP"2QuWA1^Da^7׿zyg93t@D[j3˔$Ǐ1C``I+<5x*! z<"f )j (A D,_3<7H}QѴB:`@kG}A|@k`[ PlHM&>ӧ淘JAҀ.&șT.tk_460rb$$L67Vxc˩fA! $ث  BLL R-4?'Խ6R#4jQ"DhFO>ڹ %n=BB#]#j_BtwQ$EL#Y*;_30.Ա⇺$mtDKT a1ɺ(&)Q􉒌$O>!"c.6Go7!A)F'"D2 RCk$t0&!k]a}-͢6W ^KGGYyy;Χl (փӺkl)imiMՠ1కzdT7$P*S,%YR /t2Qo=".-2xwٲ]6CK~_V.nl]Y'6BZ=)YsNG )oD|=]SoUP5,%\#~xW#QB3Bv"<3%ε?$]:+KOS]=;o|GS>*?]FQ2/sWڛgqNw8 9쳜|;7mn~!gU H oMc %93"Hbd0ĉ(be'qu4/ i,R[zy4f]b1v֐ع`c=v2vJ(*͇Ѫ&ΈL'6F06{'e"W,c*| { 'wɖZj##} E)Z 4(I7#R K9Icp>55$.Õ. „s|mo Oa∛6'=BFl_੖EӚӍ*³xxJkiçH[ɘ;ԹîbQڱ$+l&l0(6ǔc5GAjM!JoGRW$e J'F@'- VB-kԉ?y=͚/ /.- Y&<F &#m1*0q< D+Z4fne(4Nyejh6Q()5_l<._Hè!964(8$`nge/;`3Rз=XL+D\-lou;D(isz \쎩Vq9?pu'J:\=}}C{3)GޚQJ IDAT,Rd6prX+_~Qr=^NF?;GTԊ*f+M!ۃ|bDi RR z4cvKH-F"r;qMKH̉cD<ߓyoTnԼq~jqJpEPbYXVb'Gl7bI+ڂ`ShHI>G~#ħQTDCAQQ ZϜwbOމ0&YUCs}F71sƸ96!(ş>Z&}Hk2SW J8 ^%x"Mcm{/DcHMm!ɑ*Ѹ9E {whF؂*R=MVn"n:zbpYl};?N(T;Zu/;wsy]zŻАQ4B wr.:0*ZeAڳDXY+J"^9EW `,XD{ecӇ,=_Ȥ25>} "Ӣ%;\9e&mHEz2=n/E=^j'X4.%ZDuϩ6Z9IO(ܾzrԯpzp㟒+?tt{kK_j.c/\[|_g<rm奜~~>+[;#1FqK|WĹHnޗowvr]&l)^:3 4!F1IQOh]bܠ,ة$NG(fopx󖘹m3_~QZ.$ J ]E5mjj/Ruy]1qDk^FjPi%P{g/c )v0}G,%諌uB`v@X "RX<5DhBWL$)KB;F*+LaHJvB75y8r3D 'mLBSSSbbY`bDt6u!ģ[JۨI)Cd-kR;u^|ZLLS7: (:E,j1)6qptd%&v$fz*)9"bL("ꃠ$ókYJJg}ݻ%ԲGU}zUywl6b=du#.Z]w%vuҋ NWu1*vt-Y2}o"tO"ZЍ$I$iV"ߝJP%t:#Z둬tvc-@‹k]Z,5YA(49㬡>{>泟hZsf)V_D/v~ H ݺYh#j&0s'SΨ7^@pR na$҅a&'ԥLBPSDTT _%zkYpڜ&(u˂mE "J'bs_9- Kdk(ƈ8jGnaÝ9^H>ߐ{A(@,g~Y81/7w$&</>_=&ܙ 8*fͻd6i||8ԊH6;:iZܩF ͡7F2(+o7lzF1qPWR S-?עPҽ9fryoFFwDRq%#{z8+ ?69o/ J>i,Y 3aeQTKisp )H-X2E4=q!kt$ʵ*%5J+/(V1uQD*5;['ŭ"(SլMC q&=l&(b暘eE+'!JpqJ< G Aiʸ7dD㪔xN>>ƽc0<.Vi# Z(ZJ 4bTPABՇR"0XJ,N#Fm=ػ)Uoh:0 LZU̒tϞcWdnPy(I+<]pq$h*.;BCNKL -TYDj` YZt0{zx^U?Z9*^*m˛דerZcwX%HLW͊tIl |-=UDQ_@r҅XԷ#>~ !62N᪙oO dA}Eܻ#j,G.oߣ5 ^bi!+rW8vo\7Σww)gNm*HzVaZtd 4ƦD K'XT>u=U3QN>u$I:]󒽫9^-ɷfFS`;+'p0(ΡyBek%b,s֓$!W/nnLM鈿.hsIx2Zfrеlj1:RasGhho҉$G+"icFWZԊb&#F;[9i F-j2)fޢ~ζF2X| Pת!5HRo(IM$2XVh/vڙIEݙV2X O)pc aD^іn=R;`-a&kNrazoI,t*(}*dݫ[b4|T$Mr ԓ/vCа3$VܰEQLm@!%6QAqUuFO&VNʚϼʧdg2Ny[N^(LΟ(*.4 7HiR*FAFVAA0ꬊ7M <(9鸌{9ou_w6˯?rgEUۆ+hQ}d!NitthȜd ̼w=^b'mS+i&o>]$E|GWor_ w{ [.eE< O /_gM;^ ;>Л_&o=zrAǧ?f=ؚtcocGw=d41Y+m-tNZ130;(ADirԝF: F*bDǧ!AJF"+^Z~iQ<(SQ K+Ie);陼 uG49$cSBl{EL*uMJUTFw?"uaLҔ=N[)i>VUrFCJwY\_,(v$/zE{!잶!cNd+oD]!W7Z~OJ>H' GŴWio%%]yf]~ۏ>>l`&)?J&X30/ U O`T*l;(ce [Pt{kb7Rn&!v,{K^H1t3$K܊)% >^KJxeUr}iqV#OJ"*YNU '!]ccĨv3⮬ ̅f}`|5o=mq1JM>N4Izy֟+?Õ~v$;|닿lsSW_J|q"D^C"ܱu>K#_Wp2ee6 ĭS_".`Dx+"׾׸-WB4 Ti֔iIT <2 *i"E:ӮPⵦh=Xt|C*7d2ftݺ#>8 T#1b%"F Qb=휸 Ï|Z}%2y hC`ˑ $M [9ZjH 'inf ;j](2lAK^Gg6chKQ(ep鰫ߺ*PkbXN.lܺ^_Dme)h*A0ޙ18@}zpgUҩd!n'v'N]ɶyE2lk|zK#eᒙGQծU%awڍQVTyA%J0"!_HduAxF?r"}6z g._9S/|K.Ե>3< 7/|HV9Y=| 1 7ߓoɴӎicՠzk킜9>Yh'Vj#g ͔YA4'Dihq-Qݨ3Gg.kyHw7]obտK')ˬdTRxeidFE6vk#{6Lj͛l:@d0#^;dVehbCѱd}㛌WJV0xF{T q}#wcrЉH_}ȣ}V*CyI2n\?I•!5dK͏h!7#|}?^9JYYFĮ0Ny0E5'VX\%qDZ JF`l^(f>!tF!&5 . 2&f 2Oŧj2Oso$F{e<>Y GP,O"ouއ&1ۦ}&;Dv1}t$\+-2)&IBME*5Jkp%QKc%b \X>Gu8&{O,5)?e߿Nl|t,7A4\$]XBGcl=@y.-2ݞFow;f .M%3fk^k<4P:!ݶ9a9>7=%qgsgm:RsލA,qILg]LLL$|?YeǶYv3κ.V;$pQI*N0d3;LY͸M)PQ,Nc'NQm}旑Go23g~+, O{'7nյx<_`5dVs~loHlBP VOg/rgoׯ9v4#1c맸/r\&6>s$'uA*Q"rLN|]h\c,$`b<g Uq%/&qڱS,_U߾+Kl4Em?"ǎ;)r:K9Wj8,{br5L'&d4.0:QJ4mLhx-0=,9ܚ0 M}5ysGrGs# 5=! G-Ujbq)Q*htK^'hj"B+& *16ȧLbTURU9F b-6Iɺ Y #xIS!p\ydB}p@~KZ$Q KlR"q_&;6WVLo3إ`[Qʽ=|Yb]$12&X,m!M#7x:YXDq #,?{]*ξxDFn?"<ڣZX.?IcP9tgFK]hSnP9ʩFޔ:2NFp)jZtZp[[G3(paV-$,b%lŤA.Xc ϝMH";|1g2g)~KY&>q\DQ~iڜ̧u\YaU,/Y?ojF i=xd-?x)YBWUl{"W7Pa(§ . 칌{~+i+!<s|ۏ;A$HkOoȽ=Wh-wx?!ߝiqi V'WWi-v[ܚu:|V+#->b>g<V,n)!.mߏHj V9X b::=/ eLj!^(MY=)@:fE|F4D6eIdT$CH5Dӟkp;_`]N~}~lpk:xE++6Ҫj"- 5ijݨZجl)&fŘai8 H]!Qk,Q`[q3! Dt#Ęҝ=1P;{|k1"V͜* >W^f1QZ@MP9M׃EM7!LkTvgȪ\@BHGLJn X=JP&jwct>F *]D U/":C nwI "FR;  &'.+h^?x@YΨqMQJ0+gR%LDZ2}&$,"pXj XMIԈJЦۦ0ר|+o&0D|zZtP뀌,kYyc7n9VzKD(+d#鑄ݤæ$g}ynd:3{MJ(y3ⱼe{ªd^6FUGtӺrlp|%rnKvKg~^+_?7Te?󟲘K{<9bR#m-|^ե_ ޹y BxRRŧ__i]?~ZZNG{Lrݷuwg4n%&-+QH!F5yA$f2/8%VOe.^7\N&h5UOAUVKfd?*"!>//|%N QdD4x\'ǜ"h5|""R찅QP9 ^Tч6Q" Ē1IbԪU7馬:&5Qо**Ue ̨1b@dTtHqU8G&M716ЩDC4XL<H5{suQLDx*hL!#fhe;+o>׵N$c ;>j^u{X˲3sukꪞ"ŦH")YD*,Ƕ  k-CC`v@d˓)LG{`5ϸ֗}nu%Q!Ps׷?DI|@;9wZ$+K:jDvTB x/ycpcTtGΤ<arpcS5x^7/GcZǏ*nWN<*e ǖi=.k;!vDԲmD1i;_{S?/fx(RDH邚ѹf~%EJڙzܟxCyUL|췼)[7 {ӧ[WN<[S'R '_kU8_cu}?8 9:4k\yQCMY4X#FS>Op9}Ut:vhW;/}>KCFS.*,JW5tDOiU %{0BT+&Z4z)bhJu<q^mM\<"aW;-+wOf)7DZ{Oௐ_SM$\Ӻt\+׿n~oq}7x~V߸|UR12_X.{Y߹ucuWQ]c{A^|Q^wL'#޺6E! VfoK bKaL}HtX40!)zJ9mK/> ]DQ , ~""H$RU=/&NMz?6Au&pHb_͟B3v$1ԒŖvi77!+yKd Br5zj&. !:DQ^se]iK8TѮWr0Fp-HdJ.К&ʠ+:9M򉘥>SՃ]}lGb4zBi2ۚDytk_lГbw[FD&+K91b4Ѱ3:VVֆ)n4*IQc:Փ%m W;l-Q`xbz9!Vjk7ZLjXyԜdf'CrE;ت-SPؔ#+yK<S=7_ݸJ뉧~9g6ƥo<ں?`{6_/3[o'5X۸Up0K/}#e;kWH-V`>!+A!Cw9⹟5}&c.Kou;6c\tS_.^Z9̰iD̂&GS =VpϰI9r3gc9F4{b]u#錬=l* +јo}0D="7#jm,6vR1I{WFv7Da6 "B7 Z` ȐƖ*,WhyF;SdZc) ; KYB2$/)S Iqn;="c|xF{ },dsbhP9DU'{I4 #]>v7!.dJ&. 8ơvt>uTtLĎ:H㵁$K].$'ڥMg9&M>;qle@(bkyrp.xF? (…M1A=ոct 1VQ'%]ܽ@cf9i'c i7ûwoݛPenbP"՞tc+t; }uW: [[DAѼӽ)ռdFB)m@r y'RVcY[I՟]'Tʿ{\Qprk/=޵ׯi3kݑ.\$g66HT& -ְ%%E8e~.A3խtd!~N"U]i-jItn1-u H&dRB4CWs͙`"؈j3`4P?%W/+z}%p?(0zS+աع9edv$B^/h5y:㟜.Q =RkU+~G"hHau9*Fp!A4(ґ`:D:bh9E]"7>"C֠H5*BM "GTk?9#<$(!IJW1K6gGX9+N4*ڮlڲq/܃r5\Kb{ދjը5_TE[W>t~lEһ}Sm^)Bp_f2&8V#Na|{aw64.!tW8R)sDpDSI5§)IkOG@ju>1Y -.|KYYzMR/]{OnGdЦ2ڋ:21@$F"cT j /j aߩ!~Հ(Qr"VĒpoxu ߟdymx~[L Xs.qEMi:+ل:ji39R87ՁejϨl@i˾+c-8\1gs.|IO;"nT_Uw "{h{s2?ϫ矸O=\ ˽U>e&];#P;I {}>Rq&}#xhi)j$X9ug?s<,a>s\޹}UU#V DT4U,xU QQalL\tK' +v2HBvgiISH,B-!PVB`>SJ=!lYdzcDClZoPJd^B\tU/$hDLfH+.sMVDAU՗M1/TЪɜz>oQR IRTj I.x+kZIs@ky@2c\MZTQJ֎XpLcټi+vЦ yE]ג2 {_꩹/aҝ1#A xzv[)B[޸T4CDhijug/^gT~TUE&+ j>|Y`QKZǖZlSSi5et_6;|Wl[~eog+ f<(ߥ(e)Oy8?7Ki%F &"Ê4Z>,G[vcI#*)R0U(AxHqX*M$CX *UZK/mYMLy]jYWA,lŰH\iSYàIQ8T=TAW&AMP,fC\kk'tr=!,C!U,yṣK㓒 lj&ȩ#Ֆ\nm,d'VqԕvcEQg~A2q]MD>;IRd=3ra|UAժݗQYxW2g`#h%L]+󪣩D.`T16MTvPaR5DηHCM)x_+D>jсo(?PO#9?9ym$HRC0v9⧜pC$@z> &Sb VgUQsazOWZeg&y#Z;HGݮ"0/kVc1_Գ'!;R]}jOn~MW+m/?o&|zt+?my5a[~?v"$3x\!9~'7Np+L&z׸~lnVSXiL!ò"Y]4UTJ)V3+MT`}Yv1V%Tq.AyTgy7[NՊC@bjG[Wtb'RzD +OHzQ{~ddbM1CQk&rZ z IDATilFbT W*6| +m$ H*RdžoΈQkF#WB=V%&`6WZ&|8l[Odl[aFܘ -1xQ,Ժ mI8( NKkGo[Ed 0ۥ<*<7k&HH׎R1`E&L&RJ*쎈ǔ牬ŶA,PԍjOZ ߑ~ǝe3 3s"/9W1pGA*QzF<`ty+>X6țٔwe3Ps5`C\M$<'3(.e'eNʲ~+c&rS$g=ż:ǷoQ\Sg_`w=:.O=<_™rjc#+\I`Q%]?ǎXc+ $%#Te-Οb#K+<>ߑW^}MncLua˦ԉcCo KfɌD5tFmLZHUAZQT.*Js8l-FrRU}g* xzh~QSEL -0ڗhDlc#C0 z0-i1+QNyLlɁi25Z5b(㈺$U$˫h1' mH6BFq#qFajqNtzEZEEks0s{K$- J1"$g:%ɔ.ߞFkܧ]FVŷ_ǻ\<ƉuoLJ"}ګB\0AB ;NfsOnl9T]s樗,1Y{k>IZϟJ[]6gɬp? $su߽߽Oc8HO7N lRLu?ʽqr"_f. N"hӓMSb-i0  AEbiK2bNpN DAM!(CB"l1(^܃ZTJQD͝#=2 ܥFXVM<^DDDpFި/󒵼_90t7weuoQE]IDZP)kI&Y~S>wk:b&|aLf^ǵFhx[wPqѿ2kSəiS%2yAZV@:iY35d T|%=b(nD=$؆c|`a诩6BF%FwE>Wn𵝠*H՟Zjw4 (6h _y"/~^h#V-:'5 Oj^i/q NJ'HsU*Wͽ]#cz_3KeUWJ)_}Ys_ꙿos魯1:50 6N#tۄXl+qOI͡ a>G˪Qr?aHf{H|_~+"V;4_*$'ܝĢT⢅&RhTz+m o$VA5<4Sj+/+;U~B&;3b] Kk8X#;͝ζ ήJ#$nwex$ 9ɽtUDzzg>~-%D7&A:Be~(vVm5)-&ܒ;hd8V2Ih,dtRv鸀@XI>xNajrdî-ACӚW2ct4d{LTZL!I"ֈXldž,.~5"Yֈ vkゥ[vs'ή681@U8Vdg`{s8Zc*q1KD^)q\gFTUA)t!9\mN@b@ȤOdEMR1j|5,6eLV-fyN@ۣ(KLJ61,:4cȋÞaf~ zQf&P,0o[d$ľ[˱YPH}U;wuȒ:%8i!@Z9#C&mZZ"Ƥ K¬a>v~* \\d-6w`Gie=_C_a.o:'.߼kG{Om7(q.U#nötVI¹P9upKLEQ >&/1͈-ԍ`Lc~jw6/- ?}4c |c!?bp:vxcAxKpf5uzlz|/OV{-ᕲtQ0[(:wˌ]6[D0[39W0f|obsf1|:':uߤmr,7{&٧<|8.W޿ ʷ"D]XC~piX\Ow ^ܾr>,wx껼pq Nwxؾ/zx[S>čڠ>DeM\쏩"%+Bjڛڎ! 544꭯Fg(i&q+6r.1 61YLK v\3XN"6u?Lj)jE'~ kE @U N8T @ {|dc$!(|ά7ɖ Bq8$@2S8۫x[.uS(X fcL:(UĠYu}b U8H3t8d3*XC$yVT{pOKtZhU7.iLY @GE].&`k0iܐ;S'H[- {א,ؙBlJf86ҼV5f^"V!ߠ,iO#DII1)Ÿ0]F zw҉>G]]}.J#<tMF3bE∴鷉 GEV!Ɋzg]U\홗CZ1Dq`=g$AT`a TzФAMy_'2ɧNo5fMrNYd꩝Clw:`:Q^te 50KXe>{ N(V3Cjя_aQ'#?*@N*ݵ.:KHO%\Ej^ή&z;%G{3+V{X˲3;׭yIH%ː  C-K y<P$(sjsw쮮p|7r 7P(g̎ FNؽ{'&<뫧-VU*VkF%UX.TdSLu**.Pj,S9F & #V,"6&NSX+8*R@?NtD'wt<\yurN+Fb1Qᣊ6PcQl#=RTf,H8x8i!hږb4*QUL)RѶ@E:t2 zĨg%ٜXUZ΋3s+&<;Dڕ+D^.IuݑG lgotG"/ʵkR}xz Et7os&AI R;++с||3ec' YOf‘Xvcy0ߕݨKkI(KZIKt5 *FUSen352YdDJ]8A,SuZ9ЈOb'AW}c|OySUF6Wh3h!7X46`NUi8;mnqя<_{ن|uxM6fc&\ˏ׏c>Î>WYYsXp3R/U6Ta^y DG.Ho sEI<§{rcy"JEnk_~{xcS/?P P'ډj+b Z%,r,EHڳW&jD;?rkss2;;57t-jXBmB Ur"^*"E)6UZN@dF<ӤY)v[2 R#D&# )Q<Yպ0aDZkCDVBos'Jbtb-."j/x`2t")TnܣTIiPDvnG49bH U#Fpދx>4Ey>/$@JʜѳѫQiL4cnmy=fFW)4IiXDɱV$+d=P;Q R= 8gUOKJ("(M}ЦIb.K#֟3,Aw/u%wpRv*D$zEwf,__\wOO]ʱL"bґ^ur0zDډiǜtZdnFȲDh#K:Md4u>// Tƅv x-5"V /D|R\HrupoY^o|Gri}I?]kݖÊ$('Z.]L֖-gRm2ԛ^qZf.R̂TRAFhXMh>(och&o:h,DM?ᐿFH(X?HJ+}dۤ`V]Ɠpcݿ8Եq|O|φb<⬙AtI+!J  Q-tFXZ}}eyfyEgdBcl/uW֨/@pє(~u?[ӝ|Ps_ۤ%LgGc}T5e:a UW$%Gc*SQ8'NR}9sQ#- ԀHaCтE;IXK|¼IrJwH% եU*t@ ~%QDABk \40[N2~@^UOv|̡6Mդ%M3Cg'EG8*#b L2AC$>IsBǝX kPURM[0761/3*ׇx 9<Ѷo >z|Qi>}%+\fu0H IDATַ=}x.y$@^ҵuTF+IE^9G[iqє+MUvi2 Z}mR{P3)$eYi1j$ѼZ D.4 Y0Uzm.[[MψEpV i+Rk/SՁk+#: Iq >`NrUU,:Ǽ1b4ʬT~xG uU5b$'`Tt7D{)Kpo P!mNTC懘⋚`U$AAk:#RLimJzF4"$gq;Yj*:0RBp(a:CJlГikb@;+Jo}EhsAt{cpŒշsZ"ҽNδ:0p-kY,`]բcTu *%/<(:IK' j4[b* Yi'Nu?'tH-R\4ct4%<zq.0)|sa2}s47 uFA|~YyAת'̎g2NI굴i F Y;ٜxI=Te@U/kaUJW@]8vxUxj뛼tEuHd I"_ ]HCPJIipA󪒢rF 4`G"mN#BU{_~yD;[<؝rU?E/[D?϶K~1u3xJ'bh4 @Y``+bw @LT'ĜL*`'H9-kݺ{>$KYpbUv&vy;یvy~Wݒ ٕp>7aҹx?_?eQ,׊]ۻ@u䥗7I}(gg1pN2IC*m;'Ta0\L˷PR &N!p ^I;"ٚő5}qcб n$ysqapo韋.ul1H*-vOK lm}[b/JT{rd%]골IّXg챜/wP LzΞs(JЊV:} c389Ic,0 %Z󂺬1cs"ӈ^9m#k]{z/-4ȇFsk-fb p1TFb@]͓nOd VNgzr0]X=y>LJ*+h[pDI8"xl`$|yDsBܸ9&1OYY˗:x݃Ե\*LEmhjFhLK|dbz+e Erz[#t8 L}ݑ'D-%tXű>G0&joiAQWBpx[ t!튋Tm*R%^TPuXbm,QD2X2 R$20C#ҍȃy~%oM{1/0atXU*labF1 Sʱ+r C3Li-4FM"F>kDqUA\U9D&2BH>MO{NsƝwkj$GHC Th:#fB4m*d}aO gEvliZV*XB^4K$f`Ib9epƎB^?#ҿ~^$ۥ>Y!DYh0-YJ锖tIPɌZ'3\rFmNCWTREЀm:NKdhY\zB;0 h(9j$NvdJXV7 uI{{/}~w"G6.4:V;p.5iOJ\0od(,:ĝ ֛mPќ}e}P&kL(!4;Qy@YsWԥڕ$kU/><Ԯ9[׆ HFKˋz6P!H,*@р`bYc- d^$ ON;a{ko'{P^~g_I-FO`2ߑ^SLO T"k/P9K.uƚvF=dm&POڊQk*؟xk_$|cԠ<=\QYs7)C{$f 69ec/zs}iQRaּ:2_\}LcMYaMK -=p!D"ĥej, lLmQ8r,±]GʭRQl⪊$,Iɒ+u>D-"v(1ٜ@8aXu=̈́}M(Z T()LCߖniy}HT i2T̝cVT76_%u'*KG_BƓLF5|APɧ^xU7$:Ф"Q sQeCZGۤєek92G2cnkDE)ŀPV []6I50SϑsrȌZ;e+s IFΔTQ A"kAj/f;~ģ'5o ýV̝YN0d0VcyKbcۑv;U=W ;ngCak{vGKqkmX*s,Gz%y[)!2s$oWҍ+݊dmHqxt v}}w{[Zիʋ/de?:%B b6Gj>}RMh 6O_D @1C3ZIDUMvMkĎ4AIP@`bbk!EKS(IԤ(02HLȊRQ)H.!=!K̂TjHZgDUNl"cvbvB7Zh;4ZM56B\8Og@:F(:1e+„Fa/d9ǣ9>0YZ s%*B5MТ1Q㣈Zi2IgIs3S N`@j'>9d2&:&4M0,StI\ j_9đa|4saTXy­zkj}/ Ԫ6R:v*Ui S$m%s*Ԥx"7+5exeE,D"lcA#Wb-2<L! JUcΦ,\T^[̰{°O]U!4'V$"8H$݌,xTƅKr3KuZhk̕ 9j~PZRI;U+m'DPPUF9ռĔA'E s$3ɲXKw&нXsM^83 0bhj4vH䞆(}*[GdB'b}LfsDϺ2*FHhŪawU%$Q'F3PZ^/b|P;7_9E|.jqƆ+ p|\c &2hPOTggܜAgg_mՌNiI]͋>}7+Xå-*SsA+7q#v՗fg-:op=q~鳼<|76v^htn,Y )׈U=$' FܰwӘGNs) ׉dH7>d:tV /Qo&3 *RUDb8fY痕veƖ'5.Y>1\k) {@Y 3mHަɄ(6Ia.#>3ȴ[]F<^~L7їmq[VW9ψ{>y݈^1awb<9/ޚ Q^%>mjwᔖ0bn#y4']^X CWg:IXǷ6J{S m맨neU46jA0*HjZ!^-鷗&g OI"ECfBfb!r1(pLd1:Qe122-r#l$ϵou r1MYmZrXS+YM䔸LI`&.tZ\gy<*(NIڧrWΨF Xnbi~rf?q3N{gunN PTX`i 0yABqpК-d}ucn;asc@Pn8l;eI2tL:7?Oװ;ߦxK*80[{ƪOB|/64'f0ï%n+єPEZJ11DјP9O JL3na26ldezqoUBosjZk7 v^B=-9W'm$W;Qrf #ul's+i {Y۝%(dEU1:xre{_hWOQWmqδ/m쬃b$`  ݷ`P' agz^;/>q:m2@r˿،E?}P H-f3 H:)''9dS:Ǜ]tiaBEj]O!i{U!TH5X3t(0#gJZYPZ+ gz4Iie)m1"}zIrE5`˂<3M,9y|pH; ?uX`s2LHUs'0 ަ/cyn_^e- ho_q$\gY2W}0pw/]k`ovCF{Q+$bXZx;|ݿʽ;<^ޭmXu=nv>͓_9bcDrʍq7|N^/w>|v2O3aBԦq2WY^{9=#[.OpH 敝}W3t]huDKHؔ͘VdyPJ.yvrcYQ*XһsD|ۃ!Hp)7҄o6;|hco?vRg*ge ┢n `K"Qc5voÛ|t"c1σhUx}aK)JL. FOUZޟۥE) G}c|Lg: i  '&#-q78Qsӊ i C h,%%]Y©,CSP.q !էMjQ e~OZx #qOR:'9uaT,!j#L=:lt֎^ 59)ǤbZ 9Bu N䯝3.5h퐓 i{c'y9<h.RNipU b!mΉ&S*PвӢL$hF2ܓcqNqVzc$YLm QZKmNF'0PE b KVKa:63 ʼ"YY2mtG``TC1UZ;}DijÄ/oAQc2R-J~-=-<D4 sGAEű1Vd ΐcrRؐ,vA+ dݔ:!MpH $nG[^*jŠ YP Ѩ =u`H ic4/Z,f4S'0, ,[–/5X4oUb㵋&DSgQ7͈7'1,O0whfѥEc(NrnAyy.WkƊeQ2HOtzaO>bX% J,'GZD !"@>k,LEУ{VMEn& ͕du9\PK[- ;6Ml:'4ii>g4:z=Ke!7d %( |d{|U d>9=ɏwBoֵI(Պoѵ^a/q%(:ey80%6q6*(5s[frDnf 2* BIo`L!tyxc=8(G|w'f ߹b3z)?; C4v1CvEId yGH ҾAy1v,= FS&Dp}P֘)f<8h Q46("t2Ac ZVpsLCfvY^޺KUg2t&i|r_[ X+>.Ǵc|OHHKF-?T'hp A!„@zmsW#sHqrI qBG,UE:wDѭwy.ɸT{_&D6II{W[/v^iپAf 2տőcΛX_Ѻ2Д*=Ell8{8BeQO G+9.HZ僮rTY0zuLB zk=ETưJ8jW͏&=:Cۼ`!{#)C,P)&i8$݌qhы`ϸhWc3CE[Z&\2pX+wGUa+u۲iDb7kxأ0Ϟc w.Ҥ% >uk?wWW2v'qqw/M5V66fL]F*w3=]/P=lsqeє!i 42sFħ~b9}iQ9K@d LIQg?LYabW`i#dRΡG)u3OF}bb$NE뚸$в1q6ĠmǨ1Ma,`Z0_PjN;!!PUtQ%nc!6Κ)imb6A qjBQEޛʖ]gz{..[f(%Rd*yb0`ذ0 jh=0 XO .wKV*U,J$E1)f̗߻ўn<8^&"-X.8pŽ'DZ1cbUa' H TJHE`gԱ]Ґ0& Vb$-fÒ KKd\317Nҗ  uWw0c\ՐX `kyQ">:ƈe;3j ,PO:q㻩 `BI3RG > #HHSF",Z@Y}0iuR/J!S"sMm0'r `KO;z$1%)'?)*92Pch` [OL?t>t+6>ʪŎrIX|#|6ki7~r4-9Xf1YCdq9i9~k{3!sw-zޡq O񱠖N<]" '8&18f^TR ͊F U8na8sI'<ӇwX)Y*9MI:yq6p:)?7~L޻F|w)tbI"|n` /|vgᷖ5_7pl|bq .JW-h&CjP{Pw;B4#&OY0D$DłWLr_q\K%.5{oc `ֱ<:KISkôTacz֗Yw}R.,9Cy15"86>w1ijHHPT,r?'` H_]̖9:~Q *ƾ&kHD4ŐRvLCmP9iOmCt՝os8PU5fK~ZK9IpIzܩ\qgL̳ͅ1`iyR.e= c3Nd]DUjFBa (s[(d監Uma~vsC9eo8gU~pk]dxq?a Ӝ-+oşK wi%ΘǨ* 5xk]Q76e6nڈD}xE}D#(B )u57HhC?T+N!1z=:).8/s0!f|6~Y,] U8?Đ'ÔvTRDc U:iF&AywI_dy֭,kTXL]1|u "md!OWSnl],$ fd8PJr6 # ѓFZ1,m MVȌX3t[}?A#ՅaD5LZ 1P丣u]3*Dn ahGTcLF-#N+]Wqd ,i'^`M|BrșF0G%&OlX& 11QvJHlZ,l dp>Z?Fu&u6H)i؅JW&AJ;_&xֳhZf'S/Rlif%Mi$BUE|F̢COݴegdI_{2g; (/m'K4s('CCGL '-v#v peq:blLЋ*6pmRkC'$p CD h'~Ҙt#D vaY//o{-?F֟{̟"=0rsG1 v F,?1IJ..s<#i/^ Yg떣ق&t?~uFAl /ɕkCvw8kj|1=٧1g8E 7Qoi5tBAX͇+K8\m3H {4,}0;mSf<椚q,yM8Kz6'$A&'7vv6HCYCT6c 6Fh[oF̤f"5-e@ )-+5VX:tlΜ*+UkkhR?~-`ĮXgM _Ǥ91QL[(|ߠMnC4HQc@H5p:??&xIN& {sȸߙםETVeQ}Mc>zZK,h`Z/*]H`lڶFuȠ$ {,JltӦwS&AQh+gYd Å.ٮ:N@jJ-&D1x5VhsBUXm<nh'3dz!)sGc j*ё%Ʀ$0 )XϸѤ ԰@Ub89{LܿAn9* v™ّH|T1a6֘ѡVo1n5 eK1&saE#ԃɅMHنd|G#M"]P/"¸o)Âm)5&DE{FЩBVi?/i#ISG@Y6zԡs F/X6B@Xҵt e$E4TQ;H; 5hbQ!0U|nhʆO\ʋ.#g@:xcbРb@,p~]_C#MqBvRG5U__I͗+R_#Ea Tj *ѳyL5߽Ϣ|O9lf> ׸i[zɤZ}V=UhPTU:X*]X2k)bM j:Sy ~#^OS=bsNJN'ܛ g=G+ZKpcZ(4ZNC+IiB+vPTnIumOā:5l?+_{`յ2 ˄5 D3~w+Rio7hլl.R'8;'m^A%&Xԙc'ijowy~*#Cw2amY7dm/l]G3cr͔y]$>m5Y9kYU R9q&Vpdʔ?#m~On(n[ϵk̠X"`:?2>'ѺWI$U*'tc|oWt퉳l9&hL):?1` |)"/ ̀qV6I="ʊhZT;R/yMA3Z4[BwʝJxzgq>*{3Af^HHp+v(ȯSCNw d7NmUϸ迃]Fp+prm.jݛl3 /W7G,RN[ҋW=v 'q'BkRQQ| Fs&$,!R:q]Dҋ J%(CX>6jm2OzHڤ/䴶F-,Рg%ԡ2Jv5:-w Bӳ,Ώ^OlRbD![AⅰV]}tѪN]I &vl<"m@91vž]ŀ$)%6*RmS#פ!>>`%,O-`W03f-HmHQԉEBHj CHO&HĐvzhPQXNCF+R_dy&qjI6EA(,Sn{LS\Ű,J|L7bH$s\lOMo|d掜5Q ۓq>fP`4I6a,YqFa c-Ɉ 8dt,ߪL EgN!e^F"֊Io戃Ǵ?TJŒɔ˻kJKϚ3䩕E-,+^Eh4 ֞b :I#-$,={S"׳;J$'͢]fyͭ5w19V'WWGd^w#$dm{?=]h}D LGCx \'T^ ݅6᥽ .iC|tͫCm:G(2,*͇Ng o<>ǧ+ᬬs#V3}. 0<\۳ŽS{〱 .BnԱ]xMΠa88,eAź9.m}݈G?7gt_7_>}ۼߞi1Hq$YQx;˒7A:$b!6-mT P`hg^cЖM)M0>U3Jxޜ[9%NG8h iͿ>џȝ憶 *N&(`bQF *ą4`d[}dGxe7od22.-7&%~vIf\X/[LBj'̳q=x)a}|UמGwD*MKxh0"q5(I5+{Z{wNq^kmY,Z޾kecb0jNof{jMy%lX IDAT+$v:Txf+j#UxtAHa [#9K6&#`,A;"#*"b AЧI? &I:$Mp*(UT Jdi4:PW MjKe8:R3O1F$ryghAB j3'fJhns]D,!Lt%e̜gv3c0:e)RD F@"I踼ٹ4f1۠>1&1DuKX{":jF*5DCx:eBXS=?4Oln^DTDphg0OfhUE&d3ٸP T'g4:xASyeM2i!FӹƪK)2IKnT .scN[CǨb(|aPVb,2ŶKԌ)EFͷzRʃ1iEKIv3|$ltD%WAy/8$@\ V~H;H t+r`"bUW'jqH4jOW'5J*3RՁ'֫.vA [$1? ^¢tmC]|Կ19 WBI?&+N;CΏrw@/=l o~@9)Y;7QQB='#3h٫4!$v1R>}ƒɜ? >aSɲG>4ǤC.%\J_\]Rsg%>u`gLRq?'i y)ډ32!YZ6錹utH ϑT)<_gge1>U+֮pߒ=cmaPo&1OYFUkd) /h bld\&-ΔlB\λS%x,#'ЫP`1HDe ߴlŌsiO ɷj/XT{=f񖬅]-;ŘEi=a!29u tN([u)l7%C5rqKN0ȵqn/a#/9 ""]BТ]3 ",[z3ꓹ'u$sh){å[Xt<ݔTfZigM>;'St!F#cݑT{9I÷-_z6_ePۇchy9fYdApE.~2]#mbOVԂ$hD!r4!,jlA$11J,hs$wut}9t1POI@V%4ZˆHB9Zf!yM]ARURIxig Wj]@at!FfĮbHSWB>-i6˻CGtw**[\?iBW!4_g/p!ͼs\(W!O }du:Qi#|%qQȫ6'O b͹w:u8| sATrM,ɲ"kKZ"=$ @g@:k HtM<jĈ&& C>>l cݧh&/}koR݂yA0sTw"[ *Q^5/> ߋ ~ϙYQBD!j")Gb#u}]@]]n{nnFHY\;uDzĘ0[v;tI3K1qZ4ASzDuzy6=vd7-7y643,.6 YsKRs9+=ii~~d=^B)Ts29T&LȢ3G[{nHT?8d3IΣrz˅rn{q3/W6 w8,puK\*弹_:'u|s!)7(Hw}o&qT(yPMkحZ^LRW ߩ6h7s8PBς%-m," M٤WC5# K 4*%p4҅%]EQdQ,*9 f{h1,#_0Yp@C`a'M#kq2 b&5at_rt"6! {onՎč/{rv.XK=cdB=S3V/O'-/|_~w>&#U#a:C_a.[%XΦh31QɌ&*H|s~SWKdQddIjD]Պ =$RX tYW .1kEiqjlPsK[S0"i?,*80F*i@ebbh4bb`=(3>iUU$YO:\I<4ME|ɶPm!젗7y&[[F(ZfFْZ:$a8e!Wm1œ$ʑv=ޢ!:"PjKv͙jphj#kyO3>K^"'A!ۙ˚_SGֶ(ĺ.6>JP ˚lPr!X)=:PX! i:!*2M:x0m'Q%K5,bELto8dlt1 {׸X?FU͞FxYj9g#d=5|ՒYcVhۮa!,*ԘAyEi@V:ܴHDUT7!C9=%}Erzmi'o;̖-\XuA奫_5BUWߘۿrI<ó׾<7)įr1O]5qG~x~tYI֬U@3ϼvTqk.>]6CBV5lH u*rQRS4Z&+XC. ݊.5k(&hJf׏X#_{Ͽ~wxfqF퟿ĸlլ<6E4dDjNRF2fgy>?w Fbէ֭<5ga :  Z軷FmFhBUXNW᳋o'KuyoFf*3+ZQ;]h n<#Ǜ,Y]g<$8{,c07d+m|ˎ/#DX\=]PfLy5cÍ5`RN@5", 5d<;?Vy/_+W?`~e9>{YϬy˙Ձlb3qok]ι>>2+9a橡L dS4 n Tx zQs;#뇁 |\ƗRlxebqI 9ߤ鬎>D,_ϯ+5kO >K[fh,5)xA0` ~ͯT8W5p+͜g1oHhhYJ jjJ ln>c/ȔTJ (AA#kwd#y푦FDH% 8!ikO ;CҍnhsMC=-I+K L͐5+U E )@ dוOb~udE;K?r<Ƨ+DIZk"yA~:{_L1ݦWgn1֑%z%.y@dA)m%ͨG'36bJc'b85],1yU!ڶ&Jb2D? }xՋpk[3 #*$4FH* >EP%dU佯992Q,y,Xejf;(!T ܐGbSUBBJ x"sz{YLZ\;ODQ# )P#Ɩd6 3W 8@ pp$S"%'}vrqJ9<C@u}R T{C p+t-J @Yg*qIL.0]j>n@U/ET^`vA)J>%tޱ[E@c0L: VLP 6*ʓ`VM#QkMNU"ӽy u `*SC(EUQZ3&!yZ q$8+4B11㬦 8Ȣc5$B?؈ 疘-=yT 96]~7>.^ Nw3 {SԈ^R'e0yMgPUg$~K[*Smf<"8ܠpkqg ۋU鑊霙RH5:#f;d!XbpD8 Zbݜ&6(Jh۬C x^wыoR s[W>A텨qś?ߊbx/&P\1j9 }"_wR3֗ YYWDfWC;vƁBcowd_ f7 ,xlr'rTlDB 䃂Mv))==񫅣- C3#|j0iD|uʩAީ=G km]*;M7/h0"̺LOo0̧>a6,2*.NfD&-~θX1֮J4`c].o1Φ ?LGQyH5ʣmʕ3<);;;4N&lrsb\;9(B-xO̪ld$O2CA7}!R%GU܏*ڊ+k-v i a{%æ)& &Gl]>Ë3PCySxސ[6{K=jr!j=bXrÞD ϸۆYN産1~>'3qv I8|zɓ ?3aR&ḟ3钅Idsx}E~E\( z}E(6KI0cSh UNd:;Wd:uѐfq.Ɔ G Eذ(#4ҸA[b kE01 I?@o"$U qc ^#p@Ƃ_mҸIsfd;&xM֜ĵcLD)dNpi2Rt^GRthܢYCtL?\c[glo;BS"2Liln>} ܆1g}K\?ctq=_~bM=B9{}ޜ럳sLޣsԄ9n[6Td{ 9$NėƱQzU4đV픢CwT`ct2RkAj4(18KhP3By,[lVAV,QaB3䒄*#bC qH'GԳ1Ce &1FFY!P+<'*s zE.]"DqL;$]Eٜ-YN^y^:A[6BVl D E$ڔIl6i PN3ьsƍ:A̢iP>9mEN 2N(b63iXDppB \L5Qil0gu#ū0 #~1Dx^(0+aXjn8IŪbcGHA:+Q'=vR`jO%0->0/ʅOV)`R `ZBH Z;B}Ԣы O1x5,ZD(-AtA*"5c6ZA=AV{'b"Jjy5" BL"FEؑΜtk=U--y[[W_;ۤX|鳜.M4qĆ?y}m[*kkiQi'ڕs|G#/ IDAT4PD"E[K+EP-kOЀQPZG_&jݠ;7 ySZ ]:LByMfYmH\ݷ=vnB׾9˕OyؠqVk_Ks=MCdpo__%9wđWW5rQ+Bekw@uhJ ˩F6Y>&C-fR4cQ܌en& ALmc;m `z)^bNuKXzhD䔓r{T}>K6.Fk ;x5y"QOLUű(2XmjMiX$KDK =`5aᐌC "I6A-"x]Uk0ubK.K6BVX+jH>G5$&kRW5"(%n&VzR"h]րYוZkzUtZ%EoN3!H̠"_ĺ"Iŀ.{ǒIB\=Ð[n,-ڮ"Y }4^&22\k=mlZJ:dNv4V؆A5@faorI|0a{$Ϋ&VdA8.̱sQ:4{2?҃ozg7H;j.pFUua;#sE4h4ΝTT4I"l^/V A)xaR*K]T['Ti$ FR@FRyZ."f4DNb~!Ek4<0 !FJgO ki?xsա`?X{#RJ+*E|:5@,"TZ5,A8E&#P-} .8> lC7ì G՗:QL ]frkCƁMĈ\È=hBdWo_vkȂnqԪX#z+OmjDhhˊ8HP;S`CXZkfdhW> &mSNIR}$\Sj0T2*1RCjg!uĨňb: *Mv0 g IF&.Ձm|˿xc'۟_{S{w$raDC/]ϩ|juVxϣk_WIJc:QOl #Oj~2+ ɴ^d@(U8y#h} MvotaEuz*,M#xv|_y_R;Ǘb#5JUdքջrk3]|'|7e՚g=)^&KI[::d|L e/G?SަLƋT5g,1u#1A։^* mP9QrHJL$M޿f{SY-v-Tobwl;ْ1oOBMz aNtUؗO5Udur3XNdv;nS'0=eh$O%w-nHc`T{Mջ撖JG\ߪyt&W𤩉M$&uYa|%uۢ U)Lo^{2--z^p{f%FfXkY*s:uH,nizTՌfFdt7MdD\sbk룚SQz-7k9#KA-u%'IOT .IO\e EFSb9'gP /~b/6o _e6.H+KWʋå˗~ uz͒lJS.IզSW4ԆWQ#"!JDp3QBl[NJv0f{焺^bUX$332*q%鮊XTl`TeutRRįߒ#TӘ嘭ɏ7x_X>+~?{CG .%ocz},lpp,g?):uDPv_{l4b]8Ik/* Ώߤn|~@{}]VΟU)h@l0ϙo:I[]桖i,IT* ( J4pcU놌m}kH=&8j/.V"PlrYu͟^lQTW`ɘb|*GH/'Шk,fjU9nRbrS*3 lN0NX?*^D(NN٤<(zʎ)\ #٫3NDmjQǣٌ(JE +^e:.j#"jDB^"X+8Ąըbzao7T[:M]9E>;)+9{$DIl^%qK &g ĥ'L )CdLjmإR"g1 JF,3TV"j?H,b` ~Ē ĭJ-*RLy۷=w'f4m$"㙧հ\L;3~{_pi+ᓏu\K覆J (Z< Ņ *8ᘂUbW7o|Y^3`Nb7?ſ3/\'-o7ag/r ax_wsCC$oU@IFTyǃXguAC" bAbR{bO$'_9^"kDQ "'\XiJ?uccq,S'V{l6y擉UL_뽲wFԤcTK$%lyY0cNrB*@=|9žX{cxVvxB/no?t]w_H4\wItC.K:+3u%m7WF{ nWvٺr_r/uY} {G팴FvUi<$x] %;sx6m3S^LzrS"eW Ejh<ίU<~5]R=QÕu"?sy`?!9'dWB}&ր25YF?SLL2lWzo=\&9^Q4ȲVnHW_vP 193/ybt~G`G~9|Y4p"I}/AOW/- B?Vt*H µzoQ}KG;kpo=+K^ָtHˡ\2u,~4t695e6yƻ<=cq7d-|%Gk}FK]9UI8X] 04$߱\n}..*Ef^o ]A5 jO܎0 '2u^wG^l/3>F}RWdj?Ñ$7PknHP_c^z%W'%_[cXz14WOʹոK'LU"uq UL!2vV)fT)gs'c|YVV1iDIm?rNlUku{Uɬ 6,#-A cԒU95ӚPy`LO`lb:-}R2<2ީ$i[JO=d՘OdZZb\#Qc=qJdۢ 9^9Qo}W7|iC9D=޻x*׾O[zOPl>v7שl#8DQS$I[:.GpE Jj#\1u4')w}۷17|<K:MMnhQylZ&&Iz%dUUEOL?(p(Uՠǵ3k#D+Q]^׵ԕGbAbTr2,Ajyx<R4{vciS:#a0ɸ]4ekWc\j)XkR(H8=*t^ikgJn@q~`+X4KkHÛm\dwqټdEQ&2F;9h[W#zMC*Ip^ՊD݆ZLqF=Q$Dݦb*,ԃt_ˀY,v=pVʬ4uG.jFj%x3+215{% vI';y=k>edk虍$ub%j$a0 KMtzwWZH [w×vOKJ-6Z7q ]u.!}CvY3L(>yFGEJ{U9{Wf܊hlET"!A:fA"##;Crx}b\|@ډeH'YȥZ߈3=yzՖ\^oCk-yt͹6MNwcN%JRpDŽL l]W䘽;PIlK"4 )F [B})ED]",VDjј1 jZR}i6m+EOd!tŷb=?ąSm\[=qIz#ȩh(85`IE&5I\Xdinɥ]Dv}(˟ޥLX'-#/g& IF < [OI+.̅W_D՞uK"K0`+*zj67h9W0OPGGThj! $wX+/Oi-: ب/r*6(p9Ubl.4&HR>I0]ƫR%OZ:ҌړtƤH!sA2WX["|(GۄB<6BbkJYl/G1pgz]ݻցeil\anjebkl-EjjI%Dg%g0{{2gi [+2ҸuMa)LIc+\$jӏ]Kϲp+)es:UYV4JI{*TIBduͼFHT$)JbIV$Ne r7yk_֛oɭ^epw(?]J5Ht6OJ]D81u)*BQ)j83h 洆3I}-N!ރb"{_# k𵈯}}Jz8ql2lZ eQ8hE" % \l.Yn2/1i"۔X-`&6C$rFnFrhc/q yM܊VJ U-!*TAH!4SB% ^9#-3eR1hLDJ^SDXt/bׁV"c$q2ycE"ʊ徥5OK i'oA-S)&sl*;$!/$Y E%~c&(Ȩ*(eZµ#nS$H[ܗ52\:by)v"kqv3njۍz/-.mr,Ek3޼>;{L2!_6zh!ċXA8q։奼q}aSkƼ ~xmѤϫdR0}h`gtG3: +TQo7ݙk9 M=xkF7edLvӟh@آ"أrC ٕP \pT?.dZ;fԈxLO>uWV\\mqD[.toF1K GRtpL3lG<8Aַq;DJ5pPq ":7=6u a%,jS%8w D}Elb6L;7}zC{7r캾ym4KBc}B>3gVXJQ¹X~WhEj_~_S;#ξ;Wd$ҿ}U+ݽI0QJkD95elLc GPҙ$:ČxcEzh MFY1X9b˸etcґYSj۝ߩt}dsTQRώ3$-O>$UwBh8'*qٜrvɴ!q)Lu_znU;:}WZW.- eW~#C:15nB4ׇcE.DXX;&mDN6 cY7Fh?q]ЙLCR~_g;roTEHwٛk6X me)#U#q^A &˂tRٔ`=`煄 v:zKY)ӈ9#J( r2Ry(ҳDlWF*z,E9^AL#kN CO[~*Y<֫},C YgnR5qcYdY#gIL]Nvٖwlu{_dB!L /6W5q5tRZQ y&yk1I?7T=a5Fee}HYV2-fg;m1Ɖu}4[1ISs+ ckjb[Ǯb3aZH+'6{r?}KkD5 U4Ϡdz,gEoUR ]R> kn 獯|0W58.8?cS IDATqIë7trҨM4z!SGEYxV+Kz?w~ ᄃoKS|)YyzYN4r2 W6yRǯy|4"+=gN$\< I#Z߹5wn$2c+K-S#kĐĎGipwoʻy>{هոLZ*!͂@?UG_GqIg{q=gRh?t 3?+9Y#$Ӄ VW BIagsNfH:' ")6VC))2i)]lMm1JdX6&=NZlNoa`uVsjT|JƧąJ0`U:?b^E` p"ͬą&ƏXZw5rtV$g>IY<+U䵬.]f}ak[:P'v5SN؊-'r!*wfF /OʤH$kklUcQZgc&o/7dŅfN.g&:J>_R6KDDE=-iouXʼHeȬ):˙db@H?xm8+8N#FqA sz&vܩ-;t9vɭ_O9{c&&'7r"&1ey;o9"̪z Mɴ@e7Zxa0ʀ yldlD97+̘7&&YUVE{7}oU&4c'" f1ϩۚINˏRdI+S{u_}ΠcKʬZrvWxľ+2ɹH$ͻ8;m {?k6{?x9֞{Յ*~]֗_oZf{~;;<>d:?C~1wyÇy'Vo->\OQ񧭠6|._bK_^k)q2RaJG2 NɨCwN#幫=ei%#8&<<<}9Ow̕l2]8{G5*Y냌ռ[o6f<-Ưg^ʠut6g49g~',Q=~᧾׀|Vg2jt gz|{wjǵo\Uh='KȎ|/^>Yɶ4Vp:1dnҲV|:]lS.rhwRqRe@K>4[6^ǸrAtrNXtDeM4?)SIwuR6TP$E+Ղ$2i Z(d|XEB<:S ;3žGX3Ff!&FķL,z(t8/ uN̦Ϯ ogPՍ1쒻=RgvcZP#ގ,M@-`c1v5@N fgcNNڛsB#h {14-[Sۊ C\W qblļ.)Ѻ#2GDXk{t6q'S λ]&þd ]288Rn2W92+QaR.gˣ* 2,b9=3vA׍m+ǷTzi};gL٘Fr傗W/%ǔ +ʲNǕNY^)~^gT9 YL8+?x,?q+:T b_sw(gsf͂y5U=u%h-kEyoFkrt@H|l0kJd Nj%v7)!7ίΔ,{bA Xޅ@ Q*.>r=O]2{(W8{\0=Zpt*~kWbn~K z28]?|L>-+d-Jf9cŽč\ٯ7ڳOs7~Ʉ0=9/0?x״&- ;$.|YδhVYdQBT)Y"|Kg)Ƥ%ͣO9i)ɖ*,%V&IDDY7!&J$PEY!ɔu6/l( q ' N-MҘ8HpJ}C~;@ "E\,t j+-Ly?Ŧ"DYH[al@ؐ%F đB'G` eqYBҢ" UQq>-"8#Yp"K~W5"I,!xh[GQ7H<(*F0I;[45Zh UmD!YaqѴ a^K^T_7/đQ"\0ce+Ôj.,:UN 8X(#8i*x.zy3Wr6<>~,?iv:w?'o};?#k4|s/l0< = ue.~vyk*$IO0BnY]c0.`K'{x%Ŭ`՗:93gf3m36B͗JQ`g[yVQ+Ya Oq?,]aOtTQ/= REXƅOB*-,"in)oS0:!fO=-V'ʿΫp}G7>Z~zQjW/ҫ-A{e) SQ<Wc~m]ZK'?:k3>qpX+&ں69_3]%,nל@R&4!i^PwTRޛAoן5M6*螑QZkm>Z?p6w q_k~2e k=u*x@r(8@f.i[YB|UUܧ5\(9Q,c21Q:#V9y8 IZ>YVU*IGvWzڞtxHZ{ [\3tK'/LM Rr5HIagyDmI'y)6$m!K3~*Qt~,Tw&]F|!ۿ/wQJZtϵZ k8YEu#S+u@fDZ!gyfA9&^jDvY[jpO9vv]?H9x=ל4 }bbuűNNVVDmbS.ƾ~WZ1YfrEIQ1.K4"17=Śzby7^'Y͎$*G!Jc QR,gfzVؠyX0#u.<:ʐenR _&a8urw:"ts,'w}=lczV;ӌk55CUC6}<&!!5 :QNW\Z{|)3r)jL)OF^n$,& ]ikh\xBf֮swJ;ߟBޯd &5d"$8km{̕ov s\Yz:+&)o7ouL jJ)W^e8}VڲOp5;dÁPs.&3?(rm56vi Ǭ(My/3Y(jR#Y⤐炪IEy0Qu<:]:]|zdtA}Py4%`dVK“ KJؼ'1uj8@ Kb}@Ds~4׼"qԅXn=,ڻ֬kQ-tx~nu^?d_7?< _;/عx}}k@My~ ?اOkŔhJFPİCMƬ\R.>{?DE,%7<ׇgz>T>m:0,bvqXG`va!'pmm\VI;jy]*qA#HSNix!&D""ӖxN t \ F >X+0rjcMټOnOٿzL4cb_`}apK\zn]'oҩ7ѧ侣'}3R; 1~ H_|K7 r5vm+n(1ȥ\p}D}Koe9WiSnXסbIp .JH9Y#mGؐLWsiM=E$"s6tzؼ'<8pm IC7!DKcjJt:c|M+M܆Q?R#B_8{9YԞw=E2Ĥ!NN"#@_R6|q28$z*u_tYC?+|bx</* ?$am{n)f9~v q'&<|+#uz<<p#SFe"1 AK隂")Nkƨ$O8N\\W]KGݐ_5F^P\}KaT֋6Xx|mWtq%bv6O9H7J~qc4IGz֗Vt^4/"#: ,t IDATzOmJs+QBc022=zo~{xMDaoc@l\HvO7BsVk#{KL6zluoQ9Wvдc:S1N-K0T!N{?Ƹc璟# hÜK*a*N4/4:CTI4!]54Y˰S$gEavNs!G &ƚ "*)qC'44PѢn#AIhRyb=<^2236Ⱥ$,a:7U4a$)ɯ>ҵ+شmHu⪦;]mnqmOϦ퇛}kq c*ؘrQTm(kGt~d461& !^LZ2+‘xob\v\£Z-uPryJ6Y(Qj7\*Mx}Ns(↫a*{n32]LK~pg¢lsa51C0٘)QZ6bTaZfi1| 8v(!zӉLu#oBp X'b~LHm!`=$ <8JB9j‏/}_A٨YK?ӆiFpxeX=e}.=S+7~=IɄՍu 79w?~\V/ ?pC<~􏦼>Gwjn~Yl@0?8"^_Ti7C7Cy._Hu6i\<%VzdyM,&tB<5 J &^饨2L/ O3̗6V8p.W).О/T)g լ{BSIn~Jwa3$fD yQ(KHvpx08ϙw;(mKLl. E%c*hzXkEO=TKtXVqS,ߴiѓh$y0a.PNE xjzi!A^7l |4 _p!bx?F@ y%XO;ev2_>k77x`ɣsMf;ZTk9|t8fA G|0E{5A;Sz^/+)$,MCDpG]:uE<===UF/idj3L傓 9֜F3 1 gJm6:l ۄhqAh4pѩm5V_s[\~u7}W}A__wwi]d]WWN_8Տ7xUُP/jMTwh:'۴G}* ; z4; }MrK$zInD^(dܹy˪6nhfg@on0b_g8vq1sֵ8LQi)˫dD`Ǿ? Х{Zb3BlMOw7fik'3mM*\V,]6L;+D3(&kJ4$XV&jo+I C[Yڢ"]C 4YQzbnqse!a$[v_62UWk;OVՎT58Ӗ$;nVJi|l֨5YV"Xoʯ5^~<+5xto&q^qh/hI7=&,Vi*:3+`Ѣ*Vf^ᛊ&j:RLd7=]P-EicCG:#&'fj=7|8t zg8rP=N<˵]N1;}3͵IGMMCm^޹ic[۔۸OޗB3utn_@EQ^S7D.tPU[GB6QՍZS]ܤLf{{ܑ,)̏0 ב!EAC|fqLѹ|ܥ!ΈYglXY \}yS^x*RB}v7ӥ"6$8茨ju{ʥS k/+lF?Ip_P[7ltk2ϴ3.e֥|IQjQdXAqA8ڍ\YE)LF_'M݌˷c`euR' ɇVEd` Ye4wQB+[##53PAj Xc<ņB3*bL^kXk I'C:H 0jQ/?5tf '|aRLo;g`wE'n1ftf=/T݂Ğq7 ?(x7-qyIƢZ+???<ƽԾ_NK:9S)-)K&2Saq5m|9`+9WvKc;SCO^g=v/yrrBt ] I3;1G9R+OW Q"]mA"&ʁ?ʦiY64be0&\|);ʮ`0OJ.~yK^Iܒ$^, o@)'IO nc1>u T+J|y Ja]ƴguWejC!v7~B7SAg#Ga;C[%]a֍x̻ mŬ)$bЋI3tp|\2לVjԘX\`_c^}ի_acsv/}o} ^Ig2+-iPF ˍC.̧9l֥N99KUj6SEAPfc(NPմg JI lYxS\HIQ?x7$}* Au0;;.9lNټv ڲyxfؔ"VÅիg%J.]ڠ & " O ĆXCݔTwa˒mSSꬠ3!ԴN_6Z̨Dep4*f\(OQAL~GW7dF9G3>SWalc*+[+l$oWl=cm&t7SE ruE,z7Ebc'S r뭷`kj~yÝ,\w_ptfv>+)~?->uK;D?+R~^c"kAYj:/)ytASTeʖP5YFaQVWs r-3 8AW27T5*+L݈ƁXc>ϳiZ(kTքɜx*7-h@}`>e\bQb ًRHp137q$KzYdQh+ڶA\֛wɩUODI*Ҫ_k{l>xʕ: pU/T3,,#)*LQa54'OE bu _!*r& FsBgN'-*,!l%킼Yu RJXx}13yVZ8@MLqv팮3 r:&KtUUmj,sȍ0}}>~ݖ78xg'<~b|ȕ_yW:J4I ۧϰg+7lp@0e)鲑8n60f&jwWwEqI(|bvYK޼95uUuWu7&-dJi?X6dJa !`~Od0 @DRgȮ3++=#fI62p.N;N}b_oYT<9+RQ iMb5sQOfZ%fJ:(h8=Q4EM()p.jU[r̤\LˀM8?;*&ʛN-wHє4Sأ D Sli܉>,exsH3, ";7V)Ϯ3n (议awJȎȖ։jǏQws|"l0 t28%aZዚxL cΛ,=~:);z8My7y=bf(#2C{QBp.! pҊÛy^Vԧ<` /S91y ~rP;:~{J1i8<,Ν!"d-ZZqLv;DAx*(YBk'Bغνw{xLQn2N1!a>gJ#yhf<B%,=;s0aœQQӜ0!%7/vu7RPO ``=wig,CwMKfrf*/$_\?M8̊y]1(LX QJb&gԑ[)q+J:&PG`=n<&^, :$6 P:LdUx-o7,v ̠FTžgb4ݜ,/,.X$^?CHfYAa=_9T~T;׷9l~Ċxi.%**ѱΓ9AJ"LKUYh˖-#$KxYMT)"XhN|3I5dHƟ#REˋHy"UV :β AilBq/XWuKQMuej]ldLݕXT٢MyNGoY*m?EPXAJdoí\E^f~-G3TVjﮒ62S"d}PCi8ѻ8Z{Ѯ6R'ZGZXMpL,Ke9u_K|g zWg֩⑞Zs{kYb&םFD9PݲZiu6vid$-j di8sD>(d@4kD^&,ta#s~)GSMǥJg|Bl5jwdXzQ4^/UX-B۽EDr^77SJ5]/" [!C):.0Ԫi4,rb);st֗eϬ1 êQ{;w)D+).N,.xQw:e׉E&SBLjem$Kщ}̵_]wt՗ïP9o\CfkwXŸVoq>R5#(n+霓lSW $mQ deSÖV_O?ꕧ]D[OTM_MίMZ{ku:z%|IU]Ztyn^{O|W.sf#HF2"[@Ayu:xz(Zzw7~K c6-}w[GC w:`^njDÙ4X#w'ĒAZ<.y8^%!cX)ki2CRaVΘl%ޡ֢^;;ʷGZqnTMCoMEp^qjhj|8S5Q-t$v\@ km#& hDq lbglȇ3X`T U4Qxy?Wo@jᬕjhvTHZVqrfZ cډ0Q<U^ mA,&DME⽜L]8eý[cfzj=a˶v!|;F + VNk5⠥8t4*5 !Ś^;6w+UN++纺z/t8jx㽱ߛi:ƒ FKmfw~^q~g{I} ?O$„ IDATk`c 1ٛIMѨ4e}T.AJbO?|3OZȒULo /5=4EAU5Sl4;UB+L#LY!$(b"S60j]bHy9"I r 㝟 Omȼ uڀ)!Nqm\h{e?ĝ.qѧCޜjxHR3Ҵb70tUzkT?6;.hGQ?`FF4L~ " U6|ìc%*bgNϞtgoukT% #U6ltoo% Y[ i΂$Ҋ!*T3u Y'\%su 7۠Mz+5&"QW y #k[S_$,DLQsx˖`ڱv  5 [1UiL4Hʸqx uʼnnJb(@x/ψ&9kk[gTL[ bjS/퐾l?cd0DT5hక^J]hhwZA}om͉ %5$c &qpDh #X.jHB$KݵcAa /9>^hSFߤ X'9Eo神 cA?tGyN=}B]mʖieˤ2eѽ3esr,){{cξܗ$8k@^7/6':q)ekOl'.cG9j;8G?_zolyGܱ3Q, 1ɄdB瘍'*IpB;%l! ݵm.zsKZ>%V.rN_ҩ4"KK,l,?9H-9&hSƪYcr}@~=(S0` p,h0Ѽ򻙖fy)ʆTYg.nw Y#Oئ15#U yngJJ4ȕhH+)l0>dEC232Ӷ-dSKpbhO e,i{ܚfԻye84~Ws eGrv[i7A5ȴɥN`o w5.YxEx.Np{Q,X^KS"r?ss/<Œ3<n.kϸ[\:Ĥ繸U;K{aW?W@.XsdZş6l(yWʏt h 1_6U|/sF)ܹ͋ۈRw61sݽx6cZدk-}~'!ѐibDY*& Y.sxC9&2 7/O3b\2=x?W6@fQbRѶ"댾 +?cQ"LdgpChNO]qa>Ma.tSWB6"JSbUk[b[Rb@ 0aCEہ"\iE"*ќԷHY0SbŠ)c#6jc9t1w|1tA!04^c0~S6;$RZզ1 aJt4Qd!T O̕=17 =etg{y޻Q;ӴNRy7S=HmN|<ՊfQWw06=EYn`oD$]MWuȳPE\s2'.2єiTY"Ocipm P#eBYOstlWQ%JG)ԡ<(ҚebBUјu l+x+hƱ2fNv ŗTKc1'j1gj :iԑUŘS{,"я4y:UݑSݠf1`B6c㑅` PM1#56e(Q&xoHhXlދ8n ";QM=ґÙh*=wwy'=F gх&qyDj!gy믽D5Nrv_KgwȢ{rp3;]a;'6$6+Uj~,DVVbCn=ވeD7PmU#c Xb 9)/8DBTҢM8oK8ة`yM}ZMɃm9l_8#:'9v^N'.wJu>7縓SJ5qv|?)ϡ4&2]IIm)kw3 [sGR)?wOLTSR1RQw,>Ƅ9g%X;Z1t޸>tFj%uϝ}s'lusw'}c_6oqb@]{玢V`In@dX(J^K}~s.SO $z K{}Z.Y[˫TonmPT.lkK˺|ڭ"ki>^./,=GS"xɴѼ`r>6) "o(|rܦ )8S93u+ԥ%,UJ]-^̈z-|,_wnEQ; 㯞di2?~Q8fe6<٨Sg99#EZܟL9yVKIO|4Ur 8g(m1 \i1?e,e*HENJoGWT%1k( 8RH,d&I!jLgDcXq;Nu`[1Q=q+I"j8R^JE%:IMN*;٥lggb]?)nKg, U,h{S߉e)>0tiMd ~ UCU6EEovTj]A^̸x:j[;AL,]`M?Y!ħϜ~nO2gnȇ>aßP" \XZ?8gay/Zd | ~F6P A`/j뜙 a7AΦ ~Dm 8B'"g?I/dp%6=]Z5\—<3C*Ru"m'u;_r.91|ǮOr֢*^[_:B8yc;6CpJO9qؾ5f`}5 \љ vW*v0gvuB:S/i-yBiFcg$EXZ*yK+!]YX#lVv*vqCrp7c[c`'M%9M%89s~5$o^y&t:'5; =FȚGA &qMCB(zCR2D(H:^ۭCM"8٢ !)g9^\f\WzU&&o%)tߠR2H<>d\x AMd˝k #tNg|ح5Z4QR8[jZƆY/5U؇tH >; Fe}2bY<>Ek?ʆS7a3z㯼JCaxqNdH͂Su%`}"˻gnwYcYh:,'򣰕)L #GmXVǙU >KnPT8)o'D6")lYS窐TGA6W*tCOp.v:8*_@ZBrЩ O¦Vʅ.aKY(g.zs4YGp<1 UFMLjCKd;'s*ڸ6na7}_vK"Bfe[mlm AF]Ɩ.$3O1턛ok8(F̏~$IoS:5YG>5V̆aw~s[o}ٰW_{#'w)g_~|z 8#b">U4{x?퍯?zG:USi֣~(lcέԉťpje."kKN'jY[O>CU5G$ILĊh^>)Dۊo] rs"5M,:-мP$YDƐ&Ș eg8( GuD`"lC34F!022>; *#L1lK͂VH$h\I#&为!UдIq'ak(1[յUᅫ}^ܻ=䩋}Ο[ׄJ 1T%S;BlCЊi)Q1ڑ Q+e kg9 ԵCN >dӠ[3&e67tryZ폆|;֨Z>Oi ч ~G $M 3 }~|/E)Ҫac0P42 B+BPUGkKPcT+Ԇ9f#Aj T>NqV}P !'C6Ɠh ZISh'z*A(!Z_h+F˵?\lxW귕 z\o6Jr[c[/B ?>r}- g׻f# fީߣ"Oyl^c&.z>ma\EV)h^j̜ף44MM8qCooKsOt_I\B`1ǵQ>3lAh-{?@X˒Rʗ4CqL8tXyZ@h$:WJ䏞*MG/|2QomocXH oWJatc^bPq]$\;`☚0ߎۧ|`-lT;]^,$`䒉FN^ɠvF/ꊦ'Te(qP3, fI6D!=Nj}pLv!d{3N-p*]V9dRJKRsc&|PIYMNMLgL$pX=[OeKN:x8s;QqФsI;C'yZH@]@HBhD!B\Bf, yY4؏Wg.嗟ۏ6|0wVoL7|<W_g_B6D&o xvN>ʎ .p{I˜][K\9{g_p: [{{~㺪"/KvL!feΜXW>EU@㽪#dM>LQs=/C4/}6:$OS>8ȘAaVC就 ,bЊd#!|3Ai F@#{9i?F#<9MOJ៤V\e!gO "/ۍЅV~Nּ%%!@dgz(ytYzQ`WTE(G꺦}HZBb1+2(кXuQ|Z4*4K5pLrlsYZ+?ZQ#_.8=|ݙphQtBS;B;i"I,ng`qkZ cєrV pTƻc^,!dTgɔ/\? IDAT_1 K•3x-n?~aC}J7|Oe?K6?^ X> `[TOb+קeh& )J"D( (sX3M QT 25qd5=A-,O4nKG̡Y69)FBޥzO;H #uH85mϟSrÛzosƭ-PeT̚v/L _yAE)\R*djnOXFœoDFkU#ۜ/i"G::h~zOKn8p~+b(/V+kv+ۊem9IըkxWT+*Wɒg5k;bfwjEfZf[ Mnoi7?4~編cUe.Ro$W*+iᢴtbpxZE5Il qE~"neQm+KZViTMPUc鮂3X9BJ` '4qkC[ՃPxBJqeárTP 8qOZ&D& d%~AV3={cϐDnnCWvm;VnJݐh\@#^4Q+cB 1w%_pp^TE[ JlrHrYrX1Aa#tLvtW+ȾPs5JM.><ԅ?HDQz)N\{:V3_YlLugG;JNҒw7M ڴsy !5V&u:I$N.DZVlKA*Db'N+虎QE2 +:i1TNoPm*Qi֡IU$֯^Vj~OaWٲ6x/I$ d:M43*,b nm)沲-NO-\e5ldJJΥo) IR'kG34Z|dz-lnύ[{zӝ],-QP =ѶXûSZ:m~p^3oyZ[׷5;hzPjrP2=VdvT}Os|ݬZyY֦޸yCGG#Y#ΜX`\i{u[-}Ux).o M@X+6UsG{˲;s7PYsqfqVЖvC ـZȀaȰK{#+ZY/,ȲD$sUd֐UcdDFċx;ŋ*v7ɢZ2y^|\հg,R#4d dSxt B8FI+ !PzV&kVyy(0)G !-WOc=;2 lIgAJpij&&R5YBe-FkyX,A+*LRSTՍVeL^H7Vd[&]FDi,,TY(PM`xc%XxŁQUCϰabPU֊WVZ!meF\I5[g:<-ޣHkN9R+U]9B4B7FqU ͼR/mQ,湜sB-sH/e0BUbcnxk:iK׷uicG'Z>0@/D\}Z>JG$'i2_y3 )޾,j9,s븺& :ƛG*)g_Wo;V1ۗ8.n6Ag]|7i#t 77Dyjt ԕcѤ|Cz sHCbI̱Gy_xKp;ӄ~ZP)wYecύ +ho];&`>`~qHɚU_&mm84s(*K#ѡ{|3>`;8&44^S̺"z5fv+'Ԋ1G#nnY>Vcvoy#:>o}Xy"TGs5\&2xJV{?}ct?BV)+# Fm17%j*j_ C(p44e E@^¼!W5c=5B8 0 qx ^Т:Oj8,35fa&k6$r3rx†E(2Wym/ݽsR弩j8~/J_T( M$vʒoG?Th F6k^YdmvR^kY8Q-J!|FT*$oyᴒg=}o7GGO",NAB;)uDžE5yFVB|6އxaHJhuRhӜnr.lJg &s†TWU7Uԥ6&pܐDmyp1 u~G|~zg?sf;B%1YSEnJ}N^8D|ScV8˜SeΨX0sǒoeẸ8~+ii4TyhnLZevMYh}iu5au@{}r8Up}\s&?3.POh ꈦx~Zhw~(R( K[zɏ5B2f(O5uPیxb(5}B"?U' -gi g֬X)wa&UձS;#݉٩DvH}WOxc0UL_=UT״iZy鉴u!/_;*ʆBJ5Ȥ룑e3$J† .c[ \SY@ D6PcШ-e~gN  W" YOS7z e0ҵёVgnZF崀)Km2pw7lt4#EQReGQa1N2_m-45}Ob(8d4ukLPS8UZ1tSqDĺƷ. \8zԍ)Rw*YJIzPVFO^KYhjg2S3]/-g}q;c6TH1'Kē gYX'qzx呚'%^z"CF60g J,):qI$VBg3ycjҊU8QVUW9$@9 8 ) t^#)UO+r5Zi$k9弐t9h -f͋[ޫt{wNƕ9\j\_ZTKy– 9QkOۑ䜗+&DD TO M!J.ux4z [w=\#nfZu圧MkBiTIYU5^qimmMkkkeIK&tXn(/a%k3U(S;n4*)m  |+Wg9<,a`ķaoˊVX:[0h_~J:\Ԩxyk YX;;mW={Eˇǻ?U+kmB8®5i\bCVn͹˰-wDK|+}pCk9`U.B!&[}?;>\ckԯgn-`5ԗuctkێ#ȷ7N maUԏv7TDt9[}pt}Cևz;rWm|1*rZdpS&NAʼpcfxORo??>xO)RF-IwJ\&u{n ˂PLJdīh_iiIGjbI6AXv $dpcoJ6323m3Wo饯]GԯssO-^1M$c6Ϸzz=pEGaݞqTPU=717:0%T7@F+mDs5/;.0"Wkf 7BkQɐ#gE?luq[kD>X8Z 6̦ҳh,΃S{oIMq#ch85|QjC_%Q/OMA4043jXP,]'kudW3cV[{}{R9CaVz9`N'[OhQLOzoq='_ɋ?ˊMϝxo_x5ӃC~#y˗?YS\ʯr {N= /R0 Ux>=.g.\V/ks~mg/]ƖzaLah tFsxGho8ul|Q|VPkjZG!6 U޻͆9{ᣩYC1?\3NgD8'_4sNCa$DY7>t?''N-߯* % gv%xRx;Eo-$—7WPS` M;IDJPݐOsg2bʆ^(k T|]TyQ #Q *h+_hey4RDx}o|4Zi$N),b}Y{ F%yŅͶ{G Q(9`KJV;wZ˓-mmp+qڥ]>%m>{4"lfX.O (qi4Tp%/ QDQJXY|R k-NkMs{%4qc)kQk"0` ጥ::ld[!{փI,2 5ַwyTy1׽w#+ȩ|N+WѨ 9JbZV$(pL6/Go?]8Јf>sc>8—^V$=?{o?(7>)vw:?f*{UAg\/3ɇe\GaB|FiP%4ŸoX+X?!ekh Lh1ņk `V:2$!a*lEf[I'Ab*`Fh䚎 b^S7?<\u~`7PIw~ %_%D-Ot OC͒fYFaэ8l9{j$2oS{N v, ܆8G]*a!ҲT7*Oa de7WPո0PiˆJ,;j$a[Øs^# , U4tZVF4.`W1)XJyx1?S-JN S\s ܹ?D ^+v?RfШF׾6A0͘NgpUvvvŋpoxﮫ/ZpN5"Pxj[ L -ܩY\v~zNsf8ŦFb&Z`’Kޗ v_|clRMئT OlB!5:j:_=~o>0鈽_|Ezpgh1lu<1mxy3ƇJ7O+>&o[onJeнw^>${ Uq/Iޢĝ2u&¬Y;J6Ve/sZa4>Tpqp!&Sr%;7Z]FՈfsW P;/Xo.LlsRxF}e>*4 O6KGVo){N' W$pQQ|t,ӥ9%"Y즌96)uE1P jhRWئܔ"}VrI p^L.n*kַeQf߸½^bC>oi\?_Լ&cS/߻?k[l}wӧNo|מּM~}7ahơ,vc-nBY)ɻԳژЄ$iWI ϩZuŜ.Y<,ۅea9DO \,#kGQժ=fEὗm(Kx/崲5,I)vߣ( ͔SdCl!]Ec厮xAltaHQ;-٠(=ڶJy|D^^#?X/hBΟ;2zXoǞ_xsׯk??w~,Nz?=&{Y~o⪚ǯ=r=ͨ)B1O48pC5:1 ,M#8TDQ@#XY;eOth|JNByvL2q,Ae^Nv?#%)aZ+a+q^vv pӓ>D,׼͇?zDuƖ|+XL}g$9I" OKyD]/iYaFA^b'?gK$( IdΨF~F+ 7 6[4e%?Q/UZ1dP8J@j[O9)T' &ilK`BCŒy"v_VV?^I鸱湾Ӣjg08:Xkhz>~Nb︡qӦ*|^6qh870\TVe^Qg Oa Ĩ<eZk2rx|噿f/&VBň_YO#pnC]4|n½?wp;v7lݺƺOՉC\a7 !{&dc XNT>'PME>!s!"CR|8 ㍚oCA2R4U{_Υwht<}c#a"v;ߢ0s_~'@o_b*)*zW-L}kMVJ.n$l؄F=Ê/8cȘQ5Sh =[n]q 㺡fѓyk~Nv0FXQ!xС]$շ!d n*欛@,w ^ `jysA*vv)ꬣq5ecNwCl]xA&lNyIlz-g O˟o~ZPSnSJ+XR6h3_+8~@ͺ 9[v7i||]V@|8CT/+:N ?nJ*3nbf&;.Iq*<-*Yz:oR,:>$^>,F;r37{o_7|-7oKm->WÔK L 8ħw@fϰ!vڼ/q?#t.7\̹3nKUե15' 5NJ"*[R' fʧsi w;I%Qj4a֢c6޹[o OWE.9WYPP7)enjGsv$Z+$T͈:v<((EE?k陋yeB0L5;3?]3],9uԑ'b:qT ՇA8\;eKe⼥=˗Z΃Nd'Ե[|diIQW5͆sv>%VBlƃe]` yIDW` \u^bdcr7_U215O98]8Ҋg/f4NINJC(ҹ,k?ilL4E|Sj\Z YO9ZX #4ӜXOu~j³7Dءpz{V& yM"o"y˜~/>O0OHZ-GG<|]{OiT[EJ .[4zr)5[G2-EEK3k/ŴW壆;H}Jp ;}&W ^۟ _ܖ~? 9- sÂmXλeURJ1DHRL@Ti-F^z^4uz;lvkk]qt!H>3݀G4لfso3?=w^.7[qd:OK0zWx+[)I6Jz%~$cY:)ތKx"7Wո8:FN5£ *DżET\[}N# 76BfdpHyo|^Nořg O Ů>+fG>ۈZpbD$XpriԯӪ&$#~J~p= C^ iGDp5ƄTFN89F&k6j?ocZOZfj4D{1 'ORIOaR&]smV!cʛxo0'a$Lq(V]DSKv,)~񘬞MU oSCW |tUauq2B>'$oI+F vڌjeDY,`UU,4%W{OT5*r\_aL-.Dm|LB\/%#,м@Ek g=y,ZZup4,f%e}/B2M>089L%6W)}M=YhjR5\,=#h%{[~4y#b<#|K޽kts}\A[͏t+l_we1~!cM=kܾCSbH OYɍ]%,LLp@bC5*+&lŴ$RG4RD,"D"frII 6XMbZ>.I ʹwt[կ~]?|unuxG?z^6[{2{!Ytxx0Q!~֓\t;^A@X_/g3'wZNOXyX.jL4ylu[Iٌ|0UU$qjhY ^\)eYڗk"-ueb@60Q/}%Kړįq^OҳzJ`uy)wg4+bУܿ?d IEXIzn%|FIh69wZOL)Fx*x M jgOb.BaB1 ZU667غx7'>Fwe Yؠ* >J /G=Y.bx9u~|~?P! }s ?/ G(Lޭެ3r'OrhƬk*Pފ@VFH'Xs2ak=׮*2n]m"v<'S~RE(44YNĄwP1˝x!1Bz>&f3J Eyãqq7[Eoz9&j'Ks2g 7+͂=xAQk;c%(yG^sG"¾'F~{>SW=M9_x@t J3Bn5{; 0Grt"9ٷl'1tz+5J9U1DпuQSQ}n!d;Wl rs I-Nٟ3/4uYS&tG't+廁!FuȨ48z>d﮲􄅮 WyXj.w#m]k[%ӤG)X{>L9z 5dŧhG/*8FB`_^G:RԤQd~]xw.qf^M 9cѶs q=?̷8-T :oN9uh$ RDQ=L旧ƖlW9H1vS3rdck,FE E$h t}o{{ǹ hH*iU{p֗}XV\LTLNQ- I4T4T%N都E>;,9~Av}Ua0WQj#̳وE'/fHhŎ \R(|ܔ dވu&>")xQj4Gؼ,ΐڶRʒPq7 Tzsm]_!~8b2A^-*sO?ٻt Od?},qŗ{͝n4;?kxtGDYJ("&v[=,3':yxɚD]}fAe{ܵz-[ݎ6'k5sc UL#L,&+uF+D$5VU,Zȋ_/E ((MEs=s#36'kvI/uB~' 2fo?KVCDD=F Z7*4 fqfLϵ4bavύe1)26Mv\!w15Hm"& 8_ۥe0,hիTOL%##*;JV&UfZ>*C.貼><9HYn'sd2S̷,37] Uڻ=*SU#-CFD2ZpquJiJ^^njJ. 9}JO=rY !kw&hx4*CޛFnyi﷊^tw3:{n];\r=8s#2_qW~>|O@1+K>lKe/H%"b ˭oLJ:RMXr\?==鴼`TFv* z|u3-T^Kto1t.hǢ&&HKd-JDL,QzZsfHHLƹiIj)_Jg=u~pUHrK74\OZ#KYj*~q%'$(abZ}H]2$I;K2H6X6K"ec2?*ijE e1elj.ti눉#Ih[% XA$0-=&#)PH"7 FTKG Zy_$bCUVgCM[T28F_BdbÀZ & 9-ekeK+HIciF.b|F+Z~Zv_ ّMDJLR,ݛfZL~Ly"(*]TD-Y)dЗfMH ?=DM+^P"![K$IE-aBOnLJ!5|\"Ifݣ7kҕs!WRѲ\G&/so< :ʉtGc{S~xP ?{|_d܁(IJ?YWT?hQMLَdHvG 5Zf!KJ`N ~_Y`L(UD^+ ^88-E#V3\H)DMZ*b lĈAͭa#f_8 6U*+clF""HBƤqB%NXQB)l.vKMEXܠ@K?V /8k04'"W_gmc}ۏu쐬|‡}hsOd?Z>I_x7ﻩlZ|x7z}O 1Q$E Q}2lgGyN$ ]63)IF}zR,18/FV ^D~>*>RרV+$i"!x1s 3Sy<|?=zF~ h LMNHِVAk] VC i4r߱c4*p[AeF77/!ls񎛸;}0w'A&I*DH0PؠQ$ZBPLOS4w+0Rbԓ=[PɂFF"FHI %iAP5C6oJ$Jأ1Œ82CH$ R #BS: Tu̦ŷ G cDŏJn#IkYp6^E{G}*J1JB"AV6qmb]Nj#/bkU1w& O3˕^6GC:Q|-':9X՘ G"w+o^!;294Ȩŕ9e GscƍY[~?wSs]׏: T$"!jNw,|\iD-x7RE+ax`1} ְ(:v; ?(A }#ȁHjyKD|o20hAh )1kՄxԓkCt6-Af.~_^h,mj[-"񺡉 Ƭ!KQDTŔsBUKdدit<؅3=!+,O~oLq&.'nN˨L7ePj_N(UAexL.%&Ze$٪DCYCjΞܒjM=F< y݁$)!YEW ƨ=bW/iFPJFv%©Jq`P#h T4I 䥎#h@EU2U#7VnD5)STNQ_:)G,^PN$ ^Vsyr' ^ FIk[[yCQ7!ȡ?9*11q,k_~x稵Z2wHn]'wݮrӧɱ>>67-{_}>CWs|(bWMnX@U9vNNMJPG8vF5A "ڈ^//|[+[:$wENMM-hiQҢy~9sA=sAΜyP<[ǀ7LV=*܆_ϭ&.Pd Q|U0.t`yB$ng|8 T%j:]K^1L8K(*EU#QTJ'6ID,"(QAYdXHh8Vb5d W3 ΫuNmƂ)2x7vD\IL.GZ.+\۫r6$6^4V=UuV΅ut2;I3TaT+ j$^"yaVu o,&u˫Cٕ/?wA.NiNɵYVe{m|YV\#twTR7,;.2vߩ!Kw}}7t6cۻUJ  Q"?Q@~Ory>{"7XnQs ^4P[Q f%zTO~O,̿ar8uJE)j:˿*yZ_kRJ [!QՋ5yhcgp!I˥u= ii)9mv8ۅI#adíD* :k@yЯS9ђD{Mտ(##> Q8QkB,R&י΄>&bTч\}WJi5j {TZbҥW4pQwX:)2 m7lFwtMad0͞4n]iîѬyfϴΤIU"д DBnz)Q]Vd,띍0־ /co"ǩtsT#RP#Tþ"֪[M E9 ƣQ8m7_ˡ,`ŢHT="5#jx>'kP$"A1X4Q5+W/|S]1 #ٟf?R橣<_ZUn\߾k!">_O.^O?1w;%s6QEN~Z=ٹvU%\yu_R ْ89p耊hP}ݦ}GAa8v֝ן8~TEq8E^py3 G#xh4l6T5w^CYBtlJS5l5јGksC#ȐT;^I1~F&vTy+)}jӬZMݞr PbE5o?:I&Kc\ح 1G4b^( ƌwy^sYѠ{!q5,<{./}-ǟf񞓼g?͡Dv%g_xU&\0w=N0k^+_[=fRsZ[?wO+Daw3,?!:w}n]uj>V{cOO}0bkrqSUJJWh x}@o"yA<8AQ ,T{sfboOiU%hzQT-{% Z1RMHT֙{.. ߒtTzl&'f1 c@/՞8i6.GUT&}z~!n=k74~ /fwOQ9$G ?FcSW?/([#{HZӕ! _6G>'0:.YPŇEk ( _.8^b ]%v5b0kF@1 ba-OAU`M%Z$EuIL4zq0UR9$  & } wqG>)0v!\jˀU$*jТȉ%UUcbOF bh"H} e!L). 07D c ֒qC+-R F2uʐbM{H,9" O QtܓݖQ--CS4\,q0QI&GzTA%`cEܸerrif~k#C Y#Zz5mّbj~WMa[vrǠ2˝yYgfh"}>9Wq*q`h,l>?`0LNнZ_l=xz/WJYXX+͠F nUƑ; fc$8q{=9A< 0p:Sf1$UT"*2z *c͞fV5:KT/B!Dk^8D}/ +[iL"(ƁHI"8{"e dŘ$"cT1w'nG"yI;*# AYJ=mı4q)>HCEAjbfRYB6V/B%cTX{)5AD j{zNUvU G1 Ϩjvϕ#@bch&_8&U)Hj:'?Zr斞}UQM#'3@gKx縝x{on;A^>Vv'㟙¤E~S-25mԶ)&8F$e(,`0ysʍ>3lJ[[|Rb1G3 =GK0#wK=ހÿ?BA bM8!x9UK gzLm%޺|o=ˡIǾ [P9X|@:= fd!(+EM&7yqu=79\n*{:z: $QU]-ĵ0v @h0Z׿Fj쭽۽I"F \spP=LP(z q}7)F \@WW-$.H&Fΰ50,IƯ`הּ?C}:4o+ʛq ƽu =)ܝt4G&A|kRv0 ` #f??VUF+7Uw7y`F jT%'A-rb]I+!;s16)0[".qTB)$vHZRoyOv|̃t';'YkGlځ3}EjGy]beJ:Ƥ%5)3Au7lmޒ۰︇ZZ YZ[1]%^AvL!-rhroebw d3Œc*(ƹ K4*I:C)yf#nyΖhw Qh cC_tܷ۪`boX50,yַY \YC?tŝ< '~[^?˷A\w\JE_u.<7~~DENQ{xץ #J!=x7?wG܏tf8磒|qYfo/p.[w:aGTJ=8 VhO{˧q:Sm\ +!xpE hb('W`/ a* YJ^ɐ<.62OWK&P= lo| ,*E-æ1 OЃ8ҸRUXU8[C<*0ƌ[U<#ۄ*J'g ==ݗ0XCdSzC]pLOeWLS۔ѬJ'1{sЬI s@řqy/ I}E>#,F$K\ JR ALq0!-73:m cW1c&qu%x3^ S+@%PƘPtsN+g'h0A?L0O,fqr"bz RDoQ6"\3@ rL}ȉt(nó׉N^f[$EΠEhE[[WyiH#lvÇo}ws37/ܡjExFL H8=yb/r x/n}]ç7c~rʩѐUOZڙPq0FkiG,,H {~=n x:s[y!F$xVqb+>KDŽT'VG7{c !3v  ph،ߟ `e.|?:\tWn1u~#qIK|ݛ+\tIvoqϓ>CZ,mwv Z*Ya! Qp@m?K KaU$eG.B0vC:E3u6l l=A`o9j%u$LmPZ$>D%BԠQXY YBQ$qJ*hk+^#s"Fo+(BJ%{LyŖaϮJ0I) ,RxhuHJS]>=_zQjiQF诼 8"nDTi2w^3Km^eK6Θ:pR\SU DccFcu,coݯNW־mtcd*f Dta`a@# n%k ہt8{I(i69z<ވ 9[ 5/P3ҝ|&Nʱ" I蕼rD op =UVʈ:Sq?s15;y3]LjYhaP qdA5TΡ~R7q $&p-a"F[YB^Rhݾ)'ɎMآEqJ++")Q1b"p #Ri0VHF9pIF"/(2ifPq#e@z% -Y%7*ޠ 7VDDJ)]qEbkTfKY83 _z$͈hmr3ࡿ)# J\}[+Ȏq \:qm)?j巟Г}*̘:vțo?"# .i@/x5y t1EquXKW7iM69ڬP J?qk #PvdyD~K,u3R4Ze^(iyh*#ȫjL_LN[R1ԢsW]I)Ed:n!0fϪjfKLrA1UHmS<\WH4\\^9w3Sgns#s˓gsȋ⮟.~8px{1*K4dE9 tjGiJʸ_j{9piJdJĝ IDAT6ybzk/3[/6}ט"?Chk{wr!2W\p*a? Qd;,~Ho*gz$GDGiVAT([\`Ayz5A}%$9d`2Ri`|-hND[NlpO,- G)XqkXhnc̐2Xꗘ^%tk: G&`a$Q~.|isK|p 6,J@pUqDǦcZg>O Ug'X[{tV{78[4 cJFgzT=,Kt%gX(LJD&Qqcx(I ֠"|TdY(18sݼ7 #33&1IP%e4vX)N.R,Yy\PU/s z?1+e1(xkO:rXֶx?wzE>Ə2O>O=I(j:] Ucc[aUD o[yS;8/J$$a8GTJ5JW3&Ar̉Ռ|nv֑لS*TCZE#b80IH'dI^`0lWn2Saz2l*wfkI_S^8 h[G+XcU_Wk8K:I >pK#LWȝ% -Mބ׮9؊xiggu+kԙǙlNnUXI&=iB~?xl>y@orq*@P/A@3d>!E ``] 7/^m*.ll48)U(%h>*DB$ƺgw.R?sXŨ8OޅkD A5d"?ԍpDF drbn_+h:DVy䎢# NƤȞqZ=4c86)$kw%F2Qx~ihN&r߱$Ǩ0ֲ-pH25wUm\wC@YHf-G*{4wgHvt7h?P=r®F%OڑF;EEF-~k|S:i'f` dXMQ$haɆ`YY mST )RL#pgzzzzT*[u{t "m\SϾoR_Y3l~'7Kdh"7Zމif:D3m/-Idw?G67Gcd5&bVGFe&==)Nb.]Oj+8e|C\QI8A)CRV/ӿ2Ϲ/ L:H!.2XJ+jDG'إkޣЙ?WM2IN ,ס3867I_ QUvdaJseTY`[&yU))_ ӂI]7Q_JuDEFRPp'Jb\3=Jqw#&'v|B Dj]+id[BO­翩?~}ǁO>'O`k?7=cMp ΋|o¿Ϗ|73o^ =?uJN<cK7^d}~NaW_Ns}ot10#vs۪#7$Ys IJ% Giw"ѴݓzKia_*l4*Oп-qd}qhaosfa.p,.i6+IA_#,vٹ]G( KQ앹N^jz 5$UY)Dt$CQUi]Tc8V7$\(H䑫C?JQ^Hb|nwkB.UUHUΌw;7*P%wKR\:S"aVΥ t .߼­9t9t{WB:MG  h fym:ѪkG2̩喨TИR:2)^jސ2}iFX Z1=iTTL`T8LY$˿ ԴI/,h[/ HuxPH: $}u5B߫]&p/L}'8 Q( f H}hHDAAv@Ed[U%v5f bLc8Uxb 'bۛK)z !Qң~"~cB1ll\%|E q{AF$]T1,H,eIUZa}2J$Q9 پR@H@[H[,_^srD8Kx3P$/Z.UCIO\ZHc/b7pZmFWs;G0"=Ĩa2(Q+E-ɺ%ou(랞غƑLH7ũ,N84HiM 䣘hH|+ %;9Y۟xA2@ ## t+x8=%b$]ƷRRCJVV%` z㫈qG8[Bm[<Ɍ4_gd!a|1ܕSQls܎"4BsTXJDzF\lVJiJzm>ɬ͑%wjOw'[t%5ztM(]iF%W%H Iѐ8MS(5ЌdӃמ(u8Dqĩ0( [u"KtX/`H{K6Ú.5ŵ%&P4)kkT4b{M2]6ʇqe~+37"mF{e}Gm|Y>yL7.]Ǿ[Zc R ~V>Ms.oR:[[<D^p.=3OV|We.NTw`{w%zkr5nͲ­Y2W'KR,/rmٜl׃^~ץ H"(2T/4!Fգa,GQWsWo=y,$xy:{ڈ%R?q+g_^"lURns8Q1E"$/qwR2 ͕fnEF&ՄLk1B-.s8hNUD-]GΈMƼ+ܨ].fr/"/}LjaIzMK<|UY^QWsxHQ:2ajldk{['~7úwzNѻ2|Yzk=DV.5wsLϐՎjk\DY+9PYe-\ 9e8P_5,]NuDpe,&Z:no$f5^m3?BB9>~n\{zIW:ӏ6:P*,|1nR5yuA8h^"fJO߹L6xHu2 !(8ύ*-{FLvRD6#H2PERR ZQWP[ /˜ $g2QVqHk'^&/ke> Y*#C佞v}4>K>K:"!uqu|6pj}oOī7 2^5a>Ref'KW x< .X-ro\R\8zF'vo=lD]*EWz$G%Kt[wV[fm[^T#xU[lDF8JbO?A#J7Gw ɗiTVy 7(Ѽć=uψg̝kWbN vɸd7gKڻEƭ - jҠx}m:3+4IcjLQ:eSq[y&*a Z.YXÑa]9}!;48̦'{$bdlP\R4* H Y1⨈%PKP!nK{&' s,QO_G._R>0qr4#Wd%e^͡(סtR##Rף gn_ө"㵁Q-͋,47C5oN_⪕!T`tJ7_' IFdܸy[z+Wo|`!"jr.u醶.  vh>wcύ7WswjWxv.~W_Cn=-s/ڱӝKa& \LF9+X]e۔9oX!ymxW̔|G1ח&Xb7C:7. ~*Kt<ނ +gjcu.CEO[.%xX̆ԕuq:V Ĺ'# ?"RTv08vDTe^ƍމp< RDI!$G]@+_Zё->@ Dn|uy}Ѡk~z)>"TB"I?p@P3~k<_L|=1wR~ >Ov{aqQƱcz7WoQwD@{&݋$gRGW[iS 4x8n"IRMMQ"WKU4D,8"–m`gNqe~j@7/1d,:v璕2<35ɥAkx;33bQysI1U<|ϐa](Z1.-SPR y&ϻV5٠3HR75Ӕыw|'/EX}% # OS\WcDQrO,uKu)1"jTNJDž՚m:q#Oy'-Sufs)rqTȕmLݗc'x*^'d1]i+IP#p;xZKJDpC+Ơ)HmW4V2RD6kbL$#FE18""b1"" (o19 R: K;-V8B4u)\iXps|0fpJ" 1q({b֊c nJ钮WJ04)^uXȞp4^KE^KH‘1)l޷-;nDPWGt \>"s\^Թ'Vyeus[C|AkLlHʾzEy~+YIy;$4_/A[VrHx#nڼupIb.u׭JC_lKɟ<8#"Y"enLrq\sݺJ~ey [3T Tڒ!;Gz]pnqo0r%"uqΞAVKw ϑ5)Ì<$;@IdEE2"*iY%+YUPg7%7~a[|,n'm\K&[qEKհY.~fCJcD}OB4p!Ć.*y˱{bBZ41lJǧ2Yi%,ġ֊oDd *bI-  &|S<\Vav#0.ϯ܃-waS=_'uSK]J *&qޖ[ yP$Dw *Wv%HvC-nii/GJkZAOrP=se a†:1ɜX{ZH`wt곸㏢2\AQDѢ,鮬"&~bQ[8l3R! mz~9Nfo1o1}݁'"rVAD֤ٷi?%"l'eeu%R-cH],9&2?%JKǺu2Oϑ&Mk21&d=+1CGlL˛J-WU11Nx4AݰCV0S )"R[4nv/&IՁ'#@f.yN73;y5 Yai ,2|--nB('u#cz)F%^.7xk7?Ln};cvP)p®:~}N8p]Gۘ+Zz`e7lg Yj 5R8hߓ5U`\K;"2ѓtx2GUfF'|o3?JlpZDYy).EL_]Iټ5M 5/tbFƨM`R l]eu\G[zȒ~Ge~vnn>:uL>yoLu_1eAnΟ0_o %' n_,oknqd:8 ~:v7xSxk/,=D ~܅}^s1zKUQ%YCf $}zIcCCҬumNEKDMY+ׯsg(>>#i7sx_y,޼ȧ0O:!&˴" '}Vs[7 c,M8="a70ԫ.+zdQDe*:9Đ67jb7[̣$mU˂$kSj!iNT<FN-?ܑzդpN]vb B)D͕^bN'/|M^ҭ{Ƚր|;ր܋xeN@G3{9rnz1۶wV7V-3=cWè~{W1t'6#e $6GebYwe -$ƞ}or0]B \QDĈzO[ ȇq/*zB ;\ep%1/p  nJD#2 xR"D"Zu\Ў[[((FqP\q d8IMQ&"EJo^Mвdm}i\ GrUJ)0jqoZvy()SKoyoQ#fI2`+K vٸ6&綵o?tT~QnGӫZX%\ҟlSl$ 5OwX8iDГ'f\-U?y%Sc>:է0^[g8ecu+ղ s"7;tIzDUin24!"|N.{m⺦o _v]~w?+釤w5->|xWzLtdV#giV'e``nw\#;M-LM6:1P֠ z QuM]4)G.?J$?)E\W@6.eTߥ6 ߦ 0^L'R85xo'/vEpצ3"O]rꦑW\JVkyzǵݵMy7@;}}>i8hkbzWf?wI]gӜ(Xۯ^fC #S"IRg3*#-vy/}q_AN(?FP4U~Ͽe~yO>x/r,yNOR)ǥr|ByAٕvg:k(܀TbCVui/מgmfN'*q)q%јfBIItD{,%+ BЫm1 V1f7 "ykgDT1{N}+EK& b7Hf&+/ 3Ì-0/Sz{~D𨣃R*}a?z"o潲`{wGn~o {5"@_VyV|~@W9y`uƇ5'uUʰu󱸢kDE1(hiK-NRgYj%P8ŎDaL/I^xPTYbIbX"Ȗ%l l;] '-&-%nu SnRh.?*m!6Geh N_Slk4a4đBdiRJNl^΋5B(ÈX@ib휨-pq@Cc 4rHbE(#olIZ#ɑYtS' ;5,EAԳ,YF~j(RXQ9^ԏ--c44ũKsr}gfj2%z_ [|>_#R#+uc. h,=z'{RLg Cn"#~+, kaɇIN.@WȪƇi7?L~F*ʷ% jz4OdS;G^"G :]|gWRFM{[!bͪ{[czÍ6˵B6}\ev*{hroJVJGRHr&vx8Ou7Ѕ00Rͨx9+[tm ǑFFSokJqYUK^5+qR8>eBqtZ_&\?O>\S:H鑶=Ew4iIk.eQlɀ,1P4z)vJ_۲V)GrNTc.0\ur74j3 4 uKm%sTQ "Tp|:ciFG趷vcY|Sʼ W%8&fG>!6K~ !O?,}u@^#H8d3(&5ed624/7~zϴCg2} Wq_`Es^s/\?}v̯a6;b?ks~§zlY]2:(qP#333fd|L^W|~!? _y;rҍca{SmR J^Z5%< ㉔]W)]ZznEڷs'w+(k}wH^Euۨ1{,Di4 E~7T#R͞b˽E avd:PTGcN2S(~mS1M`tHdCYD1  =u ڕ|EЂ]n)>c#j#J^I^ǖj dܭ;U^ALT UH L`q˄6HN4{M=q F9^Bmz"=-~AZBT=}8nZ|pGn\'N0ARt'O_xAAtz%B)j肷TATbs8(CX=-jFt -}jc^?bmqL5fT.(z@JL0% Ȑ~0QDtSC,ՖxiktkL2yTO4wC-mJG?7Wc[>;M1<@w N]k 71eB9HiU"6:nX8Z*'OwזΙ&EQVąUW4}jq2&&dOt[>8ִ؜)Qf閊_d ' ^t^p&s [ףcK|cqв$l NBwZ7-P%z6^%Gzaq#z֚ԆQU@4ckn -w3.NWwV!q(Eu|qcԆi`Y^~Oucw~(1u/5c'~/q[zI>5W =%;o}]bݦksdc?/?፯@Jw3ݳP{&LX `lxXO1<4َH3YXVon(~qg^Hsjt juZ30Cwr+{?;7Wp~> U|XN@j[JkA ""؎Q ۄ*;lDq:7,CFJ5GU( h~`LDIEew7˨qԛ{ܾsm*yRl6NY ^FQ2鐘-ϖ8Q06"a,: .L`[)\)K2(]t)r]B[P2J)pvC[߇ , 8N25W^럆FSK7J]Xx+8'qƦ(]JSbʒRA&(bG X`z'?MXη-g>ɖQ8+H7J@az[1FTq$DU]G4p(IT wQUZx9mJݦw@\O9;4O)7lRK:  >u,]!~zCiD0LT(ݑSdہKZ,rydI@ukP Gv׎xFRSV|?ϸA%}5޴n f0C$ d+H$RZbؠV 16"cIQdIa什zP@Pĉxu_wcZM9+K8~Z+1OVz{gWbTvl Q ]2Q?TWs tVLlc$5Tx=8Z :u p7fL&^G$'v[.{ԗh>>v?埽AZ:,fiPrr#C ٨ ]8רf/f$[Ru< ʾo±p(z7L-Eɛ#_Z$3Y!II_Wkĸߐɥy2zy8x"n!dhu)0ZޅSd[~`l:U06/=aE7oN$BgLwıl$=O硜h2{dPg}:pT$ ; *'W(L?lI_E'n9߆N^OIKI"vYǛ WLMɤDҎ¤\G Sw<95b\]T]* AS4.k3Q'K]6T)ZjB+@t8B6sR [E񋫼ZZ<'1vx#&tuLݏun(/au0o ~V/II IDAT<ﺗ?fG5Ķ][Yy9V6̸ohHs]4>{u2؎#C۶qt>g◹̳ҨV_)eB%K_[ʾ;kYb2 ^sxAMK:&qcr3!wcO>l۱l6k.OM-/X^džuc<Fă\;q,Wl,Wѓ%֚GO4>|9v2J)s] X%:+Lsc'16Ʊ <Ι9?36g/Io53lrW7NìuRL~mU6Vڬ/JT0N./i'%npA'Ft,&P"زi 1"lf 67ݜqL#^͌Wkl6F4|l3Ί2vekbsOcmvo͈33WϞcvy7_ֽWk?L6?M(у ]lZcI3lt"{bZ! vdQhH5H)0)&HT6 MƩ`5,p6Vu`ʹa11.qRWjR$v SvU 3|!b'ЦMi;nǧ;0 p>l{'B1fXivRAt&Kcmִe؈KNQ5tqlrT>ڒA4X&fb-p`9dqU7MrXD4rd> AvoID_aTzϢh LlL58W 01J,6V?3E57^~>Vi8h>|rG-` V*AG\c(QObcM k<|B=youl$؋=0NfI3HktVҕ! z*WblmO3u7jE8ML"I/G;^ ڄ&`U\:A͐kt1/[OoύZ|a:s9rCJ_ý+x0k ׅF,_Mp/)6V3DmS:3| )$ziv^&[&-J[VX٘TKk*\T$?j'n{&7?qv~V"`M|n~$I6R2Ů_|۾5*GY8;Ewv&vsyCǯ"3_|D1qO>=~y>}$Os?Fpٿ90H{.-{T~? 7A^+ z ۹Ӝ㛸Jkxb1 eaۊ,Aqy'iZ Z A*ӬϿXn\b}ضC JT6ؙ4Ǟx|gH.3>y*6M>C|fOk;nُ6/( l۱g_8Nzίޝ۹p}vyaeY./fQlcށQ޶ZqLXw吳-"q "B6h!l?@D!"\F /x)QqyFvt"m.o,6i>}Ivtؚ1Jt9rWoXJӧVu&{u5 ƶō>J46~3XHضd%(Bšg#Z\XX<IN+̖5c. ;J{I""F*%]O*AHmaJ8[Qse8@?cD k`cmzZ7!4UṔ9AX$cM79 p2-bioDMkJEHxӞ4ծ"6acSC\7F-/&$E#\@)G8׏b Fy6ÕA_C9BU"l,L|جU^Z4g< [Z,\ICXM0νl`c*Y?q&\ma&WwgV͘\ ' Lx{n<O:&d5Xqv{%,65'ʍ.Nt nqM6$M=̀CWI <4jZ<Ὃe~(i\>hX^ߣ7''ZI&hMo=1(Kk 0McW-\&;LA๯3yk]?hi9#ϡ9+U&IL =qbӵ*xC}յˍQ G(w1 m 9{m!|w6NE|Ghv#w5<@ʱhp_'2IN\HO_#a֭8u>)/Up[ߛ׮^errA8{$((KDGÉ9,O.# QU41X @2+욘,t# $ H0 6KR¡pt%ºԂԁ}EÃMڤhE }A>Y\F3`o&}!Ry=coEz'76 2lŚHX@&VRML>|,W rSHFmMk!nc63cI{W=6h䢈a%, ( [b%PF*"ePa5ma'&:)ڏlE;FE/cG !Zy(KXB$"leH:=i3,JG#RI:*OP iV+u0a `t(NvvbC^'hSDgE$ D cXF-&GdX!jYAbGg AD,ANM+kqi"ҳ<~/u4? T0dWz[~xwǏWH{q ]|׃:OoG@B߈5l-;vlw{SO7X7?hMN{`Ax:&f2@be-O<0RA }R#1&0 ?D?wqr /]$  93w]gv.OOldUߝ>l2=MWw1R|+%mRY]gbɤ9vk;MЌV87BLjLβӹ^@ē+}#_މj>0T 3ob$bl$4M%Bo;Z"h)9\4 -ٲeRY1;@)ce҄*OcnMv-Clvvڬ_$BU꧗6iDJD U6-\mo-W9~pƷ ?s"t6LdiL-Y2>B"QbIJM0m{xS Ws(ҖW& rA:-0U PD7u'Z)-9;=r859iaGB`DtlL M&KmiuRzt?\|TSȒR]Rimғ`@.f d͚-Dn,F}H;"inTb`5A'ӢU03 /c NZ$؆3Jr!XD 5, BMHq kSS4΁?aa.=O US8-エ/w.ܦmÏXN\^4,Gmsd[~esqveȷ?.}[t*waᅓfco~m #ǽcOxWn6р6˶7[}jf_On4Y-n0vһ%|3Dd7S[FD8xcCI: jyY{#ӅjcZ"sK@Ǜ,Lڌ IR53WR*箈6b&5Ztjesd3Xȳg|9ҒOgreKCq)VmrB>s3?4^&[m3GPa`,E˖S>#FqstaJl4bƈB[b֒ԱJ$ sRa0 iEL+&ju 6v%Mdmzf-Еd2JFZ7Lo[0>xGՍ^mc3٧n6[܏ Oy׭|x_0G\?Wߡ #|jL8פsoLK9nı1߁r8rj0AR &+L XPVSΗHl3.ŷ UK{ۗE_k\j.Uc*MZ(Ccg&D%]iťoOq%Bqp,x>٭#l,$.]E\dy bJ }ԽKM"/`C);B b2:!!PE"]ƨo&b60.T q:%|WR VUe\aEQ\ X#RX7Ŭ< )d*>E6"6C ks]mQh"+\̍U"mu荲ׇܭE'^3XQ nalR א(JK%EPq)l'&M#E5WiG/4 mQ?P +ccB'K!@K_?+[y6gyUn=_vGO^%疤,pQ/q3y>;?s5㬝Hlm5*Uy>̟i4/gt|[ztKL?]"54bdױ66€}{tM籲±ӧ_Jnzql8fuq=wm3Otò&v~- C|;Cs/@JkKK|ɧ;1I,.Q}3d*ɞ[8{ {ٳs'g_仩_tm>ws7|? }Ĭ4 ,`$1CC!RhePF9u<3gPJ1?@h(fU,s,-Qm%XJ# WmY0>α:Dc6(OS'c6NLϮ0R앿[nh'/)E^=T+=*^dvzQkC^of7S/Jk-| ߏd$z "FNwm9ͅg%dFZ+휖lJf;d M*4AHh$ێdK~)͎uJZJLwrsSȴ:A6< =s{%G&+;L{1xv߹RSN}"+t@[$ՃsrX$.zLFȖJAXD)07u:JIqHc(rb+%h1F$]Q~-:ŶED8Ћ0,4*G@ɘ1hlbKl6,Ϟ[&73nfm{9Z{ajEޢ  oG[mp*MyQivB[[w[b*O]‡=sIccTDҋm>Ӆ@C[V'NzC-OuQTe[$f.#BA:B=$߳w®C;1NQ}Uenx,^?=y>4&ɝEM#e/śY!9:w R3Hu4[f$=Ie"muDLn=7.CJ'1 %Q^hI&a'F񺻉J3ċ*\=!a)ǖ ;qRXmKWk3QqFQh,Mv vN ?ΖpBPu`_Oٸ5D[됬(dHɪB` )j^Nuޅ.`ݭUګtWn (7yci H'Z>4D._Nn;S+mm3ʧA̳R'* Kgnܘħ7`49uo niWt{[OGݣvە'n4 v;h'Y/ҐK&LDFFs}Zd9S?+0RƝ$)>rOLXT`cX58UJW߇QqDEC16ӭHvЮB g=Et||тd,&bEDֆ&|j|8M1gIY=z+=սoqyr˵c,V97B1bx/<8IYZope^git=q; h\ꥫt*[uocprK˯yd}{_E}c{wܨQ[]c}_= {ofEOO7+^e2 tw_,^AWevufd_R`Y4NA>af~^)LM_~xWja4Q>Lf̵z{z( $ye9RC8Ƅw7Sk$XTfBowf:2b A$V14 B*$RItkF\Il`)lUz ϾcaOJc7#oȫx@nf1}< ox{Fd˻} 7 qqn qߙKg%[رDT4P'[$ХTO, ~"rPB%Gflڬ9@&kn7p± ܽȐbnrGƍjgݬ{1-֧+L}2kK߾^ G1߾q*H;6&6&{d WY) drb|:h#xh ʣ#QcM 2v_FvA-#ZUw; u$AW g4O1gg˾yuYA/9#V;k6=Cwf|bJdmE_^hrh#:>4t]|8Hh>Zɍ N$X?"tv"u\ODTITjvgzLNvܖm1 )k<7y݂8 b\j{cR@OHP$f wb&:ԃ͉j;,{hM:iI~M~es.cǶmzh' V¢nY`ʈI ˦-u mKC_9];e|z#Ǐg?uM-UG߱G7kK,nmǏ(M>Nnh͖xA%hEzGGؿں&~$#;w0s#"g/}S?*sڼak[Jqo kаY٨XeeS%cn;pzI$\\f}3h[Jt* Ǭԩƿ.i0u^8uFjMkRow2e-˙,ݞ^4EKg)RIAnt#jcguk}`FrzM$ܳ7UB4*Q{klwűx 8Ode4IrHJn" vdBm>Q5눝qhVڄX3X[ ؅q`C6ۡ1ЊCa" V]Xt`o3'_êN9O+/j݀5du {~ũOc *q&fk:,_76hv؞:<yrrz%8'^=$pifO3G_4X?,-w,Ap] =}F6jҦxY WGg.ez15e8r)g RT*)Lh,fR՘t:Cl ~K6a\ñ,cf>vBN:EX`xlW؏;U @ _nzqHRX*Qwe-p`CKdfJt-cYRX<𴬬 g'jr.X'|ϮP7YQ9&/]cl@M:HX"=DŖr''f7@m ٠n#WO1X@=M$JRh"ݳ)d4Yd82# dcVG F-UZn4MXq-9PKSD/E%ΎYl DE!QdDCʒQcȡ^1`<=z^d)J!RouY^oj• .oO;Ķt%:xeI# o<$svǵXIm=mS:fh wHǡ% "v( v $MAAdľ[reL̇KTոF+nk$F1O\\uKŸ9ĠK/qf1oC{h0J{sdcfD#uЈ\[^P"}`Aܦ"%=b֍)VF k-. 6t 2I%l"ĀꁄU+E;UQ2!v;sCG9(, KW-N=~# JojD." y~,ơg&l$ɬ8'8'T4J r O[\bS6syiVddBI"Qjb=hu)J=l@_+#츢m;JbiIbG9 X6?m۠ݾŚdZyMvYʃm*FvZj*w3% RefϏUwzDC[Daq2M=37Te%ճ\̱i1Ϯefz;V8mDH4Pݧ 0~tzv-~>#ʞ\vJ3`r* U ec&Vd% ژ4.~#E" w {m6&̕B; gyڳ {Ǩ%ٺ6Bd jB F/Hp|e?qTa_p7~n3d#~Yۧ/TME?.&6{pf;`|0,HxT -6NN!֎fhnرKcD^7X9\ľ;o'Xkn{]ڍ:Cyqf/fY_+5l6֩)\k4ضiJss=uZ6OSf ajZ5᪺Qejf Ac96"_W(J KaHef|?wHݗT:7 < xJ._XHـFOl{, F6nu9rb4Z ӏ?|iN[y]{EDxoZ'%\tIWރȻvSkp-c%& TJiujo."wfm9+C$;U`[cOkmd rub=AQ{d.ɮMϱhjllp&[%Y\ڦM4e>\W  b[`Y!LVK3E#4+8<|5ѽ,k[r4+~sEܿtۈ܀@긎n@}fdwgi$qzRU,Ć imv`49;68AJ-<-!wm-aMlmn?6{l_a9]6I4$!ӵI6+ 3SHy-X[[';N4WĻscbTLTHTLQEr9F96Q|c9f:c.e!f.a1&N¢t%<ڮM!ӯR DqHt(r #Ll!1h,]a6VI3橂7!XCYktFST"%C+UYً)s|qS~ 4)=r{UtKO-󋧆K\a%併QL`9*D r~[@NNRno%;ΗqC ؽ̡-z 1CWx$*N_5",0p3C1=9%ٍ$ӌ_l3!ۊ QJ{}Ao {Er#8`?VČ#c)߶KYVc70͐0knX18CEr@"xTYy Uҭ|p\[ϏO f9>MM?:k,H%\nvGX]0s<u SxSwF~8 &]5{~~ٶ?CS6[z8HZ˸|KysZ@WŵbǶXXZ}%0Om[nayi~;-đܽm+Nqrg^FAd)Nñ14mnScc(L.νK׫*"Rca/pC.㣣*^!yL9Fѐ0v]Ž (YJ(D>æFiv5vkil'kd_R& t 0A#fQK|ݐ?/#SS,-8g28GqX/Qه:ؐ56Mҹ 5V3#SГx1H[];7jxi톍!Gs]{6{ Rˉ*G7xe6rc ],?4) b5.vJ"E~0 kqԽeXLTDcֺTVBZ2!@h$%Jn'&2,PYG-YQYM2= 1æ(GJ,:? ?=<7be_n3RV/},M?ʲ̉oz1β\k3T̰(O/0ľS#WȎ3~v>2tV_w˶OȑOO?#G }87@Ķ^ri O=v``m,-1z>Q_Y_gfSfRardZ[/Hx.wp|zZfqJ|@^dոZ^ZbmXXN;LƎRLfH( ٠r%8֑yX_WOwq=#?ww(9>Vt,U7M(v:9FqjQ[YAHbS7H$ag^_tDal| Q, ogc|ϝ[8? y%p*?-c\~w@&~q'ЏrVnvJ|vR+վ\?]`o5h/Ui-na9OMM?pzsutz*_-ͽ\JHGP:zR>L~s*?9= ,/du Or:xKÍm (C'sJz/2aTpPNFr7H*Lhz%RI6E=ۣiX&O7> ټuNa;DrU ;Gb "mYbE(39Kll>$5Cq}xy"p%?#y ԊNN ެig-b _X'D/߷bkVӿ ||#G_OKKo\KJ&c~F:'>?,E6VMKDy-̈ea e#z*8Ыyx}ϕ rBˮFQ"R(teuu07<ۥ ""^^8,TlWYGAٗ{a= O09:J2bj|\&uzܕ /-U9Ry'e||*߯D."c8r0wB.QQCkXtOˤٻs'q DaQ"uRFRE]k8l)lZU kT)~%P`_{% Ȥ\?5^ Fos=ql0FIx9->Q<N-} Ac[4:Fߴq=Mr:DZLʞd+ 1` =QDj_{/ HNJ1 M,rFRu(G!VL+տzA1E>(zĖT8DؗF+LnX hDg YReU0Shfa@黓% 齸zȦ<,!V˛ P&'6Z]'=eo0 BĤ̱@G+v"-gz|\*u'I/?L&N𭈞iQGNӘz~GXoOLϞe;ͧ %3l߬oP#H$Y1SJ7;*դXR>t%`EȴzlkČ𦅉s, I|YzJf)r?&v6w}A$Q=? ~B#5.BĠCh8 .Wi~q]ocGĤ4~ׯsV>E_5x?/caa0!;6Low\zLś㛉oyݷ|/ ~/HX ?zX?:r{^,gK?^EM{ k/ݳw.A=7;gz赸Uh5 DB%cڋ?渊qΆ7IV 1F.<}􂷉y*OQc;^_ BZ}v%ot]nNh]^@^o%[IoHv?P%tKz1ݒpmؐIjGxT-_V7JK1)f{;U [=wbT,^R3bEZRK`^"kiUiKJ-KyX X-XƠjF1*A@A$`*_ Ŷ饇jf@E=ܜbF*VZpƫ?hA24m,V&hdKPfC-i0š0F^3I3*]bpbN_2kB:b%KmNn  ԘQz G DL7ILw7`ޫiʸboe鰟UO* \QUb;XQUsr%ĈbL%)snWn5m"VR9b/H(ŖvH݁$ .nQ+ Ve.?ooaSY_=U<+ 賩8p[ō۾vmT+Jcy}Fjs'+tÀo˞=v6n ަq6)&BS qNfUZvA {bTclԣװ_GLзo‘Ⲝ`̖uh$AZyT@4BT#{:}_"!>&F5m'Cz$uXQֆ8R K:g}|<ȗFEW ܱoɃ+)]p^_;|rc~DONi=~\9jc׫7~\p9fZG唬o/^m[GGe2:}tE$ATr}\:Z ī yݟ2v[ujrBя}3r!|}^m;8ak/+<}0湮lk156]ߗd"'ocFwWx3÷6!uz!|Y,KDwca,K@1w/lYwPk.R6q DދyҢH`p[4j/XI .>gu_=Bux,F5/&g`i@hGLD[Q-~JQc&H]k s<)H#3N3í)oŶXya]iF9|o9F#f-f0XD~(?0}JQ;vMAD}n88kj# 7w0v0F*"( N*^z2ŖٲCWa>21g$Ċ-TQa " kvAk`xMM;p,eb#[VIb6mxg\'$],,| M'8aM$M $N.S8L;lq o st3Q<345asO&j )mB$L\e"b9 bv %#b*U]zy SJC&. qA S >dPRT`Y<$Dd8nHs$:AG=\%zvv7$#NȊyQJ%}k(>BT_vڥ}RDXW XAFgcݻ{PɊGfB >j2G/ԩ5޺o;G"om_ḩ?ɇ?'~woooWiq_l|K_]˞];y!ϝ{#"zLw^0z<3},B<{e{ay旖^L14[\I-ycL_ȥZ"Z%Q%Mws \N/(6pV?WQ)HȖ^ #ӶV񴣻dP#:"/.mQ˥BbXqXX& J= Z7be&,:g]yvPC])seumBwJ>\*3Ʉl |nk6uogK36J,4B/jXLi17ˉ@QduHuP7ء+'Ɠ$R1V6 IDAT59!vc0vbH$ӉulE"i7$31sIj 㘙AYys)IbCdDL_ї!B0{< iGT2zhc+8>{X^gB#Z_B`^jS~, WW<=r/0}g SgEzw<S=m?o;ֈ5ɛ_v!Lٵek _ԒPoI9cIٛciUBY\8y)~/2gv9T9+{r۩y>s5w©˺5¯R| #WGGz;ߩ_ny(<OcٯlPoߑ,9_4I[JzwZ:&& $x€s/~5fuܠ`KRCC%}#J~{ #7rnu/:NW*_w(Zgn=Z'\pqGnjN“>&v`\]q1H@ +M$پk [g6W%Nm4Ӵ2CC ˏ[B(*փ7kEPPzN$2^K/7w^^<bjIȍ_},rmGyɾI2ܫ9r#{deo,aUonf yTbyʅk wһ`049~e~ V*Cml7oFMQ XoP~pafœvofʙpBm}3J hi]~X߸Dʶ=ケ-&҂4s4xOE#~ a| >_|/OˁDFj=*T{&We['MFQAOXh_ #i  rʛZZT0EqZ + OOQY9~&'pBmyW2P1`a.Uk]YA=?>VJZ$d~IPZsIS 1b¨iXC|FhA NlsX 89̛N7ؚvBc֧y(U+l U6~QjDPwo/Ys-:$ˆ޾7mF#8.pxᵂ$S͐٩b*eVIXi7B Msn{TF+q0>gOq؃>ĖGG;)r~PWTs}弣\qg"HWG-J!GD? wncdçQ {^7p 7p >ѪZ4mhM3k6> aaQVEQ{<*jg L|PFHT=LE)ORv SGJxp"TǼꔊAu\Yy%4^=|J.>wSC][dk ,/O^Ȧ{Kk*ragWƢ"g>QŒh4zCS?~(X6yqvYfJe2Z bhSZJdq@T8E+DJّ tu20ڋCQ ="^(ܕq>{V:6$X1V0JR{f5Lj=e88n-Gϓɑ;yh(ϿZGdU-] qH$]seL Qiʉ:1eN-kLbb~CKn[Hd\%NKUNLG+Rl ~JҤU"8 CSA1XkeUAܺ:P jJ7u~`T%( 5 C/^U!1&V}bgbsٮv->7r{G;:ٗxvw=ʏ+)gÓr"JuOϲe &C?6Z&*6zDPXXy¶Ù*8~ p@>N('pR3T0.$,=qp.6c`*"D FVEVG;4]%e]FYz]7n{mJOVj(M s M֖R1U`psT*ښ[`⅟ͺz,M7U|CMsT;3؉'L8f.߭t1~]WjV8ӯ^v.O%d<]y}tѮZߑ +_Aw]={h)Zԧ9d0Ϡߵ< vT&R̄,&Ezܵ¬O:?贚S$*3 뻿''/>Op3ElNp#>fCś;Thzjq5ԜEoU/UY*.;**ޣq,i+h<[/9>ٱ< eP/hwX'T#N 'P R,Zy ^Ԩ]W5WBBU^s,ֺ qƋue+-*<~JxbPZ 1^/NT"IORr*,Wv;&q:е:PP@A\65 Ջ"Znz扨pj}*^4@WKxc֣J8ʮQ(54$8E%jԝ%**kjբx+=qW}J{\ʁdUrӥ90Rx!."zOmHxb LŝlNMΎu"Xifםfީ -{lh.[TTDR)#+9I_n4z%YK'BU&!b(B+Cc+,r8ƈP/"ΝaVF҈G>=Lrim[/O ةsX#O~qׯȏoiC#?>sw~_v6Пoa#7<~BgB,0>2cva>8G?y7п[= VzAb1Thv\D  !@ˇ=[9H] D֩!힢/eIwd٠jK Nsʦ!eMKkF9!QakaJ&*[Wol%jcRJ4rH"əzиTTt%*ʕt&4x"z%3Y/\ynCi_ѸM<\UWv} #wTzߐf4ꭕt=7aJyR*$0$2)*GERFTHkPƒV>d[k$Ntr@fZ5ZI~:QpF_Jf?t9y>I[cu0$R9#ɣIZvl^-TI>BB%t~䦏^o)KFO# W߶W.=ny, ;w\'+W9 wv nOw]:L^g Ȇdy u<Fg`/'M$_`R⽆ zJ]Ti%@ֽ(B~Q͡v{މ'[spzBZ^sٱy\sg&ȊlQqB=30;sm֌99ulʔ]ѫe$>ܗc#Y-7Y\‚ ulsQE8\XCǛA`d`w4Hj=]P 1Y,Q1*j*_$'Jz%T%v$}3e;TaZMα\:K* {ya¯V[TrFP*Q+C?I(4B ^.gYJkS} ص8MBgjTSrwȶ+Y=_oCfcٰ ۰ .IP< ꕧySu/}i`un 7cG=B&;$nV+ Zd-P{~`@:Q>1u<~| N~VЇ]sg ma^3/3|W{ Ki#VHXR" BGs2:[sP &VzqHhQʃS*.rFFIVSY)2.)gx1ec_M(*.QQ bEqAtBدIT⍗,) 3DR #imyH`V/!+#t@PZX3nCi"ek3CDtew5G\jHb :,GRYbT)Pǀ%bP!\(Rw52&pA`0HUlGN4裫.;4˹cxMnecr-/piWo"Mp(iq"QVFTx-'3NM$YI2V->\U)SIʭ f,di0K#`1е BUQHOa)ˀV'MQ \D7*F Fg<4CUP#/ᕜr5VfP~ߕ{."9eْmp0A!l[l;}/W!maCp6Fc ,ecP>IH!6*՞3R;$|;ޣӈH?5k9dJjǾtB~ͼuԾ_9.+§8A_җm9śxݦ፛GOH_\]lٴ;~&{RVy~+OR?ms3|]y>ݾ.ƍ<ñ!{gs4=9]vP'4!eH3څF_U!`֕I{IlضM25V]o|&p31¾e-=R5|R,MxE9Yкz(GJJQS 2p+A,[$+ L"%g`=j@-jDʐpqYW 0oSEQyr9eQ!(B >kuNJI7 +QBӏixKjVWZ^mU -h * iWdwխLl`EayH274>Z=D\KŖ&4ǃ:j$'g^1IQ്PASz aفtxՠm3,9iZiETQ_MWj)+ |Uѡ|MRtHӒϖAK긆Aj^c1b*bIk4 IDATtx81$1a7JXOOʣ[Xit:{fpjk2[Af*8#qC}7@0&R80 ܝU1rݿѾo#_bsٕoBid\!͢ǨR1Qv:gx 0qhқ@y+wzLNѷ@Kͼ6}K_3^;.m/ί>v O^͋.wn>cg&xFY٥fSj|X1sAW;QD-9tR p" Zk".|&&^ z FċQqA*zP/XgZc Q^BN5#1Nˤ>ިVoV:K (-%*gT)I~g}/7 u w8#Uqg !NcǑ ͚ӶX)KSL>{. 0QQ޳fz-3݀foԙ<~ :Pjk;jYokE=Vd5rnuyDZY #шM駚dFdbt{#˘%y 4EU",z˶ơBJA>a`$ Dk΋(.ޫDLU¨A@1AYY|qtH/}<.X܂Q}aĉWg 7~l'-Uj~gHsN'Ft"ʆ]c>ss1}GZE"^BZ#a93A ԛ0ο흐ȩ6A特Amx|y_J9?%?#գ//3kݣe<dc'\?؈foĀD+J#wZy[/߾HңT2Xސh, ˴\A(dE@wdr5lBX&eRR=|>pgr#"Μ Dž5gYg4tFTTe[`q$T tg=ƋY,<EUTURPj?"\ 9IC6Իz9^ړWe7sPY/dIi*B7NdVǪj?Idx%ZCKZDQMxX hPڐ\tevBѹ.Qdk9NXm%B,VV֪RJʹs|||e2gG7țOd95$a=7@-E.cež~W췩,Ls#V'Ĺf/J$ˎru''FVt*@-ɐR2Zjƪ_aa0Z dR&JNtk,0 c%I@UHX^}h+:Cڼ!Kd>HҨWV(ʘP"ʐa ";TR=\QtG?wpX"w}:*YWjCTKhQzDڬ ^!n[leIKT*~f]o;QkS:zu:A TÊ&9V:)'z1D7-ْXyBzX ,X Ft4$k$, 6${ĬKD{0 cs4+vtҒ4F}ƉhK)A\I])haq2q8^ᡂmmsXd)KtW?~G1'/g8&].;$:KM+ĕf(7$ j9gÊ=ɔ%kAWv*r*&V7$Ԁxtc"&֘;W''Lh)zGE,"FD,;kmp n|7r1> u?ϕiTj,Xe-okaƔ}ʪvam0s_(X ]*YCۑPC&/s㱝Ow gZoR.=ʿfI/dE~/o[_:_.?/sm?T,n ߊ2nPY7(XߚzmPm`}7RAul0N*CsbnoOr?Aa! (Rgo'`0,&+,V2`*eSkZ6J;쳰{ ó\f=ء;3}{%Z5l\r}8+ Dx1 R*BH9:A24Z4PadޮgGA#+橒UD7P1 ۖX7Qy̫Z)׶ dA@wI.~pJ \1 sv:Á&T8OY Eϐa קU &I3D7ԣh+ubQ]pN9ž)@^J3zU~k*Djc|lu)1Q>6>g&I\jEs &&OgHXq^@c!S4S>21Y& MT^ +tmqaZ8FX0`)9.nEZGxeli:!<>Xd9Ag19ߋɅS]t2F_$+*C}Vn_ +YY xPslENC:7:eJԋHn PFcwr"M/C>HBQ9<_m?Í:ví:oxϗW?yƧx2@b m,*k^eL+7]" ^sV^r&v{]݄vnZnz/x)=9}6l6鸰 }6nNSПz}]hB*`RFݺ]+c cTAy I#m5x~Oܟѫq>[5'<YՈvnܦMgd-fc_#\]Mxd*Ԭ˼ZF!'XX';u> (ӆIǨ7͹ *b@EgT:O ߀&_.e &\QD4rZWU'WSѮzi 5E%D :EP2- գF$ hC-G KV^!C}Xq#݄duaH$XVTfݰřD8x|Lt3FG ԦOoy׉\04Fr}ɴN.smvnkRd uJP C.$--Ts\l] /f5DeEL"ah hDH[]桉S]Bk˞ٟF>_b<@qQQ+%iQׇߞ2ok+_b)YFSk?oQN[ϯ1<;N\ ѭq6-͂cc#i*bW6hxM:4(s*W# GX21TdWrlj#z?%7|]fi7lwAR )I6iٴDz-Y(rG8qR8%ȶ#"HdJ)ľ0f޻-ݝ?$buխ7s,9bX.ylQ#ٴХ ?i)&֐wU=3盝φcǂ{6 @VYBSH,yěh apZqEStjAġl8Y5o*`M=e<hM,\;BlDePwgYGE/a퇬)wh,7>^7#crcmër)lX7RHC^y 7) y[Y.cp4)ުiI s!SCՒS-|1! ܤ[zTy4>c{{KԪ05ka?:ڠ<ܐ UR[&j&wdd # e?Rl6f>Χl$4C%՚įJzV߫4y E]|qDkI9CeR>KQ*Qܐ|$KR(d }dOU5'ղEzZ-/Q#ՔչA}8zS*ِ,B IaJoe,a`-a4&Bf]'*#m‡¡%g=< Jw]ί72 o;Eש}v% :/[Ղ9 >o|2EoptvVjTs\X{K$%p8Ǐcowe̿huY-7'Įnw=0ϣ'?sA'sTbC`in.{7<;hB(an˽cW/~MI\p[l_ ~ A[v Jw ~O W;%G\ГN[47Xy>RhW5Nsl: Eܝ|zű0Ղ/KԋK12&]rVm,bIwBaP*1k'4įGlB7AIˬrYWU lM :L}qzd,R'#bJ|&r(b8q 9'Yф'yw{v@\Z*!Q(FihFS YeRKwd6-W}~{Gosbz>K ZEĮD;ߘRc/d#nLzK702 0^FdcD>9j ($/ Ca+ (zk 8fΡ 006p f=k3KƸB 6b0 (QA>#EhD7Z1aN ;OV*m'.xCrj"Z\zگ|0>>ҭ+!\h{*͐6-pKw8tc;}Ծdev._9%>_>Sx\&ĸ=|i-|g(KKseJa`'_]OKϯ=vFחw_\̱yfi|t7vnm~7XWѿbZEs+] ޕһo}jHV‘>i2q%6*Q  {.4I`4W9Sic9VkN[OǤ]"4LNb5H|D%O(W8qG^tet0to&ʨF\)X`/zEх!Q|@@8 Lj #,{|δY0as1Կ/y?QfG zvҺGZ :#ʣ&I}:&7DkCuZ7f)'X-#@!0?ۀ v(1iFE؁qr_8~CÄycys/B_[YCkN_r.H/qrPY`P~T?9@pD}c@+T*;J,bӸLI5չhlؑNztɪViWp5(g 'VY2 $p)f3M^U1d A\JveAV(Hs"#3B_.D.C˔6ǚm.sẎ6+:?<[²ތkJ5x!,sKoćmkv6d OG* #Ä>4N4eWm̮gѯbʰ"肂2%4#wN|8YF} 츜|[QKWgy{Oiil=Jnl2څ<;_/^w!-vp~u;,KҸMw6x ּop|[| y7^ae+oqW ~7z_arÞyJs|?~׺mO xw1ӗ[Wkp\noemgq\w*g߉wk\ϭJ_{Wc_E-0WW9ի'7&>}q 1%^TMN'95^7nF4Tʺ]PiҰe5l/kթJ t*:O&jܙiP*BER~PFxטl.3i,#up /h ;V =Vɹk6+a$!& OkYWVod}P/">IGaWahA_AV.eꒌP$T/օeHOvڜzSfuOV;Ӻ6ڔp7jMnƮq·Î:Fʧ,ET^J*)![+*yj Qk<5s7U%]\`TZUB򨐋^9ʤ\次{N%[ Ⱥ]6ԩ8WC%?eٱl NhN?c\5Ȕ"#Yr CG&2|]ޣ܉A%ƘƎw_ ШЛOX{:^>^>:=̅:7':bWU 3fi-3tlZ%/donwO۱oTek$f^jr4ۭ82z5FC{[G9sIFi-]YvtI'd3йYėq[|@2{Jt~qq>/0kAts>.#]ׂWC+}+ru+J}/kXg[C] ֽ۲kHsn߼71c3|/\G?ɽk[o;C2;v:W%0.pkL=bMxd3)J p5KY@ kq&7i2s':FyWL%2%*)hFzUJ6*t>E#k7MmP\G 8 t)`\>vZNQN5hJ2Ŭy M9>Gy3P܇4q3HJF×ȣ P2rcY[ϵbtV_ ^9˫ן3Pov)%P|I?S1ƺS\N,|i9@!Q634Ǭ,Lcᖧ7ofJcxS RDسR01N IxI8PjaYЧs6U;[}ۿMZ.X0ao.X[ݶ m}s-BQh.82s+Z\I}AxA7)+d bdZ*-˙ D+kFbԝriS*b>JTR(JH6d~dPP:[5f^@bhrX^ :aWHMb_!'G>uqw,U)JYݽ9͍]H#KrIN_b]ULlI#.CټX~kPjک[TrT?ӌ2^.w|U EVE8yo'&"@JS0u@_  FhR*.#^;K>'Xͥq!,QWCAKJ3N%lg7(w8:̔Ζ#GRGw:VaL1OtFtFxFy (w!5$m58{F'\G=(ko_r_!WPe;]*w HZʴa>Q}P" hlBă֤#4#i\yV>qa"YdXj({9+_@9OlQzM?{߿x G6IWg٬|9RXʣX1ረaF,HK HnPbXdeE'.z}Wtwƕ=ߘca07V.hא ڐQ'../{ Q-dۿw<͑PÕ*fh-iXdUu; .Re5A`!oͻ98L4I Eycj=7Lkc ⩅ V3e5(ώYF{@SX[Ʃ_C2# X?|¢w|\ppg{O›\lu_* ?ejdGD&WtYLsOs<7/OY};[6l# 6q5?_k?jAR2eBJ FFޠ<rBJhey5"QZ=_ :/aZMb/0vp!_ecXyIQE@71z(|uuVS +Q0bVru]J53lP̧͌SĔU $W8^vcvV_zbZ iLJ];, uh4H+r6u! LAP”nD}"&pv^ :edΗ֬Nunre4(7"ê9Pa)N,{\Q yLyyD|{v٫/.#7ZSz^LJ}P<@^ph֐%%06Q4.!B>537?j5aO>uڳ1fV0.9@ gKn.3hh{e[l# 6lo*Z+o >y^eu85:, رͨQ*|'$*c8C,Z^etcJ^y㱡g4 YwIEq.[!Xijlk<| e s79[- ڜRCY( dVs14؎ z22$E@V-5|NbR^gkc)g3k?xE#gϒ| d50qETw[qCeW91u EZY9_4wxWxNωӳbl*CFO9YQ FTfHcIdTP+ Lb&ȇB:yFfS|>Jq] DMݤO#VPJG/>!I%2ˡׅʘ'n8[9]A]Mg5֮4qB3E:in}j|<}9lW>P#u0w_ё}J`BՕuCܝ,0RiTu=ؤ[i.1eCTh~$E%iP@<x~D #yž8!&.|&ٯ*!F9mSDuxsJil\4:3 GJk|~m Uʗ|ˬ Pàଜyë'Yu;k|eƙ{Wy漹g?nɯ?~qVv/pYکNJE^Kz#3s8=|t{~/ m7|Zy[9o=MzAOjawռc+Ͼ̱N~>= CߩxȀpȋGT\8 8"7jbc$h[:alVBo<-pqD8Eu(Bo>tz$ria /6xuzVy"\!wE3Qt7fJFD` fn,5{ap\4z?r9OpQ>'tlD$o7oWY /XY .bA?_BHhJ7Ɯٗ3Rs?BzRK@F)jSk,D4[a+裑>TlJy&KXhbϗrW;v;qAnԥ(;f.㋾G# C ׯ|cx3~ yrpڻ۝aQs%˯z_gٻ6 FuG#O=(49stⷆ|)~9c?jGӀ ^gX<~!ԨV3GBf[DgߛJ0^Ĺ.xp!ZQޘ$O(##ʄq"9L{w+w"ἥ2_zhD`EOt];)^,w!Szev^%A$gg~BoΘ;|Zs|bk`Gi,;6k*;Go.:{Jw6KEKUi 4]ߋAO4 ^~1ksqm⻶ڷ][뮕~4KO.(|k0pfkuH5="4JR(7+Lz#pϫb@HY\(aP , FFih$yH{;7P((TIkE0hng=ʺS8 D&uX.MUxY3m鮥umyYR$Rگo=W}ޫR}A(?*>JԦB}VkGZ^L\Mܡg$Lc By/úvɤN'omh}3]>S!Y)7*.y^!/WR+飴%呗 CLevV1xȝȽ0T^YYOs9yzeggtsӫjJlnmu&_Z/r)sGiW[mhl! YCzx34L -ʺٓ:'Mk{/-=M䅎N/z7[i3L.榦]{L{H>a q9%VoLK&9fO}4gPϔ2N,,X}[Tӡ[XB=-({nv/ugPR_V|eJ'/Hz`))h 4r ;3Ƀo?:-𨙦3lȽݡ'r|ϩߺ ޸nΞO*!>{X_wS y_܋zW"t}yJ#qG)h8m|9 z&=k|Ά-33|(u~\g}ߎtWh^aʛo6BuҨS]6 q"./HF^{2 AI:Ms&5 K^֋́Ǘˁʐ_֪?P\9PwɎ?۔';縘 23+n{gLӌ8ۂL@c?_)^/_5%"%7}]<û~ݻiTbϽU@C;γz|Q~|dV_\S;uhgk,gKъq+#$A??rt\> ZtrnϺ1puUi﯑WK\~ C2*_r5鷰v!?ٶ4XD1~RhfIҤm "RTtoV b_aD9$gQNCi֨Dz1H$"9Y0,T!d 3ؤ0x .Iox)C;Ẽ-^6m>Iۍx-\T !nQ'fwޠIȅR504ݘYFdA[<~ׯ3 %TN,Yag=cڮZoqm*gkӌkƖ-q(Q$-R b rN_~ RD >U({~M'sO"SCQy*xQJ׶˝`tʰK25m-Ő(zdLU&f h55kYjMgI{䛯MHĆ:ZcOdBSpDxnp'uvZO2{MN8%>f Ӑ5!e>AX߫>ع5:Z˱cM^yez&I2T w|Fuĥ( 3Ky9>pkF_2ޖɟ>Pʛ|p\0αg?@;~7DS+9͓ *,s,K,K\..,]0Y2ं 3Ll5K|SK)a^ͯAy5\g(g1<'ng~l*0s9Pҩ6?{5`X3ERXRNtwP%`*j^֞]H~~O]}00%Xhϟ/|{ccG/Je}M~wǏ?:i{M0Cr@cmrh4:u qch cܸI6nAD%v,,JƉ|>E-&a[%iG }2c k0FB!SEpPB7QՅM=b])9'd yLL2K[6y csn^VIa6L.298Qk5 NXw dž\7]` ,q63B\jBT\({Rc DL8=\ CdRhM"BFsp'!*d&0aȉQc-:n˩b|$Rq g7GF3QYa֧B+pvXƆwO2nPrI4cE|jG8L[K=FO+vy93D^ڨ:\ |/85ZGGI_sce.AbuR %* I^pC%T ,),N&Hx)W7֢5b;-ךqzkZYϟ?5qz/\ѩIU*ŋ(Չ4Xq]@HfWf~hb:(f=Cq0jbl\ >Q9Ge~K3|xdw=|rSzO,>/5^ LGS)}c` .ͽ`š߇ ΀u5$Kf^X{j3V1?3=:) $&Ѵl讗d7ςF^UK}N8_֭ d+* TÒe )SC$kv\FV"1*d 0ƦWt q&i: 6dڔhp$s>Mƪqa^,e?̈X2 C.scI Nf~&ev1®s.İ I}HTͰ#Ɓ"Nj%-+Gq ҋTT<~h%N/Y"RyTN\:^q"pEd7b9ւGup̙#Ka8&o,[ٽB) eϟܾݱ_8Ly#;^~xݏns]<6;W\¬&?VߕVst$K$K,7jpUV+k%+̀ujdȏDvb@Ƈ7dɸs[/1žC1O1֙Ae#FؾN IIG:0#G}ϺRM˸`?^+JQGퟔ/螨":`]|#FymS[ -ug{}jG S<3miWŋ ~E58jKzHN=J ZмW)%e-%:\4~-$/0rO3>u$M r :q랉#,6=@w uTZA"bg]2%GqEHKu& -N",ƀ&3yFb~>MK ybXg,?7ӈZ~H^t](gI5`= gp(/5h$ =9÷c檼xXcɖ|Mܺc?䢼zGʑ!nF?Lͫz!֚XXxb@b@k1 k1 k1 ߟf퟽Oݥ]^/yz宓ԩrt52Ys)sCH2XRy2W *MԉnkaF>/Wģݔ/&MB A|;ףppG;S\til!.Ą5xR d"xS sr"\3yx74]' MO{/)>: :e\- ,|ƫ3滜XdDơi2k@kdb69!? $"WL':wfTjYN4&$)6S|̈́$qs|eWvtyѭxG,Iz{Ⱥp,*ɱ;q}GgXPu2ǯ=`";"ߑg@~YpUh7|M.mft?Q?:Oš$،t?✌& }IM\G}{vcѢ eץiILS_Qz셂6K1ٚYRcX<;ϫSw /Xz7o ?36zvoGEL+5R9ϵ7¢~p/ϗ-9>t3ƣL]a&NO.>L5kk]k <$],odC^H-838#׸lkm~s})!nF9`k)n!}. T zCB%uZD!NHOSHE2C)*P  Q!^ˈ=Psĸc$ng,1d#b}rbǸ<])!"y7ʈ)Q3Ey|#y ^ϡĤ50t,HIYJi:&5B7B db-ָ$(@UZجH7,,)i A.ApH 5EЎ>ݿS#%Z~cAӭm|@=L*3I*1aRaRp-նa|_M,{*8p]IVBaǍړm3YXZ)eϖ;9AA2c Jbc4مv[O(a!c r )Vx'oyBqNq#;*$I?ĀD$(8W}Njǘws6~#b(_ UKӇ /_9·nwmL/ ЏS_H5⌧^(.(K$'2M'5ƿkkwzty޹7)vs.u)n8~6ǣ-D"RLJIb5p ->QJ@eKLFS*K$r)I^ƒ wG7HɧPا(y:RhȬ?@Z|44a,gK -/twipILR:M+˘I\7?E58SKqj Pg-mr[^/Z!M,q5b d*.~>0V 9Ӏ$% "aYyLWx=OV`= 2+^GhQ-(O|fXw,-BuX}H[&cpVϳsƵʐKeȥ1rhuUb-vS±C 86񋟬Y!"b s-k^6 Rhx0 nf8z KIa)CRXr€Ċ -|C,ĊgC'tb{Ej~2&̣2Jffm& Qbb&vEJL>/ϟn-Z =\$![Mv̜j?JIqN~|#%?Ց~H4(2b>%,9N$2ju'Qݲ8A7וW̢LI 4=e5EF2-JX5# ktT\SͩVTXWG6N` yӔ%K2$M]fj#rbdv]_7n>흊ҟz7=Odntzϯ<{O-|+w[掏 rr;*2*X'd=E]#rm+S<{-g7~FdL[k4_TJYԈx9)O2o5edإsQQɔEED K֏ވh ^R8۶K U j]ƛK_ MxޛѼ慤2xޥ/izwݼp@|R#徉nWC3~z<.,(QȞM5*p|ӳɏ皸7O׎c*#y{km;9#E._O\d.r ZEU.ү./Ubg-_zs9^bqP#Y_ \.Ń'%%{ 3s\;.Ƣ,/RJ2&\..d}Vt>~_<"_ܽ}e975HakG%8$ #eFGqLy>} rY\F`"K}DMčH ̨G) BK $u,bDдg$rfG u Ӑ ><7\* p[o!bda.jׅa6Ѩ(:TmҪySΩ2-;aPcR{//ȗw 91 }j*zLŜq|NDI]w)9N1&ĶU!oh.z i$ZLJȅ0ԉ%I,t(Yxnt^x;1dJeK?t[ف?|-6[}4cҞx0!?;wtAH?˳7u[+dp( Xv7cӘזo%飋ܵ#k8{0 k=$ĭz^7 d iP|BWHTÂ$h,)рE"DXҔ-Vа/l^JsjiX/Zmo{q@(" X=GGn`|aJz}+-U}';νw:?|(6$?V!WJj99[jd1ב&rts_n#tu%{,W/z{N~,c߰hL-rѥn.r7Ee~2(hץ6n7XYL6_Wɵ #0?uOj,'}sި>8qy#[5QΐBDWͧ/]re,ĩ4^.N.YȌED$u,uɖ6&9\Zv)17knYSg3;N#қ/R@q\V3*VFcuivyuwjT@jTW aSgKzd[ؒם"p]ĦyFÀ(P(葽Fzzp'A$:M$rqX˸U0]r1~M}tID#* aF'TͩɲTyzivl,:_?0bYL?'UwݺQOitb]#or2F5q2- +X@.˵0]oL+|5Xˡo{ ͖[aTWqc/P/_pW.߳cy.7y;t,o]\Jƴ5Y1_^?W Fvnc~XH'.ݜ6$2v`s vizN77k=;`)'U*KD^Xu2P% Ӑ _]R%Tyv]9!enZlhJ-PsX#]T**$>,VSe8kpCDOn2]tx H+TKd`J?xbz' [rç@/pk7Dj2#\rY (C80'(vR&9؈0q/HԦ68jHff]<:RĄ\ßl:Ӆ>?٣e ͩA(F]Zt8t }uO>:ӚZaH:1H>noT A’-$Tr %.2[ڐ u;*naVx) ONxLo9tҰ]e[B\or}(t`URB,qϑod/Xx,OR#o7O{oX޵cM}YNv^R_oXzǽJƸҾߪ*8^!ZI;]%]Y]*J}!?3cSnjxR@N~)i͇k:uzpi2J)RA]B`ur}O9#ɹ8~vT'S(H6,T3j(Jilb6 %ݠ$V4K4Rgluiʼc/Op# T=?mS# v ;bJOː(&hL̈́4ͅIӥ<\"1%u}LGٌNxyj#U4Mrу}(/8t)#g/vcEk,V+y da8W;)vTm7%u0k'yt{FmzIJ iږ<7pg !&)Ф1-C GӰ_:%G iEC3.-|'tO92HeDgk0jLI=UoÈ-Uhr5j9ens])|!wׯ9'y~1;^wP}\=.d$?oˎVZG~\mY<"+)$F)VY MFzݬ$]G&&9ZmX坬}_ _ɶRO'wo-U2P 3ъRDoqg@0y0dXUA du=LG*uyS~,d*WV>! m}I$Z-f MtV+"HA4Q!0H&tedԅB *K_G]Q!)x%KxMKFc\yeǗGg8XHKXO 1]w0S(=YGZ0_k=^gԏiRkň@*ǝqOSW]e=uM>?Z5OmO*t 8V; o Fr;0N x-CXB98. xSHW@G")E ZhW78ޢ& *.aC<\Yՠ ^;xn*$Bh [Y!s/է^%wYN:>E f1 U,u q e;|:yH䎁q!)[*qM>01ğo2w}9bc{דX71#SC:[Qvi9ӭj=JI[l-8:>ɑ1[nkt>n*5ݖH^V+/F%4b1bD'FI|o;WglO^')iJLoЗ[Jؓ%ϜEqz&{ݦFN$}QzH\{g\nEk$uDWCۡۧѾÇ70V3Pp}~Ӊk\#dGwmWѓg0ۋ VddhGc.Zp+Saa1C-o4"[B ZD=~M+AG nI1kS""z$$7g6~`a>=wgݝ&$-1p|-Z(#se Lef04_bk?ȿ{,;5;ٹe~H/IUڼ;{<@}, Q %7ʮw]+#3JmNʦ+B$8V4m,7وľdJwo=ӆdaN5!Hk6o0-/] BPt _=ݑgvoqfb0 \H 8bl#:.j!6nh'TzzG_{6q1r&?, SYk5 bLa%)▓ Në+`t٨դQ6|9MTMowXtp[[TǸs8N{s7SwC/n"8.o>@mʱ֊=fyeDmHGŦ]zgtySQĹNM[E;h cF=ҩrjϾ}mKI TW#m@$Hr0nQ 3/tc0$Cۏsٓn١:9#AQA'#*G8yhfǚBu%KưF?1NBMdܲrK|5  K di҄0? gќU_٧>?w2V:SȊ8#c.u2zw~&W֦Xʫ"Qn)C_{)υ[(6UdS9ҺwΞĨj"[ӢNⓇT$uT,"JwJ~HNIA$FD/6,E"Ke /Dghab_ojf}8 2rrU(:TSHq$$v!Cjm=dQɣwD~<4Ov sj&?BJ.'[Rܖے7~XIU$ 4+WXYv+ \d%9×{Z-X.C4B/3f} nV KŽ=Cgcgc\0]iۢCꆌhW)!_pF\7ȭ1v6h4-7hřKtħDqf =H@yl }FOR.9` VL%pbU)wF(TUW,vJ  xKsQKsJ穝XD]8YA%sJRV3ifZL״,{T*1?tOg|c9Dm[%Yۂʘ^|h_wQ*;Op͉i;}Υ5.XIȳrpE+^䒿犆r̜q:_T1|?#7ꈟ7ZJط f0a$χi&FӍ7Fwuf*?\GH3Csk㭔Q]鼯r.Wa߮t.L4/M+g{*{5V~,gb@.}D[I~{_I^c3TsUྟc^cgy Z8aC|>57 Tt{%^ꈒc4F&o]^8#Eiq,Q )dDPg#վÖǝgrlX+8 '놚2y+N#xs1A|Q }q,h={Xwk#'2g=|14T2+B$W|BG,9%tL Kpĭw ZR{\+ .`n?Җcm~OP_ϭc9xk`s%*~a]o~"Zg,B_ \D]\5]%8%M4]`jrsbL:#ǣ3\6*`|bdeqL ywldS3Ď H'>0ށw߻ RAGj Ͼ/Z_/۪o[d"IT#|x2c'VAОbd4VfSgG׎<|:&]cPΓg0#!nbiC$+EtnYJNƂ7ju&Md)7$(fZF+ՎjeBH=DEW:ԩDyDF7s+uj_}(}6k鎍l]XZRE ̠x{d$NXgsVВ _:uH]o^u(}"_o9Aj(VKZQO+cE:S5o Ei3 ,j%+l]Z)C(=B@ݑq!8"hce5@64*gu,6XB@"9.'{?XlBcqCh IDATs^f(.p/扆K| -YC78mۆs0Vt96Ui ;W>Բdωksxnk]P#~k<@+N~GzRXNQ3u4tST|F-qifI}%fXvQ K"S5;_].VF_X>1+KXվ{ $W5V_VϛSW}g昖!=6[[{?tx~lUyrZ}-B$o`M{BlcyZ뷞G.8PgOJ=z @p,|P_j[?DVTv+'Yqj41~x;Kp@0ZIG6ߢuFz;d%R_{/X9|cVʧ]I%WHw|[t}(wg?Af`S[wƏ*$9JN-e?.3ͯ>0<|s.v*+|+4d%~!dBw),Z Q}ür#OO#cR ,sHاst`xAu'͚GwTGZ4%Y% f ҂HV6TsO|1,RSH Og^S 9s]/KtЭl V#LTLIH Rqf?sģKX g`,E6W>|N;4xH2ѻJlOdHaGN) ";qc^i3Y˟~z|q9 _~$ z""L90re:+pJ3wLTd C5sJ[:VvT\D0CuYj&Oҁ"H$vgҹ6MgtoBj(B/V+*C>'ײF/hqv{niVvC?ƛv8:&G @!J /CRKk|j}~ xز8I~(7B+NBw<|lW7e?u$nBenXߘ? jpuƮF{)ﻔzF3y;VgT =M/' K;O``@ Ea*Ϙ&˹FUY]r K!F[QNRFK)M y#sN[EGK]0-ёVTGS[aiNc7QogRkw]:?0urL`5dXbCZCbQ |rS"0Vȓ5 ϻɀoi%gs5]riFեۮ۫_{q9A5{3.s>:hsBAx~G?/7T<>ݹLLF֐{`[|Op5nNS\"(44SQ6gjjQ)T蹢.tD~L8RWS7VbKJP6 BHW@_2)tc3l\jzM}ԩvu#اo3+92ZEyNx%j) Cʹ2O?{BM>u杷O(5e?FƩ|Fs=ƷwlTVךj؏Mʅzvr=g?Xxp km{h+ոmo tzc|er{ [hӻ7/ U,+Ҩg1ynBFIcR0"fd] F8p.@hoJ㾧SRArtJqKISeУWj)+Xa<bI fJz/>@L_7~&,iu:S)^&N^j n?sOp7syG?bUDp0˝ #  YhgPm^^ XŒfA+?[+l-4>̶\qj^zvB1h⣼uWh_?*07͵y.NĹ }0׌T8JE3"Ӌ'AdP? F\XIs"V.[ ȾK nu.ҘMt3\]UZ#y;0aW$mkba2'Ijm$S]fO KCU#zZl!ZV )iyWAOLeyc=~sB~k|4X3,L # 1T>pp]?Zv74"Gεx6o`דVM8Jx)ZChFy{%6x@kz#o/U]eT̮qJъfN{d0ibm4Od$.ϜZ#$ 5Q>>QNϽ6*[obEX&| +[: f_lm{K\@n8|ŵ倥}7וo,sV=zɇ[|O$ب'pu.}@ ZrjW#!Z9!ZgTc0C cEFZôwUDlknP>1S{ِepWJݐwEM7#\,25JDfM晕.@BGZ+%:HQ\[DA}wm;:%#3J+Ds?tB磏K//^W~~qQDJ7y~n߸6oZ>!I&4L]Lo\NlGDAk[!X5s8GLN/#ϕ~^F"tN5˝d^.{!߿$@Bh,;u`D_5"_{j/=6#oV/2H2D5"В9[9~jI??yqLJgdĒ1BxkHvȤyxg,ޢI)0tU 0V :J'7D:Rޙ5HB>*x5_=` GII=#M/#T Q;HZ bbK.*N iTa)DX:1 si+Q?rz #~Crl-;!#yƱvVcS%˷rDּ$ٗB?Y+y|8qӞ")i/d$GS́8_y)?=3G4|iYxsd5dL80IIsUɶ;ŕP3o 'zhft#IS,cO̕ݍk.}XY]t5{O_O۲W~կm`m7BǷ-k3.`mԽ/IZ.͸}n~ "Tsoˎm%·,~:͔(0/Krr|)q$wV_KA^sû4WY'P[ԶiGE N(|=ҰM54BXH Mq4s:4%(eLpẌ́FERz1z}L iϨW$ih%V%ȵ D4GӜF\ Fá1+"jUU0 o:nqŐkm'D>/4/='=\;93pO~_ޑ?EɿoٺzؠVBS=* XIܴM/ʉZ{W}'UO}!'{n(0+̞KZrdI}9 mr׾C?kH q&P35%O:2-^^èi K]ѫUB$5 n2HXrϿy&kz:hXٍSb8ֵrуk R^yWvjXx|F'ݲc9lBO݁~bC`Σ xY>*rVFV}֐dHYݳ^ ۏ z!ZyyVIt"a(PMb#N~s<wZol~$}[{~,}'>~MG3s|\>l}aZ e#D x>CA흜o;&FM$hCV4 BXϱ]C0iq|D\3F:KbOTo$k:fB=hP:mԆ,Vh/9PAL<5 J=)+뚘Gd59:12X{E=OuiS\b*\.Q6Vw]oLlBSExCdL=i*tos|[~\g3\!X}nrVn@ek}楆`md]uЯ\:e?xsIB禇u;M7N.~t5m:KcM~slŏܹCČ <5r DW}g(mEy\8d14Kz`טm("*DE:fR Pq,S19aT) 섑rhjĎCLF4xa"E+ectġmħHR~6 n@ Ag+S>yWɜ8#Acwhbasړ3ѸG**&S ^U[-'4 Y`k,jASEwruK2TJa$ zT,AQ'yFZG|^/ޥPPP>RrRƚ/sb͢5^kdi0ې(^57JړF8ջKON˥/?3#z)Zq:R3X$h%[.[,eKdq$ \uYA=C0MK! Ha}6DR/H$gq{5 oa'D}ˍV,ydw\[+qs/Ä'!L_?vp1#4O'jTK7$io[,oP#eTx 0*;qHP"DVcJ,%i0oFaE'RYm*0 P"r YMKp2EsKQ%+͢!,ZNy>N (V3$+ᑣ=ƙ&ִ3m){q,g+o qbWٷ%G#Qn&ʊ>ٔRWq1Ƃ4sd㼐/'&بExi`jW*cZڶJD,Pb_`u A MJdf'@җ+"×fbЅ*0@/0EE ]vAϾw?IWOW8o}Yfk}üu),_}f?q Vyřr?8Am{b@< E!iHZ*BE'9K Hh(e8UґU͐TI*!YJRfµy.>MȱZ`rEP eMPn=ҶiD9+3 c(UZ@bsefFs_l=n>,r-FZ+X2gmA`C!g͌_9x p.}r_>JyYLyP%463^mnh-arꌔ}nˣ4X2۠Ospx1';&kE1bA0a@n";T D@u>l%elXr!)qNeyܢ(($D jMo 0Fso|wSǗWخх.|A,tЅ}|5n5WГwcwg| 3 z! D9Q+4}ONqlNO!t ckֹT4P;[7G3Vsw;_@! .-# C4Άd&CP PcZT\I y2(m$CKu|+c1KO,{)4+&\ xOZSN0vQ߽]#%qFXXAmx57Zˍ7^ +B>i8H>CzW߄y⢡> (/ͧCqQ_^PeBPƈ{^J!vv=c.6¢9"}!ݵ)mddXuI>lbd3-ޢRaWx|Lw?IX49,{$.yNFԣFfV%=Oj! *zsݳXO\O.waSn+v{Е^lApG̽T|+?^Ec L /! y]D70Nt)ѕ/X\Hz󸯖^<^O/ IDATK]k=z6S'ֈۮrN#Y9 $+߉N׸vg"WUUI3J\;)nkHU6USw.+$a!oRJIj zѠ[R̮IQ # cKDD# 1RE!B:J%S}@ɋSu, cMH2"阩 UlK~'g8xkzξ~83R B]rFHo?~fyT(e/޳y"cxtj74q^o[7F Zi':'>s䭃Z 筗kx]S_iRb'K,Cu֊cۣ˜ sԫJ.'呆;AnZH㮗Xou£^<+?z lW?r@{|~ٕc`cj:qZ: YuQpwR#CظoHW~Bcr]=ceQ@}ü1+88UK{G{ȇz+kN>U]1fgz%#mT->rFSHg^\ _MVcΖTkEqM!؂ؗJ4$seˑ/ !\/lPUh$Ԧ[,Q ̸|OdM&' 允|Ѭ vM,rWn3Oo>dw_vi8eǮz^jP#,&&|O8kdƔ@Y/VR/GDow ;֠YSy03YhO>Eox373ܗ_S|Rʏ`q<򪡯/^j<'qu%@7 Vք$;?V<}z޼rPB=|ܵ.B >Y!fK{ajI %kaB^ GnesiW&G@)I5 )V=94AzxVcOM 4B+#¡:y Ǧ\^|寫\$x* f.BK-qv[vWUL{(rxT[j!St='awe7acRFFJQj %N3v!E)b?'j㍆nS?ĝsl5ee@bxdUs֩vuQeӂpf6L[o㪝}|I<8{ ]BЅ҅o% ;y9$cÌ/Cb J\ [0 YS](}yݻ53>S$GЅ.tRb[-@BpN~ {j2Q+ibA=':Mg(ybnWkWWio-\]lޙof$`-!s,0(e%1'Ns`d_~ w\ߣw>'auBTJRhS1 5tbd ~⦌H&O\Hٹi50W_7ԛJG@-Kf6TsH+Q :[2LӮg_MWDo>c%m&0#*BkȇVk3uNPh[CTid-EVf~9:v?{WqfjpqåJq Ὑ}!-7@LVJ>-" d򹓧s(Lg䮧E&qTr<y4v$=4 ڔ:ĭ̼A@?}2-VgkԦa})7;駮㛫Niܻ>sTNCʏxY*5h_*wk`!`5ڻaW~cuZދZY],elDC}z')J ']$E=8f7"*'7 EÖ|1r咑FD5qW,8r!|,8, Y&:܋UA'ub)ċB|Z'8׾Kkp|~b,;W-OmǪʾTz3$C<;NP=!X@Ds-#CZ{Ҝaa2$ BY^##i%Tr:K=~fI9g;r"Dd-M'jV]CV)zʁ)&-Fr(R.hx’M4!Ds6-gȲhopohAn}Ss2/8iWM$RU:Y{R zSzP6H7v#iדnT/Veg;NLtOܕ]ɏu.a]u:o]myFzc dË;k`}N2h6kUE1Qm#O@1#eqjIh{Acri͌f&' gR3L]\1Ti Ǟۯ$Cw'yAT#샋\H!B$F*FR=OlXgp}fV!ݳkmR[2:[Je by3=Hng^H0y#{eR=>8>˃|iް}_S~q{6ؤšS tr oMל”jpXzͣT9j,J2jrbY̐qV)B - ڵSlr֪[h|u^iy7_QG<gZ>?XSgl~.2/Zʹ??~Rhǁ爝ץf T:tCk0TN6r\< z~uF'=ʏxpx!ܬE=Ֆ\dj/uSA<S:_fW~F]VcF1v"V`KWй)v-S200t@$k;=[넰9\ A|y|Hk+9P7o8(0$ jP2晚B ,x%5BaQO̙3c2ط4ғ2ZR\.sXwp;_=&ZoXv^&6f I MڵM&qpv)sD;l.&Os,\?ij/Z=cBxW/^c &"N/(5Z2W/ՓBiE/Jm7&丰og;-_{zs<t<]ދ \ھT B֫6}ݦeh0?a:7D1Kpƒt# h|˞qN|%O2a"?`jYWZ\u9df"`hSAwe<>J#k%<4uR?|q>|s}z+8P-dRz6f JU3§:GI21`lC9޲{3#Ty)kw!_:*uhRĆތMx' 3Fl`„e4mR hxSb\Ү#ғSR-S'y4Se`G2?Sx}Z+UfӮcƀtǫZ~ #H%kxE2j< tFS.xTSdya RvJkף᠙ARvMU\m s)h`md{XTn HW~[~t- ]2=p.`!3<=O}OM<-di=J'f݇" NȳH 4JSvcx8%Xepk/)vʛnnD 4.c}CO? '9m~1W`9XO=pҀ ߛڞ~rX/u-i20WOi%`8}] B17U زA^ߖa$C4&\#ۖ3f&Ze>rY̟_@R^5z_|ro[B&Ibl+oC?.z#ݴ]~ZBxxL~IGzo7P``Qd-9_ôd2cȉQ%rFTI$_j)2kDZ:# XM[Z|:o-2YHϱawiʫJd[V2}*TCFe,f贓*gV!2Xɰ9nY>6 "`$❣X9Z eeMkOwx wځ@ɷϾgTO&> S; ծ S]xȏ7 ? K,=C @%DBd-Ejg+e l+z: ?t/<9|sw!M/TE*!m? ?%h> sL'D?!x?\5?z /qmՁWOж92Q: ?; 9owFu]Fv7^p-GO.>Ϗ?nt-OE`+?Յo)KPpiPn%y:p̞/ȃOkw? ]]w y~ž~E'k+yA, ]p ;wi5;{wAh/o'۱& ;?~3*϶]^^u8r{@-&|e~1/^ cs~}XwNo|>W[_8bە/ ߳_t QtY7w7]J}K_16B}~9oT4 7U7LL,+7fk*xJ}gbN U9Ė]C(J&63&n,yz69U=+1ښ0ԧ-.zbBO>e@"db5^nC> j=OG )l.йxsagmަ@o7ûí# b[6^"]]ѕ`Udi-N\pXe{kvi䋵贏k=+k[-/zq!Z]D%x]̡iv~/saΞ[4퓰mHo<2ޭ-;i&6B|8UX0n+Cy=]wlgnѧ2\9l/x^灇m+R)UA!F I]@=-D4"-g4 M"VݖieKeH4YRKR$dѥSu-N0r3R"q p4Z^j-叟;Lpx\ZW,/޿w/oC5aF>jVsYkw&މ b6e :f^Z>]m(?" 㹥s/˳Tϟ̫.?>?|I[W<ټ29ũN5Ymg:gX?Z@0|̵V߈G<ߠG-tקPArxpyonJ۝mg#'kT'2;?'.+?w]-" z*+&q .^_g<8qjSWN^:ό/p V3$UsX#J9t mk~p/{A,o XL0\㋧xtb\A/R79\&EOe[3T! V>3iKh\`})wj-!n<ʮ:}}SW$Jf#@$ bHb;;N/'Y Nc+Ʊ161ـ$@Tj.{w񪄄;Հ$η{ϸ眽dC4١ <ǣo੎ Sl\P Xr#-x';;VUm ZlZU}9t*ß~(D0c[XX vLU ġmJha#f gdk`M=O'ƸC}kYA]cI-vn]$'N"c嶱|c] qx%3݋±6 Kxۑ Bep Yvtqe/_¡됝U޺ݻ9r ME=>ϓD\W[`j7ąݛbr>Ȍ(w{ẁK~40g\Nu( &f\\YA' >#3a.l{;Oqi[}Fm[PڦZG}90L@:'}G]ĢP--WT3vy%.K3C'jaaDg-T> sHc:䂉?ߊ[zHa>[S^`[WwǍpo;O^04<pECCG&΅Z Z3 RFS2&@:L BL>3ULe<*݅;1wl3ۃ])d`h$YӼt=-r{Z|,qq 7q Y0bT6DJDe/[NhX^]-_O^WGsK,J.Tçw8_Um8! H)J y "9:|(^j7geē] "\VYÍܲIap(k*yxl^"Un*My5N!x ξh.eLaݐ2)S7dX:Mɚ:yq Ǹ">vdx<* @`w9;A7,Cфo@ګI C] ~adW_8]rĭwCG&dGouIAw"s?,繊s6Y3ho-k"-u.u}d7BLô/-Rxss ɟ`8y=vN<^RbsԷ"աTld'x %C4Wx+A թ(߲-ϝagT^:yҟUkp j"x8D\.4 n&# .>hucu Q]D}^߫S5R9\l^A7@%p#tN $d3y^ԃ4Is86+$+шPuxfiRUu0K%\uc"=? #_dgj>[Duӵ0*L԰} EN+jv p"qOՅv /BcU`cj3{ R~|~)ʣUO~3pףE,+aFH +?,ɢ`I ]Щ^ET*ꀉj DL"!Ǔ2{2 )CJ?aj,_[DoވtQU\E ʗVP>L*Qq$J#R @5"yWS' L8?m? -|.?{K$%%5+45'*ѸWpUƕ\{_\PxN4FMֵ-nw ͐=$=J]*܆_m.mw ;Ip@dqhʄ˗Na옃i4;\rư~ӈ*&ɺL(ӱlM?M/I?.A7@.Z$;=g@ Z.MLp AO$~|aĉ8Ò HQ_QXSN]m$ۏS}q"n޴yȾ>yd_5QO߶kcN_~R!$jTDl`xHb곿$['.jI:)WV3fOn@r9͞pc>WDqj*"UC_1-'R[w}2FL9F)S]ȇ友?k\ xCG yyKvXlJLh`NE!CU#[nC;$*sBƺB yC=.ui0bo'.6{PMy9;[~_Fh/DQrc7lw{=j;)m<- fX75q %p"Dո^ /dsID\Ng> ;Oow|~|j~]RUU$iD?r4VWk.=,An?;9ŋFhPt$'/6GD8="OtB ٰzkԫ;:XjKUGs st3{OSl\Iks@йQY1A[j+Hzq;)RA‹BQ]Ҝ"@j6%[.czn\1Oa<|~Ml^>TcߩHQCKS`7sѝ%"TLė%t(ȚVvC.hO;dzZ{46yrQn+Xj7O +\֘|pS6DY' uѼTDb^jFٰhʲ9U$c-d:s!+ټOsƍ%qe8;,h6Ͽ<:G\tHUg@f:ig#lX@ w C^~3/MHSjDmG6E߽~n'Qp~o5M?|~?W!|-vU /(zCKE 6@R9ϭk@ (7[to^ʆ dK+ni3 o=qqG]Gz3~:W0j:ķ_dߩAK @b(\ H/BK]QTp}!󻐑|67Co_Uhus;,MkDߎn/O@x?5􎟖9 )ZN䎟.cv ?b  m>)qҌz'q(sPMCd(A,JSArᴯy?3s~TD]|H%>8y!0=2%m!Ui;,;93[! e]q/Ǔ;e$ EDWuPTpQxvO?n1k4cU|`|ִi&.dc0//?%kc.9enb?6PkX%kaWO}Jt*"UUg(F Ts\Ut?k)-$b@$tRQG)x ^VKdXUWf u+4D"8L(Tx[ꋸ:' F+xuW;_{OqqM<+:{1um̭ {Gx@%K y UZ$/L :;wJJ|h7UkX ֡0a'RaJ|*a10a Vñ2mw ݐ͛DN.mncͼf4ى[b?o|e/ݣd>d}qK nFRQ~o!c;!4G| [ ]JHР.كבL ^qv@e!W nXlБ1HEeAҘ=j\yϐ $^꠩z4pŒL,XL¬^ ax-\ Q^6U'~ h2O'gnȳQqKPvo? 1@̍e+e i009fqK%cisXbx!p| %E'6J4ylJHZAw~L,g4 %!zќe#rli~p$r;f-Rpq{6,s9AE}96ɤ]w+˖ ١u> AQ귆8(7Sr4IR}2V*7%HǴr02(֋fM4&)?B9}kȭm2v1r|A?sg/cwTL湘Sq*뽄?f?O 30SOI_LM=Za=X]ɝ"}6~07%'wZ6TYZYW[G[mߺU:(L7h! P]s͟74JOIH&<{a ['Kgw;St2A;~* dOF"))6ҋR\.1S]@vL=AR.xJyʑǗLsaXq1fGOiEӊέdK'|e|00KwXY< *KX=abZXav JT0y^%{ IDAT10==)sK8՟&uX GVӌESzi~C$Dռ"zTG"Ufn&B^c[;ry=[U+e&=ޕةΨf2H&f2ptIg?,\a8w n` As8 .6?,\0*Qr,EŸ` ለ2Xb8FzImĀ/@cF Ԥb{ hjkde<Fd:^8͗ ;O+zH=f%cM?L?9p4{92K{eN}R7yܾym˸a̩Oy\^+Q85̖$/:s;Nr0#onLG E/2.5ͮݖ?1q t5@,\RRUݽ]}4HeEw|xVq{;&u^|{{-XkXvHY)dUXi##"gJFذ<ヲ;o\D2L݇z7uU17xbq#C{d]ģ.ϾzRKf-X ̈́^rѷlm&r=t C %[VZjef}Je"BW;?OP%P# \LcWli3[a#`` ?deRI` 2~OYl19J1oΩO6'%Ҷ@nES;K?xL֟yqPׁlَ'?,}(=?,5vz"`#D̕xWӮթ(wl^_m!+, 3n?,XDM,?b3 o8.[P-ǫ{&doc; ?,{4,,,b?f?uO 7Z Zae5Xb`nKog ˞xt?,\"aDR&>&2l"z6?,X5#X~LnNţ?,Xabaaaaaaaaaa1~s6{yqm0농nXaw8 Y2mgx/963/{ ?,Xq5@,,,,,,,,,,,,ʆ̈́n3ٚdknLWflL?,Xa"/HIJ<[úLrD%TꛪZj^ߎw [hc2E!"ȶmoB "e[͘I~HPDTD k"'P>&3CdED4 # 0fɖlWUy-=~Y n4ҲB[8;C} u'Qd 3gp\wフ|Ֆ5Bl pUn!;BYD)&X5k >0,2fy%4RRe{Q#R" mR m.S_ӚDFyqaXd0 s DT=\ֳ;EɎ~:T:__e,!1&_@ t 5tlK0,2fL00D<VNJ-Dd=_^R D;ui/l^Ё$ "ar.DuM]]a]SRtlϑ" Sˡ[ϑGGĘT _:D[umig Sy~j%T{D\]$\.Nu7V]*` K5ݼ_ D|ÊH(Jjdjct#l?,)PUn҉o<ð`(LME!2-Q1YHsD:^ICT#2!b{ȖTuNZ FNqBqa0s"z = Z1Rr۳`=(pK)c̙ej[*zTMUg0,2&@ȔAD""PSdy7`aI5Eko6Jh3 Ʌg7^n8΂풵K bJ&'3[t,WC-nETg̙+)s]אN?< DlklGW1ul;_# ݇88ð`\dv1ʡ!bp8y_#e$#*!2j`Qp0Vv\Plv SWm$aXd0L9}lw*pp\K6jk:M8stzuw=D#-Y y Qpy/7[*Jtt @h0,2&4-*KO\2{$Hو=hgjhcFmES x_Zg6OQ}ʡX~EDt Mj Pˮ,}e%OT|VIB'(sh+| "ԯRJJS-Hz/]ˁBDLD }+&$t^@"&K Dž%~\FLE <׶kx/w=ȑ# 4,ڤm[9ϯ͒ "oľc9;Է7PsߝZ蕶_69Uq դg8!YX k(LVYp. Yg2~7ERmTVGDi9'웢Q'F!V9$i @7:4QsT:,D5l%Ez䙞Y..SJx? +ܱʚy5t9O&+AMþƲYq[8|21!" 4nf0,2-eEM<;w}iyDhx$SR^hc6,n3tTP');d?=\ji2l@ %]RCX_ dEV&y#RruK ʙ3lx"FUGlFSV/hQR7ްSG3{0U\U[dg3`[C D61(╃ {9-(mz#6w}}lU4'ӏq)"ҘvB (58bu]^hpFm>KeDr.uEvغ[݀xgu''"Sts2@ w>Pwh9ؽVbXd0W,C  ZgnٓJeK6NDGDž^q*JQwXwPb]=N #;NKEU@l 65Jv#NX`D|nU*eİ`d !*Fk$f)5+F_MӍc*DJU.M}u8wCVPɮ)@q~'GݣA-&-(]wyHDc7VdOTd~#a\l{MѮY3sD37V t4B?#")ۣUϊJ2\_O*! IlWvGtZ$ ?n"b뺷lB-9ݟ[M% o)+f7<ڜ."ZfY>`(hJ祢QsDq`8KTDVec $U P"yQrtf'7όCTpt\Q}JZS 5ޏѾݙj$zex/gwJE` bޝ+gva@#/bζ°ozU68 Ees$6+W_hs0~4}YVV*|׳70%R9T눨dpL 񁃙k.9DHafvY4;J\"W%?Q5aXd0;L}#!E`+}o=x!W6N+,URnz[ ~kf<16wM_$lL\EĬ$wSNt4kD2ħ;YȲ*(1Yjc&TDv_5ld]jjT54yn0ߥ0p(tU5Ҷ(#ADZ*\s5}nK[vaBFge2]BjN~vG "= yo^V1ahHKErVF(ߓлxkfc#gh}c wv" {'U0,2۠{JJLYxąh`:DKX#2;ꈈGD)0>VTv"FM j]_9 2{gLop݈8xia#ʄhX|OL55BȜEZwc*n"<ƅO{ [a4%_.#Ụ,3I"E1:J% EUtӏGȧmn)W𔖬5ð`f^@|g88i(9"nar`ClJ0CIgF"afQ,"Z.76?N<BxRUscXd0lV<# 7"xa c.q`_K3;k8|m`4aKEolz뜧hq⏯Y: uh:.I W0SE혆}ϡ _0u% cXd0o \+}ǺHMD4 4|)C:cFDi\K mv~Vhdw3_pa+BOMDkeIm+#vh|K722 CDzkk=shjUvqcXd0B!΄/~z҂7ŰM#VryэP|>99OOfϮOF$%Hr!n?ů ܆>O)"6H xjJD2u-e7ZT0t;Ū%K) "G܎mKBK,1_&5=wYs,1t_zÃ"S K5;W-/IBˉLU o`O4g,ȔO=6rڣ8'I`[Ywkm'*%L/7WҾt@p"o ę_¬ƈuH!IDRWq2qiwb"vÉxY!j@쵢oª T;KK>1'B11_ܡG>옷bA f>uǺon); i\DT=в({m)[MVN4 zcA,)_5hFL1f-!;]% Wޖ;\'_^9᪉Hv  p";36|>l=BDܢ52Ӥb-O쫗dI̒yޔ{5=vحM֣cBkLk2ڊ_1?8e`1 d(TϾ]Pdzˊ a 2mGe"K3"si/D\ӊ |7 Iɥ;]bKg.ĥ;9>n9ӯyh?:p_t9W^>D(<;*<~⭙m#:o2'AY["!% IDATclxJ_Aﮟ,DNK 4=z#18PwN̢ dNi,%}G׵=)Zh,qsC秣@ f}Z"q& mw{ 5֋s_I?6o?}+V/CVZ:!,uM@v%FFoˇu01}2 ɸED!`m[^D֫uUn3mOۏDJKY5p4n;<(߱3A$/j5 ^P}u-u [/)N ܤC|`!*W>GEW"l,NX =d0YC%e Y+^yPrzgO͢H[3882+ /og&Ue}qρ^qmS jBq@TOp75irFlL-GwiK|`׾xꭳ5;g %JMCT/K<ܚƔ/€eB/Ï}\_~ѡrWI ~9eT"AQɪY8g{d/̅89 Mg N"_)y"_xo*+WKO*rv`B"COkN Y n.);24񄡉RȻ5 ^LV+S5 ODBvd}?MQ+^(Yjx`tXq%=;i>⏧ePx"ݞDvZ@ҎiQet3 XyJ[ʗ~? cYPC(EouϽӄ0{o׮]S!asVXS"a@(tOuשY_g$hף%uUosTqG@0W{ o~o*L٣OԖhk yd 'kţ1˲6mژ.BC-6\]\1 &"NBNܴ>cVf ~djsYW"_[Dy[X`t}DoIƜ(7a7ŵM( ` $6'~44g._U|*n?ܚs0^si:yD D8XÊ3Y^)*Ry E*ǝTfox+l^ǻN3tf>ЗU\cĈ`3 dvR?3鯣wٵ@P1 "HF \[<"a D|7{XIqG&Y&O[ˋt#0\#avvQ,~B/%Ć+%xgDR/\T*e+E`$l{wMof@)(Ο6!K5c;dU4˝;[]>8}΍nm M 2 0:w3v|x JW:lq3:d aE+|es͇j^j?T?kȫ|3v8}UMBDΪ ı{kU"3 A&ƥ\;w/؜Idd]'\хx-qӚodr(8">Jfl͂_`~ O*Y1Ds9 灁İ((:aLÌyt e[=MW~_4VadgȖ]8|'FGu`(i<Rd cKiZM}PW_ppdƕJJ;a堽"LX#$S,ƥ;HۛRcB5 FOw~IښF˘qrWؚV U%i iRX hZىP kb~2o-\Ep4MVik{  uMXd" r /z1LtssÒ_QaS2j?ӞwT"y-~[elnO!ŅC)% DhN0MPDg2U4lۘ өXlOT{|x[_ćK:BD(v5f0,2f@|}?+ٛ$Mހm'EO}Rx/"^YB^RqqP;wn9{V`:HJ@X$Owd.M{oh8 >wzO"Q^pN>W6-2Js˞oZ( g 9=Jcm)?D4P|#~5Hb~,ˇ(_#_7NY="a14Xd0D"4$۾5wxUƬ 2&"VV75yFq߬wLá-7~BH׮Ɍd0![/M>tUlWEe/ު;#JfȒd+}ĭ0@u3;RoD| ȥ? :E~9};*VpFU{eW P[i:QEw3xWRp]$Yv"bK$y uz׊ L(iq  S4RDJ,W۹7g3:kw>zS̠E*FGy˥R=!̎ <$l9]_qUKLNr0'"/?N܎ J>Ӿ%p]NJ"bl kVgh$HuXm6>`ă{ uwzWɟZ95ġ@8i N'&xǚe?ΓRE5>4C^y stC%tGj RVr*vz M? +)T,f#5KEM:>~  B8CK>jL9$lTN(#ތ#|r}!rB9>ehA'9"YZD/\l/t䨽J#+*K+،1an\f'Rrڤ7Cqp̔ENlGT "zu _6yF4zڌdeM~sA6ꃈ#j͹Sy2,2) GwӇ ɻc!M{{8xa7U:yꢅMD$8<؁(ϧnYW%tycwn|6Q`x "OUR2=iD1r| N8ԖLVfZ,(X* EMeWMx|{iPwzچOg U٤ f  UF~V#[CMײ%Ϟ; ɯtH NS p5UඣѥkWGC;d{R@:^?_^ցQNl=#R^&Ejw;q x\ &[z&HԿLXUuj1hY? e/ ]17hf}IIliBwG>i " +U>p JTp"ZsEo#6.ʶ^Zd =kOmR'^tgLA`H,AsM]KK6=Ph8xtb{׸a0HN=9$Pf9N]piꑎaR{S?H ĉSS03rDΣNЗ/$:e pO[:v469gJa$[PkdAq = 豯S8"ẏ b84ha:u9៚[t] b*`ⅺixegnLi>%_6Z5G1]v" -FbD玐S.P&PX)kCПr]; q "> ' Jv wZQA$M,=8#;V}ᅵ}Oe\ [/WbY7viP4gΉHMEj0gyRʎ'`X\*ypkX@ԧy7)1&%M~zV%BK,Mɪ'36R[RܰC35.ؕx+p+ޕ;%feyT֨$()lum+&/0,43GD!Ģuozj400پ-X0Xsi"=ްzO<d۝HE~hQ'<^PR; M-53CXwu&%#x'TW瞈+MLj30ؠڥ(>P&j?|FľA-h{Zk%W[o)U% G4_v{[[(SMhT9.=MuT wozwDBDT5hTVH)gMX+>gM$"25Sp_h"D^\8dbGDGH -FW@h5sk%a.!5}y ݻ-ʬ]"t~r#MUy"ؿ8NOVddRdg|@G\mF& (eb"rjֽf2&{+iOaXd"u~κeM}n\SnKz8AʚlD_XMGlmV>.yfDc$WT[QA@ y>` "Ub~;vK;M!&fqy# bQhY"a򑚚X>1DDjCѦjƇGz.|-.tx\-OdXLhXiB{bx*lEԜkf<~XO:>60Zr!=[IPMV-ੜ=SrS~pz+BF}u#?ytXdf;|]k8)):7^y牙C/>+K5AV-C[J*tLCD=c?sh~ɗi.V{NC>Ñȅ$F kx $5}RN#9M3F b^mR`i>ωV{J 1 |K<hq)(YɬmbCXd0W>x>X(PyN0]0jD=cMm]X<_ 4!N͹fg/wꞚxZuݳ4T2O a?/2`\SN_ b(%i2D%!]3M M`qUʧ_Ϥb'ak.u8?aP@Uym W<<"oMkg kӝÀdHB+0 z!:ҙdlX-y:7LX[rFm$NךE cdVb٧ֱ`&bBDGpWE 8HW_tn{C Z/H]jW»#˗]0(at;7 M["Pj]7XҲ-iLiDDT"y4yv.E&"]9[<ES ǓY9X:`d~g_v~ճÞ]ڙ9;,¶M!HN@Ԯ`&fZW$jO^D<ԹcmKg u毊wU4HYQVKDZOb:vWZ,+<<>fy+~άDm?ړuXPWDDDR^*b pdCQkRD~ÎY3MƥSDcA;vs{t{w{/`lM $K{/_K!$y@N i67Yzn/,ے,ɲƟ?W;f;;;C U,r +H֨@q%H".h79\rgΞu{~ol~<Ƨ*jv~OS|؂k cAD@<b@ = a|Tڜt+ƤE_J9{#N&ݧH*&ZrZ=4` T_'>дD2Ng}qk`8& Bf9DY7/xƤޢ92@p!2B[Gb. PKoIzxjHwM@j#Õ#{d2(q/oiS=on'̹ro[=C8)G8!܎޻µ)))Ĭ$C%񐘕6߿TdMԋG!%d 6܃C XV`[Hv^/ A7Pj)\]'EĸXRŽ1^6\PFKּ#FDnz-HӦPё5I6NP!'}%64E/VF2 bP8\q_?"hFk8an0s^UU~*z[u{JB(,ǐ+ J010zwrܛ%[5{0rwfhK)٬FxeC?|cǿWW~֙7;=[w<N.sdw]<+u:9YgJɉC̽ qcFpw0a$^)em!/U[KM.%CF@M]gHX߷+:CǽLQL8x40qk㖌kP_:A&Qʯx]/R XEW=ߜ -K*t  .|HU0x1(OuDJNtK)cU= k$dc;wm\̫JDVe%w[}V"fiAe{\ n %L/\Fu1IΈ#"ϋ> ?r&7mc8!-~G_qY3Zi P<^,F&m/?ߏ 1R5qKƵIuj\Z Vqx[`.}pfv}yGRB|2qo[z>1(؞:9whMc;1Ͽ`",>?!MoQLc_Ct?PL&(9۷TU5-mD=/ 4T+jUy Ś2WEtzd^j/:ngػfO3h\V՟w!\[^wxILR)־x 7,5)%m@qN?16cX=.LYhP|]LԷ ÎN0>4yҥXD#V!.8ai䊄mTw*b֨c Ukˤ%ǝ9Dr)WBE|g[2)t:ι# !qu_x1i !䍖.e[NMy "OLȠuu$BEQG Not5uĤ +9/m};gĜ f〭wٳW4RjE pP?kyOv(hX1st-%"kwcDů%)+ZSl[lB+.|P菰tU{k9a r%z0t=G@Ux%Ȱw."@g/rJsOa{,uY ߶Kg73Utxh!=aQL+{~y#1ԲP1}7> md\Sq|42]D\M[{" -rSBъR kb/}rؖAnHN I:%6=q/𽚪`KJ&+g@9>ݻ1 fVaקOQ(!lPy4R8cT+PyU[[ csRQ.XViuPVDB LvsW[;L4Yg})o\ wVD6Xv9Mڂۆ"wPh:;!uV r$l{UfK1٘5HB{AƵ0G2Xz,Oq?nTV~1Ms/516S,(JD妇6lzpai;cDp$@a஛ÁP$}['ɞx$dj.:p}FАg[L1͊(}.#[xUQpĬ4*0wh9Zc28*8T>G5TXŜЃ d.IVj]"‹! Ŭ`fr!c`G0mE剧45h#PA=,MgiI,S"s?IxeɋS`N\]8kTdzcf4T5N;RY7tF]hci>(} Myp~AJ 3Λ5RW%DBALZ%Q 6Fx俦IŁcLgNq(|Q74zqx~ g b4zkM@l]K)K5;zkk_)鬨j-,jYA&rϻm9 BHi;֬dofz`Y":a>iުN>:ImBJჃblDғaI9EM7vb&2=qn@.8a5xeZ3AR`$䦿7{q].,^ qquOYB!h2MfYWc:#ZI#g7xFҜљj%ǭݝxEUGN;1&h\ӒR'SΖߦ:^x`C|"CqK(H2eguEqY qb\CeocfF P^@xD=D%`cÄAݺј2H-DM9@ #@G@j|Bq aHG/'C ytF&3/`Kp7v-uŞbA4>|*U?LfWmd^JJ\:[G+A8}*HKKe)Wo}(t|ݲZhGgqIcdAړcԙ$;x*g^ј/j|Œ*\VnOL yPS;:tKk ¦e ܠ暪ͻc4;$yzg%HI_Pͨ]H^ox`%{-I6SI?p~ZX}<aL RNF=2A5u[Uܝu [zݑVԯ]2!+-&kBARu2/姒r Ƙ! Ȼ3qq" <$Igi, \N`Kꎅ.2‘l~^ 1Ǝ{5f7(1#;O]:Ve5k嚢2q,JFSdIm=-JlKϟ{tgEhSDL:7gY*HvGy{NCSmo;{¾~b|ꤸv# u`[ 8arG Ǡ;mc5mGk[4${ Jm{ %iiE.kmFd%$)3rU02ۺU@Q4ѪuRQMEż "2obUdM(PQ9C  ) C%g9)iu(a;t1;@-A{A"~&{<6avD&)F!U*{,k C!pM`W293 '>2QCxi|*׊݅?}Ef OggOj41aÕΌI:s]: P-ʻ5{DAgә+^L鑮0^ ޠQaݾ' zHzf|`.TGc{Rfzp|s18ŋxRid1ᒍP t刅~ f4%amV-6,7-74\_˻*+g X !T0فpIa(`zuJL(dM9R'L kw@/ftĀ_MIwEɚz݈®H7H`VS!7>1z7gM揃iFGSiJ& n@3vM0g/EvQf%H@ IDAT#Ns˱歯h]K,ڹswkk[Jh.f/^dk[З}{ˉ*TbM1pSy)-f#W1eR}6?2*W+`W ˆ߈RאPp)j?Vy1,')|P9k2$I_]{">>mL3erk@58"G4"GvٱlJ}zCl׾cĘ.V/j5UN}Q!)&眖6On?@BzkNЂDD*.!qD=DFS O<2q`EA0`OYK` s̱z=݉x0^{$WE}gAAUjuW%>CM#cC8a\"&I|C) & tUuMW^WhW]#+|ath/c ifG_M / G :DkRk9)pa,,< *qQ%+qUCF hI"x HDW*R0©D#s'`C97ܸwPmmg@qOaep1"Ԩ'V7(Iի V ^9ӌH3ǀh& 1 *–j1X{yl~QODE2,^gsCcyw?nGh;*D[CL-~W1yiU_&7vu^7Þo6hdYn:ʥ~.w|ζ^}|ox]E_lEĜyP!K|=>LVUS]ASR 2Ցr !_XD +\}qӢ F *G# #{hsy845(0X$@2+fD@rA!DQ#*6T(% VCQ#~c0EGʇ|}Qɛ ViK ,N6 Dz Z G#=]aCIGBrȉs:~Ǟ !HШVOGu:m>My^~`p|$'-M=hE[]C:Ї#lOUe~7r2ŠR|INXj|xQWnQ$.Ŵ?q 6y\;a݂KGcBT`Tγ֤% cP M9(*<vovpz!aͿpSgw%_CۊB R=uʒ/G#v֟|O ɥs~9 kJ#ϖW=Wybߡ,ySI9J2r!d6͔`Kˡκި>us%~@)%  !KQ׺gXIso#0 ؉~8kŌGwTo0K pYz֓/zgRt,GJ4vԓbpr`I|p !EذᬫL u!]dj+ɚ1`!O>yO";ҳƥerŖ| 81ǔ (Wm!$`0qDGg~#OFȍ9zq:Drī2q¼'wL-l6U$'2ʅ*񣮓6lMoljY;G"W2QSN.3 CMsVщwZm*3Gj=ٝ}BmI=^'#􈻢pS/͑xUUowl;R #aεNq]ƢqЦ>׸F\菨mqs⎁.z@azuTuF%OIf svE?y(E)E$C@J(xtG,@Ԕb7k( Vz8t5jbmsspY Tж@Q'#T7[EP՛niBtL} ":~U1s%sYLfkˡSs\dS㖌q5*jEn#b}0R `OjF2@'sc 2$TM=VF TTԵVu{-ŝͪpM+ۻ2C==MZgRBvhd GZTDŽ%_ YԻN ST6H7M3n QEPʨرښ(uw&+(c3>,M\# Mf#eNU6BD A4Sӥ`qZ>ǘ@)i L (;,s:;!ݪYH}nڂG4?wW4msmD\id= LʺR}xoݪq:KfN_$J১a$#^ 8e-3&d_22\(biZ:egتޘYS6=qq/V}Á_{?tSQ7b;;Fsi~JiՑ,W@Iܬwv>13L7O-(sV>Pu-}s4G3,"JMn)9lm@fg!dF u8%rTf 0% ڎc'e?P@I6`D%!L"`M-rк yt(M%vN2KI9QauGCg&fMX= cUX;p4>Edڄ %`6Hw@djCo=u؁ ,*NLkI6@NeSoÚ~'mQZ!לiɱ? ]n z*{E*䐶"ӁA<+Q"#&ϺOP٩ kH_lU[I"}m<£.GII^uӗnAƕKJDcWQQ]N- 1~;&,2?FQJhN12?/ƁMGoY(%݇ա@^ 8;Ph q\ty1d8ѬnS"JPjNP#cކJx p`ͱ#@V#w.S*!5=]M HCqnRऩ@" f-@ΩU*gG!*F>Xhd30tOK?eShN_- zIRĦFzT%:`^{S?DQlDRJ0xh9}5w뮟83]t)& Sk\]1QAio.{uS1c0+U8W{ biU>R@>vr2CĨ3YBTTVAo,W/UR9|^$ItL߾5.FgJR!.`m>3B66)=rUW.UU"\~)V;21pvJqDv&( $շJ_ּ]dgyIڱN)!Ǽ*W)͹'"۾H0X,6T^ј3{ Ϫkݾa\dpvt}A\H Li@8PFPkNKN%̚ aεh,G|#iSzChk%\ ;HޓRR9&GvL F/- W Vdjʚ%S|{ksO'&}z=С *=a:?+".'2NRf`W1K&l^uzlJ_>_x.‘.@0-@gI|>g2ʪW+NMl전ݑ򕨶cwTMywo|Q`Rfԩ7~'rU/Lbsn).hߏ~|4?*wgM_Jt]/!ޤc%/N;F+dӲp~WυK % c +L'^5Cycayc s~>ˆVZ8xC&b}mοʻK &O2 KFn$Î{_S뿰kϻ^o|[-sW+gzdٜrﭏ:産3E(;k{՛z֭<9xgUSd%E)a^ 4tQDQTEŬ urBRls.GDFbG[FeOo0DH[F7;j!xhjMׅ3dQk wѠ Ю_O%]dOejC}76ώ25M Y77"{11u(E-MMrjU Rz !ddm/ *߆6u-`˿WA8ȸ{"/K%C?T Dᶤ{L #xCM3t[HK01h]_h45Mc!.6%g-==^HZz*s!q\и:59lnI%BLyBF:ZJVnDnꚑeւ5*A eqq$ Q8@c6;,CЁb'e#7c8Ys}RRw2җΛ~^2XJNj|@5pNtCIC}V+닌B$@i,nX/BT`1grn狩j/} { ƔI n5x Q_>WM^!#MݠF 2Bh|8k겢O{^6y@&-Il MNJouN_|y$X{WN@ + 藆pL"7JAWvdwc",ᴢЩ9,lU}D22\U$ZD8{*ʲ;E3Kk9lB&OӴXt[Hw-&FG6m~Yk1Oj;(͖iSfܴ=ɞ6!/ph)(^ sXɁ1[M6uuF^ WY @IgT赈#9&{>Hl !F2!ܙ &Osg$Iw ^`3LzQB(J+kO.@ԅƟ^{+Dkk@!@ $;U K),ɨ"b4+=kshOy偝=m'"GV<5E@S^-hqBn_ugڎW wwh+ݹ=Y^9QQ쎴lJ oXtV:ll-Yr1;fq#f8Mpnz}دk&$8ȸrX9z#/Itn˰_ NzkR'ǟ}h#z1@znM?GxTkE׻ ߷5Pr#!Aoz[ʦhkN/*ݸ4fZKk',u)SJʄ:/>mX_zW@>˄Q$B]@z I@7&IqtgэD`~^- f* sQqsub)`:> ꥸ5j*+{Οi}Q|:aEDqʳʝXfߢ:MЙL{r9#D64ě|]e"/,o/'GO(sbnȺc=xWtXZ@ xJ:JAQ.1hjɭ;kz;GMg21vKWdJoW.At;dFb +pVB| 8wv"ekL “^q&<'E{S32,QB2Lf:ѷ>pTo۴s/>NYk_Q$]S )K|^o2(lM'-|8c@5 wQrhcLD P>a\JiYĆMx 7?RdBa \s3f'Su=I~G!1-t@K@cL{1;e)YSq`8@][ZuxiP*W@+hHHV`_ =1٫ ےy0@]{(kd&!N7eΊ8pRQۢ[ E yzb'xwagmwO@ivpKVĥf t!JdZ+h7 lYq) n/??=d&L^5thsrǑkk Wv2C&3^j@Ȩj|1BrS$% )[bDdQ |?-twkvm&Wi *Vb & Ut=V?:NkwPw%KE 7V_ka3(ʙ!]kKND7|T=g ||_uKEzBuVI*IT`?ڨOwW>l-zKL|6"^=<XSG;<[Jfդџ'=;ay@ܨxE<0[KW4kLR]Gw1 &)JW٤}j\Lj și-4_@o93&vT'z/F(J"RA2H-;Do>䨶7 ﻦPBa2&V1~ {Jz IDATuo8NAD{br~y,?cTP樭IׁӼYooWvߊ1󂲷b;+H )Q 14tU1cEN~I;ҤC=MO$"M3?}הԙߌζ34~B`gnפd]3 %DA{H.τC5D F]gtҵ:I?"!=]Ij,nO)\+'F:]IY43`@ҫF) gpqo\ z"Bm O`HOgǸ!tb :@<` Q- MZ"݊HVZ;%!툘k28Mi178&/Y>Xh:[U"|J][9@k$X>ٵc#"FK v (H4`Evsl5z&Z,N$@j2fԪ$K:l2`xoxz֌E_#ᠢZj5(: :S ~b6%k?u9{(QPw?{H6}RUM-l}okW e)?/َ&RMgx? Y3bIs+M^v{q>&nOh}uE]U twh*i0)_,>Y YzF{.؉? @ g<=1Lpt(D=vʻ _IJ$?L%ɫ&* k%~Hf]z,EpɷEVDБ-O? D "a~)[{F.Nuɜh[|+dT?Zbq 7|V 8ȸR^m@vZ]6ra߫ \|-}&"B{&}χ~w9䣦ԙDQgΟ7L nUl=YoI^t9cL8@OgG 8Rӄ}JC`:ݠquFrm^O`RJ+j%>%=iSe6(%QTɬhD ۲s_Q\#(}>\k5Ǵu.[B\y>:џzUѼ%+"/hVՖqm}DS8}?z]pYQwPg;*mWUj6`bf !! !yȗNz(@  ظ-E*z_vҶ[f+˲,΋qo$t%UkvsŬ/93XK$C]"i Yw30Ọ-^% Mo vvwE3Vk~ U3/: o@zg`v4%-KE|l`\iN)ťW: $+e;B8vY㛚;40;K }rPTI\-J#tCR;:]7qG#/SJk*%?i A M "Q! um< zC]5Rqb_mM)rysSiolh,^@MI3tY̳LҜAՆ޺.PB'H  ZJx}ʳeF]PvL9)IyP G#8:F3HV7&[ @?@?{/`BjS4 ng/v7B5''-UK2XǏӖzPJbyXHRo0*(;{BfpĂ:ȐSAyYgC V` mQ~,o;tXrQm͔R J~#Lx .JT)*}3Ә\JRzc5̔RKYOX30?8Œ c! WoLRZX4ʈoNgjѺ½Hwr*"Ob֛gM58hCΉs ]@{ں)?2j g_|R vXzM~ZSȸχJsM=-M,Z.R1,! BП: Z!=.{zlJ(umώ5 OK/ ;mc^hURdೳ#u ^aT<^J+Nx5c2`vI" (BB21χSpd g @:p{7uA}&8lw2..rahi{pp=ci(#"gQBkpp`#DZ\Mp`jcr%xDتɉjvri'%Λ}n@­ dl@F:@p1X(KC]tZѰީ״L̷԰cz'. hAR.{ʥ8>,(DV(j:Xg{z~Jk;P/{5r{Mk>h$㸴4؋.PBx.Ϫvu˔'T葪y?B]>?ǀp?Skg 㮖c,lmpgMk=۳{\g2zk _.5pj(tzKYYz-s0m ʯ|{X U Q>)غX}}ש.j_T+Ĥs sk۟@) $@pƚ!M+ʿࠀg M*wx80dQUhYi!***9gk0;|f2&R***.:{q)&5\czTB8)ԷwAVNJ SޭҰ2 3dy=ާ 5/Es`AoRC0ZQ׀D~gtl ܃Ḵ 3@Ć-{Nb$czW.i&Ɗo]HDB} ?<}n=(ck.}Gd,}pW?S?|.T#oL5ɻO)BY Hbt-$rU.JZzZ\ЌI?Ͷv˾]%V>y>%12:tF/'` ѫ=,▕CMUWC]Syi 5*Y!)XJ)z$YWw~-ڐOnYĄ *Mgs+ eBJaؑFvQi%lF<4(+0c*?ΪА/E0`U\?tTfBHvV 0C!(tCq|T݊v_:v:>g =Lr.>e޳*dG v9TlL<"@1A_mgag&w0"_o篐's`=Rk%_7wb!L?6R7QLP!{Žr>~a˒9.'eh1 0Q](g"${LA[G.'iߌ\B+W+g :N?|r<._D M}mM̄ؗYqd\BzM6Q' cz1#5HҚC1mҌ6r^μ pl ~OD;SƆΦ?h?qlN3 SD\)4¤0qaHÉ]c BXgO&:L.>􁲾Stлk?-i<BI16kXtua:)Ёьl<݉J0*`"2\Gň/6g,]Brڰ4K6ud}K"0qReg߼g[|e b0BM h Ee swe$G OϢI쐒FV`Q&ΰ/=]U8޷ETh9Ds;ӊV+*zgV2i.0TyPXRz:CJK#d|)#|n8}UϞ3X('Lcbxϯ#_ͺrxѥK5f|M9*Wc>|Q`&d&8n/xH ى#_;R1艬4c|MA_ߎS1Ä'dLHJ~MljW7ZwK 4buXc=V.8OpTs].KK뀵S$)8>5W.E"T@~Zw NforYadX h#~],ςEaF ~EFk_=A(>3yp1@_{: bFX!'bOŻ]Vwlm]`0]n*Ȩ[;O%Oy{66c^AD~nÙ"pHRF-]o"49fwg|1dHvIFǤ{ 8Qu~daBXJK#3S%Ϣ"TZAq(F%~_*>e"[DФD.$q-|Qzxy]μR:>5$ Bi[}ic@/b_P` 9 |Q"4M='z'Rzx6GL L2N6<1wd| <1ݙB?t\E !3x`DD8cBMoՅ[u| 0ICoKA`6- =crae9AAqi0NvvO91/6oδ!UQ(7@PO&q ;փ!Z9"DAse75+$|3x@İdJ)Î~ N)]@lwxoSz ,T4jk:&HFge"i ʵy}fC=Ëٿ~oD afûZQosmBhF[=uHw;47MB(NY۴UqhsxVȊ͏ (sqJV]R'Gt }%Tf6c 9)P͒-`B hQԢ(]l^T9Ff^ Sugڎkv*hYP3U *cǒX8(n%)E Pӭ gLAg6Pȍf(jFNPsICb8 ~{&v[X IDAT? ϖQ-Ry2fgEFZa(gZ,I=SYP Ni_59Y)La,nuvI" \!rtWTx{"zO=*D<D1FT mK%7@| ]=ԱxE|睮a_@-kkS2KiPlV.B({Kɕ o,|2p>T=mՇ1KM}MgPdFFpl2F"~+&g%(&QY“s L ]|.7T 9fR E) a 1[vչK7֣TN^!V_O4~~ J3WC<)8kCij}J;`k G0 rT4Anۥy = dxmӝQҾoSM Me&V*If^3# %E|6iKeS!ҬdMڠTJ>9~QQRSi%c,VxG|ܻ3(bt2(='e22E%Tzog]^z\ Í٘"KNȺ0˪:cXShoK J9O8C̉3h]2-MҵQQDv.dX PߨmQx(s5[q1~q NEn0tQv(Tf(b(#ͮ,byRBHK^`gà ,-VkPU(ƍY@mgNLoDuʪagB=N:e#O4 eoQ"kcZ=o-Wgs>sF_X1%#4n+Gu1#cxOc!Kk)ܝc-ε5 kژc=QPvYa66Q S o{BOT[|BUO}RﰱpO ,g 07D(&z3$ EQ4?AYoX3QED_1o>{}[٦ ƱH;ia9Vk-qR`82ex 3'v-zD}b #BARPnGD4>: co9з|Aڠq|mG}0MmVx^]РViφ` (9z"˱so|7@]iR{MaCqyh\n\-!l.?(rیaZ|BFR;]bSp:7t>7s*QU=Y&tObPH <43P\0>4%|7xJQ3[9upV B!0`27Am,7zL|*:!ƨ! %QF<ٔRYEfc8ڹ_Wˊ+ F~ܹ$sj6E`T5_诮v7QX4,4]օϚ`HQy-Jm;eu^]b-( X yV ٗuy=Hw` ȾiL߉-q*,Y J oB05ҔPaR,m\q3-Ay]`W`*-&2õ~S\ajmhOC:eR<}1Ѻ`qXv3p+?mOI  OOvQJ0z\R0pEHpXpv?IV-+\^; @r X^H6U;ql]MM U(A¾8sl4h$%Hcpj\T" =zdz`PLdt* W@/*w0dt%%Jƈ8>[BB^I*e(HQ \#-F@ AOI n$,7IR1> ʿlxІ[}~"2NF 4.<ު%CE[C}}Y?/e__̏)ӓ^Y}=~X !⩲&#cݯI1( ?*, PQ ?759"3! %6s\!4L8\ =|>L9 ޖR mBP@Q5؄FԸpE泚uurkqBt<ˠs 0= ^NΝx`"nOioBW)rsj9k!#*T'K׷lb3GKmk,٥MXES>{V!.N9CNM26( w:}cK9?uoWlP}PZdG7RĀצ+="}Jd14 ,sROfBX &2SGrg ~ 8R /Vyq5xUBx¨B |x+ElUZ0 cdW@^ȭ:8)qG9 !#0[SyuxjٻYt>P p6hK~qd\OaZt& bcdR$`NΌ\Op0IV~f dl9Kɩ~f#+V!V8=G[BSWٖQJʄH@ Uϕ>'N?-6\3@5O!QDVHJ4=W5c5%P%0@dobѯ)QfWDj?4L. DGV8o&VC\jyL>iuܙx7׿'z"3Vd.|T_nٷkmٞ37><+*1 gA9ʊ'I "ޔo֌ŏԨĩlEzHM^bua歔cY:6:Aw:hmZt"B4uf]&LͩʼhF6#)"%3nVg'~;UXP㻁~5%C< YALv10%gK<8kt+aC"_R"I_C }B#29]VS¨6`f֝wNomu!ӧDWl;'}.{V=Qĥ*c"Pgu1+cF#&dݛfOU8缮ay)tq%.7@ՔKcHILbNf '_f2G-Ǜ*I3'XTګnWc}N{oDHB#z}yk~Lr)kqo#= y`c Lጕxr5 u@=u,dSl3Y!@$F&Tդ]Gj`ͺ&]v̸Wda*">騡@xu8yw=^7&L:5!z\_vb0?ˌ @\ZΥ@%zX$2n+Hv׬ac, \Fn 3~ZnKzw&Gkӵgy>,>pʫ.mݵ5U.h ZLjsNAw8= 0Ӧ街K3~:V.lO(|$.Ym6\m6u٦o.4mCy/BK] )GwQ/WLœ uZ}+^êKY{"/Q+Jaҵ?0EiU;۹EG($Le^|w2q@H͹ނXBI)S喐Y#Na vxU`>].,1Rsq\?YOOQ-0Hr{ϱhT?oz8f<~847;nթMYqEiөx#յv{;?mmnhteg_n-VJ;(bƛ{[v Tc tm{ݞĜE:L+;PU^y՗2ȸ:&B 6~k= ,/`ՙC⓳g-KB(Ѻ_[(R֔i#v \U<%J7 z]k/i=s*s)ɚsYN2,[5H5]1bD Knp84gM+E yn8e~~Dz `QԅOVD[Se/Zɩʰ̵CB{{Hj'u5  l?2EQHâo :_BL{o6z@Mev;nM{ʫ:uIVX}ZbZE*oL|0Pp1ꮝ@PJYLm6:D5ZO4t&LqQ2d+K?}^JDVNW7a)]X/R-xӱa=݋©%g} ԏǤR[EzO)j./wy\PQEE3@k$QIqhDWk~  +U@HíS 6ހJ/36nid9|.BssD̢;릸Q%իMWP$ 7 1"X&V(dYFlNT?IїS-b=7T_NXt>ka[;;zoNʞ2(drORpffq;NrsmGnKsTW-0ԚCGJS , r?X#eLq8C84(29LqI FP m1DS% YEa( Be5boAL/`r)Oիtq g2oϨm 7#<ネ :3RFH:nϾuZlvU&vŲ$| ?~̤ D1!N[MXNpab$?f E:zw{JSs}!G2k94u3?}׌ X8hkePkMhB)tz%wKĎ*Z]ݭ̈́(!"%UҨ`07k5Z3r@=t'XΛ3QK ]Y)k(!A ?@xB0 8]#r%[7Yف'Ur̦<̝O|KS"+L!}=>kj9#+ <Ry IDAT|WmhN͖<Ә{ŸpPta}yKݙ4H  ՙ apPqzہe-sr'OLẗu)'\8Ҙg-Sdfs_N_&m,}93ÒշMD$Sޥǃ3Qvo>Xށۦ}>zI)ˊbIɕϘغII`Ho ^?UѰ{r0b}BLu@ JdS 4ƨ糎{;._$%g \\xkO4ז-ɑDQH@*Ԉq q7 c-+5HDG/VY&RL܅7GeEh?h?!}i2,pLWSCDBue0pQ*bS8m^ A'L0]eb=̎zYmG ʧt| Q q !:H݆ͯv#UP5WddICcqխ2Yθע (^7,r7u5+mDGe_E{N|YecrrT WW͒7WD$-$km0K:UXYxUb(Fheͅkv*Ako>9h̅ӳ^yo'7(*H^QD0}RADFMDY>(;[?xLފc߽:<tJqc!}C,K=[wMTy0fJiD ?ǻR9* r#yw߯v?TJ1#:8GGH/'vxD/: .g_72K-m n范:9j%a~P؂_x;0& q!tD,as_SJNZ]n |GG0ECEF[93p4 ^c4,Z;(%cw% ~$='}9iџݟC1ÝCZ+k(@|wδAmnoKOkXh8shj|WuCkieʜW8) hs_M4@)sjC҇XOȔRZa0US; zMN+ɱIs_XGb@Vh)in(cTT:)eicm^[\H2e}33_YCp1b 8lRJ xo1OHǬ ٟ0g@SucFcA<Ͻ1Vi5r1?'f}P%a;8Mz<}lkfxn^03 }-;s2.iÔI"xg %^ͧ9^\CR P6M@fj8"DP2 &3fF7THN[ Rw'@%+~uom'`+Z{ݜQ2U-w=%y*,'cɾLڥ$3tAw/Zp.;Ҿ,*IT;jeWG OB0 d;I(@M ie]~,^AVc OMh^>R]Uklz_ZTpYsD:li7Z4%!lޗI4>gMG=8xrڭT n[gJs(Yiq}& )Zdn ! {C$7u;N "{YUlqQV3};~+Aw| !gY_3s @)$fh R)ge "(7YI ra¦: e}[Hw㩴)EQ,Ǐ{य़ Q"xRiy=ٹFUJ[W.7+zZDHy{m@dkWH639RJѿ[x8Ǚ=趮ȈqjIG-1yiC읦 #FYQq闽l-Ey0W~ڊQdamfkM~,q3SR@}I#gD$-F$0> `ΜyaUًO.1''cU/^akMLyY;Zlgu؆Ri+^Wk.: Q$r6%.SfR/>!{`u7ݛ<3=^/:Z$ʇׄą#bщVM D ,̒$]F!~,|Iz 2;V,} BHkuMZ]xRjlˬă 3͵Uj.}1d,l )ో5#RڃJZ Qz1{hxRXN_V/d 1&)Ee3B Dl{HZYFA?%~ba9էU_r_4s&yXk4kۣdHEXECą<z=!_tz8 T( ~c% H 'vRpHœD>5;.Tj9tGMw78mV\^ZCM=ZHz5OgSѺC\w ?4}OqOCi褘TMb]GLpJngK",c/; |E1}m/fA:='T>*J 6cenY@cdubNO!f{*$<@14&R^^nv1Ej$~'S:lzx*՟ch-E/ }2Jzntq2AJ2zﱅ6"}յXQm^/jgۄjd99UaҼ2hZ]4s#m@|{P^eSP"cIAs,#&$0A_w_O.0Q*l(7@y@!q,WސdqU`?)4aa hPp`PKą0{PںUp w+lxE3ocԧQFl8r:EK3ڮwl- ]91J;B0Ni^zֱ޴\هY<ӷ`IkZ:y) Rj Z?m V/c227֔ABߘ=5 2ʰ;LDFd r ddWJvCYc"$|VYs3Bʴ狠wQzvnd3Ժ]gs$v4W+3òӾGOIX>$Xd̝CRbfuMۦO[U4IώdO߈P~4(֋;6EW! @#iAYÆyx2ϛI$YMcL܅ y301{ 4w!BCw@0(N'ǒ;2={NqI!^4[4;u]1O!^k)UF%QfSc!n']r@ĩ&(+NA)C&‚=k<}Ӭ_q@~=VzH=iw)wGChd#" c2I !ϿT ='j%3UDty]s/H[`֙;{*ݿ}Ή-cU{7/Tch e\f&sw LK7@B@holk/8%hJ"xBv횵^=۷ۥL> ^Ų43 F,mx#]F,`/rzg9}U͖'揲`L`S|sYSw0RFE4ݨNÊky)ƒY;eCK;|CѲ5g{Vҍ>_T'5j B>"@3-ƈ^{*Că[j%޽&Q.Mzl w/L[hҚ_݋&/iZF8:<.o^ ^`9~њG8T6@i Y $KdAm3v-%i,aIAƙR$@0я1/j?ͻ g;7Tu4G&$^懎qksjZ C(#ۨ"bE#>yF<=g cjrJx0d>Wwiƌ搽_q֫?VDaX9WYami)Kg`2P\X2%PFŧ& UB)QrY^ Eiʩ8+QB@G]F؞q4HӭRY\ ,c]hV$BHevJ-jY15IϰGmSH .?(;1o(L5Č6d%Q4G'!LHϼhru{[ ܝn* O*O)0}ǔOd-KiqHN#xg@54:E)ژKܸT^4,@xD{H+"v3aa S[Oֵյ$K$omqO"'XBOvV'eR< Ɍ.Tuv81(Ӹ3h*LtjhoE2oz_/8"O),Ryp<t̒B>mEW=iq񻷤k[{NYۯt((j5Ӧe@q/qa0X a-l{9'ŭ I9dQB:*Jf2ƕ?vɲP{ 4'N$;< 1ă%{ 2X1{VWSY . "tt 1B;{Y'e )6r+1ʩ<7?e4PXo=݂,~)7.!NgՒeo#e=Vgas{e+#V& C:hJ ]n;4NzJIʡXK( 5O#O/( R 3k[0)³*p~hHu 1_Ki}m9[⍡,wWg *˂ϼv|`X" @pd|T+e "/tEΣTxIX h"i)_LVЇ}Eb !jvfraҋU$,\D~XxxJM zQ%CuDL0Du}Nca@frSr3Įp<DZ2եqMhTL|!t? f7~!%89ǙsS4)_q$l3l3! )Y 2J/eI"/DrM3L𷠔̊='E 1:2"V DQ)ı)łw$^S*{= X]"b!ZئA2-ng_ƒ.ZshBw ]f"< Rܲ?!j]saO)`>Nc,$kAW H!ߖ}U 2с=RN2[;zY̕kƖmد}<@y:p5!,FƏe . Kg!7{Fo-V( rCE E:<$"E۵yTJ~,ix.O? S#M^Y>|d߻(›+Q#ܭs }G;ϭnhOJnvWɲW t!N#7!Pk'w1e vFa#t]x 4@iBPp*jZ L ߬b-@OW eŤ<=Yf~YdDF✗;zžV_BR~? ~!CL,Or0^h'HBU8a?Fyz%'}i\!|ƟӢQF)Te$k.zAg*{A|o\қbIh/xV$ܒVGs78@nc[{/NpA럊%U z7q#>{ -^;0}jGƆ o d0`k~&(3Cl"z'a)N[T47j_f !&͕'cbXz/^y@od>!D ,I&ט_X}ebM1 N%7{cCx1*p~hfjHxշLs~rLgAabC[hMSg\8=nYb8tҎjv7cQ<9 ,_2TBGXBbэr@q|o` Z5gөC\_f k/h6-I{'GƟ3xa[W:d 1-v5:_̟8=#'+z~NeEOmf8>G @xDRSlxYc= @xҋR* Q4~W"횕qc /!Ee:f-^}reDTD :0d,jܭUϒ{ &TkkB5k"&̯(;o?&J@ FߘԔMe&Ns(3VszNcם K.W_frڧGɭc 岾\?zqґ=a~ڱtwk"EJm]lЬQa <67 '?aL,=}i3c/ΝE}^Z;% KENtBDO>B{>ljd!Q;V?l%}_7E 5,$V;a /'#qZuf뎸=K3_{}aRlk[Eː81AH+vϝ[Q#P"J_v+jc)òF5.pB§M 6.bd."eI[h,,b&|W  @&zA.1/mn`-ƈrmh~1TFi. Q\P'TVV B=1uZmزe7VA !1pƶCC'Ns:w;]4*%*0Z{?t)aˎ}VEQêVװlG^-:d x{8`"2h~_2͐6@BG%jd f\3XS JQvߒUmSgTYT#STp|-Ll؉Wx;- .wmruJ6yQ\:Nb5DK\?[56 .g6UrbJ̬?7ʮFr;'㑩wݞ;U˙rufEỾw- 8 (A %HΫy铽96|ܵjw1yd US^(P}2| Q'IWCgO^W5 y>/mHa\:cwjd1X0n:,>~3*$I1|#b3ymsa ;;BȄ55E{;Q~Nn\nV |3mGl庙AOZϹdjBY0|lk\24mB&ڏ7^Ym{U2O9y:ůfdq&j$G]G=4O_Lf8U;pBT7gn; O6/..j"isPɃk Lqf Jڎx1-xKtq+# knV \.UHD1ѣi֕ "7 !o\}Va8NL9ۺ<.Y1I!,OL1Mt9-̭̭i t>0/`Gl\Ǔq\m'1~h^;`N@ݣ6̇vڍi$ޟ;*]nyߩV?<%Vǒڵ_hy.J;~bZΒɩϣ-*wo 7IeHe3&e?/Jk؅nH8@jN;I]!Wzh ێ3tO+ ˔++j6ك/{C K-!cp=գr6wcNIS/MUG"d/Q,|F974yNڌ%p9)3&~Q 3~{8U5wvqhm+, tޜ02qKF=uuUUqqq)㽍%UL\:sW>2! $a$k+,/623cfRj*GaGLF[5; @I86Au BLs$G~as Yql;KX_ ˎs=til4u" zNI]VVK hu_vzY=:h.Ahy5j 5CboZ'J尴k)MU*%)BCbG%]=P@Snm~ eV #POwr o \jD4ޑX_2%,u_$Gpiw;f5gvtr'Rv=r65fXE|zsa!dg 0N(IJ\v߸nIv].qŲ:}W2e~zr9/=I`[緯*R=W\@|ʁt$;2H~0DW~W/gB|oWoϹGeNC-/v::eN 2Y/oћ63Nx}Jz ^O{j{-3k_8OdW%^C!M31K4cEyB[_il>mΝ7iaѯv)GNӱCp߱k1, wg;UҕZ[T?!,X#NnQYnP1P0/4O5poG,'S uf 8|5v]>E[1@ԈHRN7_/%C ]谕oMyC^\-3ƒOϞS atuV$oY*؞vIefX[și޾.\8iNq;Ctь@KmqݽKK&V_$RmMڥ D"Ȳڜʔn\T@m 7՜+9ݑ{Gƍ&{Kq,@rDrJYYm]__:n UD1DA1_6= Xh˰jŞRhC/C,`^ 2@򓿃%-'ڋ|^J~TPS_Rv}MTj 0:JqGtFvKBrT$6kN88U/[p>K5&+'## _0y? {/;D`nA=swh[pނS.dVP&3F5aDD.칣~Y @ǩ#M[<(';6~Ƽr'P0RO'Idn t:PMJQ!$MzT^W#bnK+^EDWz6 3+{ܝA )]B8qҝ`,}hOͨ В -KXfƽI9Uy߻7 D&Lgby8BΖ4,U8߾刵Ez6/0D!Ktrܱ1i0;Q 457]J`¹Vw1 ̹xqe”5Bq!qfL^Nᤌm $l'J:j(%^<)`Z-8( Zˉp$4_nt[2,Ku<&)L{]]B:j~ZoXvi*~W̛>:&j; * .lюGk 2@j{d~XkN8+m.EUެVkuu㉌%b&o曢2Kxظ'wv6@iM뛟SؐӒ .j6YNk6x qe27lpi}ֱK xM|ӔoJTA})09(bzgDNm:6]IXGXj/`U9 0Oj5I?~{LbSOxluuRͳnV/fjn"}hOZm0E w2>fQK&vS#PU$Q:RCgVϲde9ujJіE/ٮ;1~uvՊ[sq=ʯ&.cTN bO8 R(ʳ5Zo<#.jh`<2hԫ#gNUl9 (Y+Dĝ]Nϼ `_)r!wwR t֕/\uŀ@t6)+1M !+= 2,3쨼Fp:dRL*2ZQ`yuxX:8vJ/ܽM=s(G܄5%15Md`{"!-c_$5<%dQꮑ/=Ɖ,)90̤Iiw?ճOK2X偖9n8qZ?OÂL߳x0)¹Hc=J?Doj= RksGX#7x7iŶ(6dWM+|j΄gV@ܿ8|x-nOOҽHezyUU^U40##x$sB|),zp9[ZƎZՙ0C!S4{/0P 2nk;Ñ Eًځ0[eQH"bʆUL֦c}-Cj=KXO煷Ch0 ;ڈ&/1.X`RGk:hWdy){MS z5pF*Q*s \4%:S~R Q~C~W+yWN]9YAķRfY=hghX]ATvV-hI"K+X D=+?d8cοё'qMREY19mu2b1kf !ןeX0̢8Y !)? \)~F(j&ׯ\?GP[p>1K[6kI{@"vw;My5$AGz\R/3'_9yY8ەW+ˑk2G%vmdȐ=lx"e @*0QFqD4ݛn{02;VBJBH~},K7^\x8 ,~3vWm ]yP!Qy@ $o3CcM}Z~г G&apH:N*JdznC:⫅5r H =ĨRo` y d)=ث!SX&)8w/G~7zW̌e&YzH*e"g?SVV+zoNꙘo)~ؗBqGp<ő*p, ) )/H+Ʉ7uZ)Jy~%!A ~hCmJݬ_[{@e `Q#i$?Ip g^)>TW67Udux7ףZ_eFnsxoIgf 鷇8!w@\p7 Dm um u *)=6_E@,Sx`a\0DY)yt2M9إm4lo()99uj 0yDLbӧ'zݩc"Vw{۴wZK@xRb.jEn?;ݦ [ʫ\#L΍=L}oʁ !8)ӧXeK~9Is}M~3nIA`NWx n-j3{,+Tj0 _p[׮ Y&iE)X7%LϮ;6pue<竤C{oDhc5.sʼn IDATge>8[r*؊=0eܩI ϟ]x:7ؼ֖FIuMXNN(н u(x>{08U9U-/Z3<wdnWAgBdle{Zs-sfEI>z HHԈ`rkkZ[:eQJkJ)͑ D kx+ ua̺O@M+$QVAa(w#SS.OJޟ¬;@}{:i aRd߾jCl=+9F/̈1(7<%OSY/WtbK8evƦq jIJZ-񹧎Yڧ B6g AĎ΢⌌Ijꒇ"(4Ko_>6qOϱ(u &H}1lكGsV 1~KCh'Z٬_ ɲ m#CVX-)=Ѫ^Ra=]uÞS$wL0ɶB/Eb~C2"Z!mDu 0B047nvԙEgkÎ;EY0|o TtƉ='-"$K~ˢ,9}^:-.EKtEGJ- V_qXBO7S<#"Ah>a w8}$r\q.Y3J)Cl%ѶTӑSHduBHK] ug$f q"CTf-}|ބ(@)Ã"JuK jlX01@#\m`n9VfP峓&7H-nf{zW6ñ3ӃӓT&/ L&PN(#b2ʕ 2I ߴb&*A!H(h,^ lsgwRrK31lm}TL P*#̾>Df:NgШHvTm,Y#G ªډ·È<| ::a; x.gpw0-zdn O%x@bXp4{N!XǥܲJܡZv,9TU~Xۊ0bN%I1eƥK3[_G~66^n8=1Eȸ{eQ.y览% Ϭ?,K a#ENkEwZY-z(-;II`'e 4nu_˚ LSu1<ʑgox) <(Jk+3!?Tj_&)hmǤXGU1^v@Dnވ:Nso_BK7UqɄѢ8Xn 5(2{ɫqV<~gUC˵Pr2 II\y nkCDDaE#F!ج:Zez+l!WBcUָ{X},flb2}6K 9 ӯ:fkr:&Q_ wÃkWtv&[fO=bHo7L{S%4 )eu>xW=im.xh1Wqw{k ^;mrK0$~rm "870w'YK==ƸTWcLH۽^ĭa 7⣇$Q0}N\ΩjùF֌6KXalm/FwuًKNP(.M֜[oǷ\ }\T a#붬{` !C>DpS#5`6nysoV}fG>z_9W/T"EY©Cc5Jy,QJQQqIYֿr巨҂)Ҍ iFe@.v7p8Pb0받f1 `;0<Yu&Rs zQ?5darr"}ﮕZ`&z_+>+ a!d*z/Ad;=*7S!nSt6ZmsJƲ\YZ.;Qη$J${~Üq]ȤY!R5{O}wx;{o$#K$v_ӝ8ũNs8v\ .0`z;?N:@N;;;|歄}i4`OtHvvMFHPك (I=}ͻ_[6&|zQ!"?U_[)V=!,{W}d NsRhs51\R&Ϟw8(HxV$kH===3g]*__R*+/eYH) Uoߏ{uf]gmIuJ|l[ +Z]~xFpS=TwM-NYw'$kCʨ00@oW 0#[^jx?"$gQ u@њ 1Ev#Kay_ +o:K垾#aɷ:"HYFlEIbF9\e(h<5`&?ܚ)q|V 2;{ˏ|tV&x 5&qM;Jvb0hYV,/b18f;j/f@: @G-$)0di un;NjoܠS8++HA-0<$+"iAvԕa [v$Au@W]%*'%o:70vd>|KW uD,e^HR?"$vfvoI>T4K\ekb|Jq"V?mU|vބpa{x2 Bxo7#wێ%S ӠpnSdTJ ;#w7]'FǕuA=Xֲ©z2: UWӨ.0ڈ[}7~tbcJLYijyۺ$mI%j>*R;3o "Ǩ^P8)?/|ȉ  z"?]"J[)OpuB5 tkoxsgENۣʥg=T&8$ #aY|M6 I'Vo+?⹱sgǶb$&Y̩Wu;(O]{S\LܘI:3b rv h"Ҋv 7ldPhV{CO%B_nz?Pw: Fb- eM*8+}Xf ^ '3VwP͑}/<\Rw{_{t1~Tn^Aed69Q_{|lIɞta9F4{}/lx,WD1 G783#QςW?cT\iOz%Sop+JOQGy*[~]]ydGݬŒK[􀵹ɋ$2NKҗxt0[0[As8g`ufoXr!9zR]LpX{9Oͅd?[TFqWT%`Eut\G &nW4\7P響y=sWn?yz9X>;)FsI𮿷J}TË" "bx/4!^[{6J CUb45#m5]⨨[oDAn(ݓCY$cT+;v4(Ui\n7UgR&=2t,`4p0; cInIt\Jqn(.qPAUx JQǏO[#AY$6qh;j@$NWUKgv8:/zDo_D&RoK+ja-!^TB2*: OڀC)A6G7qGR2YL7sy\pnjܸ# Mo(~_߽*KCƬum-b#b]#.XbP#C-JR&]{xVaSuO.6đ!脐!pwcI9~':_W$wFCHSwLS5QIj za-r ӫ6dTe?,hes;JQ =HDCO֖.;s7(xᶇlRB@QnsA}D D }izIy`s&Jl2NSu-+Mr !/Z׿zþ[uU#;gch @Ϣ T) \khPy6 jOܺ} J %Clc%"NB ?s ГȂC @ :`la-.!X^ρh'V|t["bWs`5BF =ꀼՍ0bŏLPYkk7|QゕTl*H.X u*DjWΞ=CHB)ѶKД>H*dE/"jO_n" `ȮaSj.dy!,\vhˀŜk@DSDᡗ4m`?}aT5W' ,0B6<,9a@:ci2g$ \kYICމ[ =j͐8@u|D4"SL{㦔{5QɫNz0GM5=4GWrj4=&zl^ D CnnDdL^Mapd]g EԴIQа{K؝Q/+X/'K-5GMnG kǥ]]RjDLJ0VQܲOk]bvڈP𔌍 @oH?F\dEASFO'JHG p AITR j"FbqAYL[j]( _C,x'6Yn&__d/|=s 3׮#*>ڊU;NrY-{T4V(քDκo'9fUOh'}wOWAƤD}^a{I6Y7[󥣁NV]?".K>0'`hWP啅S/cʞNHT>y8"V5"N͚#:[+zqMX?Bv;g%6[RTQᲸS2\y (0++97G gn%70 퍛2oWkc⾁'d8 30~ iO|nsɳ`2pC5Rs)"&c`wSbҊ;z냙 J3&8.zsJ,NF.Cl 4Vy]A-J>dF\6B(Fn~^TO+,4&lX 'dO_∋fLqbXJh @nQ VRfr {HJ2h, jm@[<1O!I?޶O*nv~? *O/R @fx9кM.*Pvn8(0=K<( ^%ӫnx/ dd EƊKdNTr+OK^9^̈^6onp3d|I(>\.ɓM+?+DNg K}d_לՠnF6sZyގ.Sd߁nGLtrN J{t鄐>Q]u_C.}rJnĘ{%xm }dռ{Alw-<#kzٷH?d)9[W%fSiƪ;^me:* IDAT&>O?7lmoqݩ TS_NkmB!kRg2Fj@dChI>͑:0{,iu~WO[]= @@Q] Y <8|Z}e3(/@#Bf"ze /"7=?\~[.-/ylk- o}@׋$$[/˽$!8 @ Ph9-p*0W@"6(hdKs5;nه '+&᤻ pe| -TN3`M*+XPaٗyu Dz 5%4/ު;ڌq1Q"<޷TbIBd ,--|m!"IKL o mc;~OK|E6Ue6 FXsGa? |5UTTsz^tzX뎀1 "1yyL|(_{~jxߵ_%>0PQ.0i[?|Q/,m, K5_{3w&_! _quu`0%8gD.pFpjϪ5w_U&ʽ JNs(J{W|h]yaѤ6o8}/HyE?:J4 c>EIrJ}2!(?[SyБޖF"-w̪*5A}pG%/4nIdtA髭W'+ Mz|pPF@'g@k " <( E@x^]Tzb냿>^8;}^8X{eMoSkGaTȔճR - шH/.{ ڣS^kk`aEZQJYnM$b]8wO=ֆ fZ VJZxCoyeB^u+mur}%Ч0T$*MwZwعY|:\t~b}USrvmgUGan7#iTqGhm3l 0KӶCN ?e1u)riQQi`wu(W>y.UM;`ډ@G_5S=W@@ch23N}}؝vK^cx^ȞV4t~o S)\G:z<}-*Uq I+E:&#c+ v*V:y|Q%n#썶P'L9H(I_ok٬h%,QGX*<ZO:7|[ 7'NjE ^d ,u !u}> М II_ ۪)OC5X%)&=2dCTEe< PG:[S =Q`~>\Մ>3'`_  517 n<,2䃿ZTkxAH+2RA>³S||Q5[w'?gsɒn] -4*pQ "*>S{j8u姳Ԛfo |zLN:zbKWm݊r9(( *P'ePQu$IW:Nˢ/g4f\JU â)kyN~k%{.9ofi۶ ޞy,5|XVqsAd"so+΅r}u#B "_;M-O&MgV.~Z{Q4G$;<4Glt- zat0pd98П`V'5#?*/j})I% VW՜9]851RcҦL/ñrWL hR#QiC-Ԃ;9 1%\yE|/?I>ݧ3O|r\7y06DKw^G kg~!J9US% d ௹j^Px $ .hlػeB" WtQ>$mru_jI~1[Abjf^̳LGr/x\L.È p PPmUɸb #FJc$)%\Q[bor-Bd/;D@((~xCkuFíl?)\  )I4㒴vϼ#?0jKOi™#b"qb霉 &C CAm:55/Pp~ 8V#~h>@[րk("Ò1 4e \>&O=@ :!f07}9/ m;پ'`LTTJoyg-ɱ^5!oV9u-^P%^uMJn OWl;tL28-JdthjQA,Fy:N~ 3E9 ?͞oo!"V~:̒Ʋr쉍' 3e]-̚8Ӡ#Ǻd-?c!v*Cdٻb,˕,mo^gNZbNZz%^Y`WV'co KS^Bv `]  !Pf:25ifL0FHF cϚ¢bӧ>= ^$c!vK3 `n?~W*F{M37hy]z }%8~k^D׀=&iS)zG!hy4޶nA@/O)u Wo7K]Ó¨(;h2x뛰ѸrTJHK)<(yXa_³Qn衆Y7懅x:Agr(9AM0*B:4I.UR$ϛţ_Q%:=QDD噄 Z_8/]q݆Tx ^F7hGD hNkJ1̜ICׁ)67,u~__c?a1IΟ.ޣ#j63}bwU+UJ6/Vu|Lnɢx< [*,Ѳ`qĞ˦S"{(I~=}JҸ)!S9R : ;T8…kq_TŪC~k⏤o([Z2>m:Рqe`Z1#p"ld$2tȝG|N"SDK8\Ma1$ ^aWPǩo$dD$@&) ީ1H Y^_82!km+aei[S:m~smXSc ;rϚsB\yd]VAKrN* V>rjOzP}y_MQRg< LI]5i8KyZQhjy[mVcYr?`c i]A BOU{#5uJx?徢|UoXUZ>wMIܳ"#nsKO9qD]T.Cv|=Kro !M[\F1%1㖱N꼆j.?S:39 ?=S*4(~ZnLS= #dL8ho #ʹS;҃ӕj |R#DyKtA74#䡂2@ 97[ϴO] jQq @A x@COGbz] 9` @  En&@T cNВz["BMD5—gQ^_r|TL $t_*V]TH.L /{=2!p=j 7=C}GɁEo2aDRA89lPۀ"~_@Ƨؗx8pp'2 $ r)tuȨ9K0 j?t'_f\DDO9M !2(K.;׬͜ϭS|M(`anWs_!=/N ^oU_RR̂Vl<^uNMCCj#9=}ʙ3_m1 7DP@1RWG\:* p" EĻc^mm]~} C~}zdv$fKLȪ:RV8y0]iDEKpVQiP M[PaYGR}N70[XUQ%3Zhi馱*x!bKoˡ#au*˕̽ w_9" Dm:BcrJ49^eͭɉ)_HpFA (!"Eem{v3Hw~qIEyNvs`m'0 v R\J؋^/ j < 29X!`©9SxFxʷ ;/".Ppel4p~%:d0FسAE|],C ]pS2 fp)Q{Ɨ̉;٬*u}!u~!w]mU5aE BxQ0K8͟>qۚ)*Ei41 iƼyW7mEFn1"Nd^S읎[l4 oT*UFH:a"O(Kz.#@8pV HA1KNl,>D!=iKn%)O**m7^v%V^}ؚk+{(K 9eoHڀ@XU#>DIﶴyfO2]3>@Tn DJ)]!g$@Ba&.bKW= -QS9M)U phL2 (\k@ƧI8oa 8GMpKGg"V@`PM^6st|.F+Bx1 pJ>nJ̜ޘ5H6=gbvŨjõ.OXZ81YP_ IDAT-GLq{}5ohnN|USn( DQ#WDgOg7ymx#g/wVK˞Z:A_MT{ ZDzZLqAy[}uiWHNq Il`MXAVȒoZ}Br]~ l~ tixsƳvaҙv@ mh~97ˊ'yxnJ2rIN4P^D1Uޠ֙ˬp  V/C"FVg<>F ;]aRd Kk4BHQSB8ha rFn4H)ʃ]0V?&pYxqev7s/L ]hy!wL5%,J,eMdyA&m"'.0?J'oީnNq].eC$~գz\NDĨhzgFbC%+Yn<2@ 3\RU{opZx3`냭/Y]k "ORU7͖qP?6b771Ge٠4t.-R[Ph <16FW~GvB*ۆn?CaBx{`ֹ>M:ӢET^/UQJݗ8 (rbXu >y*}w$M-h!y-th/7&0gIUw.b\?!vAY2;ibg.ȸ+aBĀwr>umH 92>FryBA>j3{%(~y!ql}Grt`M~!ڤ{A<" d~)YUբvጘ3|r%(^*Luzz ~X,. \-mp`_҆{ Io Ao9yw;XlY_{j/xsYHCS~loo}]Ey9!sQ`tSQ) В]U$-^ijKY3`_]GŹi7 ?{z;jTV򪰦C?C&BR>NC1}Cu୳VJ ARp %:(鯮4ꛒLLWfds*"^`6AӓICP5j0O4Ӂ!@(LW3ED$K8`]mTv`:iJpR%]{݊wR 4b 6g/90=9ySra!w9X*ܓjL3!鷡 $::h?tzmn*pq"rFjnO\*n0i?H޲3'Ц3z>&3GL,SUHr[UI&Ou2L~8 #t7|5ur{?fxpMikil*OM-R2J"*ll7X/0:vbww78NS͐ ~f󂗲hkHuE"j4MJ[fnZ<)r<3u*=QDf߆L6Ǝ,Ohbuˊrg$YSYh?߰CH9 9"fn$ZYj;+"Jsg3+WpdpUH7QG P% 0 ؀8q_VU`:†T}װŵIyr 40dzI \GJi?R?IbNN]m;n /Yr`Q+N}|-GWǗ>&yz$p/FIMM#}¼Ԉ!wJ  6caG=K-N}V,'`wI㒄t4a!~ʥ(A-# ?OX#.K24j]tzҼ)\;;~*+aƈE 8FIre=6'8:P~)Mo2k B8N3gY=IPpE', 3$ɮV7X_AkmQp p|XɴӨSeG+õ(~Ӯ^DD"g7~x9 7lU9ʅxZQŒBisL )/AKHGF&0= !B#Nڵ𖽖 _5xq}"AFp}^`c 3oЇ7UdxHW>>hI;v֏w[ED#Ë?^&5? %BG) QhlT 1~qE)W> 8-| RSr2&nS'̛N PƜ;W}}  "~t,!`ҌJpP (QE o/SB =L~ vQ7<t %<ʉ<@ ωL @p$A_'>0  %{ xjKO! :e毿STwiʝwv3EA1\篺ɾ`$b=3ஞnfc41^Pwܪk<9O.))6HzVU-_%͓dKN"ʹA?$AiNQc 7Ѫ߶2~yՊ?{\C.mѺ9ז\@CjoyzJD*GU{H3] 'Bc^|79l]k3]:o-h#).&s-q !>z'3z͊V8]#g@{WW0e [B4+1=c/j ƦQɼ'`l9uɎ,]$;b"g7\xYO:4j0-ɧ#Ύ{ѢOQWwvo! CqE1Əe؟z:Já>&*Ē,0D8M0Y`FucB8CˮKJ-kꜳGҩeG=>.I.b`giv] rlr__j[Ř?ӈ@j3{*sz@ +Vn6LP^CpSP8ۗCL_10[xt @o ۲]+<כet.")|m95)V";2O56]wHFٚɅI#@lӐkW86 tr|-QFi0H|mYkMT"Fi<[I6 &MM[>۱]K 4 ,B~5\m.Ѯ V홹7!t a3n.F^"=  0紹CSŔ `LO8QF)lFcAA5I-&T]/R҅8*iJPw/8M #XER&1qf_[Ss[62tz|:cUbE G Fw Nd!YPeڹ"¨;.-ChN6+2bs";74$ؕÊ"[@F,P$UO끾j=< ]qywu~ϑeY^~aʮ7mӲZ W,l̷EaF\F|rJ/l'Z^ԯ] eڝl9_6E#>56J -ť.PW2;1-,nyHM1L(-X7A F":)͚`𿵍юwhMHԦ%Q80bN8 ]r%#LZwHb0r R,}s x1魚!FZ~䞸HBѽc)<[79焌tNÍ*[uEtQd:T$AuXARb2RP\sԩ#]9VՅ٭50$Pss]s5~ 8A4M.nq>'.XWTZ/VɅ:wc#C:YW44,Y sgLڜP/h.]=hx}%Hu(̘-FjyZ@/āBw8rZEa<BP[HidBB:3.oKJ@3`J_g G(3o2al**p EO-Fjd3p܇jOow0t}pAHXhg7)Q2(4} ( uʹl766vupI(zdͯ>7@)%_Ƥ "L_F-NGEz(9`S9C<ƆƧw"UMu{&m+߮n#?tpyd{< Ƣe?<ٖ7Z71`4MtAKǙ,F̈3B:c3(1E]^P=P#;iWv4^s?!tgF1ݲL z9b̲[J`磤ƅL)  H ֖H8DȲdr*ℤ(Yqc:\_sFvGIJt)|K ɻy d `B IDATxbDz:f(ydZkH#,%2CT;fC[\b xpZX\qz7a55 wQM7d%9g G J{;Nk}$k48j~B w%>Â`șР:W_?rL >W)%W:;km]?ޖxd۶earn߶H8cG2-)caݔ:y@h:,A]e@LN f)5cLvҏPE-5)"'q~ySk+T\a]tϘS0ǃ`a>Pp'y:Q12)R&.qI=V|F+AG@(@ "q@ Ɂs-%Bȳ]i\ ] m bZLㅋtþCp@ӣ(ei]ou1a>);,)%`rJ2(Rp{+KV"h9cxNV4_z2/_mW?W֤k`k\wG|Ap茩m ﰘκr"hvι3>Q$>#R6<<0B ?%9F30BS;90Z7 Oe9$@A܉`(oZث:yR%kt]UyzBZ:<'H~8%ͱ";OK+\{yV3@m=9M>vl;ꪏ9WU (a~g^ʘIE.oV_˲,e=ﴈHMIJRک v,r[$yizz@Бk!#}bGphc q-&Z;>=>#66+j ȸ(iiiG> F*n7FE%'Ӱ>0msڼ3i|WBFIʠPg rrׯ.IbuoHLJb!%#aoh-uGOפD;=R\n6;cs2ΡKGG%pz91]-aޞȬ.**Ij 8_p%$euZ |\~AL6(I8~vԴw\T/^WSWۜy6e.탖pX9]}K3ZB-^N=1Լ:9z8a֗17}RCK2?7j45='՗g ͜UZ̫({kcdcY&Dd4p{+!$>_xKoMHHs8PDqZ;<հal:/KRlV@5'z/ltu(mQsQldVl|^'t0$J_ɺ4-F%"(D9#h\i@'^Z{@8RRڹcCHP ,PFH_8|8Qa Ǚ' ֊Tz !窶pg hZ4Xsuּo")O4YYN[:wA+(^gpI WўNMh^&griʧ)L p­zUs]%#Ybl2szxKqI3\Km"@ !Ux@ 1@ZҲ k~!6i=gD[^j0ykһ#T{ʌ8gn>jFBnvD6wcT4ZdHxFyt?~">3U"IOuKgMJ*I|Eio̊MIr Kgj\u 997"P茵 no@zQg'>p4%ֹFfan gLЉZmX {$ͨ)O RpS3BgO4V5'(P^ $ 4ƌGGr 5;#>'r,Kͣ ]iW@4H/IA}\Ƙ[F1Jbxd@xJĘ4]N'\-7^xӫ feJ_ - CƢ)5ߐ @!xS") _۰F2򧙌P+Aq'7. W IiO4T2([4?^:L.M@ -jXp$<_aT}Zּ#ywu*aNJh@=Z<ҥְǃ<]EN\:gG/|n&__@3̓FkY768S[JK,Rw6EveN͋_9< U6Un;":7f 57}G cGWV $BDJ_/I S5#śq=CJE@W4B?X/\+ 59?_GwQʙU%b|qItT Uۄ߼ t FWiq~6ڀ1us?Cknޥ1ao[3lcҲzk^DZH. _ ;&=A[s2f[߯lekR"p87wɸw 댥?9muYf%]7Bzls0z䍇h/rKg, +;Z[ʌ|8~JpVʓ[[E! ӣU$âI ㄋ@x!?LZ?1t]=*Bz1 s^_qtۿ,8&Rށ* kJ7dS\أ/X~dntLf>(ޫ"RyfA7]u3Txu-MkPWh4z#*9Jeu_qtîτkO[K:,k_yYvLm/oX~?iBhGw$&eQ]l+ }~=K}&zza@T[<4A=9wMA78pGPҚ|a^ˠ7txKA|9`v'$R_}p75ĚB^WuB-_G@ι!a,I\+%W@yli-ѐPj3f҈ ѓ}"p#F$Ϟ yX3r:q((0@p" "(> )c&rLfb3<{ ;jrV{fS,=00L9y` %׉&1)}MXRBvh(lS%Rڒ\@gȮ푑09t}I c۹'?1uՇ>ؼ⦍璏*ί}Ms׋IY^@ق))yuI}/vCӾI1.h|ح'pb9l{ Ք=-qgGkβ޾is czw?E֗3#Δd7L;V5h鮿na몯id`O^>A6_ 4_Ur>;Wd)5ᴭe牏$QNAJ6Wd$$AvB4 T)=}Ώ)VlyaOP=M;&0J'|mE2:p ^y0D- wJ?E}:h]T>aȱnx}.P9;MC<6Gū*ʂtHWӏI!|_&d,YvҨ 4ÇXa9#wִ^4QU­''kC[b4kNHψ1&ڔssxͼ{qdD1%ZְoᎶ;˂895lvd>zb5_smm9O!au;o^r:B ]l=@$HD35>06*Ӱ'מmn]{ےhZ7oeŐ_8A,ྚ 1ܥ' b ɠJ8ckg0UWGۯ((Pw`˂%2`LG^ ^~UAֳt|{ϲ3qBWP8bx' W,#p0Iyi(PB'$Q +îSWZ)9M>Tu߿]صܵ'wdcDoNlzzk|PW9qyw~8ሾu{s婾/ i;0Oe{N57zm)}X 9SWnϥg/^Zzi5%0cL$܂%:c dhImo7{R"D"!\2M BP} ׁ:?aam{!#*&8/7g˩F>KHmEOW}-M:3cy1n| ܨ,K&RC>O+` x*3J9&[{q!*nRpW7#-< ~]PS95o϶T kd[3/aOWwᜃ30rۗ8gl v7^S's8 and7  "J"JV-UEM.6C?OĀ'?zTM$3<~ FG!N鎫fD*:s%2-aٵ_9w^]kz%e!wLf[4'^#[GӡIx/tM3Fn> pH7BC9,1 V# vg-T_%IOVYO+}*cɷ &ŖDv)Yv鵚Es;/bSBij{!<2raDH y*R螙ȟ qQ7WOUTyzk<"kf&zhqUZ:Kbiûv#n0+Pw#6g8t`=%TʵU[8zspM̽tp}@ sXd9%&c= t059b=b,v9=./ߦ( 9"^Wѿ&Z&eQeQ[KG]@j]_k0 Vm*w:`i8:ƾt}ID OI9WC1(Oq= RVX}"Ss۾ykUW%geLXhU(Ůw#ޮ)=ϦeY<_k۱?rβVפSs[0(8侟^ wSJNs:u-$gP*ln ij_ 28P//\dpE29znրWT#*ktp'm@ŊvWCȧY툗 !"!w{tU5y/KdIL|~rL,-ϛ_y\is}A] a˽ NmfJ")9bΆ+^}zONDrBKl|Ob?EQO +:ܪ)C+:=~A$&ΟjɌG5ណZ2g &du3/ݯ~9,ɩµYEoGcnmMδ~zCϭn G4I>8l35t]M J'],qɩlAQ!W%e89tVhoa0nw~c4ٲg.DO*zXwJ+)x-0SyH%V[‚h7HB`ް߹3nBjZ+j[+VYgl)_[lyƻ5gߞ,]rz3P=*W.ҹ'UH@h u+֔+Q Ƙ8B F.F_|i+ @G;BoyD S u r u (N-z8fH^[x. 'hV@EI4 )`]zwFjNi9h e /k?,2[LJ,%ɜý> #z٭GIPVZF|ƅCjݭͪh/K;DB33]sɰfAERLS^bOm[϶/$uX%s ,B54uuY KfG@P΁9VLԙ a" _[Gœlsj+M5[lsNcy('_/_mcCn[db]cfU:VhMXRޥ)InMCɵҼe@R@@uG\I„ް~ˠaiŶ!L]re?h~U5iPJ(1Hf'S;MUg|l̑eiݶU"Z[: uc8 b qg 0a@^7 IDATyO { b&"ɗGs؋ok֘ݒkR ~qƀ" S YE` хȣ{( GL72F(m|D *LR7.5220 /GDt`05'ŒbfƸݱ/,TņOr%ט^+c캴[?2NhkXC(¸  6<瞒mQoAPB92L?  )iә)S5u3fn|$o4B|p"VQ߸y(΀?4mQy_eܰlB  U뾵_7W$_Q6ֺO -y#6m}D8">I7lQ w@[߯tQc*jG6LLr:|_jao 87on:LA2'gv^_ӪmGVζ>fqY7PU M &Itϳo6}Oi]σ/OT3G~p16f 'A&a=8ҐqUgJ*a`w^,{M3?ѓ6c 8OHظ*1>{`̫+ iPι|W#9 Bׂ ph\5~px{oC#?6ZȊ))C4Y鹸Р诞{"7ܦdHH}Nɣ :'No_2c3>,l7&7`c}"竃*<1;|Aa){l;oı6z "-N \3ʷfg-s& Ss_hm߃a1caǡjz zRn~e9Rl6Ǘ%CҔ4y݁,exjIO-M|>I0T2d":0C[RuP_bM"d!tÚ~)$3"v"zd؆\GUH<*$Qqg~U[k;`MsvŒq~Mh #e˹(WUlZ)ͳ,Ct_mAJ@JC`:p | u2~%eiFhYznλ I1.17/>tx|~z` &7?8$'\bKBNΝ@D8'> rڿ߾{ Y4EjuAv:>6ScM:$hG׷տCb  "b͓έ#O;ZTI1pAi[VIK+ |RA|Su<[gUN)zdžCե]q&땣UU}&lwj>5?>kX,[oń#-*.t`@ʡYxo$pQO t5J gQE`%BhDs , l{-37V=Wq|UZVł\(1^d!6Q5b~+*I6_jNR~bpݕK2z hoF 4C+av7K;Z+Z{70GSw>'b+Q.]{8㜯>#9S3ˏyQ}"q"UuHm# )"1#jv.&; { ;&0Mi3L}f2}'"M*7榺c"ڋr3W͏1ƙ3 zMgNLAeё%㞓D0Q6*Amk@Kn˰..dO6D&bM:W&t3ޭҨs2}c,n%\iU!ĉ1qNBnL70̚c(EZdQޚj?xl்BGFhĈ (5nP [D>dyu)Ho?[T3]Ӆr$[j5()SU3knћ İP\Da0mz˖Qm媎 E@?#T ]Ɯ(OUW\&JyP֝)Hm{u;sL 3_J%IXx4랱fw?V!)GfJ8 V ppXꎮwp2.2U:]&9(rWzOC.I -(tp31U/wb7ͯZguqFB3x{<8$l2WaÃ?yeCO*{D3-mVݜ*vs{,!^ hqbܔ:uj{:;,Y9q>B -؄phpǯZ)v/( pgӖM*w!uf$>*s j(JM: E|T+gˁл>C> [*xv*>u敯V<0B3D:g=T5Ms٦k4B0(;6ןh*p%u<(^O8s#{[ƕӖ,֕=9WKpá@`FѼ/BgA|ڝIsx^5h%2,QxZXHG }tW_SQ.8N/7O@=:Xl3+¸1yBuƾ# SHB-!;f[Y#?,)-kȶksC^Dׂ'_ޙ +Ve'}U}/x MvՂo<\P(IS m&Q`:JɢAxPO'DM6$N;AD潹KJѦ [N%CtyPa08 l;hm̛+ 8C3q%ܾgEo]\y)!9Ƒ{cJ]L9xp yC; Ƥ&5aԔÆG" 40OWMhB쥓ExmT:isY`, O>xrJ̊& P*ůMJX3;!>{ۿ]w&۸&hLxvkv_nAatFk! b }u`\ו0)8 ' Iۤnݶv!vMRM09a2d134 y1X%YN ;}fa5{Oߝd5.Oʸ4UlSNdLK?`GNPO^ZӧKG"Ye)6 }Ɖ/U d/z(߯;"/JDglH )j?#='T@FXc!NHX51 Щjp!ސ\(=7Q͎EsCZ=fA1YbNOub֥Jx!DXx(Tr =76}]s7;?t@ODEOeEC'PJn"'&zçrЧ;mF~(wUG~Џ<O:ڿABbJҩ xg@=o*" &:f"Wt5.,89Pʔ2C*qJa_C %*:`a{SG3;i莉UzCB:EL+JPqF_k!{FG{S2BHƆ\S&rYʺL7W~# *',-X0oȋe =;n!SFFόr!@yhK/UBcUwdb™ģ74io4-J7DOToxk/?^~ߐkꛢ3#53DxH51B%k浄 @&zr]C=#"H|0T(0-QĜb7. 490@`O!9aJ|uun\(Vf !A/rlҳPj7T1GV @@Hp ](1&sdplv%3Hd[De@QXhqDB#&y,!䃶74 9)4{kZүڲ$Gig uoVr0e8Ӹkvgf]Y {eǖ.UUjV^팛-C/:^vD-4?&+--C>OX? &SgGHn^{m[ p(|nM<#WrxI< H(7AN 11-̽TfPh y|+m5@Uɚq}j\$G9a#9!/L{;AU)TJ8VwթsWXQ/jI4KO?&VÊcT/'ciJ][k8n<}WODlCOpPY(@ j02BߌQmRAhٗc᭨V`Sm^BW@%T"%Ṟ:;Q^JZ0IvtTY%zW_lԼuܿ4-mޖ?矦M/Bˎގ|nlm|FdanEb{@2VD%d؜ix_ Rg[L<_WAChm8;j~ >jח[L@WY6nw|D f- ;}B;0B97!T0\#^uejy܀tpB iGJ(G lq-1-}qXp&Fx .ʼnG BnrB ܰlܸz_U PAZDMٶsXq[K&%0S2YQTD@(w UF"<6l: 2T$T|1DIP IDAT~3ryJ_|~EqM'G"*%܉Q4 nWfP]կi'qטz|68c"gL!ެ3@E6b&t"O\ArѦw_*ەc6M)cXI]m} 7Cϯk2A4ZwlaDCYsc7pkUTK'ՁzQ c[ d]TCmy6-0<ۊ-kz[?l < U7Pw_{חH6ݴPxDwS,]Hw$:O44}J9qS]õB{@GrJE{co[mᐶhmK۴P9Va⤬k8~ py. ;`AdZSw7@-;9gIG#WXK% r''i IVp>|JQg[;ʹJy/]g#y ǀn18_W@w ""A&_4v'gw'81w1-mYuPf \cp6,rg/@Ԝ٢Nr1+hĔkɸl%)ꗻv7q+8oy?mk)5 U.ʝUj {< D}/eN 61jV yD|^c LE|(A-bGsZI 59/eiP]țWl*?1'$*mw$\'Rq/:;ս{[#&w,9-UN|mCQ,sg˜%5ъJ++P~#P,i$&-!|l"{}0rGjф9.c9$wf ^TEH45[rb҂P@U<+t"IࢬΝxՊ׫6db^y*6۷Jz0X֒ sò«g. /^-JXv{;ڃe_r?rI;xiU_KNE%DҜ|IDpP}=%dT0#Mw洛.+ L֟cT :왆@y1eT pjrm{MM!{P cu;zEZxePj^)WaM֘^?_HSW*h0LVϿEK J9Z|6 E"M쀯 QcYt Y<_i[ZX&a_jjЯj /%;Nj\1Api H>"˫/lB!A(4Uͻx[C~ nuX ՇFƴ;75,Ysˡ/ \[mo[7^0 Ȩ|k ~ 4bR[H,c %Tٲz[NRɝ(eZN)RsR0@hs pUGL!?ᄐP)>֜,[J8z:}~) *u-UƮw>Ucظ$Y,8EC맍zZ0(ёg5={DNrqzBƿ\ɕpAK_h~@ kgQ&O<9і;cg>p;.L Cx@V-V(NVp/p5{" +?\1%V$$}תhrgV^pgȪZ*wT _/,YĤ7%1" H9Xw #30o5l{wFU}c=MI(΀1U6G DH,N7AD_5pmi8B$"&)['H0Щ1|; x7$Zfۋ`d cdP<=n$dݴ̽T *fwεŧ,0yrk$a#3v'xϦw$='4k576b&w#BJ ʫ jg hVȽ>l/Hy-/i c9e[*eOHHc퍡˖_,wlzm竑d*< %`5XH\z_WwJ4eBŅkmpwĄ̉ +@gG]L^KCo$tHG1UtQgm$Qi P.ªfXBi*!s pTkSje xsɹ#?Ú⭯<ԍ|ԫpfTg 2!G>'rso[4F9zF{eB 9O-'AyXhT[픂]_,V t\ŘpgwmyaodȮ]2m>ߵi 5Vxh<اww"  y~YQgLϛ֬]_2{M%yJN^ڴHe(?/ץd\.ݭ ]Z72Jh2@8ΔbM{Q;QB~@tlJL ФwL/ƮmxO49 p֟>6=]?(pVkl3ts8?,G!ܶLZ2H?&k!DԋWsBZ"@Y+,}9AFoY@K 1eM19nSJg>?S5✟3~U"Rcc4n{z<.B(iʠLq"-Tz<IBE o|f!ΒqA@h@6p#7 ~w[uDKSPfe@sy<*?9˯ׇߨtSb3/XW(k|tF\ǦW`TIDk H LBj>rPoȱ;#!Nܩp{l(!AyWE{/:&R@]"MhǼ$cʅpWĆ1kƘ/V{˻zvC)i3~nFN/HXv5xB9E1l8ыs8wh@eZeGhL[S2 4Ɗ9SuS+jv2,-:S o[_ٹ⩝E){=DRj*{[sK:ofC&$DiiASWT,OPtډ.݊wri-ksgI_9SSIϐV~קOĮJIY`>@47S zi^s~ xb;c/I.IӚJ4{JdRP.L2:>9FHO/RLDMjBiX$0}]u+Rɦ)YoHW!c-:u CMyJ8s 1 5:SuG9/hѦ.^ mCu{ia DdJ;`B~ݽUaE Ӆfy`XU7U8J3MˊwGӕVV=xٸ@!59uAoi N s77.%4f!q>5>۶eQBH?'7Oz}o`%))*pJv@9[N wC}G&؜"Ykgi1a(=X=/Uǹh? W%;rXcVaxTD?9nr|5m7~Lfs"Le}꥓[O>.ɹx?Vg 2S߼W!p)lWqLvrvLohȎr@AcC뇴J(z~OB";8?Oܥqޝa8^Dsd%̐L~%J8|iZ*}I)+s_ԘWdjiod.(w *p8s̜nOML=vfq~r]Uªr~RѢDD0NElC̍ESLNc!Ϛoo6ֆT>˜s^C4 rL K-:tYHqSyЋo g;yZM 2{;ĵ_1tWo'qf-Xvx^ ޻ JPZp1pda|~gG?W-VqTՍ+XJoƬ;}WanY 4, CI򡻮g74g˼X83ASVj s-:Է'С,qCpd Dй?/yqf5,}q@F_E@|irBird^rS8ڷ|~ܰl6gs}4>L8:ЫC$YtAi[VE7gD~l#@iy-Ș3=H*SdJO^V}k\rjqIN(}uϡWlqʆI4aQҀ)#G6*}N1*?tsg5{>tyi6;d}v`{ozeߝJ8N u>7N; Hp0RvH-wWz?x8;]=Z]ꨛʵS A |]c`* +hB V:(qg84E4iªaLXSt*=joֻVN/!PYSm8lyԀ$w$ٳfpd d~d%+#x{vhi{~^ۻu^ woS5D@QJEnd,YYfPзe< KX’pB5S;mIKr(I뮼n7[=ܖyf l p#P0W_O|4U q~ ; z~'͋4[v׈qsὯ\&K)vJ hX7cP 0O+8 WDD4,0 IDATo{C4Nd%Wc!$ݫ@y P0貉\ GIWSa_kTf̧. DT7 |Zaٙbfn5x8$ "@v GNթy'Je'r [ JG>#Έ!Qg7f\P5lxNY[]G~};1|fEs#0.逼 4;2@%`:|u5ømnb R!5&_Y ◭ OƶN_;v6ܜeD190$PQ"?Fe\'䲮#`)%iJd,um}yg!E $$@tGAR{k b\Q-;>uV ?'~qz9[-*G՜3s'-i}Ϣ:To4(#yE +E'zFisS5̙% <F,]?usqeS  _n4|v"R0;T18= NBj_9 X\ :-ex Ct[PmP}H ),! ff٥#deYU,Lmju>7 c^׮fOӀT²639rcLexdp)!ՃH(O ߁} '2ԜƐwrm{Q9T6 SKNg7,O;RO'dzC* U-n9h .t qqi%+Ss򍖩uBZ{v=fJ%/6Yd@ 6à!9@D֛g\{05y/Bϳ p"Qճ-;+;|M|AHX8N0Ü*E>?T!| l z/X"Q;{*=]teZzbS]GA0MtJkoPۀ&+LϱƜ !ϝk[(Krlk9бN k~ d`;ޕddh @Y/Ow6InXլA+/v]$Am.q0DBs[u DXb,i1^wmmXB΍XIU  hr7 C#1uSH KD#P7/Y+nPFb#>wo^ EnhcoVtdu\?\jP>@_WI:Q7))fJ@{`FhVS/Vԕ&T~ >(2\ K8b܄tx)GS8Obg9h[Ѱɜ)PQL\d3&jƀȠ10@ '0fbd)^1H1xWiD*J47ߺdd=Q80AYR}OT:2}@gMVC%3*[p2Ktn=~wGa& v]䍧SJyKCV"VxsdC׎yfx[C$y!ܒcgd|7R Tأ3 |&N'akwԿy<6drqF C޼#Q'2$n'sw4 F&^f|D#>wU30Enxz'r@(s3=uJn74# "턐/r1tfֳ*UM=2kɘsd 81P {esթ3گR6y"bt|J:4򲮐8 cj>&E(ŷ Tj^WBd38WEK[ kz\%'G B~ed LzC @ĝhTP/Kt<eJ:z5!:qoȎQ{h3\6*MD(4WXp^ܸ=%\8&FDWZx9TIr;+m<1p^s8{$' @d]ۯx]uy΋ ?80d! 7$ <2at1w{t}UNMۏ~`j%:D<.x[֯q3L@G AӤzdڪ&r&zZkᵩΖ`s d _kª1ޜiώ"<6 u T2FȮSVD~ohyjK2ocɼe89-}*T-uUӘ#/1zMUG[j*{:)J)?FyR]! u:d   V'›6|Lz@ۿ~-!)U8@/iPygpBZ+j ͷo.GD߇ZVJȝpPCse=7\TH蛁_3f"S 瓩' 3S4T14ni9ax9iF4Jh@/[, #‹Wz&-J; '*;58l͈(`b8F!"raE?{>ETP|U/N83977p Mt1SsH]#hʂ'jgXqC:PB& A}"޸ADR/|f8d(>6USHH5Xcʽ'`Rb59>ˏS9Wt:Å;`i  wVD:90M~Z :QkTYs#;T{Y0q|pZtGd+fL}L<5;i ZFGwoON]@)#-K'/LXy~W%U?DD/p> #mc{99 E7޾@ 5Lbxk}"> }=ڤ=2%tXɧ2 9$?d0Wl☞$m o#"8R_8یQkL /-1"l "k:gp\vW&Ӻ4(8LMFw5#E\d>DD1p̢ц|X,"&ii*EYq/߻‘ ]r}WN,9A2 Jn}ZMA]bg wdOћ->4UseSBRVTQ;?JEv07Yd wQ22dT>y'=7!)"p;,93e 8QU@[&j|2KI߾kSH-miOҊٽ'nQ-?&}|˗jY|̻fb^l1_*preܯy/ϙ?ӰSS잵e&01$aS1q"^Q_׸x+U!%q?w.Is&^8삁]+ǯ<'+}("7 ]S}Yc)ksYK_-;͌S, 1Fb̎nHc,ٽ#Z8oAƴ7ZEkח;T8o >&NJN@T_lUmE˸91/0S 9mgs錼Uɉ8N0ĜX@i ߮t׏Oϼz  Pm+7ޢ"sg,Rt%qwWoA|}oꘟ|Iz%*BƉ3*A^WmupŐ &/ 1Dt%S_r|gɑ|AFݗ+?(͛uPS~Y1wrdV~잺 UGg}(IX/2 Cy|]2t(}XzgSdoh% w3mCC$U j i-o Gȓ_ 17\[pQ!k+0JX%Nq9P)01\$1wͰ8S=R|u]@Y 2lhD [$"91k m0 hlE:ky<-A},ql†n]6N"53.^{lhG++E& eOb3aIFto^YveL͍t#x(.8}OS޼C{6ni;tЗsCmwc{v V,ADQᨱq1z {mҵM4yn d֑ ܠʭxyܲYrsH{:"{N%"~kd;[- "4'@6@@a5>! @KY5__*axD3 Ԭb҄>owE"hr[-nȴqVůYP==^()vX\]Wd}ALR1 uJ/!ؠV qحx+bG,3˳!9?c{}5_W휛{`2>,YsW'x$Zsy7qzNCZP&&YlW GfMpK`#kvE%P'_:B~fMo1tۯ(ux-t'd&NA_sϪ6kSKb-v Y^o *'u9ȮT7KKO!0h]Tp?rs dZ0FBRx̄+Y3L~FVτ69S'CoY,l@?<&]Hɥm!mYb!=~;^hw)x^c4qK]20s1q$#:WwaoOoK>7W G%Z\ÛH^ |ШP0]{ʛ}WGZ'߂CD~ KlSF+¯00=fuƭ ,WK+vO'51|x6ȿ\Q@l%:}-'SX&m_6:'ޔDϺtzTloK`X\mi-(]M""~R`Xu '"6y[Ę~D0>]at P\+34dBk f" Fʶم#R̨=wk[0F `q8Q~yc{c_:d؜bSh (4?miSh=oso {klˇ.2v[Cڮɗ]3=NmcھHHfN֡.r8@iMVU&L^{a<cURDVsVFsg۟1wL1oQYTDs M7E'W"#QnAo:b~_cEK9xm'cs(43de۷}̥IȾv8YIȏ˜DّLYv}t[gţFTQwz([Bў:qoI &Yh">#!ٚWkM`Kc8čWT,`]qHh!KqJ? w=F,ڇ't_[3 $F:炝}ge=+lYioH(>>"C0𘀬nqV|̢q|%^x+54cZPK5.%lޕ L[b @F_2D{r> s.{h8#`Ak2fB4ѕ+6阀&eh,p%aBpu]6xs'Qsc%g]o8qOI \cbF4aZ2)tM5eq]Q1ts2/H KITd=[+`0IHi_N!XDŮ_rY!=u$"j B)zҨ뀈RL"Zgyh ׽ DXU?ʡ0h'.|(zOOR -h޳*.͓&LqnEG۾d =.ifxS,5M`U4)X}*"F֠ KcEJ?H֔mT]~QdQ2%s.j+ }LFFbRY/ꦃ =#'tٷۆqRŸK9UT`?RyVb]/{DzǑU^)L`ŷeNees\ۜt!-Np i.R1R$`1MOž3$ ]G8&Lq5 4Cx$"ʌO1ݕAm@y\Hes΍v@L?*#-DVD&t [$]~ " 3aj#6_ơ)}op9IQ۽J2CSM: xv}iRs'iѶ-s 29CgBe޺E s,(;_ k3 "ϷT)zb3 d nQZ/2aYY%AO]$1yL7hh=Rma('sCX : 4S0 ";aWx r(` {X,}9LDWܝR8A6C!Ou {`<~fNtv_WkLa_dT'Gtү_6;[7 qm+앻^ گ_I”m>4CD0L$cڿ/"rZ͆r8oaֲ&Jl8\# 6낌S%Cm\da tl㴘iims&CKyl!Y9g_P#N$41@̛n|VF:w+%!,m–>= _lt t]|'B_3xϢ~b߂9¯wQ[7Ӳęw x+<{E&MKԹׅ7I(SDKI:S&(cOOX[mP#^M֌#嵋)g8Qʣٳz#h -Ԑ)۪vH)' d ,:H@3zod{MMq 'jP7 L\tgpDϠJoş[}o`ne`O.tL[(;G] ]OA:)Br4q]b\\|.xc־?ցWzҔEK>O*q],V}aY32RD7LCd\ۙ d ZsAst8Ӟk!-$%c\ک#uZE7 ==- .۷})D ɬp>0Ta-_g`:ilZ;o2]R<df6G&x,0S"ia}#%K;ilI~l{cI<vb]%޶iwjz}x-޹r˧5Ô@!^f佡Kuݡ$o|'~MRD\װbeg ._F4#>r 0O"RY̮`GF^2xGÅ h-\%L$Ä>BHC.gEN\m,d LZ hW^.Z IDAT? p@-]"sFpIy=)Nz1|a-1`n5eDzrCf@n`%μuu~ȺyUOcDurdqJHD>ógV!C1BMH>GQqw(39]9q Tl(H5 n8eeZ8}UqJ}Ǻھah)G)g؝S0,H5]hc N#ZTߩZt1x&nӌa$&L}h`J,sו~wˀUq6[ 9>c2bR?3,SHq֏M y?5γpiJVcouDHo}  _ nnI/nT!1Jڴ]߿-j.[ n}@ix52wycOd΁ fF);BS,\߲h@~`0Q#9kI&0a & pիLSzm5Fk4S1L"B+φ1 qw;Yxel댓ԟ))YPc1O\Xeszlx bTDZEw} HnF)_Og3%YҮ5i]Y]>_삔E"!u k5Ckc:㮚x$Ɨ[?阥R0zm=(9Ma$&LmQaFO_j\ڥrWB`9+-FDMBzՙb  -}Ƥ߼2ޖnר M1WF[CV uH0`IыMϔ-w˫jׄ\]4ƚ=Psv9> jpGA}poնRr‚w䛳ۄI2L8荢j!߶\w`lJ L`k&O*:AM\3X/xRO:8L{?={lsOؿAk>_O:Jf4և6*q·Ia1zb<7S0I ?/zVl6157C C2f c]8Zq~ZUq\)&{f&Paό M;Gz)LD =o|]˪gu l?ŀӸ62a|Gױ }{q^ިDa-}Y#rzJ/Ah](ռr0Āw/H\@4ڃgт߃^Dqٲy ~0 ( (_DtsOⲜOf}mV[|Zݼ%L!^9Tݳ6"@`&B3a &A cWj}hE5&[#FH]\@Kr?Ѳ:myQVXm&?p&X9lhwp*=P&+o$&L|HzYZz"kfRK"˾?smsD$(bջ[O8d=炉~jMax҃L`ݵaDTb'tsjN||~l "h32 jm˞~gB}0DaRw!#qLNl„I2LOlXiFQtpݾ M,霭3Y:v4AoV 9±(KHHQ[ؤo"bQ.EiZm;v5.ʼL2rΎa*Qyӌg"ϵyBq*6c0LjN[&0a0!#{lOOIӛCCS`ς_udDArd_fn(0ꍔ< d,DHkmW"'GDG~?C_5Rw,G h5{ݚׂ9̶j´d0M5iuB\LS#5k*,F#`#QLKł*Rr[$Kf// kV sioF:EώX&\S#2[)Jyruq1~2:fIeL0-&L WN SA}N\auQ𵉡JkD7xd_/}3pN} b]}gcU͘x~QzF[V}ҿ L{)]ՓU!/=M$ÄobzRCE@qg.MDFB *]Mi Yڬ iu!0 9B=PTeTmpN|[k}DkۯmKD*<n%12"o 8:ciuʧZi`w U7Z:$'sz]kӕsĄI2LBTTK2evFVNcnAGWhh#yVL&JbQ@)?w7?ؐI)=eKk>@@%Z{ʭLT?5w+Yuh`aLa$#zd& L &XA{ޠׅ:STx&ǵB(I@эR9=<j RZ3=J[ؾʏ5`"Y my[W` "K>M'sK\q o'}=2jGʒµP6 I IƢc#RmM~%Դdu'w be;<}!A_ՠ?8>HvHԻ%Cc RieNDCz6IxZ\FaFW;;sGa?O!s |kLZPw&0zaY@~c MI߷Q¯kڷ_UG5G&cDA-ĞQS$2%JtشIE[^o,386?ZD׼_O ^4c+|*E+R![aF  R`DHD㔼&$|¤9ΑmN4̺4O"ծa|`M D\P(=>ۉ?]y]~ Eљ{׶VllOJT->]ӻ#7|sG qzIJ8&]9f#P~#V",'[TP~m@Ծ4:0[tMV=Zz~Ӫ|c)o t)o+hэS܍'lTRǎ+' " SR?"[I&87* MS%ÆGNӄ]KpR`",AO2;Zu$O>ˏ,Wd65^+^ˆn eBOW@2R`q  Y;*@J%ųG^5.'_s=V}|_RieΫgN"/4ԏQp X"2* `:y=:9tODIҳ@{UG&PDz'dzCc * l3J_|"] -])/?7DY e8nI包?H}p"0VM2R>' wJZh$:όƠ˾;^77ߚdBD-e妞p: 1 `ml*2Knh*T\2YI"׬sD:zw5? تs*C"HDz[ll*V Z" s ADRrے}5,η\Sf3Q;e2K> 04 M}7 myq-q)" ي*}Ů]Sqv~EZ&I: zeI4R7]TꝣNS3kneOv #aypȏÈ }Cc #-!˯@fo7m mP玡\QQ~4SiE0UZ/Zs2Jpb%>Ce R❺W#,]T+Zε {LB߹jFꂻIZ[0^3`eC5UXXx4XBvcȈw!s-9Iֆ6 ~U8'Y]6 D_rɬТ5(PB">UH:);ep $߯59jf;4|ɹf%*-Vʠ>z^5h *X1PZزɸTk?"VGYr=uYײt3[xTJ*-dN4Û9]vg!RGf=។6w~ 2;eYn)"DmIb vM-Uy=Fxj3~2v Igwc,l`7#C M)u:!j ^-ղ6urntyCDҌZגxQ"]I ND*@q %e7tWIsHHv OMJp5+ɬ6Botc? @;e2K^#e6nX;e ՎUu#_,淥 LJ4ikҗ a~,]|8j~;dODox1eLd;.^b P?Gۃ{JcL9?F@ )jR\aCcC[> R=>z7tx$GgUyS㼅͹xlpC ZNf59{|("~ذ#z>],9`tsity@ lV$-:"rWi&bG|"\?n w@Ngr!DTSCcČL6+;!O1NMYZ֟&d&eWWwVd1dnŞ~AݘSO*A޼`˕i  Hu4cm }2g9Zjϖ A<^~1л+u{y~-qې/ 5u~go;KT_d 3i)D%͚噎 s?uVu]j{Q)dp_8 5 :-kIDATcT|z]xb])f=}1%sƌO}8^`yX-H膄Rl:ʹ΅cȔ^m;SlfW&"d 'VaGmp-V^T,V[a$}6%c8e*< 9a!s D7=<.xkW̵JD̹c_7׺+)u'q@j}3E#7p7;_9Y [P w8Z A ̿ZJfO;S*`Fql:8~~Oܖ{ĆD$ ūޯMbf$eJsyS}0=2M (_M5ft|c_1=ʫ*f|ƫ7uNz`x\#glz̼}]mǹ@#}Mu'cعt]gcdh+3`QſWu фѳp Z}XHV2R#8d0N\pU7dbkJטيҹXo Sd{h{ÅԟHfgg5琚"h5Ƿ?~0,NRN/? ŒhH V:A.R|Bzֺ=l;Z2KT4Uuۚ6VoҌzMT M`2,y@o^+.V=̞Ƞk)rꉽ?7v1@׌Eȫ+[x+ql~)։e%#bݫ;G:J]ۚ]i=<>|"`)i֬ ߉TAQqL?i=IRC Zp`ܓEU=sQc/MIA*ش:cΘSf߬8+CvYڄo DZ7dbw`@0;)]*SIA MBTxL;0;okLێChKyO꼶)\ޤbkGjR g("noȧSDθ?d[H"dzc @p n ا Cc1:#qllӦU}dR|Fcs^I%Y뫎}p k~tG:ꮖ$ 2f_G=n\{vúyھzlE|FDIC)p 8X1Lno QXGs`>\y LVЦq뼿t~8{vCVy-܌qɓT=G| ֱ)C<s9Niʙ3!\})d9sSxPDѥRZ,j6Ϸ'Mmkc`+ uB ݷ*hw" :r\[Z40i'B"ʠQWS .S9r3V,dBH&" B^q`lιmJFCD+TpOKO=M"[蝈ґM{FdyY@]k 5)˻Ȅ=>$$ɵ׀e1<v ~"dXFdDϧ)K3J.Ks}ޅ!#xŨ ȩ1S&\Us/bLFgXgMWK4ϙHP*z1n?SZQMVxdž \Sh4@Z!`2;=5mzit Sb5FuޖiߊR轉'\%CC6dI8d0ֿ*i䢞S_7ĵ\9q3 +EAš:^sr[RiŮ־y'o"\_^%Hakg"6Av b Cc'5ij%^FbK D|W#]8 cҥ!+PHv= 30Sџs HB`p}_ Ì~WM7BIXƱ+?xͥ2׵Z$Ȉ?*CsΊcGsLcNP)@h{J=ci?ampwf6HP>Wdܓ.nZ0MKNzwm-yF4wW7`ǥL:aW^beێCY^~^X^ 5h5^]o]o{yEeL"j bܓ..I%} /6={z1["Y"T¦ئeM C£N٧wAօ5J3bh\L#ѹ4"D/&# C}@@t,j_xt7Sܻ;x cա7,B DNJfwiɎƵ}B"!R'OҺPH#גV_,]Kxwڋo:>w +AВ|2dܓK;y%']X$K1#ܺy@!I{JSK NORV}ز'gQ}6= ײvfc* j7q"9hZ %;6 g7}hUEa"(+j=[C-"IK5"Ԟl3fiTSu_Ddr}uqn^ѝ[z7B"栚2D]R )ipp`2XBXاE!goxy`[5/5'EIz|qF8{̰h9qm S|猘%5c:.A""ڦ*b2X5~NNȍ)qb5dW$ɹKL9IF Rmcooubz(#t0z.|d/Dal2B Q,8d9|}pkGݓLR;[4:­U cj|:P ~sс?ezz~zb[, v>x% BŊL!2ȷTsQC WI qb8d06MYZf+θBȩJo^ӞR󹞹'pW*ODnJVϜESފȮQ)3eߝDgl'njIzC3O 䉷Cc|f(;Wd P "Fˬnq&ֶd/|vcUs zmRs;UZAj a͂#عŎ* ż!Cl@!e]q8d06g_ci.B͵]UGjs^,A.iBΑw왞ޛlpv.ߕxefMBv!B9WKtlN p7F!ݺ5o 5[Uo5oN;=` ւ,.twcO\2:"a6nZ3 c&]P=,~W21k4,fP (L?~33Dn78dav"bGcxsFa H3Q5 B.@ $pd!s#~<<+"x M0B\t8K@zB@F&S`cbP-`'{[! eDh;VEX0fK9-0IWfH  0Q){`##xFW<+*x<$9E[-qWW.(I+6aa@.y24x6_-"bbϫp@t~,/;m%h^ uf@Wp~<5j>{-]cK'Xto(hw?G%fIq^D$.Tʳ?D*A, `6B$BB dr`)B(Ͱ*`/@4Qhp.U=pa( Aa!ڈbX#!H$ ɈQ"K5H1RT UH=r9\F;2G1Q= C7F dt1r=6Ыhڏ>C03l0.B8, c˱" VcϱwE 6wB aAHXLXNH $4 7 Q'"K&b21XH,#/{C7$C2'ITFnR#,4H#dk9, +ȅ3![ b@qS(RjJ4e2AURݨT5ZBRQ4u9̓IKhhitݕNWGw Ljg(gwLӋT071oUX**| J&*/Tު UUT^S}FU3S ԖUPSSg;goT?~YYLOCQ_ cx,!k u5&|v*=9C3J3WRf?qtN (~))4L1e\kXHQG6EYAJ'\'GgSSݧ M=:.kDwn^Loy}/TmG X $ <5qo</QC]@Caaᄑ.ȽJtq]zۯ6iܟ4)Y3sCQ? 0k߬~OCOg#/c/Wװwa>>r><72Y_7ȷOo_C#dz%gA[z|!?:eAAA!h쐭!ΑiP~aa~ 'W?pX15wCsDDDޛg1O9-J5*>.j<74?.fYXXIlK9.*6nl {/]py.,:@LN8A*%w% yg"/6шC\*NH*Mz쑼5y$3,幄'L Lݛ:v m2=:1qB!Mggfvˬen/kY- BTZ(*geWf͉9+̳ې7ᒶKW-X潬j9(xoʿܔĹdff-[n ڴ VE/(ۻCɾUUMfeI?m]Nmq#׹=TR+Gw- 6 U#pDy  :v{vg/jBFS[b[O>zG499?rCd&ˮ/~јѡ򗓿m|x31^VwwO| (hSЧc3-bKGD pHYs..OatIME ܋B IDATxْH-vfVfVuu,}&_(:~P(!g3w*| S X|!`U( vS ӟvFwT|~^T}VIf>e$WWQ1d.w2t.28 Qzl#sy[ &7x/ibWe{^L5ٿ3\ǡd2?Ef3S\+^:X8Eۗupb*sMu2' FD:\nCdy.2ɼi*ǾVmeg"3p\z=LZۏ!іZfg 3O 3=cw}?dǻj2o2L㺧-dex*sFڠvrOm&뮩meQfڳh,s{< d6$3[3" ]fgnqmtl1ďk:6.sŢ svL{緋)eC'hesniӧ̇w\]m{yg8]dʼMrg?z7) 3K3ѵ[˼ D*%l&4 \) dR..2ӞeƖ2/Df<<76AcC2enj-"sW%3vj`Wi!sEix,/E}:Hv+;מB.{u@t}ˌ-eU׮{Ѧ.mloMbs7e/M}W9vy׾|(8 98x;86xj5JOWE(WJ9֦uT})2ץx|,0x\sm%s\U{cȌy!s٨a]?d2IxS ldJS=˼Jeޔd"s6>RvȼU./FCs0W|_M ٷG_(d~.2oC}n2 c#G2?D8xq<8c/ǦB{6!u]Wn[{k-3=Bq@#hOcԗǐnBUt!2y~ 4˞>dn:̻vvy硫̏TVnޢJh×r7*}js̼Cv\RdjS&Mg2YTwX*2 cdR%1dnhlmStk[ȼuG랽MǦܴ幻\e7Is6_Ȝռt;2Tg{j7eZ>uk 22uV|mݲfR͞Y>R%sAyZ87FY|6kI:VfBf죏k2m"0y*. #ɼBDK9k竍i^;m]e~k2w]G)6mIU_|7vnsWdX.=!|>̌eq~xǹ=>dGgrw/0o/Mfr^*_@UfNNE~?޹}g%)M-<zfJ1Y>t\[{e?돱UӴǥzb5>v'KjKq(Ozzg(on쾆Ϥ~qg|e>O.%;ͣ8Rٱ"D-Ut?4AUMYEX=ur*|ULY: 3<]mWT>dFmdjdYfn1ϻL Uɾeje.&yXnz^2a"3gd/X:MrG;Nmk-C:9yy2ozoQf}KhG2V{,UAg "yLFdF;rXFfFFΩs  i3rp*]ur~vV=I C޴Vݣ)F[fBO9ԑSk>N-/en>dnƺ|(KؿXbmmu.6{u.Q[VH؀waBA u[y>u=ȼ<;SIdӎԵT}d$}RVh?wxԵ\]Cp|cIf:ӦC%6<֞dni-ᡒ[BLOmH0e[U㒚v5$aS꫸1z(z+a=-Qc:̌3qZaB5yyLsu/fS)kS?SO8.33.ҒP'T)1$"'JฅD<מ/K^ь5bTܵv )EUf93 .R%l\m*kƶTɮ}v܃\Ex)e.Gsߧdne.~/y"3m3ٲ7eil0")0xPt (} F̩~@@ccNrN=R}ݘqu%Mb@X(4^Z;Cȧl 1J mcѰqt\A#9/^~J?W/U&۱Q?Q旿B槮^cOX[tL*Yb3޸ޖSGzMiNch0%oxP%1B#W93'o2]^%= kk3jP%1p̋^ۏwn;8xqѿ=!9RmP,ݖQ118WgD" <إ:~zglmShiMW*S+\4n+8{mdƆ7 "3jUf:԰! ȥ5u>d-dnf"9Ǿ5%s27THǦ=n+sL)X2g/r-ۡd*1썃} ‰H2ˤ('$AIYH׺0wX"32M3fiب"/"CXKa\Jh;#32>{1VbZ[yHy FBE& ˸,u,̚el%5HlWM:xJV?k󞪕9ih%儆J{n2;wx {ۻյ>`]I^Kj]eFMĮim"⼽8Ҵ˺E* z(/?=̴CۏSMuX[+U\5AaТC!n = 4ZQ,ﴍo k=R@5,~V%~0]-ݸ${AѱI`YVPIcp$ X vdt=(#7 w}q2"-{U{\d$t(, *6s> ioXRBb5` eBj~kdemfRTmuҶ&Ԯabs{MޅC|xQǗ0f/Adžg0E|6ϲ\k^6kɖUBARAUxr]{_{GyN9>u,wh.+Hl%u$Ӷ.2SKm֙:K 9%ЮsveRMy{">2VX!M9a0%FD1]ygj{6p!AѦ/5\Fn3gTRjg0 f3/n*0RE8'k۫w|PrH+;d8/EC|} #V1C׿[U4B#D[70X'By= 3"ghuY+j˴3'4nER |;hŕFUR P#eƐef$PLhH(V#Ր\2OxHzU&/ \]laq&چvnS|jr-\|2އ]dx2oum\~d27U&]7_G|sCԅ7zHpj%/wWs3)|OvZM6k=+ N{2wXGOeBTI8__%606uI}iQtzex;x!SqP1#8"bq*)1F£X7&,\ 4'9e@-zbXg 5:o|YB![-#T!B6xCBHr~d]_?VV+dC$AE!sf\ ZYe{O^Ff(u܈6]6=>:cvy>cɼ/EygX R˷n y@wYϱN]en|6IueAyO^`r38/QTak]l;m_Âv0N}7J文&M7W"wI q.k =άfhqi)"ثRz9 +?F&\@"!İ"0VTN7}c "qYv!9~R $ чhE^A4x"j~ƪ2H +@ɸMհQX/+7t/BtDg< V~*%e d>=bnqk3}aș\LW hb* ;>.Q krlڶkU;k. T8>mhoWTѝX;m{U<|)5GZM2y2%?UU6}j3z':wڗ?!D}F(Ta'P/ԐxC1$+_ZcH7.i9^!pYl. bJٍ-Be^ZRd۽{XII?1[XS;;NdH>a$} &3xw xΰzq@%C$*|^ਏ5c7-duht[PO>em^`FQ3h4q!AD:D+0C(3^wMCBEU͐^.A,˺>sH̒id=v T!HHFzB1+){G@kt9}:r!"M_J%¸? _V!8X+:)+7t.(uڿ|{5NƐ)ujgq w3ЋqJ@Sί!0LVw$Z [56U KtmAY lTS*~ԋ4vFFܒV1}ﲛ[΃UE/]wxַl)vIܮ.\f<#wN_? zA/6?U^!>{:,zاm#27Qv9VO_G4pWa Vƶx`2WݠL1ڥŭ9u-yS6-ds%5}W2G~?9qH{G2j9F9W։$!ıE`(ۮpư<742mSynN<$^)GFfe s5tʴ)V1./˞׭P/93E@N)t^^xEZ d6H9R icH*BPӠj7aFRڷƖHSʾGH5p4,YVpB-y{_A3֢'>_Br? 3Զ,5 a>>EʵR sJgGG,x K]CuwN$3V2t2]"?+HarimG:gcg\ ]@2z-.R  ޟ9{vvAAMk抆MjhD a3t$0Mf4Ȍ cneE(j"0Q)8D+l{<E.TMjI9d[PR7F \x: > k7s{9x D5;28FWk0SfF}BVGI֧~\2F#uYL =kiU" ֹUc^2`p`ݵ~w#ViJ[a {E%q069?պ*P%EL@ 3>!Pe9q"vH+޲ABq=rnɽ-kI:~y"t<.$S߫|C夬HʉJxJEbR/Jҩ}..53IE*Oa4m:{2ܦZC7QaGVVs|.kiR(E0fv.G@ M=$'2W%¹z# Z163S?iT ⭷q_)-/5BYϹ4*[Js5nNδdN!k=e2VaPb؛4;HNeBp(ld  h 6kk2gg.a{<|o3y-!̈p@"u4J"n;>_^i@F!j0 rHQo˥%cE2m8BR\Jk:UA1bϗW u /]]&y$ǵu5w3rofps z'3k%2/Tyֳ*"^3cD;Ny^SfAPSyBH )αS4Cw)>m[6f3:_"X!=Sj3p5_DrP$4^pYFTf.7χz_疪ː!OJVD m$b%.ƀLTå8j6,܎ ʷ+j+\>EZ D?[H06.KguA,Qʗ̛u)BA^KM. ڱm VtiXaѵ62໠MZ9my!QXڽnfOMk]#s``(0[D/,czyf==u={|űD9LkPX="ژuMBL&= Y~1ms^9 z% ظ|.m0nnB Tu?TWŶ<6G~(2X&Go.{zVN{]l4B`L7e 9u")ջWlQWq˄re 5pvz͋5j[a2S  ɽD/Γ /#0>ǣ;,/6ˆ\O+qq(pF87\X;>5p1zf>8TچCkņ\#*)iʊ)Vټk[[Y3jfos^"dYib A|2<2豾-e-s8sh~}tdugvs~MO8d2xNA^BFXRqw)}Wn֎m~YRSWgmJ3NfQXCɿq}JХ6hAU̮ms8Bi_k8\3J/9di jAK׆qM|qqE2|-"/eJ#uE]%574TX*&R&U]6sI,Bu27eyv2 uA}$s4~enVld8f\ %幬.Ī4=2X(̩A0q1xd%4 8]b:Vtm!=p!VMۦys٨v$q̄Ʉ1R~cx劵(6c"(ȧCchD&]G;.q*`h x,~f QqwGZLi2X`z=`:ábbXLvi\mlz=lVL.sa2?A~2څyMo s+c^K #[+|fHi2yhYkgt)"\½mKġ6F,B~FDS@_ܶ7w57ћϲ]Q ( m&H =ύDQ4Z4f3hJ;劶)(w|F)fΕ,C60ETEzL z- %y: D8#_+K?ww˥-J4sǏZVS&EI>f)m{="r*FpCsLG.Eʃq^E*Fle|^[V^=Y]EW:kUɲt)"kQHy1S*-HYк*VRVaP8\C{EcM{1Ȝ z D^*$Rɢ2Fb I? ]?1VHF+s0z<!mjpDF8Zc[4gh yNh\ 5y5t<汶9P8Ά;ѹs}:b}FuRk30!ms97CexZ"Qfx|uO Gal }}LTT?v>@2}G3#H#+-ˁgowRp0M$8}sk!3?B._!Qb)_bNE^{eoEu2 e8A\L4 ,An ^P*P,Y\L#Gm*2kl&ʶ))ágJOLf.pT1[Q*-u+4?Q9o8;~n1pϸ 9geI b$Jz=9wо\2noXa!g6܊ >!m "rYipjB;́ϠQ̧V۴9dk˴Ylܱ=)5^)ˬd;ll^lΉ£h3vvR%\ܽnW T ;89Owv 0WV@i܄{Nsy)S{3g>qQX*eb@(I`.bH)S?hѹv\od6}Rޘ74:ޔ+݅'V [1\->Eڈԅ7{ZjhncHW$c)%֖k9w >dWh4hd@YMjtgZ+3 MsZ4o5je:՘0Nxh ׾>/̦jθxf2'j7((Y-#:]0ܫ]!4DUL86aӾf/Դ s(DJ$8phv>՟),[6 f(Ĩd7U{S^BIް!KfChRiXSyVg>'N]Δr[olḤotsl_fXp\4$k1vĪP^dk0ҩ .%.YۤX(t|ފ(>l-eR`R77JA[.ﻻ@>PźZsxY,uWŨ02-}1&^OF#c͓>x Lw0t |o}>i{-0'YƘ:;} Xq; \@1TX=e8 Y_m>-U7"X伄˙ C"_Cx} scZOe; IDAT roыRN(d9ΖZm7XC␕HDAJycͻmyEܮtIݢ*-3ڼ½"*ҖqV)ָDHk<SڧX3 NNY7z%G BT:cȍY䌯s9 Q3e}Jt=qHbP o? Padӽ[kf n@j5aQ|Muw ek1s|ps0u{_qj0K;Ugp02rՊvZzPAHוE|R# ?!$#պR+7ͱ3mN 8J&%8o`tͭK.*O6vͻC=5^ڒ&7zѿEg}@J [=/+^'aBf+yWɬZ;m붵}%n `_m :q]A2 SUWnl?(Ư-tMn^B( M"j ~ޅy;;9UMXI+K(C@6 VN|3$26)i.ɉ bT<)$1Pa2+`"!/^Xc#N6Aqҧ<"HACʹ)rm _,\}*%C䞁#deYx wmS+XS%׻`[}P N,Ggvp#$nvz=5/~T{vZ{>?2jdc#A:+PhΙRXyE86-Hb8e`*#k5'p57)K 0L|B!M9P %[&}հ4QXDm$:o}ߔCh;Wzi fFT)mɕB3sAlsU'&S7ևY4&q\ ".y֣ek{)py uJF.J= @J]a:mCbU!B6d&q`AܞdQ?^<:K>TʱA}@֩00d#pZ#NU.{Y^w<TEҗ`TdxT`aCy%QȖ^asY}%j(zTISf>U2ڮ} цuTdԻw-M*zv>^DNɒd(<N%y)M9vq Y2ה8Rmǜ0lUYprHk2$kmIS뉁0f89T' E^1X-I{w'<өkqJead)ss(Uƈ圤p9$;|.QH"U癥a81<ծU'/W冁fJƫQ4 ՐحyVl!{Nf!(ORE 3QnPO8P2]2&g3j9h]deE!Z>@t}+h-]ϛJ)R,^ېC\fD/'oՓmFRgSReNԻn֑F 1$+O֎R*r3k ")Ȋ%Y M oZd8nڜQ(R2KAd*=.ƀ{K$ xJH߇Oi*WȲA r,K`3I?ҟ %>Iepv9!ˠ!I2  02@V+i|,$\xL8?| !ٙSi'DQ|mo\ :o6D Vu!:`FI'<~_1=LRwW]Jzb{x`cu ճ"6 ʞ SzOҰ=NջxO*Č XkL/iY-[7/f@'kْvI#b=B*2D ]43nY"yj`-wQ!E+/Rwa7\)Hq&ʰ_#@9s+(o Sk>E ;Yeޚ”$ѧ͸S#ԯ>BEko8(4F2ҽBf!C nõB>+dˊ5j5t}|D!ݘ@4RZpxGn*I-Fhۙi=Wg['5=E"m7`X>91iDIp}o IkSa;j gP kQ^!F^i$蝎ÉS!&J8RHn\". FV\$҂B"WV"` CE ]"FT'7Vb } - `J:o xN &YFydb84Qԏ by d)>!ӐBs}m`ٍ|V8BrڌF3Q$'伂cah߾)B [~vVA{4#B J☵0 iZfNzoߊ,10s))a]oc# !=K"9״"ίdl8X6 M%\ɷ:VŲf͐96yPɉ,Y Lw8qZ(_HV,hs(w2˾s0#*7FڙFb|X!:4P8DP|Ƥ{Ƅ8ʁ; oKۿ1TjLt?"d27x'sJ谌2,ojs'fLa];tlE5 E9Wg,jB3SK`o~^߂B65畎kӢ]K~ʻ;A7bȢjXl'Q1{a Av $ڇ2x+h8Qy29cOŃ3zЉ)ejYKBB.+]S6mu%y*'rWwUȼWw~PB&V72PNfw$8Y'=QBbXTϻP؝hR"}Ʒ`gn ÝU Sl?a ֒743,XX]vhUqt`d(7-`-b!s4Ӕjtp/D-gu𨐊j!:Yv5$|ȊsDɔzm80hCHX l)I$J=+g$^Հ \_x?(Ǣ,~ ѽ~-ЩB:9NOM5ܿ)ʒw6L[z=1*$սŻ/Tw(IywwQIi0c OyȜRV\S8lYR;8/^< ц,FY^O侒j kH'}49+Puam[5t[޷X` hORW4czsp^ B\ ռ#Ţ_r04n\bSUB,XaB8(_7 , ~78ḏx'hk.RsJW_bgBe~wQq#W(Ѕ*wdLaR9y^&!aPRΦ(a0k| ,t|!f} `5Ih}TW GAbƈ=y־ Ŋ`1|FE8\=[q^"( ,YAz@돜( Z,I"6#,@ 5NX!g3]Ϭ7e}F.gQSGYyiõ޳Ȟ];4dOH%*( KSE/m>)92ʜ1u"W#Q#%lB-ޤ;cW6IBN6[}YA߹Ǣ }A\}K[?M35Xٟ෠ojz^&Qf, y]g}H}Zǔbu OHJuS ^ښhR"qPyq/W* iz*L-uAr26,tY{jkv!dUe*- X9pySIS_.̶ G/rLSJQb')r7"''q$oJӫ7µ3_Q:}%\Sb& K!1裏] c>7rr5ɘEJcFO[p$LP{G#KX^O"*''r=cbv;SAy"~s5(=Kjc2[R?p|[($ sóO`07^nWag[_'92.5BcdQ{bqav``5Ant WS`DԼ>=>j2l]|Q1ʍဂLI_Km홼ik'h&D7AtG-M%M*,>`Su77xTLP/6.)pW]|DK%QJ6DGG! IxGR#;NAӺUN%AxU*^S9W~SنIaWm. <5\^@V.ml~dOq&B^RkTC[gx)FPLP:eG;_=px/*  |־UCu EeQJʖnT0DDxzmU*ŵ}}-)W?} HۙN0kzlIUkQj)cTi3~{+ h"t*2D1LɼKƻwblf"|{'Y`I Kd"7ps{ǸX \"Â,U2RWC R$.O&bnU8ҩza,g9ٙSSZg sAa9V=?h#1Q^YJBVJB1/zJݎH 3#>a1"Y/ gC/ՀrnE\#Pvxhí6FN^!$YXi:.k3VnZ_Ͻ{\„zm64Pc6:T[RϦ(c_ /lʫqbndVầ|Qw]~P߮Dh[Za^#y7H1t?QxgnZ Q,tH;q;zrvncfL&c L~0> $+Xdո-㗡6 ` IDATx4#ȸԅ/,h2gxǞDn/sqaj(iR3gbW Uk{:O,m,ȆyyXS}*y"$+odj.]EїJیc|5Yc]^J?u1c632/f)>xy).wɹ''r݇rǏ"p/SE.|pAZ9-#"[}@ƨ9IQ$ݝC*s'I0V,`Q0Sqq!YƮ jYH-)D 7frJ`G),8nXA]O2:U̳AD\8IgՀ yJ'{92O+s ~Rs,&s) |b|/U1ּǤ0 ˔sh?_3S}s$k$+sH6GdT9ۛC*6No#UmkŴ[>7*UN#eHTd!g)\7D̒(XbŰӧjTZf RLP,x8m|cm3w9.aZK§͉cHMU19V,ZN?irB:vkk<rz 52D\S-! )9ߨ[%)Bl`Q;.~أH8RrzX=vuW*u,Ru=F?9Pwz+}D8I3X;8O_S]lcr?:򾬔o)2LYsmwb!/b\}"#q(y)V$$5E1$R$2U8ϠKF|#>]7}"} k+ürp~nz5(pJA\\>$#jax B$DUo4a?M2=B /?:}Z)&c@F^z,ٽOT;" ! mgڀ\KK>{Ǘ]`emtS*s7%B1{-]Nҧk.QYr0icq04S--$ka>^Xm J}l8TӼ%-9X?N /z13еBdzRprLVX8x gv7p8(=e]`t=m \7u9 8r\Kz>Ҁ:${ĕ3JT*KB'`Jd=sH%Ruzn )yt']|0+ge݉ޯw%ӝ;78ͯg8 Wדw{r]K8rMV+c[9cw=\4I­V]mUu3 ]Fw>s ;W_$e-ot]r̕J]-G>wss(8A'=ݏH9(ȏq٣ \ u{.4+RLCUmڂ׮dA.\WŔ\fCV^sUn>ՓVw?'sʖ\ޒ \knSиm!sU.1߸uX`N-M+)c\2qɈo.5pUR5> ^[9.o9дu0}rqtznN]@ʚ~ux${םYR`ПAfWιvc;INhIZ֥lȽOK.غe"rEV.тw =̈́c׮Y? -Kw>;7c{аZߋ Z}$ߥ+\;W?]mgUc?{51QNyg &B3 R2MMts5z( dA%s^$I@,1,&mj][EۺnHT[tev~7 $XkQB>^ռ۰A|R_rJnW c"rP &nxvAuL3 8ȝLH@-\ 1\]-tnAG.a#cɈ />$:b]rz*1O\[RjX} 7R9:E謕*e>\b}K_hi\7z_֢Йa7_y}HRq+B66:ݣ#Ǝn H 3BęS_u{m]fٻcMޱ&#[Pd5ޤKڹnrᖥZ״%Z@[z۪-\H}zFHm cW#BtbrOkkЧUk]"8>NtZl-= KʻihK4bu|nmjBBV(TQFRV {e2*3M4Eޚ"ʢnrmvb&fO*I"AIjESBm"d5&;j &*&e+ABiA~-5I,67cjxdqYfjKtInw<n=o6Zەm`(ȶ950+"9jEMu@8:2s8|6 Rl624_0E$6I^SٵI frK! FVKƴ%ɡF<DiF04gnD5v*q^Jj5IJ n(WP  +.ĕyu:VIzMc DoA3ܭue69u+Nz2o҅΃{~>`|o ]N:gBt8 k$ɂፓ5 9㷍+iYf~}WK+i :MXڝloV~#FXZXRHќ,q/ @H8 ݟ'NA,R0SU7Z9|9L!"뺜u\; c&6Vs:/&t2x;BekD! r oOw|(m $t`yX:%zojR8eJJAa mIuM~Kx\Y@`V/Gp+4kGRY&U"4!ҁ1z:tyei&2QkETu$n|:[\΃AB%Dp.e( 2iqI%197&f|8t %=cAӿz`I`Av:Di*0=QTG.q($dLǩkBq{ .2`xmD|_KoɟJHz3_^&16 .GG&x3, 5|4m$oqrުğ&|-H:#)zx@n/T۞Y݉Pj Fy\]s^{%6mh~&!\PT3W:5]p hP2ȩKaN],Lo飨 \PkPU ۓ`BYH"/FvY Hvz4f3n'r)l<|#al.Yנ.Ʒ0?$A#kênjѠ\ tㆌ㥗C fS9qdI^%fs`){F,1KRctg!Me|Yq3v/޶R7|~1Ȱ^%vSeƃ0ҸqoRq`D,)=YRKBWd<&du+YB!;K%ɤC }' fٽc 2ɀ$]#U$q¬+`I9G$,A%=4As&SGJ?~6kbg_%v  L0>4]90!cTx]CZk2A L5m"Ξ=cۂVΧqhzQYXG 5ċB:vO9ju&"ߑ[M|0b\dJC%Q@T5 [q??ooFE͌x?Dnj3]rp(%)U\6J65|^6:ZZc8[ڿ24rKjV5jB^Σv\f^.y5XXZshgM:` j IDATJ]x88JF+KrYؼ?zF$ʦ֕5I8$BkiQЀ\af+sdG^Gnuѵ`YSJ4s@]veb vr[w+ro4Q'뽚2 5emx~{=>F`* UZJ K& 9=G4T<~;qz^}r65h:-hCBBǬu΅WX(\+MZ Bmz6:}\-W#;Q)z"zu@tʘ1|*sJ?p7R:A*W2.9=N$AS+IlPvDZA&p5PR$a< &QD%uVTU%eu({ 6unӔE\a5!J!7шZ S Of |X`bQY\Y $*Bt*ёc1j]^?> fܽ+nQphrssf e?̬A өzBN'=֪<+wCf3cnTW+9)p~.!{y <Zfۺ}6&Y2^Ok[,HvSXHb,Q y_1@>oT\^'o0#kYBAjVyZ1 W~ (" OE$1HL; .~ Bèؗ@n5RȋfwmFU[ln#iQWO]W Arpލ*tYj6`YT8^oIbmnda{*'ݨiN>r܆9 D=kOZðv}֗_hEY +=VIBK)svo%SD?p $~M2[HisP5[in<3›C3r jSmqA,SHn{dIWitYQ0[y?7.-pcm;sڥXzeo]ǺyZ:tdbE̴$|k[92:|ƛ29SIq*S=M/ * dk0듰̋%:-]㯰! u"=Uصs5wI)˜W]t=2JOϹ|[F{;_FA4Ϊ+Σ"=߃{F:15-]iqqIM~rMr#[ߑ Cw&E!hL1&Ssk:>㾪KUH&V!,m\63ZӉ[rʞC!p-s!~ @]st?GТVyZ @]רD8)K<,KSmdU#w֪FT~SO!:=ŎF߃mq]qyAQpƌ}\r"dQH2S y f#, 8wR͵ :=͂2Ӎ4xYB1IW$Ix)O3^|Q_B|[-}-ABB`6gg"C^Έa>B7,( 8;3wo9wwE0sj48/froů, v[֫j˥8|%(B1:ᡘ"W&neܑk>= w2u,0nͫ}ov~JX0FݱWYi[މmu:ڽЪ$E˒h` dK UjK0ʰ234f߇t]>׀"sQvVr2 &dJ7ME^#Ň8[R^AL4YkC`qoVr +o\r ;(R?0ݖ0l\rUdGmsj eP*Ѥ;w5T`&WjvڝAdov;؀i$֏ IKRsܮR޴W^Ez}eRLu"_>?<з0qvng ZAu^f7tϭa9#(Jٛs=F鮭s :A,6!|R&VH9@:zML[_z:? \6MӴ#$hJiPJ፺o`߬+d 3-$ :75!A 銺G=J03OO͛8?c3?]",եgl6e(?kL+t[lG^xJOsI>C!sEgtE ^#rn{v;B|}m\+\u]D2YhHv9b4me^}$ɼs2FjKUǢ-%Gz؝eBBt0c^dâ(P0s7fF(8*{9GG(~O1 U%F|EA1o:$(mXT8UUhm6L uK et:T-yB #F~#M=/SIϔ)`՚Lꂥ*|ܠ"1ky|NoDC| M/. סJ"񶖀ܕjȹ:&0?]v&CpvXRJSƱЄMDXd[ĔknβcXs:npZ|kO~riԩ:,zO+Krй^4Ҁ&`P^F;Ɯ^*.ݪe˖&Yk}f{{%vAuV[;חbw^ﻮXR\;Om6Q7a4GohGc.2^oW^wW{=hQ& Ɗ8;9K[8gʓJΒފљk d)#?#?A\_=G?#?BݺcÌ0@D9UI9xHkE^bK: A--uBQ\j*ZH}g8@OI2DZ ,(I DYTf]WB'Ҁ2zO+JOdsUJ!XdX#w!.%E6A*FH#1qb"qMaQUTEu(mЏ"znv3(,)kJvBҺɄ&Y,)y咪"fFTc2S^v s9mW+-0B3KZ-*V ^HǶZpH Y{ ڢ3DwOK-M~6MDŽ7g!0Gh{H# |uN'j`>eIxy]n :F斱~/PD Q2DBH+͗HtnGگ`ӑAC89! l&1F׮{Sъ09Ui^c9FQV iJ}A<&2L/JZ #YB%chaz~PCn(eXܤF}2~t*U"B.mHM,圻]97#o6rNG[}5=vZ( !aglND -^J]JznK TwW!SUP+e@bQm#FAq'RH8 Mf ,#v*RցBK-MZ+ I@$cK17(R;Ti幊$IU&Ey? A$ BW4/QHnl;ƫAwOkµ!IW>J$a2xk]U&Wi$ሤ~>Hb#`yӅcNBI-85Fr$Q3RpE$qM{G$BhKa@9%IR'Jkݏ$ m{UKHcDr'OEBؿG2 =ڧa)@U}׺{d״)O~d' H:vg&yFsn\o$ے$ Y5dۇ Ugiǿs:p.7VYIG.Յkc=d,S 6dXϳ ß%ɛD7}BW*-UQPJ+p)Z 90$nf]O{?q|kO);? ޥ$ep*o|^;`-+y MRW(*Rⵂc8ud牎ᶎy]D;<#jTlkiMekĖX~笲֙h %k h&ILy+kZȇWu?MBQX9ԌG8`檪pE8. Aj|DUOI,9jPnHs"Č4ːZ\!2Г'HS$yδX YsC\H󜍒?/Q!cnwxpz oD#=<lR6^u#og70^:˥ɔ1`R$vM72 oܐwǵ%<׮I\Cnd]; vHw:R)g% \ͧ!X`6KA$]&`o2q~.s وA񛲕l L2Bf.\7n zeN%7<1/FeeM737#+^I3|eP0fI&D[,&{Twt.c&pS)[0(h@;yzAWo*@ӊ:]F4~md$>F,Pq{oRb%#NqaJ UJ fius kpofW\Q)8etap0>~DQd3?G9 %:2xUv= VB!Rsfj ͵KmtQoqJj(hTrE[JV5U%~Dhi7fFEX]vt7IPTE\q#t\ߣjЊcd.Z=ZeVc\"L/ t7^@WeXDV?<_?ԧЎ"0l0.jX74ƳkRpwn%`N TZSܐEi-nfh`L[r!k`|R$ #ёtL-+K,َ#B&6u##!Q$_u|vˡ߯1=<|(i*9=F'.lfҺ}nTT@kj`Ppe a]iZOQMmLq%]I!%XvI[\"AUZDQ6{Ff@U~ZRUKR +ݒol}BIJ ujHVrk>K hԧiKOgrjJ 7,SkZib1 }tk&:p 4MnuáTw4`;p~Co:;?6aS=A]PC --yTmo7pcjgg`okNA&dk K<%C;@pΝV֒;CfODϙBdfwx\soX]B5Į4k}tM+Bv^ ڋMikߗ.=S,cn]qH}=ni+uVٹ$"SLp]3GJAN .5ҍքWv?^8fqJ 1d Ux $`ⷣ"Йx;Jox'*Roķxݔ"]fV 5)csM<@_ ?Cx}7~#U]P܇Th<ܲ8PTWVnQ4BMtASSҺPC1Cҽ}ߠLj\ "%LۣR *MjxmN<Dr*s'Ruxvb"/|jE8*w' ¡Q$JS`Drvu#\N&&kSg E (Jyp=> 9pTEHԌ:N~] q(!m6d Hp1#͛9.. T'@| 伄?gxbgId%F~)>pHwB|$ gS^15O #5]3Z,D(da37~D≱\5S-Hn,I G 0)eF]*1Ddlœ/\6UiȪ D)Vs *xmoKF7:ѩb)\2R%|#z4\mGńz+zկh9VG5X-^Ҁ] HZU}pXwWwT*% xlk%F6QMq8N(Mw?ǏϨj1СrY<0q{Z2{s8sMr1z*H Ak@i nv#cݍkׂ ӽ{{#1 #b\G$J:?ďC>kvlg9s~(q,z/TL2F%%j*z)%,Y $&uUkD] R׺=$M#^JU?}߮+ATWHW Ceiz9HϽ,|0=3ڐB(4_gҽSVf;#Qc:Ms7}->m.0ZrVRXW3Yv?wp7MnЩ ] 3a@!9v@\6B}1SĽS:{jZ7ś:rnu\4 CR/]9y|JD;8{d㼗{Ut GҎTZdeS#3 4؞#5fuYDkAB۶AH;%g.fSYc*wP nO gu}jjQSe+OA{iڵU- ]hvN]U]R;W"\A1.߮gS1?)'/?__Ï8weeML/J7: ׂpШj&{uBKrz^E=VSE}MG;aB2ZU@̦ХcйY;ءjWlx,m 5X}zβȽKsJ#)MD1̨T|Atʻ%@aP?:P_fZWڶ  ́fFENiS;q!Vsnz&e ~MJJBYTngx  8~Ue;rD)aPL\ 8u~@&Z= & @r'j"N{&sڝ*;.>':J^pyjP>ZmOȕ--4Z/]V>ECdv)C@m9ȵk^[$hFn7^ʪaO=o*9$G:[W6eWB2p[%ƥT%Œ Dws6ϑ_C7D`7>6fIhgnᙻQA#g7FϺ{b``Oߙ .9K78`rIrypєrgE?Oۿ/W^}e>q۷6"t5D]K8m 0p]&Tm6i([7 l4 W$sQa>IdKqZ-9'3 4b͸)0r<1"N5!P}=>zu-H.5m tjMz G S[W@<}qkg󑮬+Urd<+\ױ]z[AsVyg_'r8y bsg.C|]U"ATٛCn+ {Y+"{P챾-(\;y -}[N~ }v2] S@Ne#\w CK^=to3|jqU] P0 뎵{e>ҰE^E]H"ύ&} k%#t[wݩV. 86`4K\Ҡ#knBJ4V 'J^:~L×_n n!gP7迹nzcTKNCCܻ皇OtiM1DUkju#XaRZY^>qt`ЋS\V{ZFkێ6fњѕD5Y8fDRcke,E!BfaawNt֣GHSoK_mlwAs0X'iM{d\?WmUԑM)Qrνu,He:B "׼@0J"]ٕno8uGs{`wױA*7cWJVxضcCNk ͩU0e#1{Dnz#R{>vEqln'x+7Ǝ8v0Gu":̀N8@FQꅪ;<;['n8%X7)Xoŵp^}Pj7xF4-t+Zs֯, L:% :S5!@̵h3ij5;ױtEMKefp(VVyvA9bˌMVR% Z1yUӔ((kUtJc}z=$uvYUzTz1ܡGpI~G/"s|軿>.~@%8F7Ha6cXmۭ_ʾ>$7uP>H9̡uSHnc8 >&E!<B-5I3d$xOSxK᝹xF}ݮtRM&=5 %n53ϩ鞘˺̕ai*(K$,7@{9 @΄]薄x dvzx]!F@+UVw}[Bb`׺sw%A]W&)IhMM ̌{yzJ% J7JlʛT-+ѷY$o@(UHK릞? }vG?ߏkptto毡WKg_WؙcEs82 IܜԀ_AX/S[ޞ{N޾xeC*X$ϲh'1ƉPN@G9!!OX { ݲΝ%=iTP@$31)K\*&ΫwJAZ$Q]G/#QPTuLu7VBE)z/+!΁u0Nu7z|KKG^"9 I"HBLD] ]#D[RxF9k+:v*\'*R­7(zDyQp'oݿ(9_ #ݾ K/gQ:;8x0#sJ hfCQ:ў1K IB0}Rg੧g&\..$8e^Ϭv\Lpq9Tk69۠i#&w-8v-\t5;: k9U7w'kV29\K Q:'Q=pFL5a8MɰO8+_=/|c)3z?O9Lz$dma&\8 Vwnp.bMB]?b,d!AJi&W/w /cH$аsV#k]Q|#b| _ !?, t*2hmȼ%@ss Z=ICסe U2tG.V 2-unnIPl&Y}G|)hpĮ ץ%צ7&u kB*gF^L=/r%%ˍv6 #"f!13U_ajw ySВ=@nc?EQ`ew:8fFD}Px * $Qđm*:*8OݛZ_ub[m^UuwRv{3 0&ac0C& 3B(J$HA3R00`<$48 cvv]]]^[O8߷{vL"z}}9|6/ A{y׮?kַ|>q.;wcTHފ"Lk6P~}(4/]"?PiFl"8[C:PRe`4$ۜQ̋$Wϓ 4n5 D3vL,tPIt¨PDX\e9fH>-u iH j`<O `L&#`B1%!uۀ!f=_ W@OL 6(>U q dKwQD>>^$yuLpG&D=Q]h &3RRZ:_t2,LI" T9[vZ0Su{nN~ym[yݾ3? x}@i]ؿ́noj:w4 c@:o\le wjM(5Kg>,rcBN"QU4FԵDfS]X]T[}mNo#hڶтz8S)::U;m}3?DS+  ɨnL,T J)+-\n9jk1ռjmD*Evqkц=P#n,>jS/fáC/upe維X)R߈6k65 MTKz)5KD>cS~Z@f.=rz`v+H݇;=ׁxk3OyG p.m IDATع"hؑ@)Keέf uMhȧ<6kffw+u6ݮ20Mbϙ)YbKfe,_&l?EmP#4g=[ XdՂf;&F;sGtYFk-sƊcZH8{s~ؐ}4˛ Ovk:[P`6O&5,Mk+NkK*٬ k-F,B#)y JAA!iHQ^,2,fChi" EG(V(5 p f `  y>Pz@(AF1i:ݥ*.P:U20T:㟅3ʆyke9㧖> }ywDX (].:q-8 ;̱tFhSo[8hߏq|E\ ~|/~DjWE%inV !=A9 ƅs pDvR؎̜I)Rj&gV$j^mjQxKs Cbgp6w mҬݦ&$7M+y\[+Nrx4c3y{$ aC ̺ܩl[)ڷT̡YzDZQ4nýz*Jh$r (Mh5_2d:ߣ"28fcJ5{4K\[3gڈhusZtT!DAF(R*hP}~] yKJox՟Ɠ4řrhB~F>F*)u|_ww>:NtM^K=FimQRR'uޮ١E) 6+E,rXםôxPh oX7#Z(3UџTB)XzSա&y }T6VD MB-$ZZѬ R*WPzkYapAlf۽iiPU{Kʢ`)f/ҠpZ!.K4K> C>ZP$I5EEO???;qn&x[XhG,C#^03GA"c0ϙ^yC`>g030XL qF_"Sc5AEanT3M5SB%\_&BfMͦll&4-+t2RXZ0Q$E;BUjdA Ԭ$T%ܾ-a(ZMr)A|a2a-I)UA .k\|Y~;&vbpC RjZn,u(k!0aeIs2$އfu~Lc @1Xh24R4I"V+/„-=`y@~3BД8e)KS;Fm n!1R_8L24%P\JirMki6BϱZ]6X4CE1w{E/tTbgmKZT+ccfihD(Bn# CxDPܭ-Zll:Kaod\Hk^~)SѨV ; 0NOt$#CqR:=w a餜1NەYBX&)ϥ ''ٌ.HwH+Tp.e2Ik }iV+q>q4_֍XQHnA#%Gb;KhPHu"ֶﲔ4 I 1$EY榗%,it}rsZ'fbHO(FyJ5Ä $CT;/gsB'q3WZgo~">ؓFX1L#X#y@6P,MF$ʽeED:$:&QjqyCIYz~,0uF^1iʱY,nj=!欴qb"z5t}!eZ"il`-T7vmD@j15]PZLT цƜbC!86T&9v,$mHUP}~-  *_'h+!(*(V3 {_Esbx:g \>|,._Kb}]+;mNg:?G{o%q!KoO3ӵ:)+:!-9WFzU6co(WӍ2;FJOؿK!Z(BsRցu+E PD>ubuȦX׵tTf(_6f4tdUx`zN!q+dٿ\A>0\2>?2>|)O77EmmQZq$4aAdIcR6W+ iYeh|NO&ޘ6Pm0_|wnN]h x6cf #>:B62M.Bh4.Ayʲ]i)67Epv&MK9tq ^/BA|הL d=IB\l[- Ґs]wIvQ\Dߌ[D` 3.Nb<_'eohe""5jyb:: Iׂ/5eiYb4YcapdN_*]|F?,/ k׬8mܔ}4p -ˌfk]̀<#TjUH=~^*IAIn`9.-__GYz h,ڋhVsu"y 0 z%@`,Mah5)ub^O}Z:-CEC)v2Ұ5-&7ZŖC 5[ [hlBKMQH?'.>7=A,OhfWx4:7s Z0EZYYaߦ1C(]5bڛ4K-MK--esl~@- D6>'5A kCQ[Z5 a3:ǐ_o`1iTkTs=nf62A|ȵҚ&.\sjژ\Qmoޚ"ruu]jCX6Vw69u)=3G2ϭ Z'tUHOw=;J]׽,#\ڤj.qf/+lj*uZJr 7"EF^G_%ˋk1&8OJNKs`2pn _g2R5bi-_]kh-SXпZ~ TͿN)_jcJ򣎮Xcm*mΫ4%C)rYlQLl}OZ jhEpҼFꔵfںq#=(T֘5M$Jnr\Ur\(Ϥ w~XƄ4(b;MM 2 j) ٯ uheUq2Tn0S~`y.6\bu2Ximi;zI·o댐$1Lo#YJ3#:R9VOܜmv,Մ@i⁠<Z[nhiP1%v'#` ғ ?s h"a7`AÏ|v6t{yn0*Iml4j<.[Bt5hQrpYR-&.i'@EbWj?:-Y2Tm"d=׿>z&]_c:# |?BYl~ w~Q4uMI _ f6GfNCq-"dDq=G}wƋ/u۶dEdB??ӿ9qfBCZk1zO}@>>vl궙PܯAW] Nunuuly|)pޤ5\ˤBǿ-sǟP[[pNvV6 VIf,KWKTp5ȷO'Y@8 <8vwvA.)N|u:N1VDC%wv( \*]Ȉ,W lO4}t}|vT1m|W}]%Mͯ_k" g96LN7bsЕ+FsV+y! p)&tJ~ omI!oSu˚0zٙ/(??9Dvk K>XSneS0[K2)ww }vu/XgO uP^+_߾-<ʲeߟyF>h۲/2ͭk}JP,Ot&62% )t䙚rC9fE!f~F_J:"e]l(ߗϛ" yBͦ|GŒU@T4ݖ<&g7!@^MFr&@g@P8 <#DfS`#W\BBE*H#yнPNAs໾G~O~?Al\EŹT=5؄?x| U5O{<|X44+g#Azj9 Q"dTr5o-c8Omv" :wScn!zڮLX]UIL5ut!8#Cڀ=͢r7*Sx{صr6Pli#`nTX򡟹q*Al;:Ԗ-==jrGdn-/ H8J8naQi.ݗζ2;C͜s~!uTcO@Sn:F3m0?]k0eSz+i0@0,A0^i}e?{ 5+2ڊPԬ3i0 @w8=E/Kav;qx5:HM6sgjv5 1=;>u>\s"d9Pj2ڒ}]u(q"窳0 P }\bmH+/ӲcuR&#W=to!7~؆XGO:˱RBu7ڸІ)2Y̔I @D~%UE<ǢU"2Mq}t6 zوFGg<@;.\@{<4˼g:V79 t >h9@󢠽fTӔXU4XUh!Ve8^,bAW&퓇l5F>c/W(/b$%sJpZd{G-A& h-/_·٤tJyS,NOHUenF8CBO8>- _pp PpxH {{҈4B{j6 o;.tVs%en( نBI=4,;V#$ŢξuKЌCyu^ĕc8<,Hҳ}x!Y|\MzTh0<Βh6-Nt"&g4mfaYMbn6u|.NBU:yY&H$,8"4Z@@:#T 4#"FE"4#D-`9%DKմ^"!t"pJEB,a3`9.]v]d}I yD( BqHo|Q_(~_}~zfw%%В@ yq[_/d2ut{G IDATr Z(n*e&ǨkjhwJɲ˸W Q&Յ0ikXk hs/UL9:8ǻM xߋo􏾏~_wW/STWmhK:g_cԉrIܴt_sМ3/-@MS4dzܺfhD jkgFhaІ[1zi=TzLlB욮ht#=zd˲5;[u^3K-mG?6G-O,hǮV\'/-yQjjYT_*X JjTj'ڑ.'eGgqLfkM41 !ZrnoATx x92A6btF,ܯE7}MWL%)_@pTkOM<%v5vx  Oua;PP]P_cX0WZwd.D+@5+tK?"EM̾>t]D4PMG*%] ֙4qLw:aG}̄Z Xs"]%9cKigL$ŒBEmg rf 4,f@iVD@-$>.5ѣ%&Eׯ? v)=>Q|\U4CprqEeQU`#ьcUEZX,bA=D!n1Jbd>^hOvq:lz_ctsaynUsTz=G)SOfdifxGl|$'bjai:w")v])`Ls)MZSrE\Rx;gOKX%=MmђĊIWܙfM`"[@-hwj%#@ޖ@$@!Ў@(Z{#? ț_|ܿG?Q.18WZWS}bxfM]i.^M#QKcaӗy5VŊ5rhGSj /=L=ϖ;=R Y CR m>{ظ'"d1V.`yx(hs8ĪCw6C$bYRyV A3p} aRUp<%BP^ +!ʲj!* +xDrl۽SKJMܢլZL)xk lܑtIlJacf"iP!ZkB )- ;Mi@w|f>Mqbb4}КXFj47oҚZnj$ as5XTNk#ύn%sV4M(i7!t_ðp }[UXCi0U%hSYԨ<71:5fٻXrMZӨHqQNu{ui =Zgp yJh7YnX΁*lc4@ pb;D,;Sm6cF}Bad)aʁ(} Eh7|J*`z.|ÿKyy~>?n~k_ߢTZy )^LH,CT~*Hl'B0_|&[[jz"軴I VS]9va̴07ϋׄI}z:ׯ9eٽ˿lEo>_ˏ>_\[(8&YgL׮LFZG",ׂ?1)l:5 >/Th&ݡ:ٴC 04ꬅnDG*2((ba-ErMq9׷ؒjNJRkS,u&"z.t֠XClmjCaLu>jbT [5(}[KZjcD]QJnѼ(W)sIE-m[uKϕSislyY9~a'e[s;#oRs}SL5}E݌0b"jFPׯ3Q&?Wu>:M)eu>[O]4k"^, }ScWH }xAu 'ܔ˜XGE:އ>QJW5 !Ւ!Z@CҬ-Zu4e8֩t}E0,No/Rj6:ka.ng:Xϭԧ͚HjJ+Q @/ q%:OOSO!}?Qyp\D،#8FhEA&AQPU(¢(hc:Ec("QI% COS̚MtJ UUaFDGqp:3QY"*GoYx쨏?ոvSnq h: x(i{XP&hЊźvH|$*3 *u'Rg@IHC]8 P@ؔZ&@n 3a6~ = = hR¥~Q# HU R x=ASX<Тoԑ1_ p,JJYB 8z4WGOMw<# rF5եL7ًZo*E3׿{{h4"zq k>'||r,{ȱɌ{<j[R%i/tFj8=gm` TMo[ =͘GVrC)gș;T6.*9+JcQM1N+pJ# !PϨ5g$7AsLJE*NeN:u'K5Fkd)R>^c~UX豾Wǯ>S;.+}kӚTnZtZ_r&Ϲ~D fUӜj+-ܴ7chS{?7-KPmKVPﭦR)R-:4Dkk ζ¿G.ܟ^^N8U$G5[2rI ~5/n=8f4tz]VSDuZkandIy]Rq]O p){ |/KѐX}=G*ncȼFJY)"1w2ĎSiZxM.tl?j+kS1n8 iMm>[XvfׯmqEgM5禞[dfp iHhB3I@D"?O_2^#[?D{=-[wj4B0<.0Dg8z=4S$vY ^I`,,\!Dh4ܝPZ%EyHKZ9 ݆ł HʸK@1A*:wﵨ?H5Bo̳Q U[/&q167%{!MAAd(q2ŋbǂlJ}_D2Mp߾f/vbzX& S"T~4Y}PS ||Y˥|nbܸ!hh$(SyNն_r+dn7MAT,cZłwي״ٌpz&YhXa"JDޫY7AI:$ y|ZdЙ$BPq,EQrPJ ɫ*i|vI!(d1Pf@19] a h@1;%Њsʈ|L\e4.2-M`{R<03ҌI `ex]AL2% DqB GO? 0MN^{|c^<k\XL-#dsu[(}mQK{ w(>;4eU/+ щNt}5(zsߐ3‚bJ&_;("b_pgm{eMҟ,2[{Hǿ}/AGx6,DŽs:7T//tY/5䬞d!K-Q8NLQxJX9ShQ~VSqjڏ|g5r$c[^Uvw̱ufe=f83V&U  塇^|W~ G%:1 NൺzoFPU8Fh,8tXP$\=eI^8|g 4ElRhP1qLL9@7 2Il41Vy(MV4].Q4hCV\伃ov?ͯ=#kP}dLEgc@,B/ˀlvfj] EZ-)~}72MoyL! ]606B*vL3E1];N[MN0 бBd\ч>*aǐZ0ڃR܍'R)D7 ]ePMB7t_Xn9 .-m2A4`u9^=DǬM <-Ǎ)rlk MHBRX\Ц̮իC:c:묧%ow~6lΈ3BFk*txǎÔib:vZ"2 C/3L6HCj{a$7=:ަ^+PjW)jqK)py/eܚU%㫡WW|*W2)g5дMp't&| m0eeY|c@ )Ԝ# ODT7 "QaJ-qV?-x[EVʙ#źmIJ)"fY)rZnrT]T0Mq3|Ni/( qT9_{,>ߩE./iS=z|mmrG Meɯ+KlTʒ/!&6=[7y|ANVɼ23~5^a3O^1zey*pQ"9_j4v>:<ϳ0/[DV 9WEᐛU,"&8p>bGe#kiqކ?lc~?#<;{LGGfb*Kr`fL=63p `Z{nHۼ+1"P̈́m^GGmfj%֖á$lwyMdlmZkq~.kq-Y \ESkV+`>g>c_L># yC9?!e1^db2C k%`Oʕ/5y#۾Z%XiM(bMme=I"M` z<2Lf3 Fez%ീ,b MFFdEUA(թK6c 4V )ь-SF2d@co3z\&\~b$ex\bsn \`4.0ʒQ}D%M 5\.ڈx oŠ',o2AAFuYQ.Z070T`3QL2%õ2SUsTe^2 Ld9 j+߼1||9qgx4z#/z2[]qmP~,f^=:4H-y'p]׷8ƇX*q31I(8[> )/jH@[_ {5A u4)5eAbD&..=צ&OP 纃e6Z35b IDAT(ȉn{ k 2Th6z,g-]GbQHeikd|#t}צ3gyAGeoYн9A-Hk{.XMr\wuWy=Ŗnk_Mmt.6UnF ΈT]'tjzگ]AW& TxS gAzIT#ޭ9$8{t3)ywЁ҉YP=iݣS~]o-W/h^,׵_;3J&tK/Z8zG芓-an<H-gSuE@amHU@7us' Y*#ͷ\m”];@Ld"~ S:7zlMt?}nLCf Ai A0#:=z1qF(CM9*-J='moxO|gWPn~z9nr4I.ߧ1=h`#p4**f3EeI CjF("LM9 ZG8$(r 䙸FQD,J 4!QU.-r$mB! "4IZm ORx͖I@}BfLDa(! YK'$I5:]* @( %cbqdBy!%Ɏ"oڙ O@-uZ*@ϕj:uOִt<]jϔ'|PZ I'(v>Mxb-gܹ[U$fVKݭ,Yj`jXDZcQ<p`B8--%HV7lġI53{^yu"-R):^{%OS&y>' xF!V<sP9 1GIe%]BPc bߴRvdlrL+ g2sb(ґD53bs"'֜q7.Y6_rL3K'פ)JuAjݐ\$-@2JZ歷ejɷ2/W29u Y. a1>Ѿ%+uʽږzBm-lsuzG!ݏTO.YhaǪ{?Yy[{uO@>I{Dܕp&wbb=ru1;?׾BTTPǦ|1kg/8?;3ө}?[յg6SNxV+?oK+n'U&buGew#8YbVs}#g;0kSPX)| 5&93$>#",wO IQpsR(JJ8P5SSc5.*VjS+m]6˧swE(s`"Ya#;Pρ';؀Rq1Za2/}EV Z8g̴skWj oC-SZ).ԉ"N8`bMy ּ ~MW3)HhZUFȽGkwGh Ф<#ځ Σ-޽ȓ`[۲һ?hG#yB{`"H'Odx\O# 1hQ]RZ(72#Vu/'{ve9G8~o2^DoR Lρ=t-rYOߵӵ]/@x% ߄nFox 7|Ͼn*_лZí/cy~,fq G)V+@Zw1cyO/u?1ylr (@I\B\P(i'lV%V3JH:vMpՎJSNŦ]+8/v/pfSKIiK/{huiZswCw˚5\iވp1ډ}i%.]߇Z6,R|ǒI辌R:B-+NH.>3VdVbt2ڶ{a8K'1z5>}DH 0'om:Hej#WB8ZK+@Ro~ ?R25+WdMrESe w<'pClOY]l_ΆPπ"h R{MsD9/%ЉSG B޼z>_scT~$4m4(*z7 ) }yN*IЊc)(%BlRӪB_)"ץ,q? 1sPQ CDyNAU-K8A?pA8YzY, l:p 3j}~ͩ;>)x(Beh!(R(HKFIŮ˪NDmP^cHQ+ s?TV"<|Hk@+<1\ tb>zŴ"KVhRVԅB,yTɩ?ָg-8AێP`E0DkCo>KX J㙻V1$fWp|^;".jPjwU(̤ʘይ+"YuS eLtP[3m{*AW'RV~ x'r8(|L >FZx;k"UL& Y4#(Za 9Kt %pWcwX>!cHq@)qS$rVH 0ZSCmosyߔ`_7Vrp,\ I[svlVgFK΄0D }2ktt+̖|)FĒhu 2eJ3BmHb!QRd0t2&~RD:`%GKքHxDڄϵ"] Lڕ\ dw1DRr͵*>PY2NG5j|<.Y `Vj,ׁ;@`2F[½2]^c#?]yf+޻_rz ^YU}+ՖKL'-067d[ +W&@-{tH:OiCLLe;:q%-TRW 44]IhxRK~WȣkZ+Y]YX]XrWRX7ֺsRˉKnKGq@E{Uei^STxמw =|Oʿk>^==ſ}EK8RB})޸ax9xzDns͏ S{k&yQt^G"BeZz-"ySfI F{eQ n6EYJϣEbv σ݅Wgg?~EY")KEUHT7fO{cKюLN{sh|":2"J:tcaOW9};EqݣY)>]w :xDj՜fӸ||բ8?;pM>z4r֞{Q::㜉faXljzVxbRy$;c~0ۜL؏$+ݿ5'(:L6@=& P*BYY=^mp.j$l Ի@pm^mH !V00@H"eFmp7xU@uh^H< >hAhx@v,@^ PT[E<[7OR-msLՀ*T@A"k7v)n[pRiY Hr"k9SFHjqJ5D~G.e:k(*vO<¥Kx(U~Bb:ИL0=#SٟY.pՂoihU.wPGBFeHå" 4޽%񺌝Y]y* $ڐJ~e qI0YTk$L^+˴DOKƒiyrksDcADGg:hP WCf]z%B8x㨉Td}9"+ք!Hk!p C 'cFJ|ʵd4м!ada^.dmv,TeS}f肄dXƻ-&yOK~v%y>(dj8TG@P_n|v&-#]s੧.ׁ`Rr,SѱbL$qOJr&Y&4h p:@ppltvD^ _e ̈́dVqIt%2V…v<]3. 3b9e! iu-]j'KN( `*+(UiU^Ua~?Wb$ o}ʗ]|7ᲢU w\6"$eeؠk!/O׿L}xc邾M lNqX\E"]PRc1qjVC=96 sB|NQD*,G#1q o4!\.uy)iUaYfUgI(B1?itRUG Ii؆`,5_eYD4˥̀˗Zqy} :XV)VAs1NN >(5Hw ;hgٿ&Os2uhy>zQ2< NzEkM^._6*m纴 LǪ8ܸ{{&aK'Uǡ6F}''4=B-`6bHŲ"D0NG@C*Ij+O s)@Oe;!CfGV,{ "kRCaZ;|8 9yMB3Jp<ҩ\]NX]UǷ^:>pppE٢=2C/w?)o$JaQؐi.~Ph`DҿKбY4֫dҪVC[ݖTX"#XBm /,ll;VeGpݽaa$ǣ+0y@,6$au!`@ڇVXgm@S%Ʌve\4LvJhMk{B.Eږds+$@tYuNej?-d;]#[J>纔I% #%ʈo}й G\B5p}ehO 1ޮ*("[h!(MQxLs@67QN&,l_,Q A[Mþj B&n]] a\ h]-H ]2磶ń JS|C/o|} e؁Z-[O೟~x@߄t* jPws c>JPj *^1T aS1[Y5 *XgV VЦRNfuH\=+JEI`!M33kQv`YNGys}oŎHV$ 5 n ҕ!%M '$W ^<ۥC Ӯ[21h!4֬]X8w-Uu䲒đ9bVOe=!+ŸlZ##$5$Е2:j] E5?HGe74qQ1=H#m-ұHO8_E5!N7-\bN+6҅[J"-?V5^$NN)VxdEbŽE(߱ڟ&ř2 weq|eU$`cU?wʉm=5 x<{o\+I`sx[-Q,ALp >o["׾v-MN*-=Y-5|SVW/*!im Q WS92u#,׎FP˱jD&]CuȿP{@:5ho)=tD?J_m¿tPUC8PAY`$f5a(}HiZC<}c.N_$<(f\p-tv/,]|}~bzj PCTXL-,XZ5HF#JjBQ ncuٞ*ːjtQY"܄P1T}F9|!mq*,C^UR2G{5N?xg˽9VESuY XCSn]ÚKlUGGuP̟->D@sZ-VAp]Z+Fe&?|锿[a-Z94 los8f:'8x]]x۝L"?>6Vc+.? .բt-4^}m>n[s+ʒI+SNi ^p4@$BۅrpH8I[X N:K!@9Py`̓&wHVL:I%K(@OX$[π@.m@`c]J׉(d9%~ŵS,-zTtU_ğ|})!?/;E ƛ q.,@m4`Yܦ|NsHTYd]kZTkjַ @X[ӕTVf ,n\~iln.pC\tEeABu2]K"jOXgItf)J>mN,c0^tj_+9JYzr9mZI 6&%cɪ|Nz_d~JM*in=Vd5":wmٗLzGlZղ+u}+1TCxccJ ?2HbօB27|yMsw *mOEme%XK9kuM6t)zeu2HGG%}׬!Iu"VӦ{o+κ<}J0_2C?ǧʼns _۷QdMi)I6hf~]zm`@f#960>?!,$CK|ļ T8&cSP${B.whe: "xSP&n0ʸL# 6# dAJ^k6dЅŏ-XK/!֛os"Ϭ:+yҚ}1Kzu,[ )= .eп_  @,i8 "TQF]"VÉc랇V('ӯw|ǖJ/nW]V t?}URǣAz l4@jVä,q= ߧ|( Ȩױ<8 ݆͔*K jjwu9= QR.CxyqÓFI(ˀ%ɣ0{<@+X)ޚᙽMLZ@ZLۡ\Q0Lg>w #1K7ovvcqsBlUn_*VYIjЫ kT0hNWЎ۳-kᾯ^ڐn"1oc0ٌ.حLZҰZ)ylm1l]=|aT*k װ#U+& NV@-DcdEpAàS;{sywyS OtjB%^`F`Ӱ]t<6OC1JJHi0re1rń*2V¨5FXVsgtgIE5$t&z).B[VT\ӶT(w {%cז)L=mZ&GOﶌ@I.5K|CҺ.}dƫfUwZ0ҺtWJwΑy!SXTKXXI֖Xp4뢁N.< Ji$qj͛$:"ҎceuG2%[bmGbXȀ*%yE8)7Yg5rOF,}ln<ǐ#Gmk{ ox&о/װDZwh _͢`#Tx%Gs݀}{׀WE:ϒmhEڮČO/3!~ AbK,OYyڦ0J7%Hڷ2&s ڰhsdCQ:ړeL}뇀@,p/+^zU#P(БZQaa3I@UCvb#'`VQwR4,nm/0n{Rߡ"m}<@wvWﴩ]xf>ǎt!)STa,áR"P C, QE,.ՂZ..a\ZϽd {@7G# ar f>~SsM$XaҸZF@[:1@1Y9PAh*- ^R*% '*ddXv*{E(i Dʔ#fM t @,[ЫvύAd/T) 2 If|SE,(-K`v*5IS b|DV-7NQ`|أTP5{Vȱ6XRC^B(bFK{;ݡRdN۬VÀfԥ0x.hf/6Z,ĊtFd wugXIFWauJT\+lduKzrvMuduaT|N;xi p$N+\`5! NbEݗyݵ+Vߴ桌i½ H[(U$L0/_2ӃjZzSFɹ \U2OWO1ο `P<<PEuEw2^w_r%ڳ4,Z(IMhioɭXr)2Oj1;:!HCiu% 褕F.!4)V`e%s$j5hX߱M+UfxG9:u\G45ɾe;B>?4gy|eK\E^b81u# #cj:e{x£x̖Ry$@ˠ,i>KL6qB ..(h呋'L(ZǏq*IP%ƝIB*ApAo>0% IZ@Av ÐX͞xe8@{{ 1M 4E b(m)8pvv;Eqc~iQQq_~qR}oAׯ3pK**II+WEW}rLau2ÁtkN@"  a>7CUeIIB|΄tz!b,DŽ}"<~lL W+s*դ "Vzm>>'8f,ZzK.z/ 8"Ke^ hGLv/ZHaRaTqDz;rr>f@T(O1C6![*u "|1yuµ;wΒ&@8Nu/ (xS&s%A!u6~KLdm{hʣ.]{'d|%0{egnTS&Cz2iQO/pɤ'ǥ+ϦQ`SOKXHPǡO92Q|R&yPR;#XˠЂ(JTw 4-.]W?TׁzT-zZu| }r Ρp"ȶoh R LSY\I$ %X `uջ @j) -6Z#K8/Ёz?ۅ IZMPM.qr BP' It1Hj*P!㥬sz޶Ԣ هb*$pi n_|Bq:.:r]̉VU蕥Re _):q]UQ@X$i?-KLyaUaRp[Muug?n]woNw l48F)m;^9 zmuK9Sww1J5C;D(Be*CS(Bp1#m4VXa@ex0yPj,XFM&*srX$% ݆@sW+r\nI0Rx`r+ OhԅB3BX0"KeF<`PA(s] a(FclNX@h9sLr`h MVZ=:v)TSV{ |rr ".PL (+B~"@M:bNjPe.) &0@44+df g&S$&VUudu+$SspSNsG/ 'uI*g5A@cR(p*y$K^ s(`P%!Z$/- gJ8bD8Һm t| ͥ ^EڕzwWO`T29 Y}.r_+#F^ @dTU8uHh`R% P(am4T $q-"L砙C_Hg!pgBK* oj@b:*kcJ:x|:A9滻Tp@-@yNiQ m4Mpu0!f`TUPeףlFj;^p0_.$P IDATǔS( j@PTvH=nUQUV+)Z>^ͦJ9 A|Jft=E_;4R{[D~s\^,I*`pt8vSY[o)\Ąٌ#bC0_},cVTzERAC;s0(L}>/iBӺr[+P1GC|,$uZ8a]">m. ׮1D%t661CN>vIAtZ-K洘b:夅sZø01wc&x+( 90sVB2VKB2rlmkA0>Q6B8M =7iFC|hx@z<8P-”඀l%yiR) +!m Qx'TDl?ŶBeBrpZj!3`щ VT$~[ 3b#]:S`_5Vu_I%w]1"x09:.|XK0Np֔x5 GxJTJq8l~`8ҺTKyA$k w$ i$dv^$ +Dx-w!O'Br-]k|M.К#Q:Яn8}*u8yJecV-(*E3UCp\WUr]yN(† s)(ۿTbsgU@͜j5AH,sʽw}.VS6eo V~AQU!&8}@8( DqeApH` KW,PXjp/.-Kt`@U#9:2 >jEs]xXmo#j4#ep*]vP:Ö4?HU%r\ƛLQ9}|=5SMK I a}] xT씽\w5rоrsN.t'>q[NNH+RU;EA%sVJS>6- X]2N|( Bp$t'jFDh9)29?hb.JO&&!V#|=cR!5?sJnkBUrw"+ ;s}ƙ:Mk;;!8[<W{{p>",+ qkv.΁T+`-`<MPHWQJt(IPСPYt9%Ca#eATJ%Zpi!R"Ѣ P/BNeUW-H;"GwKOA܁zk3uz@6x* }9䵲 ǎt_钥˹k(]<(uGr~ <_44;W3{8k~(JD:&Q A+ǡϩK mTUE*%E"@o%P]߅xsNRg'h? xQy#"r] ĎdT)EK$Rg&;HSNvY'kq$1&{.gm&HOg<%P=BO?,ز}{<91yYU*Jj5ݘ6ml^4fOO"mlL!h; v@C ͭjI%JyݫݭM3Ffsk^1C0/@ ^Os}eP &' !نiTZkn* 4,,b& v|$D3[1`\-?ڡ0 mQ=,yLd:ߖwlXS%}"& PGVQjnyʻlKgwDJZZ)4qmYz՞Gu&[Im榖=REdC鑩Yy:S&?;s')>D Cz׾*2 7)uNީV8&]NSl4P@#^wSlIH-!:Ls'jzr <ԛsi"׮ACshҠL`?^󥃸iLe܁iejFˊ y a{%B^G#vA^Q¨А:ڧ&PǷi: ^:{WQ4]q߅&.AxE¥|1CKl+y͎9:krO1aum8ݑ8xA -Տk'?uZ)kS0Zgc#NE9km[mS*QZq.F֚*zii z< #:DYIQu'£],IjUbT]}&~Rڞߙ ?֗Hti 2^]m?rGl=r5&Bg  _{;5>0*Ҳtp+ـTQDh`UA$b7[bE0iEF'Yƴ 73x7~rBYKO奤%m:A@; -/)IX cEU IB' tW+4I,P2lᐭ^3GPT/{$4..'OZ-iܺeVrIM,3$ D%`/6ڂ -WÙ9Wm{|eU|(D͛j$9B..lE_$\IƱkSr 8`ǰ^۹" y3|gnʃ6)zvV,)E=ޓ'hBeWU3deeiZVQʎـ$kvA;>y+Xo$4TmX X /- P G7` )؃YiKe zB|.}ì)еt X2oA7"w+gGU̮!QYx64uwSߣF9-~ɥlԻ@1g OR)@9 ȢrmQ.ކhc!]W:G)s)'8VB}Hm :='vVڭXy$_R8ɐ̶BUlJ1F3KKxd*`fny Y%yA;Џ#Oa(|W&HϴJƈ*in6d͆MQ0, ʒ(0$O^ #ID) ګ%w~W'LMK&vQU?e9rms;rՠU^Дlaxm^n'׹x7xvAi޻GhVbՊjVH+ILc vY^]AM.H.lkSInӜIr8Dssz-sHoݢ$4OOi>~Lj vIV+da' 1db qEc846P@+D xۭ)7垕-J[ݿq& >XrwYBQv[-(9;M-`IͦUrv4P՝29@gRwtdWUY_xv׷V+nqt/dsyiabjDgwvtos+7;؄k<ޫ ڪ{[-xy{)gh/rUfm6sܷO V*g91!jq;pcBUCH,ZƢ/.!lhоp3+ك| hЌ܌ LHz@A@؂!}O~C2M9Y5W+C-cd=ʦ,Y%n ۬}ٙ؉c[[h4`2a$4 VD&M~otJ) cMyμٔ2)lWM+mZQn6Ea]NNmv (Mzx鴠E}8iٓ-@V6 l@[F rI\Y _} ӧ$zmy VU+ΎM^{=ۭhlbQg]ۄ&Ibp~eiWi߷CX\\M&KqtK$-߷F Y/ {s$;~M8:JV˥Msun,CjT*l5`($-`[i٪ s޶>vÎaa*H&]Z68`r%=(ְ{.4o.K߳.EXJ!Bܲ;G@7cBqncYŖ`#>coWoL4O,3@7"eJ'axԺK/-?ڪIΜ\]!Z9}_:y Ot` r #O5)<̤NJ2Rv1*喝d~ vɻe4Ҳ |R;6/ 74Y|G%}>{N 7=ny\>tRQ9ҙ*Fxbr 51u݋DXL}Ak{󓶰}X~S ߴv 5^4 εz?}.ty#>zsHeQdV%n6 `kK$!j6 s Ȍa|uE!yKy|L<{dA@eTQDtJjo6A`!kq) L >5L&b`61q G'0h)1oՓi$ԫU3z+pp`߷ Kqqaqll?WW6vҬir)l\ð{_+',W3kdbn7}AnDsa%dK;bu=nܰc~}K^.k_ڎ=,w$MXmeWWv^ݼ7 C܁.lL=8ͭ߀䢵mU6Kdl|eQI5a IG##[O'sFz4E*,ʁkB'1琷U &LS&:IkkUWݒ"ܾl+h&;/:9XRʝ_\w,y֋?g_EVgg.z}nah@{ ms?_A3 v{ Sul9T",zŰʠ̠ 6k߅fb0ab'%c뤽AܲIU}SCyXM X'V} 9FO|F|eSƀkw0Jw3hϲ' \*#}{<ؑK004ٺPG׊KVG]j~Dvοd젅^9tuke*2bEKJę TCMmj\Tw#;~u=8΅.90HͿ`f8$Ta]:O 2(2 1QH:UŶ1(6F2 i6&n{Sn24{xżυ_LId&maob6[tr^9{;6v:C :qpcg|~Bt>aܺEceFC&2[Ulo6]YE6!t$ 6vpg׳wrgwyibaHGm驅HE"g}zj8ktp=z]þ='T̺iC 7oian )4rz 8jLU%o؎N0>f amW Z}-X_XixCX-,`Q$jWQ&@DؙX" @ZԞ#، ˡZ A WV$<}cןGZ _RX5v5 SB \+c!)/iuhSMT.IW6SpY=q&nuv#C#6wc굼ߕ!Ý \UM~go o5U:Ԓ$It* NvD#j"wiK8 ~y`.pb'_yKq!ؖuM,{/gw=:vg7x9;ɵ5!:˅W9%(c{u]'[? JsnG LO!E{& )'d[nEjJnAu!}t\&|)ƺa] s#ƛ®̨JۥK9`~VuQN:aq ~\_^*S+efj /`i{M =t .HJmj_S*`y X:v{9먁}T^^P"kB6;9 wn{pz?ֆZ5eDǽհ2=#6' MDxv\޻MT7v ily-_;-x3}TtWoDA@huefͪ$$ iU14aeax?SSx`(ؔ/Kb-.eCch4 aQJ4i0i4Hㄅ1d4ynݜ_٤䀜eL%55_9ypi>?88`X$$5"̇C C1 !Y9YEuxHxz R^BT!YQ yβ'"4NQlXm",ȢMMlR+̨tŢcF&;ߧ|>l5ri~.X n쓉MZ:y4تё%?ybQdvHUل"Ϲ[[6Qp݀rȎղ/rͦ2{<__.vv..gvm^[Qؤan]M.i윆`٠ ؀dm(aEc:yar '&=-@ڪҵ9<:殅UK: ȬEeS*k#?FjUkXG8 (!?}XVVBֲ9_Caa VbL^]rܒ'O9{ЀȞ;ށ*eK؍\;Q uK`E)h%4!b!#eMdH NFu,oX=ZU0HU-8~ z X1+РK',vWƓ8꜖NSdytCX}]+<9mOhBm2&r6w.x%jid: +8Iغ£yͦۃûp}N->\uAͽq59pzyݘgؽ7_Mwx}{Z<%<u,Tū-˵OWt}|ɟwu^ځ[y3+8)ӎ?ٯïZl⡋;<@A@$ FDyNTUUEs`ekŜ*c0?p~`z0JW; ?pc' !ZDz=ʪ1D]Ge Ab| rNIytxa,aw&?ཇ< 2Z)/>=e\$aTV:hm.=,)KC4%`d}zj򲔥1,7V ws{7nlj<vN-Zpu-8ѝiv 0= O?&KzMUVd^ 5UU)=/_hy .xTA+}=+'ܩv^~^;qJ`* kRK80/s̾62kӃ-@jHuy*'=S;yS9%]z2.:t8[0~]2ᒘK%Nuy"gܭM$5czɭBSB{\軫s0 Gx۩\ҋzWX*=+$\jB>"\-^g:}8?8N޿u^ GDJmЖ]INykc>AݩSrvzE-W ]rW:&0_izJ. v _{-)y;+B3].;5Jz&**Pr&S"x)mt fHA~*BI=Q6?<sG߄o/d,%{mya9M\k*]9q}{~?`?k֋éK4DhtBeIl 2.KHh)KZ%AQ(KUE8XJwN!0. -y)nč^BgU C)Lʪ8Q2>O */ߖn-yA\ci  mz+Ӿ{"ޜҘΤna>bytj8rI0!LSړݮ}ʭ-1"aHnɉHs+d]]tK<1F6eI\J41j%m}0D66I´bn䭖U6j4j ]2ME Ullս6 }ZLY7$[l'~f= CUѰb!mQ(^Q38XZZ5) -}{&M vI"t:6q Ƭy°۷׬E]ysNByvXwqn1*k?UcZO(l9zBgUBpp`V\1D ug6+3G60NVI6̧ =cc5҆Pf.$$ \^ I*-!6̄|%d n 2\>6LlB2QnFٍe;! K>*sW?&Pͳ,0ͧ_RvT2SB`naY5+۽CH;q]Cz KlTaqFZ|O)VZnsQ# 5ݹޕ>My8unhvpgRGtw>ݡ6gۣD85 EҀg=ZgN F*Di`g*nֹLKRaғ]ӮsaVDq[RURU)HwdQאsUwƌ;^/ZVIzDER'Ԝ) k붾MhЙ". =ÿ8\*u[yLjSߩ@jw푎yd(VLkk k{tt.h;5N$7NW=,Qs̶Lt~Gʼ[q0~߬ka~ h&E)B*0G?IA|s-7?f+~L?#`n)Kƭe[9Uri 1*W~OB{ӊ/%|V vGdI:MM8uS1b!nUr*c}53QŽo&Dv ʊ7_0YRam2'O|H|>DU!m"*YFc8<ݥ >}jmdvوTlF{$!l@̌aa ە*!3"&JZϷrT IDATpH%B厜*~eP=3 T=|{ohp Ns˻pФ]=l 68? ΝOƭ[|WV\\\ψ8XHC{dI*&!YuѰYfʚIo8[81no }M =:l VnJ-H"C[M $GMKޱ/ ]U*ϚeVr3=.GbIϞs<ǖth!taf ]v!"/2;7ZԷ\'Slګrhd~C%&.RAхDU,km"4vjAA9 .E AC`!C {6V W ue80ԠO<>9}-u/^{ ~<~1/=͕SH2ݠ;ڮ|S K9#QRcwH-!{!X赶=9\?wS?1&a( :uǃ45u?ݱ&YMD!AmcqUzSldje,5Tu*.^W41>g?rtԤEN4\xRu˃/@. <)ic6CjOy(hujwGK,)𘤁FE4"qXXqXz[(5h&kzڥhʚImMTWꘞR٥[Rw:{Nxɿ-'߬Ka~U[Dk'MDl>Yie;¦5s>NcI&`$ni[m$Z7y(Z␜7<*xy< R:JZ1|8OIQ(EVc E ㅌu,[zVHt>019{ ;!_1\4)jx$XUf:,Q ]c?WqA"s+bB >߬QAr߆Ez5K ['3eA ah,T*INMedH&ZE~|s"Qn$lc+#R@.qn;b'9*J%14D(DjŰ(],@iZY&mR0j%l!Yƫ"竐o}VGC1g FE;˯DSfS2MXGAh4t:˒VU1 IU ]CLUR={f&E!YN&!1e)4VjX!E`2m$M-JDH,b}Zij4t:rMv^d5Zm(s1Uz\ z%UXgkKʖ[`X`<\`bw<Z-C)Y(x,t$jPUCjaw051]ᡅ/VYf;=k[NSvğS|ݻ2WO|9{= r 0`Xp9}ݖk̩o S{ίl7ܲPD(Zc{$1dM۴0cCA90B U8OF!lB i\pn71,/n>E+ j,c]ϡylҭȰV& [B66JoAq9w_xȇ__h?ua9As萗^0r5y h^Q!D遐yA ]HՀVMf - ƪf  ?Hne+Uf?S UPMi][iݠ2Gn^]Ejmiq+)-eʻ-46H<φq2{(C,K套NuB~?IYNK X-Lj|Ԇp-]`vP˖H֠l-qhz ǁ{W`Bo4,4pvrs=/ʾQJSKn|,^ΔhY:2=<=sݪ=y@,Xoϓj 0`5"^Rc:NX~J>GI|cׁRB-^ʳ0|Ov&d+3 +㕾SU:8@:()DӑMi۞=XsŲ7 MtOi~)u`au%P邏<ץd=^wog>tN{`%cǏYa B!AS5L ٬RN&$F:zlb!sf$ jEe qYl6H6ith0*lmI 6AQcܲzmR\ck]WՃ@I3N#1* 5YR0N_qWn]4^idɾ*/6 =ω Mjlq1;8.A8X{1GTwf3}('Fz̕&67J ~(M;R. 6c7$ @3ՠq\i"`uI ϼX$>4O1wW/q.T$,0B.߶KHA kм<=A'#ԯS8irk+sm%_S׻-it9~3=d݋CJ)A 0{ BYrטt߄A|rC"Jap RXz652PWo{y )Ir+qHCFĊ\+u+Z!* i,§l@ZT)B^yp{}{5ʮr9]6Nb%1qw!(4EGA-Aϖ:tt+Q# hAQv٩*Uի7w{iXu$?{9{:{˥b>UٓM`kHVs:V\HCA ZW`>ehtK +g˲pc(ErfsJƿO;mƱNm?߷qO~bGr1&`&e!S˿Tj8AسX&KBR|bQ#9&L'1Oh@Sh SΙ{`ۘ*P U $SZI3GVOw^F8铹ll \km#b_㡪't^[b):JPB$P /4HN9+ %Ż;hЖw=B!/+REeZ+z_Y)\ln1<|Or#<%|y7޸g+I1Q>B̀rH9ߟCZ>T^|g@  5{<=k5a0 9 9A(mkc$T ,keh!v/&Lh3W(Ԫ~ڱZ+ũg]PTu΍&D\R}_=jR( e Uu0PL5]'3ʒ ʜԓt3j#QPo U9م`*iSM`Kaa==ުeqmz*硹 !Ƞܖ%uԳ랰[S:qd?%/Z)}b"A0f"$"Hn #`O*cEWάq%H ApoD,Ld+UF@Ӕf ix?3JY5baIt~jsA0deHXoF,rv3zD!NtuǶ{`FɤRIjQV1 Rp4˘]B$9<$ B TYT uł|tEDu*>+!q7 uDzUŀ Ҕ, ÐU͛Mpw`.#_ ˴&C.?Ѩ,lvIhd$  ijyZ-~gr|kY9=8(_Y[]׭llnM}N6`o-c4MLtj]Nщ lNNxS~˝{lblf$ ;f9tzUxmB:&R3fS7y?c7~S8ZX^NrgO^̓u=3v lCu?gTJAd9q[RYI O2J˖]j!LjMUhBFm~ '#+k7}o~ "P(Jey JCL3E+t=$k* H̷IJ%2*:"B&BeDdcƐ5e89Д0$v%+eP(8xVpi0ͅ$ AaX$8Adlt:V=T0ݻXȕ=r@3Ix4`r+HX^6K}497krovcX(! 5L8%zJ6`Tڵ…{0<} Cg`'IUb VVNegG#V!IU_1^uLR!tKv.^mGt 'g;r>8.x6!zŸGB BWAf7!L-{2,N؎ CЮôwY 3HsAdaYVS۷-5j '<;7Alݔ}^Ŀg 52/f7h.䡇wirfJ*$RBpX6ڤ܎oxs‘bs#@.$BPp_˺ PʅeE0m[\B r7 6yܞ*v|rXlq^RTWRz8<\⡇}_7Áw4`AމX%/ qC%)H==GlU}el/'ԼӤ9-Xa29 xc_^~Uy睷_~wxWxkq&I:#v\Y6'S@$bLꑋ.2T.KSy~$:vU=p(W xb.(`Cw$[<גlO#j"O˩Yuw+/;8-M:T&|9qFΝL-X-=ߞ#+ǣIɪx]]פa_yћ3I.HYdJo t8 t%wHQ9_'k:NIήg7皽XFkm2%%zHe;*YGUvnaN4I k zwʩfȷ^x5KqF0I IDATWpї M̇Aa0 tjiᲗʔ &@%e.Ct6? )L{Txhu W4Ә$^B ED(" CJQcjaH9(HYČeK[ r#"BԄ؄D]Xw0zۢjRYFYb2s3 ;q,=A521Fwe &du$a2_p7.1Lxގ8ҪF|g>Ti)hDYʤ$" 3[[`@scM ejyNf&o4`>t KKԬWb!<'[,$&q2d:X'Kbc nLHaJ"R.! U(܊j勒 ,]7b6q[}Zᄗ#M ++~Unݲo M1C/~<ހyLCh aC!5SY%Lz`C·0]a|.i3+{0=cQfկP0@R֨؃I7_ۄ$`&I*džta y@"D]۵IRC.YҾFly*uC9c%yU ]&fV7](k𳫐/],٥$Ey 2z:lH+0\c,gmU}6 y]u 9Rtv+ Z5s]Hݾm,۞K\wY #Sjx֝f&RnKpE0G\'E$YH&M$ WdyyXƙSϕdUMyz̆^5 s~'-OCOu0B)LSEwȫ^x \ ԑH-%!ShlrAwkKabMמ1fߡd&4Qk{UUᔲy[#%o-|_#y@ohSt§"/vݣM(qNqJlrkbU..lQ?+"%fEIB@h%Z9/y0Ѓ9IL ~q劕=ގ{Uر;oArO $g3{J8gn~f:3>/ ''NFW8{& ;z$ű-+{=*\`Xpե@ԫ9~ N`8瑕J>[]]cơP]]\lhGIp1}r(^X'ƪ-:l4,dA`,ق ZV^=_z#h0mZv^'{KLAfYu0߇XH6^cy:O=z /䳁 RرԬt.&B~vaȾ5^00`$6) lґXY.Rx>/ uY_WYډ !dcTBvęz4 {|peBCbygVjܓ-s<`rcDZdRvbWdY'G)y'y7yz{HN58c=:Ҧ:pxYy]<ƞRՁg wΉ\Yh@6wXStkIt<'AZSұ9 ʼzX9_]Ιˍ7%ONnz ؋J{P;jMIUt\3c{vYVJlxNkshD!<6Wo:ktK%M6J[jFk覜"dNa]f,S M!kҷϡem7Ŷ_Fz!cY_&F#Ǻ:/_qF Ф8Ro}od}QXކInڕvmFz똸cыskDB;{61 G3 t@ * M'?i~M&U2k;Q^FtzM2K K!> eV3I7YULiAJ3Pk9GCJz9_S)q \fݖvNIR1cT"<:$I2R"VN0ϩtee8~_쀝-JK >1+%ë M´S2|N0,-!$yOff4<KTB#0FIB612R%9L]Jah+ pycӵ G#gYY:& v+:߁ndtan5pԇ@ks^WTƖƓcajUaksփÉ$,/Y)YG;08"/#W&"5&˷TtT(=e4XSY^:ևpx7sk6h*ʧt]6VzV< ! !!`2A:j[5!oKK\Ohg< z\5yUW58Q.N?' ؉Α| ϩPϩ*S=Ύ9AK*z?W4QVuLUR0]F_+ݧ؆uv*XWpPX ܆޿i!C9UP+{#M")F)_:;036eNřHrIkhr|UF/rΛ&I*.}gt-WR2S ߜp{oR :}O! Yݬ*o^' 4ҨR*%|;yD2PBPje%"WZU E]ܲ.!(; ![O"iwޱ{S9lW`S& MAB#HU|HOJZ[ jiYHVqp/I$ޞdA@ rdP"Ezz]>ǍmMէ9wo]bT- CjBIswWFCfT*|N#9fA@%4= P@!V6viYx I wZҒ}OyV!j}* DQ* K]ZK,=g'_BB!T'xw{AVW ak&-kk6lZ^4)Ν];Rx G؟'%8`Jy_ؾG<g7z}ްsA*,ư͒(La6J5a %ҁPayS#8.bMayȦ00RlPqWav9cnXj) P 8u<۴:^5VH*@&*55/5+enۀJU6$ !b P>jM4 \RMH"נ~N"؉1RXHQsZQȁu<'.|!XKՅRf/kEjAQlăZD^P7㝧1 #Ӡ+,1=w(kk_HWRXFRC`EWh`AT 5vrG q̓Ըőbc{&m ,;ե=@%0ǟg}}IߣJQ,yO8 NDͲA~97(KRq_kp.eP,5ꜭ1R I5u.9rWO&Oډ{:L39Q.mϹtMU՚:-똗TAViÌD- .5B~ڮxyC :n:d|U˙A9xkZsUsԦ&7GvLѤ} 0):]5ǡ'ڢS\6B =O WR4# ߍM}kk)*߃Gs9^5+WpT"|SQgTһla:OC0Fd| \1t?jG{xtuGGSތ%(@ʘyg"r8Jwr&ɯFj\j3ќY|0]7e^wis= #.Asw4Jf\ 1"x],.L{=z RQ[,7&LXڜ}B|xeDy}yw˄*g@ YFY,˘3"2IlJyNOs:%yNP2eeV6,)M!˨JdKK콲b+''VuK6vU]AY9#U&zVnݲjqruu7^П0+qp}0)u㏳\<{["<4B\v# cݵU[X679MXo { Y¶]j@X SW^%],x9qO_5Yw ksxh;#MÝ;BWt~"<[J J;)ֆzX(D)]+4Ɛέ{yކhfcV:00!sǹWQvU\OX)B>ӀaCau 3Xݛq[x;ov W!]qUP%{bWxۯxJSSVk}&檪} :{级dPSRf 8v)-(¹<7o=}G|vѩG<+PK7pi6yOqtt/w\pfl5 lkԥ xO= l=hT\ AN-jźzvv4r5;Q"ح [NgA4o顇n3? VW0I\*K˃{@=%S~CB}9lE3[ԺVYM2;*N8ŲއCɚ-vd]7V';蜮xШijF)yDwNd^u5eo)tV=QC9 bǹs]fs ^Wuhpmw,n<:`uzBzbo CP̭vs`y,Ǟ>s0lT_9qppdky9O|;a؃%B}X5G(0" W?I߽ y5U.3 CӔ*qCf~}>au9')CM }M MFtx;o.,8-lh ߔÿ͗^_W~x߀zh>g}c!6.۶kky!>I=9]"宯T"mBlڪU0:TWخ 730Fye\@MM|^K\Ț.˻3``+p8$ ѪO_|w?­c1/RoW?[;+0'0B cH2Xݴܪ"[ SA}@m&BM jRb*m`4W`^,N`ч6L?HzI42]cG7>sIdeve]֙;kB ˫<ƜW>tc=G d  u3hOf*eSs 2,ʝLb0JQyur/rҩ/0kRغb聙S/vЊ͈ܱZ|d8 O)~*yxxg=R9|1fǓ@qRS%y <д$Hi xKȃ|ӱ^ IDAToQtPN SZI!Q>}HQ  S=rPϲ)l-̙M\; |AFzNk InyW'xp8烲IQe7ȼ#ɽ$뒸s׽v$36O %j{ZVzn .ܠWpM$G7<:=t2+zΨѱdvz3l\qML!s O{rJRc1\놞羾ޥ0<\]*4O yȊ02(n_~V S{LT$oh7Q .3Ǵ1mx uuVu%QrLU6ҥB6 ~[& Cd,,_i,H^$n!B"<).L 5JJ_!Vkpm{wzxڂ9sD*reXN: ˒Z(2Rj5#?q9|};9RI4U:9I.Q^x,$#~=^lʙϤr{EIz?2C$8i/TDݔa ɘH%,doBEB"jaJ?3!KSiөE0$TX"zy.rUj*",ÔJ@Lƣ@VS>7Y'_*sfk{7L:1x,h$< F#89yESrY 0ZM Cdei%F( v?-HD$.^lx4PUۊveIM&Ғ0YxSl L'J5>{H)e@M Cτy&/ʠz[Jj6>;80T*?-_߰y~!nܰb,k{ۺ-%놡j Rl5)5|jst~[>e,d!]Wo~B~5֡ ܹ#;u7,׳JZ5m<r[eS0`:B0*iB0Ȏ5,+L :|,%AJ(vPuk'BP;хXHTHMC9e.)\fNHu5xdڊB"E*c: CYR8/##UxTaMqGe}NNrNa3)$DlzҩuY!`,aʢJ>cw%ˊv"Lž/pdx]K.KP"7Ƒ#) -ǩTڼ;XZ\2QQV)* WJ !{Zyw'9 PlIΰ3]CN_^Pn#o*I"b*ijYgvGdoo#ty!aCHLTF6ThTds3ʰL ّB=Rg:UOʑgڑq WqUUwݵz#]'az^2}B|NLGZ]aTgYS5 M/W#:29^FT(\z:-9e#\^cE͓ .떖LJz%Oj9u i!'a" äWPӾ,51׿2$j&$cՏvhԚ7SOٶ^ZSu۩o= Qf/ڎ=6Wmι&;`cL~/O1zڟr` uO n[-R,榹``ۍ145]Dl#KP|vN{:sџڥ_3䵐…^g&f(߄ݨ&q93#!πە "6vU8Rql]ds 󹡥~ hXmLVC:5y`kW.Kڰ5dpp`!H ե]7-8:@MCEks|? Yf=0/ PyalnXZ2L&hd \g& +b:m~kؚ4w{/Ih߇ޱt áu_,.``8>^cű叀_GnǬwgwOvmX@ܣ/sgɂFCre6_{k~7cC0Obda!RAo *-C0Vz8| ÅXh`vI!0L uؽk$Bޅa[C#$ڸ ,|>3؇BKmh0q@ZjlSG0.oPCn]elf8{q3#y̛j[5Q ӷ a^rd/7v@3!\XXl y(5H$Ⱦ&$Id6)9//yeo7nRtgTƊjO5qI=鏅WeO ݹdYe@S@sq׉eS 5_o`Lu:4jJKU)oEa85}pݝ pNTTNt#*c%o쩳#clk{IhbSVroY8^?A?[zc]g$Ʀ Џ{l%5uv#3}5ODŌ L2B؜(<<ԁ b׺nlI}2^f<'^㡇$:'bI y\>SM̩u3|] .msDMžA]=OxM JzOgd,ijËo3xGB):_$?j 8R.>)},Eיi*+lu=RV;pVO>;*~*efleĚ(m"N8RF}qo= h!$3d)(#!ܳꑞ/=ɏDz>[ȑrW@` H]j${z_b9g44>TWzބt1F'䃛7z*Kuٛq~A.o.7f3.~P1+pכ#An&?HsSJ,Ko[)eȬ]%W$DpZaop\ٌ04e89JecCcH\(HN3*7H/Fp<&k4H66{=d4E2/2r;|V{C\pLԪo>"SL(9Q BJ!RyNPܱެLeHVIH:K-MYBCPX]d4z&Q<:&˄+WlܭGq 9j6UIHUT(-VAd"t:r89&|:f{pF羅7/r '=>>q$]eTXZ\:.gVJuXJJut %tvOh 4 n[ }ۂ$^yFB%ET;x% *ޮIu7W/抍C;ZL,Lc|V*j 䊹#0mAYk^!/lX#¤cav j{A"dM$sxEBbhM$[vJtf*;v TČ=zCeϑ#_םfENmg1 R8 $EA pWFw=XMs%IϴO_u"o;"wk$4&!'|yK%zoaH. ɗJFj$tsI$ b~ILN_y q1uC! wrD(rN s8##JS&yOJԙ% C$cDF! C6(brt[,' ty2QܼQ.1I8.SH1A z;KUm.2t,9׳qv;m], y˅6oc||b1!4էUJ<.`SX@S._>ju_!~d0 !v)K7voG3\ t>"f]Tʷ_d#tnu҈i[XYyfO}?3oOo+\! y{ᅗe} wreƙu-g!b>9{ if$`FG`RȢI<Ō}PnFJkEUNfF0U323i8&O#Ξ"ܼYP8Z&jtR/FH1LdƖ2^"ef>i0$佮sy$pMǿ)a?C>O2ݧ ǩ P|@;hiǫ0;'9_Ŀ֊Q(O¤gB4_Kt.3iG"qcGdE֠X,0 lkޢ!: VXcäϜvs B/"3H,qA&{H|)>i+"S񻺏ͶKSEc]m_ v\<xzv*lg_U;OПĀ]4D?$}?@/"7hH~}ƿB:BCו+CוpeTD6wE^zroy ֒_cߘb"l$KdtU}Wb9MONَE|edsM'CcD'1ҚLd2P|ϓR lD FFJ9xDT]W 12"q$(T⻮/h$ 8gp|L' %qqEד   [,b {=)F8M&¥^'w4jJ..)nW"Pҳժp 4~1YV:(w|i-Nr^hT66$p:geý=cLYZb%5s9Tc,fPj>UpBQ~忻myfyCߒcI6Q_dz2á++ahy)!h$ı gdnΜsc;Bk|\v˓OlI$_xݾǑyPiWMAhR7'БgW@&LG!^2$먷ĜLGh*fjNcLSdj<~ƬgR|x\0ǚ5G50QC]ֱm@ɼ^f)Řd7=n=~:cEͭy+3kr5TIFxS\@hi4Arfz RH݇x_y{x[ȞASsbäbkA>J  K+ -Ch22a~ƪ IDAT#$ZYL"KPt}x,ᱟMhN8l '=#8#n4HPARI0CL5;[ Y{^YQutG:79O111$A@2t31>IDH-:zd"8&CƓ QA.( Q,,0]X ǬZ8Jymۓ=9u:.7O& R<4 C^4N9p }ǼSˆŔa{>]gYvv3*LXnW斗yTy(3[(V@޾8Ch1)ߺk>!ye۫f'8cC,Lϳ ph1*B9ì2Tع[\)uԯղse rYH"ӏg]vJH:}UE<)>N9‘-Ur<_4\}5'ׁVs}؆ҲZn`uȭZY &EeHag LC0"L'` FyEāU'Q])`2{M|O>P l| ~ӷx3 ;إy!g NNI7Tx9ٮEXX'x-Cӻm]lkQIiKQa9c+i!u.T,4r\S?̕W8"13Td}zrF:GfRC8ifK}q\$ +<2qzS? jsS1멊a QGgl8L+:Ck`8jy3PԖm52DՋ$Sҧ*8 Viw83쓃 )|g#OMxjHyLE|K/br $"Iө%7}q KKnmsҠŋ/ cnܭ=Z--&yw'"o/ C$qAJ7q)jE!J=*$8F67qCmErs"FL𶷑\n}[w[-dc ٜ9FE6L߭֬#D]-aX6W.HXҮG8 O9yF}CrA>6X< jk-h+Ņ?aͽ)& l@.5ʩV-:If򱶣0S~SI j*d9 ssv>SJdt GG=FMZ&: oR;3Jo-n8ϯanz-%߅un8o'P()סh mE{ǝ W?WxL:u`z9wqOV~bg3}jLTF{]{N|Џs_jF$Lu'N,0Őfh 4Udp4Be} "%#fm2&_ O棇Ubc".]cumDܸᝤ]38f]ݩK)OUvf>\z*PMA#3O$o{|soc66No7~E^}uW_mp[[xHa4zC$V 5HݸGӽAe72HIÌD Wh2z83`)C.f*)Wa;xAF5%3%`^;C)ol"heyܵHt3ϼ~o|Tƫ#IrӚsx3 z x%sN7uՌCwgYRժBshlb~UgϜ^ʧxuqzdP:G]Ar!,qՎ01Ek0dUOz 'cyG䖵CJi(Y,e(>Le|ƿ[rJg[/qmK&[C|C!?K{7,\omPݜ&YFJKSzf#4GȜv2ʺ' qW!a.`TIO- klem^0<A*0.R@ma`ge^UaMùO:\Ř߭'CFQ~ỿ {,NGpaUnOlt:㐷TL ;G#!ˠXԡ70M“ϏɟNg,=̅tN+\_-r^l q, @v^g3Y]Ʉryapp3n9uz|r-IQ}gA3pxb 0˩DeEzUDp]lYx`B8]&- %¦ăļyoI;I?c"&ubsHzEș5FOd3Ύ98 ZGJ_5|ow8B*vPhѰAgJ(oXh$&WnX^答nL O۳ ֖|{nނGܑ3Ks _x~呯qv}Gnܾȯ?M}2sڧAx]z2U"/D%]ß3bLy.#Jyf *k2Ry̴iXKblLD_gKY7L&LPA1nAF' w߅^V$IGA'2|}}XO~nQI܌oI/ӐU(ڮGGNo3gR&?sޔ+CE}xd SPYXy0 VV }&(aaclٔW~F԰OY0FR՜ci<)~r/Gm>|>q2(p|7Wc$7}9c4b {纸岔K%'"¹9emcX[]%'ြYlXYQZ?9Ihd8A^T*8&\R!j FUv:ɐZũ6o_f!ᑜz1 ?n5ԚUq|>S痾 o__$okYhA`Ǟ,u;9s v.g)m\XBד R~|tOz]n_q8G_ nݭW@dJCm.!LEh&xU-Xً6ϗ[BNF6ɗa#|֥h@.c5[iѮeBoUaбwb++_M!T:wlJO`ۃXCX(CXu`I"ygX[x\y;?*t'|m*$5kd؜9>ӧk\U!#. q;_jp;% ƴyc"J69}S<þaC&"'a/B.'>9Y.3B[`s5M\Cxqyr6\f'hXd$L{=&d8ĩ-*B [[x)n*ܜhDn~J׊v\c:CTD߷0bݮ tND}r kM&!)dTRcc̱^8ߺLJ )$nW(qZSkMc;K$m'pt.sQyvmPޑ>"J436/gpݘ~@OC }c\3"ӡt94b8M8wt}y}y6ׯN GFm9 twQAꂜ-_1re `Že YY ^2=tD@iy)/ 2@bSQϗĒEfЬL 2 dlz_j|J3]9Rh u<ѿ "Y$&8}J5)wRÂ̡}YR>F pqleNW J|#6oܻwAvXU9dYVthVHTt:NǢpi̔hc- znwSAO{[hoQ[*j-'y1Xǚϡpt?J*f*E RR2ߢ>3Y0ErJK`נօGD{ [Oa^:7˘ ELc>0tϨBU3 0824oB}_ԧ64ûHkELe43nJOkP!܂ M8yF߅W޷[@4ǞGB~;ƒ߅ks'TѝԖJԪ%8< X,P.FܮTnAYIlظpms<خy1+c1Ƿ(fTј^ʸX`ק1z]QľS<"A)X1h4j{ "gzT}޼^0#^]碻zkt9BDpMKn%Dxc;"D^F㐟nj8a8ScCtpd:%,c-Iy 4 7ߕ hn׵i6m`֭xV'UFZ^2b^#^Sv1 ͦ8фVÓ_ K,n9*9]+M.mA ;Wj>k]{%r8SD$J](mcmS ZO[K,{o&g"ּp<]cd#(7W/]g wҥ;74BB4{v6;ENVBn ThXxm(,w񠟃P_=W) _^Ox Mv&:C]}jP~Zp{ ΡPdCηfn@ gl<1ڇ\‘b#,Ta*ڒt ] x .No#n;$_\coF|AdZٟH.ȁ`zN{Jȃ3New|Jek鼦Ewu%U@dLh/$ACASet@ƍP7IK5 _x05B&Ok0.3H3< rZ/}_EdlP\mP2TK=˙@^ C,ZUnC i fp'ɒՐ]rzQ-3t~Ҥ&zw,$j/ >]E7S1 %~fIURV ʪu[ Uʛ>Va*P$iBH!dϟ5\%EB44Ed 1b5O?kͯ~gik`0po~nSY7Fcv Y4Eh)n#Vu⌆"||7bT/^^g_x_)n/+U>VIݜg"Fb0划cgǸ}_z `a^jƈlJQVYg^쳶qkljc.ǫvEYރ>fwjF%xi(' }B|.H\!])>\ҽ{m<\uÝNmsbDZC0uؠٴAsm89 eP&Hq+-T6wb Ή0tcG\]'Z9͂7{{+eXIR^ʾ',a<_//)6Cߍ=$ D>oם%}]4ߴIpM:, pM67tΎv( GB{17p0ϯ}eWu.N8"76c~Q& @1^c+-Hjֱ;J`8Ys0d¯% NPҀLpZ7Wt#l! +0iCo IxoA\?gH=n}yuc{\ѦmG{C Mxzn0߈7ޜ<r GGL:,i;~q2]#c;s)OUBE0MG乣峮*tfj!pJ2 [BvieH'xO^ E:Li)O~65se&9sݽ;/,p-|.|NdbjsqOǒ?^j+zn]˜sϯ/ҟS? c"LO~yBd41b99n%1ec$7v% S l!T` 7^c!ޑy֧rrcOS=)=1n޼yȍiKXŠG;JƒN%Fr r9|Ǒ8 nK2ʕ|^( RIP\(Ȥ?dQ Z GL 5֟γZؔxPq=ɓgv~gQ6xw%r|LRI$ c a5E/{O0-YHR.Eg$BPry&;X&!75J ?ZS^ΞN;+odivvNNrё0b.>+reuL*in|5m\wܶa0vbI@K0JpnS!X&^[HF?*P0C10 k eB 0m!C7{moB=]Tcڄ"i"zBXct8a)Lg $H lą䅸"D#!dMmNً\8w]g[u_Ν!5nJj_M*8;j &g ŮB2瘗@/-\Md^e'CMwhe5K%MC Le1Ymd鹌\dk0_WӹX\Tԏc/U f En$I&<Ć|anwQf؊~'RX:C a j-3өV?SnK3ǯGyՕL7iG,65LDˡ]MՋ$RBopTrHö4]T]VUwZ-ʚNSP~5 ڝfȅ4Xuo^K23(o8 `=/LTohGA\UV5P)́=n;/z*ˁy(Ir04f"t}'KVTFZ8OVlbZ@b4PWH==ЄdØS#א-Rw @~s2ĢwIpK+ R]#ރǾXma7<뙵g.b sFŤɕo9rȳѐqTVԦU']sF<\x䘋fMuscw/Dyc6g}qu3R<?;rY u%}DZ,Ej5dBWf2TL\LSHV_4ϥLf/>qvc[1yܡU]xSqMq !naPZMréV f3& 29<4}$2 Lg8$ R"faHEnN/-3$--/)9< 7%lL٤$8//&뛑j"ZJR,`S`+ij2Cn}*p=f+| On~q雤:x* GL=6ˬ1߽{7QpO66,< `kp|l<<ݮ熻w74;O?mVK8q^AcY[@JĎk6Λ֩} ok' 9 ߿ۻPM ,a4L+p)D r X}4Fy &Յ!4RC DBkVkn``g卑0sr[ 2G{/a~ 8q +dJRt]*5 r9vtbUsZm5 ],d xg Nߪ@eQ46hzey. 5<ו^$M Rj<,3 6qUQ`b]1LvRUK 5*6dQEXRJvĒRRU_*ܞZmUobI-V๧Q1|V2wvk xSOs)3njeXP]V̹ǽ2,ȰIIe[M4^7Vȵksrn-YQ8RZ-]c\e^¾l*P"V:QXuQaY]͢d5c)JjOuP#=IFnǺ<c+w}O\M35lۚe3m]kG:N0657;H0A-/% Q)5RgEM| hWCMKh]p mnI5}m^>&t]aϨijRְhۂ@hWt *i.X}S^vI!/eq:5厨 ^ .4/sf[!HRGwܵuwJM6&-<1wh^( %g׆\]`n@Cv j|pN[/WkTv8Xidd_$Gw=MDvk01YQO*A IN-l*W\0qYw?7V/LmYTa+/_6߼0!vmOHdI$'x93e;s5|h[7ܸ_?)kހ+?"N4IS 1ĮKwete|6#y,MۣlJ`d:^k2Iu0 C"g~|Lzxp{[&{WyNc6#ڢbeW\iɄx,hdUG|.lmPE[YW[ݵ[5҉j}N9w\ImŬGӗ1jn355i? uGvym9?m֠;W O zc9A!.F# `5 .ŋ$e$ܹcJ~XBu~hdw,cv3 gڸW?;“ vM?pޑuӞ5OmU7a >@} @3?nAc?֑BC=}9VQ7 R (9Ksk&=~*\rX]+@!u厮[=Bm[+a|j{Rt&څR1j=+t@J%? ^->هȲ%Ukjo H|=~"bEנV}E#ӊ|UJ+y,G%;pBjF!|6VWUHJ7hH|Ȇ:tqi : Lc=ww.h,R\*BB 2FlDBKJ݆HAɄ//(RSMv5 k#(@<5-Eu:˼Wr.{p.t΃9Xme~k%y7`rG6Gmg^bzU݁5,:Ex(5)_f_B\ނr7U (Q.GR";:ℚi{ &~[? ~߂ȭס5CU@Ν˹_Ȩ>L\ί9rV$2MR&K+h:á,vSnJ})yoMxJٽ^ +ۼvٛTGPOFvD'1LL Cf󹌪Utʡ&ˈufS锖1A&ۣ]^4ˤjIY[#hISk56yάݖhDR! &;DVx]޾\Z|F OlIrp!N{es i.?ظx7=ys~yŇoeah'Vɪb;g*Kc\ևB8sm^]VUrY[Ƅ?lsMׅӧBԭ[6CS.?u~&v>ONǏC/y#PZR_7V oA+=qpQq*t}MzZ5Jpo,\@v.uf0YXp GL-̩ҲϪ1  :D+*Jhna6|j0;2gUB7%PO35V)ѡ+E *?-}iߕng̛oj%WܼÍǹreMX)i 0 ? gTdj෧kIN fo)zW+%DZ,| R~* /)T *ź:D u +P)Ԫߑze[h)b[%\}JaRq`Rr/,:vcTt#.H71"9Եʂu iPpNJ܎=<]zjr[z)=[:忥 FQ.H**X]|PHV5H>4t (i,EՓ,6Kwԫ &:QtR; 7KzoDSJt/ȥ5 l?4NB<$'llvFԥD Rש&k< #MR-<ȼD,Z`O!CƟe{I"15㰌'#%k 7w*rLCuYs"$ &:0 };$\Coɋ!ou#չJNi&j'H\yFYƽZ O C!MI66k5Yv]먽&{f3} %;÷n1Xǿ-`[w Bw2!7qhD0ӃfSi*$#<ᐰٴBlf7oJ2aHĕ 1$$uc &Mf|6T"!˨dGG$gXQiGө s8sm hXwx$a& )Pɵ,Uy*\1+#=8jޓ<[2ţ_?ag?޶]ۡs l߷ SV߷zyn454c=nv.:ZGp$tڰ`; j@e9 M'?>E{v^f}u;w̗#Ka~_p`ԅmRqm{G5s80 GEF 氼bc9T+B4Z'BՅJ"U.g{C=X*:B̢7ȅ$|&M%x[fCv,ta2w=g H&TW,|*۹ilBiv ۻohO+#Hszngolё{'y≷$ /}хޠbaUժ4 JˣwR+;0Y43]c7.kzk3:6ȗY<50:Ԋ>Ig ,h\+i(z%\L@E@)%u!TIS}hBZ(O"hҤe@]Í8r|>7n/e Ђ]$K]e]Њrْ$\ibD^-[WmpiRtSK憅t Vu,Jt#E8}ޱ\Gr{[z2WOڳw:'=|187Wk<}S#/L߫l?ڎJbcÒv*M Ri`.^ɐ++{{BjMM۱hE-.Ht`r={Oat,LgNyG.mzϸ}+?iLU_Lx[m~O/Z~$C2™m {6,,xP qlـaVS½0VVBǃy(t΂ud&LPQDP=\pB ZTEl"TV Hk\ma%m,FSa6kL`Tm-G{IޖYI!W H.\C|}Svv3g4;GΕ2Kr \oUtۗ /HK0  S$YC j΃ % f~*ePu;ŲCx/JfbqtUJ} `e) .ɑU ((fikȐWmDC#~ }-5=NA(x[BUHNOV>rғݿ!'ǫ&o*ܨILi4- w_ 3M.zgpH t. @>?Ͽ_~7OҴ b92(|j 9YY$:.B!׮U I֖~&+~V#sX>D`jH Ms=340zޡ Kzg\k+9B6xwPʍ˹9%Bw9L K GM]Z/6gM6y͎ >h>qMLh" gˎ6SC))["{V690[ Kn_bէL[\* A޲3%d0 RI0M\BOC,a}Ln@oRz{Dq|:PoOcp%ZAI 5d{pծFO$&`ID"TAkqLna[0L[Uvr];Kgn[\sxXq;:C%{6KFC}q>[|_!|+ϙ(XtN=Z6%OUiSIv*쪻c!e˂4uD&$o5%-:"I;hcXرk3vlyFYv|3Wg~(œ?'>`~̌Ǯեƪ9<9jm#IOC=<빒]U+۫ӟ S2^䳟8!p{[3Va*4pX _P =NSqhfQK~5輫^F;+.&nn;xvj.nFvBcy)U;)pE=c6J݋i W[/-I+ ile\EIa-Zf-A~&(*7uA \h uh8!P='!cc*⤩1i*a\i4DP|N^t:2ݥ2F8tClJb|_<'2TO,HcUI\LA<(qV 'Iīevp "$x ݻא i'kُNɎlB W. G\ھj50Dz 9~ DZTZXl&8a'Ǫ]uVNv:FzgXu. [Z _8{ڤ&$se#{0uY'_d+LRC'/7[||M=#^>s4?Ɵ}Z! a>VM*40ժxf GȦpԇc4vTV![x뫶hIh$Ҿ0a. #dh%_T!hڮEB 4Be YEl dGh`a؀βȈ[ir*&3g9&m ['ł84\lY,ƕZ3gU^{ecDj,>wE7^_A,HE- #Hvc\OHHblz! H}Ⱥ !RէVakyTZ _BV@*/[a/dZ#}*YӋ[Q{Z/t6TbʢJ|P[%z x-wʵkk9r<Ԏ8ǵkriRTWO}]~G<yVz 6dSepLPbzK%؏uO v{H ` _ ^W.@xWv{9 ON%NAVG R ?@h ?sPytG|O빼T],3MJ% 㖮YݫBh8uN ۽%wラ7c,X<7u-)|ۖKq Y|@z^7m%wu3%s0,@.ud*OUNvU&z^a 6Dswe IDATrU, 6 ^7~[(PxP]>/%>KՀQKS!4 E8Ym=t[٤U4ML/* B W:y%&W+@M& ( K{*@]|ETĒc}q1dSX{d]+p.A04߅W&1>ݟׇUxЋlҶOd#nAnuJǃ0g4G`%:b<riWi"u 945='s,'ˍgQ½Vʱ;FIåwvr#/|Lpʋeҩ01K9)Gw *~x3@ڒ7\Aѐ4C\Bpji2ũT*B/ %V]^22r CUyNqyNepe7B;3>>38 y 2ۼyluykvb\qHYCE$sGycU6}F#0xluIӔhaDqg3$US $j 'z]B|\/čc$I78y佞5v|lex X[!dxnw0#ߐ'oCawHSyK1v.?\S{Kq})WGו{{s0 mj{ۚZYZ+d6t:2U{[΅|TkIM+/.+r7.3#Wq|_CZ/kO4q(;[uc9ppSb,P]cM0 wgll"cNEI68ʇVP}T6 tE#YnVG;YE@[l-Hx  = e'a>V+jKr%V])Ǐ58UR[5YgBqZl y׻vvG[:jE \`,Wyٗd8lcږεrVC_+S"i*)9, gcM%~KA6TGenT>UڻPZRUiIΥ{2EHk]8]UܽП"zu/e7_WQ <#Tȶ7͂csĿ8# vψ%b {:v4sm`W'Ƙ&SS:Լʯߒpeܶ<'t|4Rӵ>TJx!0q,CF|P(~_AӴq^ +s;De_#BQM^ ({k)2:Q)\x/t0kI߳UBL Vׅ It9)| 䫨WLTf!&];3MjZS+ UVoI MIUX :R(v/TNS,5y^Pmt2 (n-[Y 2K[[Ip7`wv^_GAסR{iա04]h#G6?E^AofR*Qr Cb-  JZ)KH)d^ W,P0[_7}BzHN,!<3}Dx~*{voaøZi*/K2l"锞ʸZoϣ)B63vſqo4HuICzu@>2\BǑI$}sx(` ɂ'MMDqv`v^OX]C$~kcyD8:+-gl,_:z7_s7CC&o֓&Uy΃^gmwu|n* rخMll>DրS*_4#$o_sY7X#;psmq__ؐk-`g1T\a*5U vzհ [-Cw$5g!۹ aғ̾@PUd:&~UaKWT5jlײbZΠG(c+`E+}0 YIMX$]5<sЧj!5kSОP))5=%c5kAZQ+!"ܺUe4Zň]X|(cT~{3/*"75J53wC 4CxTߋ MVFJ{QQS@zUyk3ֽtJPAYXBS@V""_x^kb ||ۯxdog _~^a5 Tׄ.=.MYzo%($cSYgϱcOk*my%Bj]ݒL5\)Ȍ^ө6F CVQ..(M0{^Ӓ X;wsUK -)nWECJ-^rO4h)~IZME/A'y}ɨ1 ʘ*$K)LkRMLuhnK/NSi,ڴ4<RPyXrég[Wa ][H W$,=OAj#y&\}x5=o+M),jO״0}.t%Td`,tOAo$,}21D)S)v_Ƈ.?e;zoKCEr2x"#¼ZIh0өqlԖ%r]RCk6#zJg$![Z&و"QDuuZf8D"6˕ yЭVIV:RNeǤx9ӫ0~*9fN9ѹþY.RXNr'{R:$BIy>l&n$1F|U]Ҝ0Μ(BsjP!:nO&HiM<07 M 6@ vZŝL+z=D#qH'Z /0cԾj[.ʊdO8'6I% 8|K惊6= >8.?$ts;g#/%Vy=F- mbkv/uݮM{08#. 5:U'sv.?z,rwϿ.>‰~:}M=w%0%n`n9ec,$Cڻxf}40րK+]ۧnViԇ ]\6.B4OYl&]x{vs}x0uLlA$!$|*z;e;++A. hϢ~ބ~'ck,/gJ98fkk/8GGg_a98¤\K:fvP2ZR?+_orOh?g5=| =Spt"cY)* X#ݟJceS2=hk2(uMpxO3, {J֯# }/>?G?OO?c>_._Qu鼏Jڷep!E7u 0H>ф}N|7l8c,!g/jaiTF* wt_J!sd& PB_Ɨ\z.UXo 轢~Z ؎XUΊ29kY&jaMad4j*uWgQ}ѡYBnerRqP^V{E9!Ʊv :% 4%Wɹ[vB9 ұAg8薋~HRķjrM:.;~&w̓-/yw[^-]UUJn[jZ,r[6#` ?q(LDxe AHBhChnVU^o73u6SUr9y]Yd 1ժ~Lw"2>R8t!#^ ||)Cs"L7hlx#vW ul-B@"O#Dw]aG} gRqکX,I+7y{\,\#7+RDnN^bs]>4 mRrhYW*t\{0iN$" Vu Ϟ%[,H]vd<We(ׅZM,Z,uFٌ$IH\<qLTd%q<`2/*!MF#zĶ%$A9Ğ'y.*Ir],AVP.f'y[ߘڭU鯞 ˲?JEm6<dh@оqCz; %,q|82 /Id U*/LvWe}GhcB.4'0@%a3nfnHfM MmSYzlLV SDf&f^ M5P^Kܻl0nxxU\2*|Vr4{U5.f0u?(!dYW;&YY7ǿ9瑕( i}^)1rV!:ˮ&6J͗C8jeN^a1Ny]w׉&` @B]ƉƖ 2WspOCkPHoAwzm-8tn\*ԚA8Qh*T$M n2U.%ƃ"g$pb~BރdUBuzN G]K Ue$"( RZzCYOKyEKe`DKwp*R^]27VU*%O )i$\uiWA(m*4밴 K+28 KFg# Qb^2 2pAz:Kdy晥7 IDAT s䍷oG%4amKJ o6l[@~tDjDZTllߧDA@my֞훗мOye1]ß#|AqV ᮜڞX"鴎W8_pl|h*O(,дb.gCazq ư/8>TOCtLs 6nd a : ;-ttgfW PAsSwWIIk]HBl ;B{SCXZ~G{yUe}TǮ.J86,YjʝzD9Nlc~H5yՇ'y}Z.|񰰐]lF;ܖu+KcބkyQ |b"yvT`fI ^=^J&|[pqy_)fwe3E\slָi݋'/zIY ZUaVmJvv6u.VWssRP/g we U";'gC[$pVJR 0vfTŽ¡.@E''4<TK[]/eTM;+l(s-84"dmb#\:擟_yg'uԠj{01S4G; 93WG#*o~OgM8)Ep"mR %Tu]3 'OJ)qQ.IӒ;W7sH g9ol@>KG]S8nޣdyi B8-Mԁ29[$S9IԆAf$X6}d}S`/S݌B٠弰e)ރZ]'eV\nd澶*۟67jgY}&SJm=wHOX =RƒIOD)!,90Ia>-n#z`6Se6J*jyEKR2+ULz\y1S \ UبfJh[(ǡ+\U,!O,**WHqy w=&_ޯF^c3Y;ǚ=h`; ?]|Ŗ X֔\%W5ݲYy 5!Y@fK: 7RخTmaX5v0&X6јl ~KCm@-L"?xU<q!\*]PAxW'@=8 е юY7Ay8`mT8`d60}-ya YUai)f=2HitC{[΅5UFʵ% 1<eiIT5T9}H-!~)PMLnTyRuP0&2IL4BZb;&h($cSCmo 7cUzm^sQJw(%Bj&*]^ym2ܡտM>?O/w8U$/]6"Z1$"L47~s%͢|hERgFXf#Cje~a"oQ3:/~I&Qh&x]H.!} !\)ȓO_/'"~Y)Fs-EΒ$747rsk]OZS><ɲ*x,9D(ٚvc 8J),볽i5;xJ|I<L/W%IUJ _hU:<2r@RAղhpeTu1W¹<(SK.NQ0MS_LXʾRt>(q8iƮW%%{QKg6py% Kݞ8*(2BTJQœįcpH21uBl0Qy$ sE8EŨVuŵ,ߺEE밾Τ^ؠ1f#=f lF+پru*T Z-R'"}z`4bu8$T=bfHza1łmPTNn>vGܽG$GD4 қQb|ySוzdA5a*{Ŋ"~uNf^E.x̰٤x!xQD(Z,t:8.A7Z- W /iafx؝۸8YRԧSz{2jU6e^#gښY`02niӊ~{eW[ϦuII@Yel~gsOͩzSn|>}?#VVB`} (`7JEKF!{4QNryl$ot6|7 qiz'z&651g:orwԟ'w GC_:z}ChW3OL'nCc:(ϦZ"S`B t5zR8A^ "BņF *sMd_D03(MM|VSX Na \ԛm*[mIiyzVkht?;i$RNh$a- رǏ}'.aY;;|5Kt-ADOEVǔmWbc 0 J)  f2eRITZFl\X/dk漪ds-rmbi)9߉s{R{&dLe'C,1JKz C%[sGIA+gWsj-~] Yox%Y2KCq(ȂRP-8*My)Xd\J9N>mIBLw`aX{v\aZ-mR.7K*%t9-o/s}-jԦ NIIN1:(nbgGqdWu4o`M3&m<-v~' T C$H@u׶ieFC(BV 'h\=WH6*I3j6s\h8mZ UlN1::"}:DiʼVc}8dݶI||$A2ev66s球<'ulmveۚ=5"vvp66e<cy("snel%մT G]T=SZq(Z OGu@%*VayWU!o@+±_q{izg:O8c'fr5rZƋm~4`>:֋(:jԠQ7O58u>DBMG[pt!\hEoGU]8,^Bxs0yS_Cp״ժ@%ՋiH&:r,`;E`h1mOAsZs_[z`xX +! -t^]с"{URKKk*MyDB_!~|tt]{߽41݁Dq|ܥΡt]%5V jQRVʾ^LXV 뱒SQn*ۥ,ԋjzRb|8phvj)((U>}? ݪ K%&4_?_RTj kf4yهWG167^7 sfU spE7IߨDʟ~mtQ)Y*p'fg/Eh~c[9 e`J8HDZVn'?hXZ>Vʧ?}/i:ԩ_y//>ƌtFc_N Q=V jػ}?C:y~/_;"8(Ï:բvxK{_59߯3n2zO|[:MQoRJL^hJNY-:4~aY5c4UEbVb3c7k0k4W3,]ss|eq]޼IzDi^ii$ H%_lشarXu1g6@-f6'}} Hnr/~c[gW.40>W[iF^1 : tr3BGZ5J_i޾q\}.q3B l@:xqr'lA*1Poh U _uu˺&%h+P@h΁Juf0kF64⍁b 1BZmiC3$n 9Ӿ‹vMnw6SsPQ~ ܆.-^:aYBX44ļ6]| XzuvP)HL– aK7x#:\΀^}}`fN\>vBR+r.;X[j. JP"Q‘f(54tjX$=kfrJƂpViǨzG2a=Ejv£|i<Ʋ,+4J OeJhh-m㏿ƙ3?boz5) Gӌ:w.D\0@)ͳ8 v7 O$N:Ɉc$y$$I{ȫ,+W?7LJ́#jz}Gy2#y·wslXòl™Z V-j1^+ *yQh%_,R'sξWo{^eXRvl+eo-Kf픠^<&EC)4E.m1.y$uJ*E˶eqj(}ztVFrpHXj!>vqp ms'Z2"^gu<(MmTd}Q+דZ""˶4f6XY8m :-W"^ɛp\=-J` .w+/s]l!i/dWɄt0 kpbSvn2ihxOf4#VQ,rxHz^Tc?k4XfE 7TƕC cz-E& t@zzm~JQTkEC ?J:aoOq I`*0&SZ:7 )<=*U{& רfv^t6ů/2 6/M᫻ysԜ\R,}jMsMsюڄ-+u8j(ހ45 FpkOv:G=aA{ QCk8B%-m>W2y5_kл.jl:P}7w2_$d}Z>2miKpmSUލ Xu4h 9Lnj k]L9k|?gO4tJ&ɤmkt5#I{dɂM)qw۞cӫqh/ưL*U)V|"c$f7 {LFBg^"h?_*c5 f׬%DY+m_ %.CtfjXŹivK2Vc~y!_w +,xM88п8!IbxT) _+@]}rc `>w?'O8yEx.S&KQ 0~AQ5QCxRQF|sjm4U{K6.)RBHNo[+_ wyz=c4UX4%M}VHӜxlfsRHCMq R Q)D躑'r2'k[U7 jB남;X  :œQ:Wo|:'E c,P*ju::R,mY& #9mı]z]f8q]J:庂㠺]$㱮6.N&hD"nԷn)&.`x ۷ޔӫ};2VΜ`/r=b՞6O+TnFXUzֻA6QXpZ(*wETLk8k$XX+cM!UjF}!rղ{쪢"B>!YkÓB)lWm s\;U ˁ4,)dUf=iW>`"h$7鐥xjG;F XEԄZn QՊS($ `p;Ϫ?Kj6[G?:gch3 i9j&)GF6,hwfF֌A.3岦i2 \Pee!&lM(M9Na;n:ņڋ"q9"iAfl)t°@mw6NQfFrKmö,g&YFӄLLj58* gУ"A`YE(I `&b$сi"A{!aj4m&V q, z^k'wqb{7 AE:& Z[SyIM`""mlmGd:麪j4ȜL И-`iJBH[*8E2ΝB59yHShGYCs]DF!?@HS-<`@JWޏ .pE2ѳ_NSyA%{%|?{8nI|F@u۞:^cgM;ԫ#Բ)O4ߢZua?57M5!R`;K!"&Ԇ8V40"@^Ei eU8:0MWs5Ut2FU,(WtB  p ʀ ]U_zUQ±sjAt%Kj&8 ꉨwJTB Nf8zVuM # ufL-B6lyK,(t5fvcP#$Bd&"]GBe8e"=$Af1\ZPv0jW:"dz륂TK)OL + ۀ#ؗ_`y8 )TGtyV v #3)#ޖeN ͅR/? K[ 1cm-G> z:X"k$P<2ʹ>>?sH_#vr2p, =+ G3kgb,1_Us~wc~.|}[xcsh=7…א$MgY"CfԸ(_ )G|Sܺe֭  O^C.|"~CW&x@ *{GDyGMYauB.]Z Xek2 3An+?(Z0@ n?G|.օCh)(3dDߔK2Os^D*VeHmx[03 ^ ULpbJ5) gr'R"G9\U.r!̭ڞAPseS"NA/Vut J70%awFz@#W0; Ж U wMʊT=H>ml0 {X)2h040E!+*: ÕD;Na "k5Z&TӰ1M@Ր6>edJ/QJXqPL&ƞ2Eh:,i$Rʺa&h~^<!.`2CWd0c7M,xl23#MSL?1NSt N Z~2]8cG mD|p3u0M+G#F5[B\ FEOiB7MᐲV P*aZ!T2̰|A+j4]Gle0H[\DhTBuw+YL8m0A<"h+(S\VV߹~2zq $B)*K ,Yxm:.ggX;L} :ZD/'F> ./j Nֈ2d],Xk'vn3mL{e?Wŕwl ,#iS4]lF0ٶտcxGɔQ.L[V 0fdtjTeYk@K&j^Hjݛ 64?&)a k@D ,UhQ*&!PA O*Ԡ.!UПT20WpN0@wy*oS?RzC=b(Rq|]YDmKf(\qƂy{31qfYWQed(Po< f$'<LËxzpZM\|aMQX2HX[aPXzkr]-8ydUd;4DԃqQA(IR1dzC= Avc;Or!&<-,++VYIQ.`uL&#ܹ m\vDoᇏ7W$BY֕R3 Xx__~?[o@.$;w4{P*tzp3< ?3 yR|.o|rnHa<,4\G tL$E>\s KK[y };?|a=>ͪ] ̩4-KʹS:Bk&C8 Q=:s]W!66ٓ!rb~E |+賟nЍ7z0`ȱsWM-n¥8n`sË/``4;߹QTeRp\/ C8Vp*ʉyFK%RHf5.f (n <ͮ2T zNm͚ 9Tfy؂(hMOxTB g::e<rCWyEiSi-A//9la X̙@i6(Y&IFwWnx58yo~w/΃1>Av70b~piABT\2+ZilV QYa4RMHLT9BD!rJdaI@ptw-`DC ':V&p `=~n{b7+sV MH*n:UsT%d}ЫjP p4&= ]/+Ay) D%”PFk@9x M$23F=SRlutWuL=Iu[e`0!z*SdL0\{@Qz豻h7M%Dhfa<ZPB xػmQrW>)tT1uLR @b2fc$Ug4CA4 y Z_)Nڧӧ۴[&yıNݻy5,$j Tht4e|K?FLH2uo`Qcs.];tj q>,y"lJLejQC ôdR .PR X'Rl41=P_iɟ\\U%8ҐmMrMRu@FpQ&I2GYڅr1&5sAl2z& ͛} ?vwMRgD4mD.u/_ӯşgut:Yet7os] o(9$uME]Hq ^}EQB?&fiRHݟ4) vHʋrqaaߑB8gdd$ۑDzU'+8&:.STP?%岫|nU^ 1̩j\ܔ/%JCH2c@ ٞd]A[ _$۲Ui>X8\ZYK"ӟ> h1wT$IB,Ô:,4ːfFJDi:|PČ QcrL$!-I`9BM4M1q]5b7LSu܈t]GbyJ4 ՂiӔɄ [-㘼V "4j5Ei4hY\F캰G#R $5 RMC0`cem[ Pz0{6?!k:Eq.0(mlPY$1M2[36s߉ %ԇ@PEزi6Y"BΝ#0&! ˂Ђbj1M) 6u]=d6l0(Gua)j5D!k ҥ%4OnTED0MF>rt'mC5NB/ U!.VLݯfo2lM5Sq`Jl nD-OEb=|OV2|z~"w'cDԶkґ>=< ZX"R[zT3Vtrv(@f0 J=l%y ݦlK^kjT tb_)>|!x:6 ծO3o&)7O ([@))xAxzN.@>gǶ/]SO0ʕ'=M%\ m(S5~U86._~3^r;9Gr{i>D$ەlG9ofj ujey˹ s5)3ԲBvF[,ֺ.|L3Ƕ<}0 <Ζs*TB{-Ի1 ejʱ0k`  fq0@V*t@f%R10Fw1fB\&|C~ɋʇkَ~]V:V T-4U4U>.\}YnI]NT.L=%ɩXz&V>_)Ų*h5t9Fi!Df[ $6P~X8ڻ j T%"xz\^9Gm-"g8g~򍯃o+=Oco{Ue(e]aFϲ6M RyZʍٶmSZ*5Z ꖅ~!,ʄ_xA;I0M: i$?O*kuDʞGƦ PMJk5do8Dy8,MCeR Cva!qh`:E*ZMfaqz vWϦg-7I7vFx:+4>D@|PT**ZQj k<7 g'ၚGjGiXLS!0*r%yqf"؄!Jc2k7 `} Iv vFhՇolyX9bG?~cmo!F K~<۷x5[wf"ʂ7yUW6Fp=@2A CE~*"rI}V:"Ee0p[

7;P|rUY˿uk GX\OBZ׮   NOLU|c/aeeGtISN;4kz~{&-.F܇#RSZn.O>J!QޔB6֘#۟4+3җ~?Gb4ʋuQ~>CdŲ~!yB ="t^ ˝¨RI/~uT#\-*l_}?>?g?}Ikn^#-hxAPhH9'ε {ҔFoI#pT[3j5-8*D ih{r>hg KL3\nqKghh䃃 99&/}ݻye^LN!"/0 θ0fNQޔ#H4j;sCL,&vbh˺4.=i%Gf!C܅)a]ЃX3Y_ܐ2cZRh ˚’]pJe"9'P.1fWK 5LY0Vc zo>\"g3tRG%ϓ“ݖhW.: }4/~ D׍rjJtˮa 8MLmheadTmXĶhBBƤTBEH3 , $y)*:&QTT`6J yBNF:D4NaHZ;,u)dFZi0;/0N>80Si:EiXi /Lv]Leh12Z%g0@T#< 8mEӡepx2AZ*2Doypw8hPieivTǥG&h&?ڕGQ-t:.uCBc$OL1F# DQ(l.J^ dCNeQlcB˰U 4UCfA2Y-U0( Cd)RAJDXi\W}Zx|倞:A4PE~:՜$Fw-㭃t>o5Nh +fy'&\]?_W%1ƕi ٮ3C-u2{e; HZl aaG]8J@u=cÏ?MO<ťK=<zd^Ň/he3 hoK9*^;vzئBn\CĚv]!ц`%5BʁjТU1alGHUnkJcQ1Ap3VGb*0:a"_(7@}X@&D#4OeyCsY%F2;BekB(Y!["^ǝu0F6ȭTHK([@&Uw\x;8w.J }HFzc ),ҰNR=i2{Nv՛5o$ S(zb2l_>FYvDn/ jV뀎.\hb!mxf Wf+zӞ8q {襗#RxEi.pl*naB&]ܺ :ql,Xش"i`BY4s6"soqQ.oOv{!XKx` YgE2ؗ0<+R)QVoG/x&Agώb86@>CyS"9T}_biO»/5ӣgt˶v{'v0s} 7ީw8RRerrpWlp~n݇?TY <$>(MʂY ?ߛrD\ {j̿{?3}ߥGQ!81)-/KJvOkRE~,q CEW%/2{m+wX %b,ǹ/M_N9-lrrる˳cܧSA+|m58d#yBdv*EW4嫄RN;Ц;)އ)aaZS{$4`](:⠄,.剐̦T̄Pr:^py4x&FMiLپL n$:"ɭ @ Kr,+Ē47>L CYV4RK! ylIRYg5f)IIޞCt 8p5Er 5\~fe{D32D[q H8/z:h95?ѮEbT^ZEtQ5*#m%Ir]d 0$"h &`FلQQT*mpXc㘸\hшgThĩe!,i2l` @VCr1c ;:9#mC<@n̲ЪT~3M VMi\ߧVR V x T"o:i/rBvwr xߡ:&.,s}|䴍oH05P6d2~EtQ݅eZSh{i¯1 !xm z1Z nÚNa/.""Ҕ6,*Zyȶ{=p70mcjv83P IDATC uufSqLh4ǎ,7bխ*D{ [[ PHƆKEʒX^&W~֤g?=hUapw4Z6n"&Wl5yغđJ3sm i&y.`5ns3)يxn.7L;S[ LhZff<\G.難&QHyđ֦.Gs>{.L[_ o}3='r^Wt&`3S3ut 9 p;jS~Gԩ=<>UJ5 )l-#xemID9{Uk"K?FaDD)1kд1}*}3st@?+2ͳXLsHy(`e)Tߗ}hWͭ~TgN2bq瞻3gޅmw>^8+g H=`&,⩧SzUoGOmOrQB!G=g2qVB"znhb4|sr]gl9 ~ci#BӬa)&L%*~ ۘo;B97}7O \\ߗF+bYR.I3K”fIꜺBPy h?hmSPD,MaN$BZ'I" #ȅ1cq!#%@%<$ == -8H $l\EA)g`|TΉ*%HT5ZM cY=Cr(pS-yeULe\i(wL4ϭ ܼ̝| uo!T_17f<2ɧ%97,LM:7dK-h׮Xmh֔y((y2s) aM8E4Rσ35xydyI?P2M B"&c~xL~ < w2Qt7]GuZL 㘲 @70)t ER3k9}'}}ǎay&ilN樻42*]GE2 ClجVFtqZd7Iе,}Qc̽\#p$T4'Oy0I/a $Q*!fѣjGqJpw}onOj#^;Jw;<3ܯpX?xi #5hLaYssxSCڑ#*;I@=*z<mJ9K<Ð,f0C4Nm6#5Mxnf|"hDjA,FΔ{[۝c \\GTE+m#7n0FcB>'N/4_ZA36>>6.ނxwMXKė Tb0i@Nek0ي#P J2y2 Ak}PCoͱAA248m#@@6O>H#6X pĢTOK6R];d$b3f xBRUH95)È vIȖ}P 8ˀ$e%'PfB#a cbdj *H0-Mg\g|c  zu0Z!v&߄Q_#8&7F`i H; ixWb`mH9^CJLGI3*RA7R2(doII/%Eּd1XMUs.{.d-ɛc,S`C !Cf( n O:&:3FVV_St<0'oͿ_/L/^ſWtICyT B_1OaHMmH34KG!J,{+_>HN:)r3Zn? }TLU6+_yE%/vM"Ay>En뚱:7ZN$OYX5)7!˓ A'_'2>.|KPK}|WlS}?!fEi. }64Ŀŋ}`"i8hNJzh8hF "GRS,z\8u[)50r'l&>m^Պ%Ȃ) YODjC)i\v:XgEaۗMiҔ%9%T>e\WbK˽= ߫ҨlV`mj0hST $ Og.Q=a*-W[XV;S9):-,e/W  Y%h>9wbj@3Z {(8'˿k8cT^n6Lq8!iZ:88?Ae8ڬ ^qDM 1k/ X۶,݉eSJ8M*nL"3~Uyٗ^wǸsk\kR\'O\xo}T8ěDfq5I ^UdAj|1L*l75RBnmF^65R.g>kW MΥԪI0cK\,dHeWЮ[=\2c>f$h={߬flUXzDup t월+> #2# 7 Gހ[IH882Km R%275h#%fs{*t"T9l3-WZuzd76_5s-|JxFjdt2/b:VE31 ܔ[s]hҞfc`kVgV4CX t.JfA/ծt>PO3q}^I1Q[*0yHZ-2WEe]X<<xL<)WVZ88 aa!mUQ,2ƖEѠA vRjڌw`s2ϣ=0cf9~qܶ96b% Y&ZM)_) ΥKȽ{AՂ@^eh gۣ4{̋[z1 C%8&T6QX:wrn8>ԥZ.nv͑߾l.&+>zVJxȣ'or{%D>=Lm(vwFCI CcTlocY`)"7X)bZX9j<ƲmZ`2A(Eĕ\•k^ʭuue\}}Y%6'N)spTëj蘿| dک{:6 Y!D1{ղLEȅiE6<^+MrQuuaϊ;ը,t64{7!9t(#ao8]Rz0ndsa/H !!ꪵ+j5[7!njo rX2 Udul=^gQ 5ޔ@7`G%zH(\m//yH;<7./|gĥL2 R5)2URT3 [ےa3mVj[SWg>?oQ+~,ZAُ%Yp$RJR WeŸg l{}//8.ǯI?s5x<~|4hy,Sy^T^On>˱c.]Rs_ՂVSڵ]H@C!g~lO)!fۦxyvB%iyWclk`\g2A_3 \z|%jrq<65'nKJo~͵T]9W$FIJ)\۴T"YlIEwtV>U?C&[DeGp& 6 of$h솕$ó=**l%^OL-3ĵQ&KrC"fNc۫(oW Yii:_'wh/uR,#JD~%P/ &/VGn!]hQ;5RL`"K8Z`E8nϐ IT xCiWBY\:JV3OQʌ"en]XItkFSjUejܲ>  A82x{*%Fj@XsCN֐Pg֑/ ^G-]ǛN%T2/ FCmևCvm&0Vm4HF#шFCRy,%<Ν]=8`Ԋv-QJш"H;NG6wl&<͛(%up|uUAup@<QX庸ӑ /;;cx"쬰פ>>p}Bf<o,ȩΖ#ƮL2AvjuY lDQPHj5dkPEmE9Cym-+(וuL'Dda(eX^Oz]@.j =5Cy+p>F kkj͛:n6T++<ג'NhNtZƪt_Ycs05ݓx4!GY.pF4SD8(ɉ&Dh4(-ȧCd;ѡ0Ά w`_GӹlmL[B#J]O0HP0,_4LR + VhF)^O$mXhvKWo߆ć0L<ը "0MlX'RMJa-8낓j &8=(`D f |j.b E ~YKϥm~va#6'PԵw D 2 6 Q*G7)ϲs8accLQ$0S ƄmʝkyG û.3ϼoq2Se*ř, b9r,K/|Og_ш='_ȗ|^Y"ϕInꦺ6p.QRqIhth/W" &֖"-'?{E.\!'Wi9 /#$ 3?.?/Yf ;ۦ !@AeZ/ĸZKrHFi` W5yEKܗ'fq\A`=ۓ|d[|Ju(G%xvJ #4t,Hvhֽ[ ݙhEiz1ϜRd{Ь71rᨙ$wD-s"[Lߤ)/ec-pJ֦Z/!a{&Xత2jPΖQ:[yVؕ4˪(XI5{ZiNPv0zRҞ&i2 I̭fWJ4S}1"T$&fܲVQʺonQ<]v "rUu3ZbM l˩ȭ J OAp9| #}.т#"~Ğ\Ir ~אa%xwWF3US&keNoxJF5 JSO&dIB8RNLD( IIT*&AKE_KΟ|KϲG.%dK\N#ZMZnWf'XS}qg7X/cL)1Il;_Wx!;;Kܻ^"[^$C,$ok%s,1ܭX/fxZH'G,YYckN)WI0JVR1,% f$gM/TU64t^J$&+51QKhRPREj* 7p[O8AcVЧ3E}L`i5 k_V92eݼ2g~fLJQ-UAUEZI5oU|NŶ%AC˻g{X⼅4+.܃ @>#׷CH_79I̒o)jTV@P)*Ee(ۦnYAqh6VS6#`;L=@N8:cǸQD4ʟ)uR!NSJ̬V}Ya%hLl4IB¶mx̊BΚRZj}D):ܹS9u:dtL=ӽuho;~XKfH*EaY09m[,#5F)dz]>r;[畿\'wOko830y{wTʅu.-yA9]0ɲ $Ou$$cYiuWVs,G)vU;lFu94gE}TOr,la`Cso-]^#xN.uHҒ\xܿS=ӗA͟|;f}jg)0rpe>O- }Ӊ~#ai 98v7I[7P.5pp{: ;{`b ͡uR;j0\%Xn1.=HZu`2㓺.G "F)G9P ݁h~v)P[욯ǓYSvJcY`8Eq0 ?=!غx,0+uȮ2ѥ؃YO_yslW&C[Y_=>EZ:(<=nBy1؇I1)%+g y a*[4+Xtg$!& ZYXbЛT,{/AhfpeS#orvdܸzC/Kܼyo]kO|8˿S!kk67n-T"Ԑlś#X<8^W/QT +BΜ?M>7W5qU W7sohcSf 5 gӁko rSYŦ| Wjjag! UjW5]d﨓Q5țUeJ"WԕF"z^IJ_yqpRE/)LaRJG%5es^-هJgG'+a.x0VP)? | }S$w 8݆~ : !oAk/TTƛdZW4WSI!#ų,4%mZAR4v2A?X} }ۦjL @Mڮ+(z`y a'MdqLՒ PyNϲuV\,w=(P"e$Ei19sqqd6=MX)j0np *Mqu$!GCvL LyR˱&,JDnF&3ZE9<1ږC3<8*p֬C4L]dy+Hc~D+ (PE5XIAV 2j!ǒ 1VZ_RY갚?}M۰qS㰹8L:ICXȧ>p|&·@F'=" *ZGqt%ˠ,]H}Bp6eL`,F bt7e@fnZV i%Ks8(!iF{B $14,JR e7ks>0FyѴRNDL7`+2O#h̡@SB=}qH:\8"N6ɝBv_s,&<} hkY^gLjX} K0/ %˜y:\^N14SlΜrvᅦw 506X'j+` cX?7hd͚]7^ N8Y)!͓}طYY )9|,^DGA-TiJ^yO۾L4oPvG}qy%Dr9<<裯#Rc W꫏r>?|-;?MܿNM{.o}Y7̡udv>|#ĉ&Ӽr$a& IXRU/}(f(kfA;^%8mUd[ w-[BU,4#AG?̷YY`<>yf%jIegO>Mk_q%J™US5daWv ᣲU%V"x~ҮW`.&ўU@}?t_~+hT$f4g7xr4Z?4Ÿd`\tj+ጷL[vJXhv$!-2pkVz텤i%Ur3)n$R,dmvۡtUGŢ!|X/56QiȨE9(E|רdefşLv,nX M5GJǦY))3d/& Y̏kιkb/lԣLIu*nʭYꂀ~?fp);(O){igsb)J^FKyEs۶IWV]Ϝ i xIdǘ$H(x;ZY RMG~ŧ9W5 &$ ll(SCl[Hb|[<-)OlLQsGwNݻzөNvH=Ip6EyBN HiZ ,uxGB9΅m/I\'tb'kB C׃١r{'tk0jEZs F:ׂFO~r#bb 3򼧏 5+ =L8-Ƴy>iR˜:8wә믟h`jgq5?4LIgMC<V;7irW$rTFfRދ ĤBfVru I[dR/~%r(ˏKwJny#d@'9nbuuW^y)0f0XQׯG}˗ߠ^' K?#өht'b<>ի Lo0T{ZJʂ쮽,Uy:Cu?C;0"-Ν㓟|}e#\c| ?ŋ/f:VʊiW%,yI^9zHL9)!9,"S1[G*O`ǗY8[-CN[0o޿ ^M"P"KCĪcC>ˆedxQEN6'i[<`51Nڦ{d9=}nr܌4į\ȷ8*B[djǛ=xw4,5fp,ѩi"J,$w]m[ZEdےxA@T8R(EeڻҲa,GHZ") <וy!mۦ>pHwoO=ٌx,x,ŝ;@y쭭;lFưV#Lt8yi*mjáԒ{슈3i4h)N=ݮ m"JIwcd?zL`˗X_#nt..U}MxgK_y^|2QĢxUfw{2~kTg!`Y:eILE{/\}^j|?/? iUx4f,DGߔ\|seko"$Ee2R:g?# SucHO*}#)[mKf0Dz!xdY3ݜ*۷$^#`(k6;ie?=7cT`"uѼs^Yse9~n`L幗]&90krܳXa76r3sLk(Uv͵y?Svb{h~(bˤ}\|^13U7 ]99`L1|* *{Է9x&ڭ F/ QUPtJ'e@Eh< ~$fW-I6rʌ;zi hө6}q+>^ M? ){YP-3K8ȱ.<ϑ _9jt>mЦ13Nanj5B˒Զ j45L\۶zQ̋_RJ8y؎ÞRIS!~ű!2edBO2>e?ϟgn3>8v]^qۖyrtJRɸ(8L K)i,1NPD2(M<#tZ]qQм~g:e/I7yv@^NFVNۨzǶ>+2LfjHI:$MY](5y씋KD=nܵ961mJ; BDRs9<Ȁ m]cwdeF6+&-8f["czm <)@-F*rb¯::Ka|  Lg:ՌP(uG-qX^ 0䙐)ť#{_vsʝw淞XkbP.|_{5Cם @OFn$F%4V%-@l *ҘP}LEY>$gR$6S++cnZG)!MEtBj._ IDATM0م06qM»}sHsju Ԇ4ٚB nSHr !|˞V)ۀz,2H-AZ "8DŽFAĔQTjׄ>vqMUbOR\C։DC};:K"5idk޵T&ߛDң Nf|4Jk.k*1q-฾I Cޅq1zz`O[x^3[Z&x֔zN|n (ZdmvS_W8?k=+YHY #E7SCT Tb=7͘Kۤ\7̱FƄfT;/§PBuRX/Z./A _/:*!OIDX;c0+l' }l. i EfӊcsQJYq&4>߲(DkYg o2a/Mi)ǔby4u>θ(PJIqwo`:s,%rD*ɓb7DJ*ºYyy8H"VhjEw$:FCe%<58F uplfTRww)_'"yhom$ eZlFsg|SSwa2* Tn/Me,#A:A@;FH|wԃUP8jwo20yd6 ܢ KS|Q8E6:ˇsuԣ6ʱSu͏,ʥN؃C5r= \;Ip҄t9m1,38>4?/m6?շʝl !z]˚<-ܾ탑espx8gW9 |/M8>2S Ml-q KK:!}@t4]YLoNWcT?96b:I|H']#e]I*xH D:tRc=7&<̭=7PQq},s ] _ejC֥j44+BgzLuVik]`Bf)aڀ ]Il=g@J@'^LRؚt 0K5ub n[Q4Ӳ6}>X0q%ʔ.%\5:9Q٧^m^~6LcU~[}}`³ǮtM0/`QڕY&ˊ)({FE^~ Tc (A0$]TBW<(׎q s͛sɤwɓ)O_~- zL+18uﳺzW#X~Ӌ,-zw n|-3OJ}Y>S!%+s>=:~| O>ãWa/}a67[R eJ^z8F"LJ͊`Fa2_@|dԢ*A|I.#okU¾V?…IS/C[=^FRJG~~<~맸vD%-XqD8̐놜\:mua*~3Wd&PM=V:u*l[&GkuE~7׍[}>n"Fk-==18*,/DCkW)on5Ъ$-eҾa~nUhӆнb"ĎHLQil$ +?vkʴK"]Ǒۤ#ּ|CIRۦbYIG*Eu_|p)TU*/bU* i5+MrCC\RVb>[?I%jlNT) WBfco,oabtZUBq(< 2[uS?ytESl-3P J)KjYnA- |e" rA0]65dPAmk= 3/u·,D$O8C* BHSfͶUnӲmZy.|˒fSD 2ҎcI F CMz 0T';6Z,N9ԞKNJTJվ $(OS歖EhsVW2,c=P cu1^O&eM&ڶ ie[6eI8jlJNSߗVCrE`g{`! k\sp<:]Z)Pp]L&R$鄢v-do=rk~QUI9xey}< Sjꪸb5~C"' zDaO%*GAωMQO7|urWw>½[*O X8yR1hGnǁH5m-;{/gqp;;w[oy/Ї^0K/zO;Βҥ=| e9{v/]zTn)2m2|)=cj_ OG2歷.o?rxXRT)~[DVKcmV6Hɲ2/JKR~xWze*7hC3~Z>q]O?G?4ޱmUqdjz pa(~|g \ȁ~yȾ^MK-Bj)OW+o-ds17mjkxSwmq8WKhP.,BVDmRlEU Q<ؔO}]=AbP,-gkVrPq۽ʵ)Us+ =^ݢm:Z.f^mR߷Ƕ1QN<,XSv͉^pK笠bMXH٤#ٌiJrz8eҽuI3㹮hxI"qHu;\zW8畝uݫz]X0K]F E!ʼnR17$:2C^nH14kWzr}A~2Ny&B%HW|rxxO׾)Ǡ*-#恬jT+mjK>{9+Ϟ=^bu2,QVeJyn؟TUe*w} #Z;NfwnhKV~V 0nVGEs[zk`Y6Zi%ޓ{>{9-`yWd}K!.Ƣx2) D|Fk*h6ZmjsQUgYBJ^bʕퟵk MKHQ1+V#uf>謄s7qlX,XIӢvϾVD~7)Cg0̞kam}[۬ }Ţ"%еmk5{*ZEFȩ`P[6fbײHNhm&Vsݢ1|_F:?H 'BR$.SFԖKtSOfI8 Mge'Mf1I1x_.Y}.WO2z1ҔI$, n Cl:P0Sz];) Qz}|N{4"sA$\_gY ̋B&kkd&IP'OJ#iDѷLj5"Yd1w/wlsc|:ᱵHvH8K$<`s<ھOພ8ќFMNLk=䯮a_9ۼ{<|/ȅeNr]1lG[+8є_nSO#C1V~]8 O2C7Fknܡe+I`t>45?7\4^M 4 akPk ʇh{PpbC3'.v! 6BG0~w (Z\ABm/SgMCk Rՠ43h*M[ n،3HڰZhP(f-(8{5OЮFOLS7*ecFK#twp&jLH yiz/\7<ػ>ŻOBkM1T 8'EH E9D&Xܢ *"5t"%DKs|9!CPujbbF +Mkt)j&'[_-qr;ld sy7K/mIqz貼y~^>7}*7ooq css@p֭clmq<-yGn%Ӣl~wU*x}]tͺK瞊T,vK;l\+ngtlDeWEoJLKkT&5n['ߖʝw|_o|I[UlnY_/}GyN+OJbmZkV&ՋQӵ%J;TVnvS[4i$J{ի̢#SB[=f!&^b?yf`Ѷ*e'_Pp*a^5NzRuhu -ꃊvHUZcc ОtW=ڵheCUaCm[dqdMm:DŪԙR#1L 3LW4^EJְHB҈U¸,J崒,n1V`^bi K7tK}qe0,rƊ7kbUzQ0mUbiGU\ݫln˹B;7RR9DmN#wazs+5%V"'qhp5tSZ}6KYX)DkBkNb]:|N6sbD)4eeƂ6MɳRRw]HHkY$IEA(m_)|C֌2p$k9\zd8 /hHp6tJ2>MIw35DEy,MiM&AMZBk&ww/LA~Bp<&sYnnRdaChߧp] 8fm}4%k612"jEd-MQ[[DIqpu( $ =oLd#s%Oļ6xJۅВ$ a4㠚M4k4pk5j"uD7J) .[7]^^2-Α7טz-s |g!~\|v/MNЪQ&j4bxN z14 I'OcgGpA#7c9 7y~x": NAh͍y`Y4n0ﹱaЋX]~gPRyI}9jWAkhHƱ0 )jNMӰ?CԄz˺z IDATж.%fǗSp PkZ̧0/Lo'9X~4B5ڄƦ)UKX4?-c[3uH90Byd&Y9tM<өs0 jϤ[/9w#8:,NA=L'bak<.x >2c-σ3-ͭiwd !شn.1E2 {ߋ啗\)5.I /Gh!,dVu2D=H}HLb{)RW N\dž 8Aɜ]b #m³(ݠj[ R8X֖OQ=k6e.WfR$Ȳia988O?Fq%J x;<_u]F#g<ث$[#w~u_| Y6'ޑp˛vۧ{"p*2-OZV.KeÖ=/ "6ˡ&ҒUذ{o"^?~/|< 6_o ɲ_0x ?O<2K>¿7_K.$P8ՔJB[Is-BWI65REcѱuȖ-cul=-YinYT#Kml@O?yy3teT^b8hd^8ǎ0htƻ ֤ gh2"䮫kI" j45R+E9i*ii yEAQA@?YRǚk@)y;z,z=I"ֺ}e.~_:Z|.\Zz]ayfxACȨ Z[ӣnEEX{ LY821PVW)(6Ihh5bLt>vxHn A@8L =s]ł!J#k% ]uY6ZDZmle:]gH(K66:֍' e˙qY?%O(fS 4EG04 TjhySF )Ғ$04RN5}xN<^粇y:yL~6{\kަ&ĩEKj=WE6[o ⭭Է$ţ#,tvF#h xI%_]׮4~xkD (ьƂL8NǦrC!C8}8 D5gZtC#ڸ9Օ;Z8qVj3^(8ZjwapG04lѼCϗ_Hm(SF)Y@qź`he;V74c$ja ̇ZiPS垻oq/l<&hxm<7?,\iXp5%$;Bmi`e_z&dO48 Mq:ZAF.36VgumR$ܺW1.ȺFݶG'NJqUj0Rq{/y#K7)LVȆמ$>Wݮχ> s,kʩS/g5?&{u {'ve;,Z?<7R;{ILT'2@h$!f3Ke,N((Gq]Q"JM&[-;Ge*vMt7Hk<0֬NGzzM d:JnN89,? %p]W1mԹzG/8?Fzv~GR,B,s4E7BaSDktqADA9RSZo;aӡ6?& _q Eͦ"<'s5i$qёIXHSk+X^`wW0NM&Esm2{?= (J3"lϓ2@jQЈj5(]pM#(e&CC\R' 42y F! A%ں La<& ca'N,ZAU.=!O5Q"HW KӥMw&8N= 4O&w'Ok #$" S8˙" &"ȅ0hquoƎMpuQ@ȦP'rWH !h9cW:B$BaS¶̓5n,T9 ("s$E<\z<(ҫX}._>tQ][Hn@Td{[ Ocnj PQBTa1{V87UXNyNnP.&u[fiR!;('eg{3BV  O!*ݞͿ+?<[rr||ܼu;K)Xᅇ/'?Ţ.?o9<Q"10ZmG*"ӟvTQLl**Xn $ Q(;gnu`ۉ}}[`]ڐG9/|U?S|gDDdT)kV7ԩLW7~/9{UŷY n{A썖ZChࡘ/Qp+L~L]p+twd卹kVxhu#0sL3'pߎzbx-n'?SCvw 祎$ٵk.`^2 *]{OO"۰-5>ؼbhV;v)=ظ,m%}CeQB{zeo-ob~E4Re퐴Bo VZ2|+Aۂ"bZǺ/[Sr[MCQT*=U@Wr˙@\1*p1kv/X SC&K- R5O 6p}=^l;^i0h/Bk: lolğ*RVUPШXFϑ?Ah -P/YaaGA2YZgJy}RkQiu K)k5RZY&NP0Ԛ0wb`]ǡh˱Rjq̊"JQ.QW\kjQ$ѧ "zZ Y!].' g\Wv67p0pdz=I󹾵IiǸJ"q0d3KSTܝ"2f6<-"4E|UC= G#}(\_x}aSvQ 5ia:[-LǍz]syΝwv/If3ښg3)D$5z.q_߯IWÛLY.{H6=+ =xJisL4GGHY,VWK Bű &St8&k $aXN: ݨPy)y >w>!#}5}x-o\)X.5'O4(24>$#4"Fy2<`Ws&o>7 CDJoRxP F+MR4ktngLSr s!I4iMyM#-Oi!ȦL%B@jWZ3*`*L8P4swއ!@| 7`R74kH;pWӜ]7YSh]4tɎfk B1Cvv@yzMW Ni-HwfqB!?Y id{b[%6#c|LyrOB '[؃ٔc;gx?{ouf>i-RyfBk&9ciYmzq\CWbgs(<B3"G*"*yEX  iJ*uZd^6OxUIU[J^~!~ͻ? Qt{}z&nI{|'Onsi7O wXg"tz'|M18i4RWYYqK @)̶IW*UAmGa8N'>:_O~rxxQ}S&iqJJs /yK<ԏɓHy_/y֒oeur*nfUM۔+IyT/Hf-vkls0e|iXLbӉ|9F +Q.[ZJY:v,qh:|+m}m^Ӱk.l%z'sk|67nslϥk]E^ƒSV n8JآQ&yJb%V"^bHa -[lץRJkj^Vɉۂ}Ƕ`:V|+p^խ$xlUQTT,sٵ#[tWyb{C|e4bxUҺdh9k]؏aJ`ݱJ_jidRcVj8^) HRR)h pKY3͐4E4u)j5uQHE!p m8&d!\&Y7hMz,nF#a00L0A䣿yC#yk F#j¸@ sU:r }lnJTQd"Lul 7n# !bPy0Ȉ=X(Rs"Р7G5!+ ?VOXNMXL۳j.55"X0c9i2 xvFA%f 5-̘DB1]Sni`]5m&Q|4jB ALNe/ cÀHpn0e=I&&+k!M8eRs!RpBӄ9,,sHEeB Ͼbhn XlkCN^:zoWNx l AB~A.CtmȗLyA}߰N^"FW`6ڈk53i2!LlS)Km(JER:Y1^ 1a$+RH^RI9%K:Q9P6(ROxn8KtڵG9::(g>\?/?="<93Ջ,'"#߿MyqpP+(;NJtNJbTOJĥ`QkZSYig>3ohhy'/>7+=,mb%%?Õ P]reI:=!ןlnY^uV*5l69FtZk{6-$^1FS[vݷCv]Jz}]N?|yrܔ;g|wsq<,jӞ\VڈD:ʄ268S2D0PA = k{G N5Q80 Z61 q[-RHch4D/DyNEtEHsDV1)j8dɲX. { R0A Vc>|eI,?{;k<2&ǁ-!4ahv8<4@^4˔Xn>F]dsL&Fj DPkBx@krϾpk wu˯c;ga]ޑAMPX~fh]BZ00abȕ=wln,'o)׶ j9+Ee[Bƪ**Ȅk=gP}+=࢝=6! xd]^|apjry/ wYvv{o?!_ڗw>\r||}/y[$I7}ΜYprE];isʠɪ+V#P C&0`ks{Vzꛜ?Y+_O?}?WIq%=ZU ƽb>+{-M=6y_?4[Ufr +lRJ7,N?kmUrm6ujUmEDnUJrdkԻ3^{{|яxяO9<ܳմ_纰Ew6{qhݒfW^-*޾Ւ 0֯lKW)PO *PABlH8ְ{-Mg@i { Y6beI+NI+T&e^kvnS. [QJO[iAõYc(%ukPэ -׉hlU@ǷkQsqm+ XɨBǦ_i#哅ܱ9Z%{}6ފz&}*^ ~]Fؤ۽CX wBr=xf0;4V 1v +o>&*nI& ,#Ay C{Kcys:I. NEw]&Ks|ѝ҈"bpI\qmFN!^ . |XD"k5NӑYǸٔQ._-靋E8{s˪)Ls; fhGprx 涘ij\YM<]Dϯl0XI(* e'YV[AӺ-XѴ4;yFQ / hs=L /}EZ}|Ao7?*GGHMsL&'8wn'=֭{8>ǟ7<}G(ݵ¹L8+6,r3ң+\&gD|3o˙3/K?ӟ>l[}Bզn!imvuJiXXɚM=S''4wwq at48}i9sp@8G$"8)iOxrɹsp(\(""[HʒK#MiZ*$ 񲌬ݦuEMNM۷fyh40a4\ldl0}\{«hLgj*ipqM,ޞ'1I,a}]d0@1á EϤ$ .{1-K `>xnb= (Yvg?C(z_/r4Ez-:4u&{t ś͖GEq $ϕV~KAz٘9u C8i0RHYVb{0[Bx 0>z-N`D-J} Iϐmf #yqf 2viӵvN)l,7ZH!׈Pw Кhb .,VbZv\auzwB.B&M;mW}.оhA IY XEk >Bi" }p61]Jpt+&­0l Փ\>B]zKw- ųod'6ÙBv9{:>Cs7qpa~ iYiQ(VcQB7xdqr('WFjvJ߆-չ8:Pㅱ!t2ԥYjW ӣZ1Mu }Z8jR9U-X,o 8uHIZ䩧|D)`0#y2//Ck 9%'7o^֭ .]zܸ%qy)~Ro*2/**Пu~U^x+\36a?U^~CLQ+B4 _.ŘW^yVO>k^yqf3Ƽ{)VeЫ;4=kr}-ĪSj~EMGEq-Td9Cyy{4ۿ}o}|>lfEq Yhjbة Rq| [-t+ިx2Y= %*mmzlTcj;>2ƺ}qbQIy(jtty806j@kDAl|mʚd OW!;3B{jٯ9sGrO_(BQ%zJPOdi>|-7ePciVPS()k2@xn81Cd -\`kn Y,, $]k!+yNg:%- )'\!QDDa;4e.=ͮ㐸.eY27q0-C!)K ((,A5a8Ւښzʒ`6#"K;ws&e$ I&fEO׮!A ^quMS=,Z-!#XE !/3"шn>p|Lu 76p LY2k4(}ł`uo46h:9+ QDݖ^Ieϣ92!ј1m7WxemISHSܲ>1^%neI4Hs(VW1! rpqLt:{{1 $ ]{iL&̏}F1aL3<)x lCh)v]+>8P;ZTݮu{bDݡr5یBDl#rm@ű-lCǶ |!F* aXAs{B;%V<jr Xl Bׇr xsz:mCg @/֙߃>m30ž=~Q #X`>wr.B'g/ a3f;P[ꓹg n 8Z>wY=_d[6iDZ1z@k aAIʍ m:RWs5!EV˜in1'UEթFEM,O6VOT Yէb''ߓ^owq3ϼͿϿ~O~4?c{>Ve60AG`IW^(7nǥK<7x89y(^z$UAam2=ڶR{DK|s_&~GOWw٬:7='U_n3;M>n[/}_~JsjE}80R4b Gzx`TV8eUIjMB˾V(}Zus&xyVDr=tOGĎc>7gڂ|k_#|Orae[%=}{c?NM^U^A66*Zݨ]W~_ʶݪz  x_+㬖sQ@k {zmUeX; 8W1>syJYi >i~rRԄuFY mYTZRB>WDn]9)YX e=F ]up:e6u7a4Ni(I(ERKEJhutU*bV(pkvt{ ژ庍cF]֗9&gfiTfj\dKyUȧ %O_YG TX.XCx0B]l^Aq{ 46"4p8x/ 08* VYretܴ:j1脡) .g 37 ŴZ>+yQH}=H8t<όʒFC:q,9 ϓ ~_$7Mifr,&LWV GG߷uc(?<K6=InKe qr4Νs< шlk䓉: fiQHlJ $(Kɉa(Y1sv;ᮭnwn4'4cp0:EJE&ʢb/wew{\_*Y`:]t Q ]0l0u% qM$ϑ ''ݻcCGp8$=<g4nVQ`TN'8llgk_zh&_Fnܗ͖x pxhym.|6 hXw<FEa5gݮu}660m`$O>FݶHlfV7b!VЈDM>(-%(hddHvrB# t[0ϭVȄ3BЈ ܂K=CYT[]Pbط0BӤ>a~ܷf'ቁK0CqYa[BtaL"aXX,V-6 uJ #+ 0فp+c¸/ky(@Orh)~0uΡ EfHcQ=iIe(m})B }}*MJA2Ȫ}{5+6eopd"qY(go5E + ,l! WHm616 e6EPϵVɚjAS9a.K~S栖{!3E$j%t'eUJ}u 9*tRCDe'f:}E2/!)5ys#r|jN5}vիߕ^x[y"E.y^MZ$V˫&׉Z2_9~$̫|+6E?|˷}L&J 2JigO/~~۬^{\- SJ;ъ {iTp=g9,y>҆ UPVmu-WtU5 au)~HuX]u15y=zh4uk]?'?+iH5ߞR<-O?4Ku8VTh`߱rK-ohW5=uhSREƽBGU}[<5*oX=GU#~Me;;}u{ZJ/tr5%)اfV6~ _KzTP ScE _BwY[e?,gRMګP bG2YXU]vy *T/r . Ƭkyj^%h1YBݳo^I#m,SaO)T}@RtxKȑm[:k8[Er'13*m#s(/ G g-:h M)i>1AO>0/K(DD\#H8cQ`/92,%FFQ0szyN'M Cq{=.FN&Vjq  ~iv:4=dBt ^`@X[-Id]۷b}NbL9'۬3sۥYdx.GE"">yGA|}Ʉt8$vqc$$yS$Z6nҌ"u{ije-zq9{~sfx _=zoa%H6׳:@vf3$/Kʢ.iARR)EY|/"NܽKxrBM# qDg3ᐠfIpKi*r\O}x$x-|}2BlKZ-u]!M^B.s>Otjmy";;M&Űay%B@6ёՁA r>efOza>-J tVPt׆ #螷:6 o't|:ZeE*6mx>:0[ҙ&aou0ƁB5" K|ptήCo& :Ͱk:}a &g IDATt1| 0uv"ܢn1x{'4P'܄~l̾EJJufqCۘR{TR̚=n fu-JT&qͭ!S7nCHnY4ٶ=.^Ӑ{BٲH۰ HxZ;4"c4V(eő`Khz̎2N:]ʁ/jYm>jHD]LvPTs5Z~8y66SO_lo"sEfsbO;W?rp-xuxMDQQ04G@~@h6sLzj=G|˥KoR ^L^z3 )kjR uxC~w'>M#nx?ߑo|rt4Т=+}Z*+[ZYs-pZXMǞ6EZ$Ԣ;Ji:L{ͯ5~7?+qt嗟O,/t{ڔ墆e5!@(X/S$QQR%cvL-Y]IHLj\ f8zWzb31ީ𲬘ݥi.LH.k(b%Bf kV<&mJ@U1ĒTs2&KpٔMzj&+0dNApA ][ uI:cyl*&N7Cө%`kͶ=F&2s_ FTQz>e^Kߕ@5%XhuݮcQ[Tt>cfԛ-Ãg tg_]"yI|MxyWBrng{!WC!)/ d4n J  t:%OS9, p<8L˒1f4Kju:QahVL锩16h8$i4&WE%;xlnn m85VV(mVf3pXke!&8, X]% =ݻ]f:%|׃yS(KS?{g{[֎u$o$amHQHNytׯ99FIByGDvv 0$;<4e94cy4mQĤG)cҜLL&E.^DҔ׉/]"ѩ~$_"{=G~?L& PMe.%pl-1}͹S-[mϏHK'x0زMF + 0sQ~-( L-{ܩ:m8yYR(ZKz8R/J[}5:Sy>}[݄:A?5"[Xc)! 9p!擼M->{7y*l{J`3ˤ})m|-=ZC16܉PiUF4Rh,sH4'b`71, 54aSϼ0&) tݥ~@J͂X(](-eUsc.uK2$'z)t{5w*W'4kۦa Oed?6Q:2}2zKZTC]]&c_Wg6Ckw^zx.>SFfO&Śx&b$y4$_DX8"r[d'BTZ#F1F:I"}וFC<ߗˎ#bdlƱ4Rb Cq]v\ߧ9I2_Y``E[[qx(+f}0:y!8B<0ɉ3]Y!p] ݓ{e#m9vōc) FBq/ Ǒ+Q$nQI*E|oOA I/y+B⬮J$t$n%,_/pI%VӁ3M3q1QQUt$ed< YSL)}Cs솦Wc جyg>s?Ke1npذgRWᄺXO35m .Vh~a9sYR6t#W4-ݶ*);c}QyKKW \rq= b-~ǮS9_퇞n_} nD5:A~{'N?ګ1*}IҴdW1M͌Wm%'^?jVt+0Sx3֢ ˻X+mQ ȪhE;`S}I2igTTǧjuJj@.Rw Y\T'eV~OnM5Կ5CxWj%ځUQ@tvj-;1R. 0"|S䇆o\{MO=z`u͡}4&zdư_얥8ƘX:1eI,lvQY2"uq }٤<<'sFA`clC3sڎjm6y̼dYA.!fSVVL2Y,& pN%aQv:m)FYs4Uk0~=EA;Ie$8Q$`}0ϑv$]ܓrtH}rm^,r0<;Q>`Ǚ=.6bҔ# 5` W{3Y-Kt  yy$iʬ,c w[\d+pV6iwk<ʕD5 &( p~Z-#K]! 1qY$ѰNO&Y ^_z|@Ce'өWt(g"CECz=o1. jTIͦum][4ӱ?IbYf\딥d`1+PxX[c.vLaA^Rhuӆc蝳>[=VH $IXfi]fCK2MH;_c{ʛvG!JnB 'cKOj 47a1 pڠ+p ܻ c(B8!ߴȃ?ȆmD6SGVo󑏼ArZp׭nV˗ǫ~/#4#c\8w[꫏Kw?|)xo?=ϭ[M&6"A-3KLyf|:׮=ŋ/~EwE.k*{WmQ72[bXUcz|zܛ5?Pcl!Bxjr+91UAyUE)0p yx{ُҥ n|y饇}1֌ZZu]i Q5KY '9WknQ}zmJ_Y]O%Sh5^Ԫ͸&8VqVĤ. z*Hu+6qsZ4 qd."EqdE2}F#a+8EacwmM[6Ks"ө 2 m^'x;=C3xaڭSѳ8ͷH8>F<sESF!EHh!"8q]̃idYM~3z7꣐͛BS'}S 2NNl8.Ka84er$1M+{ҧ*mʊyZphmHF#Evݕem }Ma&6{ l2pha r"_h'hFSRa,41d#w-U1lZqf!Rw}ص j?FR`ų9vΊv = N'6d!Vքs@g B(rͱv-q4EA&mpNP½6lyj6(7Hj }͜13;on{V ]Bx}f܎mTD f&GBqӺ9I$ȅrl c@|?=Q1fQʒ^w$5w#yT*Ci*NYE4DzG``U:ÿ/+eҴ_Ϟ^tQDQ7l4r9>^Qiu'.QEҴyK'4~QBq"Y6u6 99d8̘ͦq٬OQxJyZ6WzmV3Zry/|[>O~ɜ95W@%ޗǧGh5esqX'|W@,Mjp\MGΞݕOo$=v]wW/o|Aݽ$id5tYCMK[Jl~N#V:T5Y( M^JoupTZ_[;-*" QꎵˤvKG ~U)- zޫPĩ^[BI\܊nf>m-Oj6UфmwȠz\7U*W? i>_S Y5ƤX'5R6tz%~Yhz ۈH[)S,?ALD[gC<Գ}Xds0]:iZc5eyNѐal e/dEsh0v%p];]Rc&6Kk:e5iC$"3t$Cj =1Ih@Ic4"OSSt*,d̋B5Xiq}Ғs֯qyc>tt( fS\E!d"yV>CR9#foF `HqqwsvMVːep/|vӏ"xWY m{pi;mZ-! m0Wbk[QZՂX} ;>ŋV$}p`( Q?Mmc{]H371uqb5C, w~,]*΅N hp\mJ ޾::/-kػcpBYۂ1L wf)RƂ@ n G0Bhk-`hp8b]H=}_\T IDAT}8C\"XzӂV|ɘ;$7B lH7R @A<&ǖ&lYש,xC}u-ZmkW@:Wg!H826L\:[i-<5fܾ}ɦC5/O' ӷOTed뜀 3sKϻW$YW5g.3=k7ϡlk~Sh`Tt>Uk@l.LdMt٫LhG>"W{?/?pXя.e9套>EWY\*Krڵ'y#/O͛qrr._=]S N F.@=jR3wu=_d)ƍ8jEO~o~Byqg_*1RSA-Zۭ)*k WCD>pjy˩ִخ~pu jx=yw_&;|S?b08f8l˛o>ɋ/>!w{hiSِt_] ]諸jNewvSSj5yWʑb*>Q赧 .u;cH ZEHCiM{*7s{^]EQ洲mBwPayndMNU:Q1_-AFt^j:xT ;.Nԃr[;]T uj~-[#}mZUs57j܈˿m0Gz<>^JթiUfDf*VD==>5gv̤^U5Ni(ނ WֵU !t@Buyu!]u6Flqpؽ }#2wg҅ڨway#B ei<綱ּ,YAg6EeI8a*2H3Mڬ0d4e$9^YbaPfww:ĭE޸K'yRܼIբp]fI" 7$!sMư:t:\\g{(Š1ݦǬ)˗Y.kiJk4bqtİ(( IZ-AhHz++ \lF2IHZ-f$ ŝ͌&sY1A47o2})\b2!wlyqnkS'8H\0Dfyx>> OsMhZ_ lF}ҏlߡ\<-~G?phf&q͞ ʊQg'Ku* am 66ei7c61v++wұ] lhNrH 4χ (Tv/Ol:vo 0jT`۰ ȎBuJ2p=!ci7[˶mFĺE-{÷ K~ӽskk,!qak]A]j:ypCڶ7[}X#@ E0 ffSθBaxfuw5ױ߆cmMm! &UҬ98mp2UV+P^*8}xe94ۖjg el` 9>?]Gs+|}Y ./a>8s[CI>-,5[_YY1? {NYM:hh eO̊\e'tYAɯK[f6+kN{xW>ҥ7g3L~?NE"7k:tkMU崴?b3@.]#G}?]^zyI?xΌFn %_>\༬ߪ9HU-ܞ9&ڬtL! R'Ot>ě|so?ezurn`:~/|yJ5Ru/+uju*yU\E7]<=5ЩG)ٕS[*bV]QF-)|]xO[ ķuS*T'Zst Jj)lg5ǫʼeSW9N5{:PԼU z6]9]X@;@Z$}B9MPZIEXъJҩХNtbWb6tUEiJxi)먲.4X֬YTT'5qxO?l՜L#݆~Nuys?e-O4l֓opQvdeIZ0Hy8K8q(DZ(DތM^7Yɹs%O}z 2) #cpK$4 vRf$tCNSc4Z-YF a7aQ" lFe`0 9ol\_i:>2MS=6;ˤC-&ݻ4qỹ@˒2Mйߧtץ0hpr2ur] ݦ <ߗ (= v/iFY_'<9"tJei;űH8xOkOd9wywğ #o>^nb6cYFMX`T^r{l0)Xt7!pɯQ\-ӱ}k ܴN3<D=vh]vۊԺDUx" ,n px|Zsө߷`eYΞI=;ą/ͯx6m}@;2C~g5١[J)B9'Ѷ*SR1+;ZX^R5[ ZhzOݥ=7y;˿k\ C嗟ѧ9>^yP qf?[,_-hzBIv~t<ܛr6"}-B~aj? TA /O'y>d<ސ{/}>ë>&gg"4Ƹ}V&3=OJReM f F]RgSl]*hz>o~O|\zN'e0w}9~7k_o_ !rdgR?i\MNjsp-=~sw $,8 Δ@Ϧ^Vﱊ7)ѿ/#}UaUx`8^\VYr&+%\<]](XT|^7sY` %zFS278Ja<%ٕڽ >Zb \ CfQDC^|NefEhPҵ5cP45pHӑEpm)}#,K?M)ѕ+fEui,cnݢtV6x,E$$}0|>7&EYDzN]nYahQDfE"!$ 164 ]zY<]&bFȃ@3$g4P2 qDdPfQMz=M0d1dYؠ18/#̋߉yҙ9ω&6t} Q$ʘL I-#AƆ81Ar|Lg~drt7#;e?nX{}rCiV7K 0 >jI+/Iqa}҂@͠߷γ3ua:5dhBvZn ϳݻ… h4sCQXmGYSPhE WR5,ͧndR(b6(oȚB3ԕ &gz;V.8 wqّ !ZΦP4a)aTh6cX ̖B{-R[3,`)'B1o ¾AB m0ݮZ~svfGpҁ<6! -fsLw6i:a}#DwmC 4C(sPpچ'#Q cQ&t6-(L'9rf5 RV URfS;7J<W湧f/vINy3Zr]sM.\-EBe|[LInnnZ򩐖vW::,v\ j`(bȖJ?*S Sj)07+@VM-$c,{Dт?ů"Uao%ƄCͥKȻ>m׫:MKc9" vø~GW8fo|3ƑiZ%kkGڬ~/ߏZ"WlloO}Oe2O~~w/=GQdIqW` d"@GKO W;_EZUkY2B +Xrr:<)>|c ywx᛬ɲ޻$g#i^~!};'&uŚɊViGfRynkTEBN7z-tYG< +*C:TmRm]uU;|ԤgԊ6Jj)i ;1ZtՀTuRXߋujJ:0YUFEw=DZkH2tWj%x,&NoDc%39>5VީjoPcMVFY*m=٩' Hւ;]TT™+4]iW}tVTN"ھI-rBvjNW jɵᲥm,^4aLw]Q^KJXltZ5kA2cHMc\9ZE)LH&{p&w> oV㐗lOǘ0d\JY(E]&92Tbv0/KfabsՒ{Ldyl1ٙko_|sS6 I"_gElL"wНL(h\?ag=)G4 9V xJWp7nLC6aB˳t yȌ 隱- D`'%uFBѷ03%} 6S"BpBXD=/g`نib)WufK(bBa4ݹh[dnfn(}ٴq!kZ4{{.Du[HöJR LO69GVnNnS֌|S?Fe>jV jZk`ByODvËJSR-5*p"+ǨiM@0}sݎ+j7A`5Ѩ+Tx"Uc~_F*'fRV(Du~bV"pjkZ\Z==,޿s;ȀGu r䨦j0=R%V{N:vj4,%aYsY!y)eL:A׿l#>"<1:d M vRnYP!FicvqzJw>Ǿω1t!<>1rբBI2\_mZs1ɄӡXy6E{=D;GGbLzZN&4F@ܼiY EA/ دhK[YZxk?Xhbߦz,gq 7nX;XpSc.sxftR'9eyQ׷[b" E }aF4m4v<$\aCk=؂I-xK3T/uavN#j)ɑ-6= t Xz8Ŷ/N+X;kz)64&F aN56d؇["]X,!]&fΑ&;5Ċ9|Q3g&4v؀oޙ}B9 IDATo) #m8R)ZzzkP̸g<4ql4C<O0? Ͼɣ-7/ή@}z0 vmR[x툍{.mA1Tc֬grR3<W%eE{tլ7+TųN[뷸|5{_^ڭ֣HN\Fc 7ywj^=dE_?uEGlo_k,˸{7+0/|A`"d6k3l0tdg2f>_+r#C|?ݻkyWIhڲDX݅RӅ"vߤFXDδj@֙|lK /ܗW?Gޓv[887yCs|s:ERTSԹj)jYQͪ@cuE|EZE5P捻ݐŤ5v\5.@@[z:Z/[oXKpNN5]H^*q-j/zX+ H}:UlsbUN04(#9wJZ]W4)G?cDMVl)!+:5('Q`"FU[.RrI7Q־eܚU0)AC/ }W8]vj/4љ3>rY䃏4+ "K8%,3McyFf382iq]i!$!LSA@8!c!Mͣɶhwn׊SoSdlnlb63EO%fEhp !9wޢ{2{IF)]q ӾshcťK-:>{떉76}Z,77M-~H~&m)|}Fy΢,e&N%o1EAx&` 61y.Eƴ۔ c&ٌ([-$?wN8ggW⬯KRF$~g3V߾o'?ͭE߬E$EAl$"1ak˂qlwTzYfhK? ߕh43pzKZw r)<4z3+c 6.^'cՍN+438v1Z]߷#K2ܽ+<݆ރfN/KC -a.6Eams1>]sN(O &3xB#dl Xnt.Md ض3#ۆ$ށ1̵v\PB8HXxoGgnɠQ   0tD Y }&PN H nAHXnf:V\==rp)d[4&GPةJܾ1;nޫpuPdh}{\3h ٱXWHe$ p& o+MpS+؄2t=!kC ͟|?2?m/"_ϗ)<$` c%b%q!K ) x~^iSbժHݓjYsfe5ۭ45xǬƉvfwYu/j7~|dK?m~7~@˗ʍEZ: ggqO}j/gSB}.4sa*ed}/;vv^_qݿa-YY&,SǵkxxYjo\e2h\* 5"x\>WP;nMo2"<ոV"V*˜G=2_]S\?8)jrpSM*kSP+O3c}]\T^W[VTMcmZT`pZ]*\{fnɽ4h୯5g:e6cc<d{0 Ͳ(hg8ܹxuIbHŬb\z} "pҔ,d}>9ОNɋӝc=9.TQ$Ei,]0!I!g.yQ$ fIB1ff1XqIaϰ ѩG{5F \"o3)Ep}g%!#].ɒeGGr 5)NND/MΝ#oMaCrqMryr{;v'͐h ,\,`}/E׳=Ŝxy~vs.sxj)Qqla]װgs&f3 &z=;}"oCۇv+޶YzKm)ښ]G[Qd]ߥK6ףr"- Ib;TqԹ'V p{5a ܵebRvh908=1ot)MCtՐlY(MH,)Pb =Hn8e_ڻd x-;-]|i׷cVTz6#uA'ws{x{n@4cO,l!-{z3f#C0dP:КA܁mweKaa}mBhGnчرb%X -TT#N1J]jy넣i$e‹xfY/H.۟am=wg.^6rpvf̺Z C2*rCֲЂ1ٰq9ӛOvXS1@'`%vNkVUTf/:༭7~^2I?_K5Xw3Y=m+{F8:Yî$]!r$ۡmh/2zwCVˮ{LcOF@kbW%oXa٪'+OXS$5h|G@SNE)b`RǑ`l qEAX\(KNE =OC)3y6F`sߧߣur3f(ᅜf#ql1{9.:% \2X_3U(?8]c8u]4a05d4b0(@i_E`)Bj gdX N>1'yNKse#"CY,lX0w]q$i6M>Rzkk'mDmkkD)d)|yaHu!{8gcgOxe<$٭۔1&I IΝwyN$i8Y_ =.0߾y^Ib a X,,NJ4 M۳ӄ"YY_lQemxm[Pr|lݥ}145maYwmk[) mJpl)S/!sp2ҋ:kVZBcv̎ҩS{Wy/ұkr]ok͕H,( ,[فNCV>Dk64!*aqf]haްEfQun4(1I=Kqڂ;?i&4a9Rf[{g`.؀A'-5\d&^dpc'˹چB޲iꈃRmUmKȆ 6s(Gglڝ Fh /Aͯ>-瞾&.|'CpMʳ KrB^ ' X4!l}qfb].+-T`UKs GQ佪.J0ҥ/k\yگe^yjg\ sZˬ D25x{lnAڵ*rHUQhs7\uȿU!ʪ0O?էթiWVOc ڗejX4Q]Gf]:ZTIg?&a{bo|y~7?!GsfVWU_^ͦDSVmF~lVȸfZUzfxkq6Td5#mUlRz.+)![`v(QU%U l kZPs;zFMJjnQK=5ejjY T^in6cw٩CJuW,gI "(B9j`Tǡv54Tv œZY+|+QyӐDh( P6)gҨ[whzzm١Ѿ8TLma+ʼM.ٴTr.4lH^dRsj=@nB^JuPom}ζ7sjݲ@~"eY[]۫!}2!7ןLȬ5ČG:'{Ɛc ccX%1%8́eI71F#Y)eKZ0:pq"fJgg7呇9sx{KDž007FH,Ytp]וFQ01nR,sL+g377%p|$1. [0׺ Iy,IqtDw:2m4HS rg8˥LRRzN3 ) Cc\i#fs, Y,z="L'd"l&^K\NeEiJ;IѐqHL ]W& HҔb2T~_}*ڜ8aybm/gᡤtkbaՍpvF>#1˗ɮ<۹#ݧY"otCL<' a!~mH8LK6/ ;Wk ޅVP v0Ylg@dY4lo yqQ( VcePM)9,l֑l&igFXۭK.V_7fhwkag|[~gD3o|'9QʭcKբfI/?/^מah괡pWgws=)?į_łZyKtx4@ -X39DUP4{b* 4V_캖>8b&aʏ7+K/~1?#_'xLg_gjU&H"?hso pt:SQf "Ή_PDBz}+l]C+1[y J|W; USPӿ;wҔJjS]u[kjrKSZTIhpz6XΤ5_jPNЌľָnT3??Yjrު-a_Ԥ\CI0Gt&]Txj %=B*7* I*SDSTt.羂'Bt#&@]1hvTiݢnS1@\9F?Xd5Gkz9.TR#ww)0rGӇ6[O`5dYX#2 c Z't8L**T8 My2pRT<0[!@XUYFdxyΚ/ܺ%1Lw^}SB/䱧p q$( .* Ǒ#l&>lM?ҭ-i߾9ltg#c#o_=|,EW$K(!/KnWtXg6K /K1]7Sl =dY}#<(ı0ZVKgi1vpzjf7Z=F!ܼi( @0~W߷)ǖ$VX XȃxMBc''}oTM݃mlpm8T)EI JZaF :JU*'mbC2t ?R[x]P,-K Co ,:8F09l}Ճ atܥ=٦mAkS (F0>1t"! -iD}˅ ma]&%L]MlZ%8ښj^ q5QXjSPk[Wg>gbY5KȎy)FPnPNCTYKK6 WXb]X]/9srz^{/8.?oGw~KQD?kA>*O6QJ$+ЪMSYi}gSD5ɩ*L$ }(ܭ +S~*v "81%8v@@Kc֮M-Ă'AUW׼R]YKv*V)+։ 8z^JV@-kZ;L0@Ud\1j: OJYs~5Xꍰ bt\iCeS,UTXe VNpۚ1T׹æN+P:OFv:cJ lFsC'ffD5 @{tGyZc konN%lpyh"6~_cz*꼾g\+[Qh:]Y&>3M2 "7/$<%YƹN!p+ $1r>Ks>0flli)>2r8مuLpF6nt4Z- 8Mͱ Cg% 7 ,FNtF#}mlH\R%/\?ȷz4d-( d0LSYN&'hzAf:N?ϙ!<'qSi }EA93J3CQppM,|>'NI̊y!1Ȕ|bjufFı!D48I<Ǹ RMcjAhֈLaN}oݛql 1K v Y[3E%E{]g/?U?❓g}w(ţEkĺ2`X]tj' ǖd- "6+,4Aʷ-[WT ggwUwu|ͭ(K-0v!/]Ki5T@YTK)#=8e#, h7 a& 9}&r 4 `R[/g}>pk)N7o,ގXk@e]͒kQk {^PQD|Xð^ =8ތ0 p\6wa ,KK%8b- ayhi>c)[I"H1znjW-8zD$s{~ʑL{=ܦ:lXIYێ2B>u7l;l9\qCH:L뮬^xJ@=]Օof>U'l~۷qΜ, ٞկorͭG۾Uy{LV5\u{BXAJ /MCzl;Y3*sGaJz#cm}JðTrR8?;mmy$n} WKSdJ Rj2jHT4MoSOF$M;yߗ4/afjpvMUK'|_!pMFXvK HP+b,S\;>fxBYʬ׾V3Mf(I cRdEc9-I[ּS=X;hh<:ƭWh8Vsv/*%^e `lW+vM|b]Wcn8s08cn֡q2Īu5f\-$o/E+o_|T %jЩ&*KA<"5Ż[w^],ЪAg/rHYO @+RڷRI&:ŸPb,.Tp(dC T啯![԰Z -K{χw|Hwu. J*`EtN gU Y&IBR8GqpEpL,c{"tq8V<.?ǒd9% {%{¥{߿tHaU"ć"rrlz $"qɃaHDL]IɄd:epדl8LwvI\]$Z 9<=weC!yS[H!XdXUvl, $j7qj5(yI"D~"MЮ /B|bIj"ކ MTT6ɭ_ >Ԛ0=S[L[y R `ܴ]pR+!f+ d"WRjߴ' /bVĢg)43#-t-aC:wc׷p~/2deU*aj$xֳb4gN{gYsltr=oSC.^0Pe5 S V }H&UiP-A P)$\R_ou>>c+E4tK] ?dMSj?I']?Scg>׮_>K_zon4NOM+edn1m%e캸 ~Ck>G шEKpz{z|0HlFl} `ҔL#<'"1iRaZqhUK)3c0ISk4 1A Li40ah&;Zyي~i.Ҕ0` i!/lg n.iIbj5-V P Mf̫.<`,->tHk>|50 fcV:ٛd?@qy߻g]߷܉N}kѰ]Z|n`%lܱɆٿ_~V-S@y@u3<;Fּl!bm 6&PPp׵Hَrn+M5M$64VvJ:cXg|^3iKv-,ʩb;Zw-)j[BU݄vºL`p0 1u8K69u-_ D CVIlʊ(*Лۆ +OblUL`Y:K!iX8TE\\Y1 -o/or^K krχJ:>fYկ I[/Cf6Z [7W7=52<ҧsYCDB=h}(*V<ֹZXvieXN=i'>ӟ~yՕdm`lo I/0<|gjy{(y!R#|c+a^ ZJE㙵Ȭ AiEU[Zq8F}pJݠD-Ti\;M@rB,kq*\T_k_3gdg.ڈod"Mp k%d1k|MdXx:J V!c .D^wj"RDSJZ?VXTR\^QiMmFI0iRWus%$i欣*xhZ[|?(:5rx_窭Ik;{I @Θ,'ƺ_ HQAW6N)-^1?֞Q[rG(%z˧[цhEQ$!ړQ!yɰ-]Vhh^&EPVХYj/֥PZTX6MRaƭsXZ6WD;v7Ғ1aϪQ,t5tu m\H|JT1_A kp'?t>rFuuNWkY-5)jjWDZ,z>Z]9 mĸLK#T"c\WHSS, DCLrǑU?[j3j~alm>""qi*NLk8)y2!ly~/M g3ω] @ 兗 WOx|mDi]hD>xץobY$cIC09 {;ySTlW¢VK$`}*j6pt9I(E@%—~XXYADΒ wo买!U8!)DF@`9Pŭb @4Eq\l#p4|%d{V&5@ _0*y{ysY {H{ȅ&T]•pIxs'Bâ ˊpz T>5f]feyF- Lsi ~9u^#^ik| Dƴ-o[ PXރB>܅ U:,==n ]iAfl(Lfݾu/^SL\/ۮ*A&lu pe9%C:p,T+)'Pr߿|åWɫ_oc=z$ÙׅyfN*R*cHU͒[w1*Uk@O} ^+_ȵk X ϡ  =I?ɏ''o%<_궪,d]䛥Zp^QVr]:Ne7i[Vt{" ual r)U rc=ޚb[loxO~Sy4\v_~|泟}[( +Yxg Ǵt|劤{PDvNJ%_,T|t]l굵NBaa"k+cHHdL+-&/~a0봀JZᗢltﹺ~ XCĕ:o*O#5qn3]G9QZ̝Ⱥ+1.qS|ZIJN '9-D1W'"Y(/'7ylKfe K$?K5xy;⬻&g6d+4h|.۩M0LGԩ~'RQsѦt,qlנIWXT詃vK&ZyxJ;*}*S۴^TwvRļtK-1>M0L7%=[p0D2%믓ITHhPs| f$jPs|c>ƪLInARHk<86 (ϩ$ Ag'nJ$2Ʉ_K{1+S^~k5+B5Lat:} f3SZlı&y.8 %B/}pH4ΎIAQ.qSqKiOV*L]"s2u]M\Jj12Ҕm6uMEW*D*1lrZȉEBT*y"k$ȍA bSc,8M׳JD}X{BDŽ9qGݮdA`} 2yX`*ٌb$6~,W+L%5myI7T߼>_{8\ߘ(Bhl຅쮰WS_ ߷IjeIގcX.;㱰Z\DZ _0vM}yPb%r+` sX-jma\! 6CX,SK8\XLl(zШ6w:M*޲uب2BYrRSf04͑ƒµ&6zhV&0w a9GoYJS CP6]&dm˒s+-OhO '1$G6İ$FZBB橕MSCm0B|{ϖif+S& >BֵkM@$m ׈L !;jW*xaa!Q+ Y/YZAMqA7k孇le8ɠڠ)dKk@c  Y ^z3_nGncP޻`g^پ=VKJ6EZ$vի!;@L~|K?¿?xRh>/uV55MK*MFq&Mo|juD@NN"]ݾT}/2U'r(U Բ4Ox_\">aޔ vmPbuh`[' ~K>EgiO}{}g&!QII {!o[u'^rNkQn̬[Piz5+ Dzbq%kZ}`rՒQ uMr , hpOozIG^xzx%&vkURMIMV\Sh˚,9ODJQ2%oZIu>󙕘m]wǑûo>r^.OV"MڕצYj\z*_=jӳ<0q]bץ:,UT*TU wc*r1iGlbk*8JS9si' f^&8Nqr>Mߓ/P|.ۏ/m{|/ ,ce2sLd`ZXH6#l cK +Vo QDv2x;!YpFj5X.me\c ~2n$T8:dBeN9ct$ ի,f ӑUO0IY1sT*Zt]aH d"Ar69}b! U.nge,T?ؐQA3%xFD|6*x)G +[ ``+Tr0$x92hP5kKVZGLQD)tj&#o41;[ЮҫUvőG?7~#K䣑-p֑HS:*<>NF{{}]{\|>d` =ddOy:]YsobN\fe=eGVʄ}|'k_8S%?䞅q\TL sZ8uާ #"s9sT@ږ`璤{043ו<8Ƙ0Im Yp8pi 11f⺲Lư)bFq[JZe"ƒfY$!X8l#Gy폏eC4gbLm0IL$7a",7h$7JEFaH-h f3qjI& %}J-I(YLS;,=ѹsx jf2JS$rUi4,kZVȐ•+{, P@f3Cg6ӱՊ< LMhE==d0jU浚+4 5$45Uו`MW\Kco4c>\a0x c9''6v-}ٴ\9"m7d>7,ٴ݆8Abh4ʪBY& "jz=9S"xl;ͦ(f&M.Lj3/ &V.m6 g8]*Vq CXJ QVu!lno òehՅJ! T^1x?oBRgfH68Ok4Ȑ̄|:s 'K8|(8!Ib{5 jHafcpsC-w T _@5@ԅݶ3Cvdlê 4dB4\p_G^LWHkDZiq 8K34Ш5-SRNY]Gr>P03[~Khlꦷ3'|x%4Rۧ !rӴDhY$ VGFRVf$F`xYyxx׶Ά*{d١Ԉf:/4Ir5y/juFtd_ZKPQ9eIUUIZҷB*%5\=/{x{7 N$:楗'?y^:'GGmpEm,ha\5)>5k0wVrAs]gFSJ~ 5;XDw5iR-M%c։M\R]*OP tT^v[v*D[$%vsUN*֟gx ݞS\t~4l\0Vk.pW!UnoX>h_QMHeit{3 :כ:_T!WS#5_,:QtdCM{:2vc5%;JR$'* z$V.i|_zImRҍ(tza %Y/YjVF@OPxӚ:Wbq&%V.UX:D/GD>Ӱ5_c*yC=1E%/ZɚeY2?.O8f[1y$7掃QJt Y85HjF3Eƍ"rX.y֘3_֖*p.][ަYF$L|~9G$ӇoфoAUhe㰪y8R31}~rT'y5yέV\qj5VHB\NNdw4t纤"ZϢRa.BKah4%I;=Žw,]& bqh=(ZrtDc$q]9"aeeUܨDE5ɸV|78#0c ͦAx{`2!hiMtJEynxsˍviܼ乤˥IMv`qO$^=;| É./ij!zX_w۵^~՟ 8hd6m #¢"bk~mQX\@lrb9o4J x ؏wz$+TOGAG#0nנWFc" U=K'Zũ``ݣù!_ DUUVevc{:$waCw.?p*.g7lڇy=7N*ts{g(9{8@!Pل!)س+d!yf-/@e|S^"p6B[$zZd֤N NÖ9V6 aiUFJQ=BeҶIAgElj+W?H*QW9Lh-U3c2@RkhdMc{&'V K,x׻84-Ҵ|go.?.ETQ#?){wOrxX̴rZ`ZgmUqMдWy[>G{D\go1dRTOJOPCɻ}[}vϜ? c*E7d61/ۏ\zKNY@[F+VKhq1ozS¥KonÌz}F2vLxɭ[7f,TH)7-I<)D[~%]PS^J*_qI jŁ,sO7༪"$Fc L?ǢZ$~3}c3f9VnIFDUK c3٠@,unw/<(E}lb<Ӯv"VqF)+X',/kN)B4Q/"bM:*ӼiQ,[q[Q:e%H 9u>Vǥ".]YؕtJžnU] =D2vJEꭞ"RM RBS2NVP_BB;GN}jkOsd]niaXjT<\^;o[38Sc@8 s#ޠDEȃQRY.Ys}МT*(͋Tva̙ṷZ1YͲVtCjhh0 #cd:w?VYAPhܾͭFYnxw F7MiV)Cc&4嬺o6u]\4b2QrFfӸIBXH㫷;⺮84<09i NE$61C>AZE}E7?}c^u_~1rh|2{lA`_npWv.VыNC,۶RwRt@8 mw#'Fe-v 2<÷),5잇͇@=VGChoYn BZ݄ 7`Fhe8] [W ^޸ >L,ןB| A`0PYr䅰lATUp2! HJkPUٌtj{ҋ5+ӑ MԮ ݹ:Z&9R3=Ux$3]zW 'VoBҴ6 ٦: cH7!ؤ0r-X 5sGv҆b{e1JbPQfQӵ`@°Vڂ  k6o n~7pC+ܰOUBJφ:&f`,υUXWr܁TNyG@@y6/3?f:_rT$JNKeqšti;o{gi62xg?ĝ;,gU)˜M}✔[yx1xNvlmsMD_zaooMJrOBVn1 67y  #C'VIܸsXl0Mh9^[CӐRWYEY8 eQi%"E)٢*ʼ+ اz뚀NtEbsGӢ0+;pp5S!1nhFjkji>jݰTm_:owJݥFbGz\H~Yc%TG] ثpK:zlPXvt\HTp,$%KonDq*10*[,3S3:?6UZo疼BE*;ʧ%+Oq5EӴʛ%׬Dj"+N-c%o:?} (%@uU2r+ [- xQ>Sx=ǤzutNN+q11,K^Fa8K5ey<$Yqs:ý,3y.&&Wf21ah.ı f"aȰ&"JjGbYoHmT*Cv|̇>Q_%?]~>ˌzxLrqs\nclf92sƝɄL1+)i.QrB^PVL2 n+tioʵ,n,2pY!;c tX>kׁ͛n#-rq/,Ğ|#qۭ#1wr);G6%nMxn:"dSt(%)[<|Ewk+?:U[w[=+'i6SrvөKCJ m(K K]Q {{kY\m7QM+8yFU%nҵ&˟+k _mۿqVHq'},ѻvM=җC?DnWVKITp=wѭ[7o:jYKg>uede4%(Z-'ltjeө[ʓɜ$$zpNh欍sB;./GG*ݚJٸ`8tJ~|4%kqAyk y,+KU|.Ue.jui-Ӂū'nںʏ}>_oRxw_xhE|qΗ~#=8hϱ^k1 ."\D,d$7jN c2df2ű0WLJ/ZJo X, rrfȤëK!7o8p5K`C.Lᑺf7B8}G|#:pP x{{PưBl`@?XȜ#YA28#LN+V`ӧ[6Jv܂qxpqq Ga{O،8Ǣ-䞰h߀HI v`?tt+p$oÓĤSU=(> /`#x晣{QW8&TrWW=@ o*P+\G&d;QEP%9Lq.rB!lW=Ur(:Mp}((S ҩ+;[FKO7 (HHB+Uoޮn;#: __ny$Щ謄mZ) s{ݣb#i-"#9>ߖj:5#Wѹk«NfaDTcy[. _t/s'P :FR ⒙sbc6(YE:42tF=tUUW-nCyr1h o^>ɀu\ӷ R^<H+#6Hu5c&l JFt3F8v Z>6<jU;υ͹fEe #^MQKss7%cx[vly8M6 Nʠe5Z;hs߈_=UC[JjXxeN-"Y8ExdֱxbDv}S95ޒG߸-wLB7t:%1uI0&u/WA3X}eS z 4Oy;! jې7Z)QW_੫<0]ݡ0j:Nj9_64QC9yyN%B[?G)>~9.g,=z;0NS^ۥַ>Yk=,ɇCz )KN}c%b,~VY%1񋂪ErIjQ>dB^㢈eOǸ . n2 qLj)O[@(h!f" po TUE1f0jkˡZG$Tgp@t.MhdhP !z(7AX3T*܃Z~C/`\]WKlboK7Alz>n]+*i<9U]Z"%!ԡ !@w5|%)$q#[o0&Yyߒ%eCoS?wk_Ä%և x /LHwl:TT V\N_[wsl%MO- l =bސBڛؿ//oٛko5N}8_-CZk3%Z頵5w!CCUj<{n ׀L]6J>IzNein K"oorMqg{՟mZh:u2{x^sp| ={_4Laٝ}xhdNQI`O F&['sےmstSz 9 l T1KKu9`,9 CA(".s,yܬ(K:e+KZqYYFU* n2Ih%^Q O\tzʝv9A2wA}ii?7^\K+\e9\+@(s9}Wyp% :qN~R% cnwEH[NW1b2w)KxIGyQ$ts)tJ''k!g3‡q yW+BW12tJ.MSW&~r{=488@!tIOEOOnWdUEwsfo1 &ʗa&SwW3~aRyX XF-ѿiԤbGvd0k/ 驎F5۱ZNڜ&s# B?TвGC: %:$t ~ݓ@ix I×vlK(0_;)[^ `CiZdݮ cpR|>Y"|vYe[䋅,q"Rt׮H7 p;dO²-d+m)KJρҷ-!'^gX_.T.ȶ IDATN\#<E.lzW qWǢ or"Wp"T#ڢR]K0%BYٽռ/ P:!BBX9+LF|AΠ B  Gvfd".2T(2ڝX.;Ko`^!j)meWEQxDAފ0B1E)#ٯ5ݷ50Vvcs̎ekҵqtT^| J$os_<OmN'^MM߮{C4Y]gQ:sex;ek~ڵ He'l<ܠc6<:ơQzfFH0P6'pl}b3V'pǑ}O ~n$qCydE;sPEVX)DȜs^U*ʢ) -K2"$"x",}g `"<"\e<*yܴ!aT"E! t([-'assTiJ8]BE ҔeiYrۣt0s;3'<|3xɳr>/߼ʧ>_U۷9("+KJ} EA@kowtD ctL/u[#J:)"Xm!h)w!x ŐpsIe$mܥT畗Pʻ^:ZC!`;i{ E_6UqE堨4‰¶p_j'CN(OY}_sV^8$>$vkނa @ -9' Xm4& ifBZk+z c}uA/H^/]ȶ*%Gd3T{F:jGAF7V~҅GvJ7=f<N'y`Q5goB!ly. Z;(# xc{KC }~g~ݹy{/۸jxYАp9o<V*RWsvUCuOaj%:x͎؛kX0.ĕ&.7 6+jwx cr3vU/ ϏM =R(~!7kRvmXڀ?a6"Incs[Så۱X26n%n΅ { 0f~- kꥢ ST -6)+D`QSMvqj_3ulo,yUV (PΪyM"p;s@ZeA.0(,O3Y)<7Zn`Ib{[,Lwnc\Ⱦo;_o^ FݕwwcU+o̍h+C,l#šprJmHZvHKmNuM``b uN' 'Wsg Oõ"w> 4ryBTirEsX<"G{%Oa.+(S$n;(Ȗu_P&EfJD/:/AٿOl f1U$𝓭vdi};6#*ICJS<(˒ BUنXL/9JIL;lUA) j=1$\ ${B+il<vCXHHa JoODPj꾐as їv&L(`~tC5=G2䶖&VJaY@1%C5zFx8}ܷJgC BV-~&`3*3b }'xs=oٖ_XoGjj {*baVk]U%$~q# jKۀ*WR@~_Uy )ZhiJvJv&{JB @c1Vaά. }(;_{ ɕyVLvD*#̻6s7-`}~4}9wܳu3|VV5{Ղ› U+ 9b8^` ZNuʎ$Xe:1[`a&Ke>g:5E l 5u=g۷{vv?p%;'Sb;5 6u*7[#؟='G򹍹]dn~Pn:\jd m#ٹmsq~z6ޮI.ߋ[walSŭu`?LT}4 ҵtMS)Mm \A5I⦖mwJUO5y^Ct*xSÏ(M`*Xb oא79wN TvHPƱ}6=g=p%RULk,P*wk3Ω8ȝs10s\ZNV yxrI/Iv:xUE`p!u CVz7D,_p4߃/~_5~峟uk&{-u"I|09y>xkaQZ.QS9'v$! X( e>#k}gsGG\2өgv)LHQVE(^E> =;śT4 wzJ.˄,s$Oy\ o+KV?V/8*;c6pO|Gw[_YSw |XPM[p Gi&8~KbΔ36Zݑ&[6Ei˰hR6z=xmt-kr^NNG!ː wU>4_ Vݶ&/v*+:]oXdA2vdmP,aն`詓w9pSȑS[ui听t~ }o. lmP) _ "~| nuU[hsEղ*P=E\!;nC5 "H-cG9}Rlfe"gQg%I[P<0{7,#LM`FQ9 'DF, 0oO2S`Uj.N^ \|)+^W\A&{v[xFH"{2}XR%y,$SW!+2U#|?g~7?)yhMC%lȤdylu``: 9*)lh*JNm\o4*]ٙ$SmȧivҜ횉lĪG!| 2߷ 'zkX) aL昞%nNj,4͹]4-DW ͑I::gݹw@zڷU[ ֽmO c%|um"vc[Du4j.?ixd'J48G5{RkˆTqa\KNA"GeXLڦTP6TvWy4=Sysoz ii"}R5nFta; ŧcZ /d6@o_ Ƽxn9vMƉE8A7;ygw+.0i|}fͿJo _}[6C/'pN[ mj۶MV4i#Ië0$}6αa Bzڨ.sEACs)nScxmV>0$"HҔGG`8l#gz=QDEdA*C[ՊQQSUVDᐢm64G9 n@|KߺsCBhWHPw?x~{ZAO,۵|BK QmYq9?{7?7+h}wC*l\+WOuX]ϓ&jk2v*bMy0DBEz&yz }O6FYJo܎Cx'Pæq #5;_ ҼpykP T9;+k8ڃ`b~ ۹V(@tLi* t6 `Hae BR+x;6cp\AH!߃P JZ#nLbWXPOwW29U[ʭ*;B0hb L{R+O٘zRLT1QIG3iNmrNgw6O^O_9± Z[,`:1pGx (o~؏k^zי>?Eg=u6To|QMά\6h(xˆ>݆+Ďeow-l Ex% ʒ:` ' m˫vnUwhF) 8Y^6k(U;f7je7`> ec+3okblKny8]cxb@#K-p]sKkڗ5Tfhݗ:zdoi4tVCF ŵkymjX_cH̪SxX|6Jv%6b{A8=;>~~#D}>\Sz3oCJG5cI AҝD+KO|9Ux:XϐUT= 㻰}N_"{zۧ|"݂#+p!xMxaxoKxJI!DG,M2Y=RێmgCZA (*r Ӣ`eU:'ݪrm`xG+yqaUqUT9pR8Kx<*y'aHZUaHh]MM*)Ba$>N1z=dkxyIݲq?h>J..H]*K||/Bnj; $ˈLk{$ȋByNrߧE1cH=*./IsZyN Yv☰+ I7ĒbonP9a?p<E?4ZZ}r8ldXZA@RuUAt0YjU 04`|?u9!O~K~ ^#F-KzU$`h:\\ukx9ov>W̒Uw`oݾX+:\\K) ;PL#";3pvZ>`po_6KR**uSy7 Ue*3n[u&-d K!.N!nCV젥PjoA{$\:PPC}p cXo<l-`t{N8f0SOn"o}"tC> 7~U8|f L_ٿ6\ sP}Y`ݤ| u:qUƉ-9$*E,AUz}/Ba*\U1)K1})}0DaB3缪xTU̪D<\o!!ۭ:=nxט05(݇iVA߾E@lc'G4 ܒͧOt& !ZB*v(켱bufgc%ܵ+ H5"i0=R6yU;!|mz ŗ4HRC"%ZPŅ eBV{jxu-[~gk|y{k,Ѣ,AOEC0; -Z KЫ# jH=x ;Æ}sgޚKK@XA?o:]cNvkшƍYW VÇcismi|i_#uvkѨWdG~hfq5nSNni]|zQ˵FwHMieg0]xd IJ Qk7ƿgUnNI#p_5Y81k|qMH8/k m@Dfs ݎD>5nH&dОKOq-A2i;}~ h4 $7l47L'*9ej\1:| G<&c4IH☬' Ca 4_ }A>w!2ܗի ])½Q~%K:[* |۬Zed-1KwR .M\ aXzԽzT /Ą(WJ6eXGğ\|#<|%aa,+uމ6|Z+UR.߉(>7#fEl^%W}OdWIN*p1$erϏwo{<ܓ2v/>Ϲo_/^Vݡ`t$+R|2L" ySy՛lq}%93aY&ZJ+N6(TeQF#! h4LEܐ3,3ȓܚNijRj7o 6\XrB Q+B@XT =h:u{O3a º",,j` fDv$ Sa Ѷ0SՀMN@Nt,YZB>6UԄ(}Mع( !'tEh$o A 5!MEeX A鹀 MatߒdKD]0Oe,H$#AQ򉰌 >U-l !P9H]mP{M!*x%lA A p5(?l{B E,PLB=J, )䁐/=RicBSflR}ݣߒ ^v 9dn*P a(BA2\ E#t,}ɛgrz|c?*iZL='pCؽrF^}]e+2EE{풤Ҧϡw0"I$sX vb*ayE PVR=$neuLwo{GVUԷ3[` e 6)cY)K=H*+G-T -H9?2O=!]kc_m1JKe'u߷oϹ{6Gӓ\I!UQel>W1Jy!^ }m/-}Qg&vJ>O&۞D.[}N(+ܷߕK|?of,-84hhhc! =)Z`ʾ 9{{W&Y"IEbQd7Kj"gN0S4uƃn"hd Ҧ#09O"1EqBT=:G0;6v(6NtZ0Cփ"~ +4:l8q>/Gf501Ŏ}S߈P =&4|  hlСdnxiXS/Ǥ8Xkc lӗ9,"#c?gr&SArAr #a1O/~Ib :0PK` -I>ĎCPw:[ }5N;ʯGG^6;\d*UOC4$%yiEj$y+>CþI?WVi:ۼUUT䠢4aϷ{\(ݫt\zT\V*F~22p#VJ(ռˊu)s|ZAiU^$ޫt~)@br RG ʱxW6O9fW~E%?~lT!bno$򗐢طyޫ|fXU#.Ь,=kqԢ=?^HS\!^$o+%K)T:Md~+sKgk+D*kRZv*5+]!eY _/MON%E#Ѕ |'zS]x}8 ;ȍ܄yOҁkB>GI 9%&}~ /@ECoLGbnݷYƧɖ-e= hel񤵕C:Nd뫆m4E;S5|:\.%MQ *8_@k-ϙ뒞tצ7g6SϞc2gښjqAi0$ӱȎi0X hN`422yncUJ.S#;u!)ra&ca[> 㹲|\¬& 084< t"/`@aM7 maz)<# J[ZKJt߀h*q ӥX/OԽ'cqsF>{] נ%L[^R,h#a~`m59. *ȆE]DЅqU8qLG݄"U{F¢mL8C5?ĜŲ^FJVA]fnGaOh^=r9u,K??aEe Q!&clzy!V+oB]*AuWw Od%OtmϹH\nC^|'F,]e SOh;&ǣe6ubBWAf?/jACJ[gj]a`8j~uѽA| ZӖ+>,gλy#:44(;<%s\z0w_KW=I+u\>iwwd3SU7ܣ򳄛j9riͶ#I~8/8֝}Vs??)߉- *'̗߸_qxOKh|sC4cO,^WT-5=x&!d` 2r+E`t~_5e9?9CI]ZKG} f]85 Y9mC#lw, )w<I:L^̶[nڷke:xwv|zL=]!J|‹IbdU$HғɄeTPUiE|ш88]0ha!Yrɼ~) ZO ];fA)cz]8Im$!Uk5 /6֖䝎 V#dA$ATGGDaIE 9#E2ˌ[#&iBU4י,LD\fׯf2:Mr8= n)''jX<\2 "bxluj )s'o;tx٦~鏖 CekTHi r玩mo$_XIS%ϕ-[]%WJSX T"PD} IDATԡ#}BwWhG^{̗JQ֭DvSc&RDJ ]XP) &uGKX4!Ȕv &u3C̅dK&IME "}%lOIu%(-kDv,`-\ա^|~)>ݩ5h-@ @I6!k:7$|sEKǺZR'I_g}<ܻ'H+|NԶSZG Kyb5Y+K^H +?w<:x R]i Nulݫ K=:ȓZW`z9 GUgvO{z*A׾p-=ЌFJԺmyxS<Hֆn~ 8?lvގ^_OD?:ȝaz{%ٻ\)׽fbC^xoztֺ7'~.(z̉V:d~ %N+Av+d|~iE@*LG[Ҝ #1Ayi?O!"< sY&łbh߻gRQyѠeX4@N^09I&$ U %$sLUa<& <'dj`@xp`ͦijU6y, j6}yNcwWf=׽iR Z%V݅+W,Y8:Fښ2ʙgD u:ք]Hb#mOGgdح f~Z-~d9y7Ij4%LL婔5%'%Ds[7{\RuM*w=96/m31-;g4s~%llXY½{%Qyh]A)bB\T@fs2Ifa*G%!<'m(^vMͷ`IQ,n<J1ʶ6 %aqނyUdVi s+Y)|f Э`5Yp,L?X}:v R eapl5#!Z;zAZ/}!'F6:̅z3E#ᤰd2uTi0*j~׸t(;T"R((p#]* pL[\EoE!|x7] nR-yp\Y;y*H [pޮ'xT$GɘeD< |w*c!} }|m/= io~#JXQyDLuEV%α^Y:"́w}l?s[/?-Vfr%߄?|MW`&hL< O<{Y3e=ccepϗg);q Gl>ӝ`RI䡧,o5xFfx#?b6W=Lng< sA۲ȶ-dEj)y"$alcCuX2V ],$sOe,k5ҍ =s׍)\2[. ӔamښuDTBbwKOyGj5K4\2C ćCK\KhT'vt(u>J&nL)R,`}&@6*iEWQK^YL];e4@_r<<ϻ6|+ NW*I$@t*U<ګOZo,YL^1ګ(SQɏpSVeZr{:=|,j>W$Sa=m;]xկ׿TC+Ni ܑL-iҮ$;\kUx8[)A%G/s,UhNh5_:dϦ\twou"O+x0yͦ9_RyCZ[gdL.i%[ؼh| v? yZ#^^[_A0=6|Kh$fZP跅5ŕ㐵B~O+]S&:g68n.J/ RqJ k!EZVYck@9pVȲ 'N^GK4PUB8>mn]dr7n0vnQ Ca yN/v(Qt]Y aX "Jh+j*y ͦJ)DFE!\[3O 45(b13M',QQ9cED1шQDfͦYƲ&V%U% 8i6[-fnLם` 8wN,c\[%=,8WݳqMew RYֵt`>rF^,`}^Uք(,~?1\hVMISa>/9&K{ÖtԂ(Rĺ&;@{n)sOW]me8m8=̀_[\޷(!N7/'`Hӵ9U ̘0MXOI;iZmX$ ٰLiӥ0K!\@AstQ!@orvX YkUXZ'@o^c {p4k(QׄM6}-,9Qf^uh6.4~1aBZ{:(q$LNtXKUВSӝ'>3G18Xd9+6 ees-B[򑠙.c#s3u% )4הVC(z0 ~@4)ѡ%K8)A Qn"cO8NL[1oQ yjba|ӻh *w{or<ş롙XB C./l}#%[=%lP7\~79mV360mz+4*2))K #tԟSY 9NԻ]h\2mU PS)t^sbQU ԺD{߿찔Yʲ^HM@cYE L+DӠYԒRԣ1\@5|%aXMŕyWG;C}Onn%ϣZ.SV'ww]޸ѭx14K Z>$ ke|CR>//8TiՕ|kp8R#>s*p3_vZ-sҵ}C@>;29p)M rn͏avsz][{U._H0 ?8!rCTt# JґxQ>Yo~E>9~go3Ŋq7A[}xk&ԯEK(&ITӗT@g Sjp7Kloh._:\dy\^h>_vru=_})t)²N8fz.*;cfyxP}4W*8oT.5GTj " Wy.AU:T0Uu)Z7M'OQUܷz}[EZ-aH4Ӕh6C\N:8fAc4be eQ%yNEH%nyNJlJ6H\ &_9V&2$89e4mf]ݹCs>'ng1<'2.Q!h0{!ַ^xpHdB'咥*hNN8X_/ ,s ,Oڿ%S=[[&:iz_[jb/}8߆0Fׄ ;;vdb1 ,ޮoie|wrbPfHYVgs:nsx{O7=GM߼ʷΙu'xqr| jݮW9z":4\~uw+?qסA#lpiNi !;pv;Be\fMa4:Qk]H{Ɣݳq0+vIg G`}m\M7i|aoDCXZY$lCk)52v'L:Eȁ«8qڒ##׺Gv|Bxjb  RD)'O0t^d[@2&#{~!) ❈J1wuwj,bfĠ;bL#ț֭KJtwjB|%y 1+F\Mȶ`8ɧ=y߻?o%js.:MđPD3 u:K~~$;WIB=-?SY!ս!5&bR!O%JU }XvE)WuˮVRW']Y%:eoOY%"߭ooݷmCV>V3}lj?i}[Xkt2~\8%ķ-L+ yO\tQ V*M|e4~^6|C3ȕz>J$_*C?e%L*ӪkD0bE/}_etϏ^1މQcIwnI_m.>Y Y|g|P1k\5^ň' e^O-^wdQ`å/F`KX_SU蛮Rg:+dYRNEsXWě Ln?{^⣿jtdtS½/YǺD=GFo |*ߖzjӅZ~p}0K z<G4 :x9;+!paUn?4ŠvRS}Ǜ}e#V -JWKʋ~edFWTU}U֋ryL(>pgJ>^jqLʸ(HE8+ WÒW5ȳP) =fUw-X95YsF( )"?sCp6#Y,LiIJu rTldiJtrB0Sp-WaGxL^Cۄ>lf֢5h6Y;G$"ħ$iJo2!EVcGRCDxL6#:iphDA-^\IZ_L5jtp_KΟ^CKO`vmjjM0vF# ΛM;}rbcjwvV]뺔*Xݮf3K8Z-phҭ|g>_|f'ߠ|{g݇O _{;[panH1[s7.ǖ\av0RBirǐك)23{if~zӤ1z`E6BZAkNoC:&y[[X249EݰoC܂oc8`nW&ܝn׸ yӠMB !dM+D 82Z IfGmiكIn=iW ƭt.3[e=HVJZX&Lg 6&܎9Lpb00΃/LC}ghЖA2`>yf֠9eݮ> ;/`deUK!C=}[5-$;h 0ɱw&aӅN>z._?OLJ j߳Nij1't^O?ᰠJUy\h2U*?A*8$TuR**T\rSNGIܘo߻zu1q29TJdyAaiXd%vtW [ďYyM|f1ϔҵ-ZmV%*9(1 s|Ͻ"U)KgIqETBJbb(s$LsܪD:߭+u4`zeĻ{Ϸ[Y^eJV\/gb$"ɕ>ad^qi_II@XIǕ2Z$3+ru:pVl #|Mbf2wvȍJb Z%,8$?byK__ Go|_e'Xiw>˓oេGCNO O~3KN]/+ _~7EeUȥ_7 "t3x`w?"XzyQ=u^ĥ߆^Mg9RSI̾]Q-cDm蝋aa'M- (:Yʱ*RUs9%B/ EtTi"22<qQxTiw'YKZqLdQlmCJհ :qZ.=Aeloe"\X tS,b< ֫Zӂ$m8p̡0nJ L3SǖH jMICmAmw`7c!oX'"Ԑ06]>,~Z' 7I2I )؃gf CfPgVS Q_R-B`M CcaF΄%Fq0[l*4ܿ%-?Ia84R1Q%,{1>1.GVH0уDEx5H`z;y|4ZX hfH6̅0.Jѐylm^g\A,e`V J6`|3w6GG$IgX*VtT-}"wXVnZgA 9`o:\gsT~\g&~8{}R8v=ͯK"CU;$w;e.[a)cJ{Ժ 70%z/%dBg L0W!_RtNU|*eT=tHS}1Y1nΣJvJ[@+y%-n\!J`1"@\!wkekJ9[v.[,H)N,=/A| '??x;ǟ=Ȳ\4}ɕ4_#e@Fi#;o0{cۗ]?9¿D+63[C }mSWl\@뇑KoD!ހב }OCKi"d]x3$2l%Z'Q{l :9cRP=1A~OJۡ jŖ("* ja(Zz2]'j"*4TB9GKTU j|.Z!A콣ϻSU]o~NV+XXȀ , ػ-aYvᘸ8++B2 Ȓ=3hn};wgxs=sU~]oqEHY](Z9*0[wb靐vHgR:N U&XNs۲XX{8=:*hjQ^ElZ(4d^W`Jg^_7աD~0߅ш(%2M'j(D3E&%+E*;;!fI%4& FL(;'sV3ς` HtΎnL,X>k]pu*ۉ?r@=jSuaIl&ܼiښꃁ%[[6}n>X,y㱜IFxөlo/Մ& QVW&\X;x}s?XP/?3"݀K}\qt:B)d0>cy+Ep4;gN.r=wU~ɟ-acLNG+؝@w%&@Q[F]vbbwn0̹Kµ{ '3ʗaAtGJV3%I`~v"!.YGNuگ1i֣(Cd|%P 8!j[PQCXL"t#3܋$ iOiͅ"pı=mʮ'!y|+%[\h\$چR/:‚yE5K,CL>VbFn TuAcS~UTbCGfK7S6 2E)GJzJ<Hs<@AuՍԙ@ ;}yNb4r!.Š?MGԈ(ȵk7/]6g~/rЕwZM P{U^Vl9tg g |,}3x[ҬS PQuI˕zt`%5(<8Vh(p=O\mg㭕j:%*0Wc/ybYru'Nύ<88or $b^ R=;^Ha]_LtiԨv'|*sr 'jb}7,Y:vG+~{.;\$A-)>=txRO87}.8ap"]&nX؆Cq!p =_ډ'/^ e QD>-y~*>:yk%nycES7~;tY'KԪ4đo5MFtC~:m~Wy(Ihԅ;y3?_0<ҟUBC'_==~we y<]oy<;C X{^ǐH /S6T}7Rt%ĿB. Tt=Y^i^so~ll]&yx'/6}l'=\Zq,(bEҏc6Ry.cU]r\b[RUՅ&DvZd"QD)" aHS[Y*ZC& $lj i*EPh6 x+7{FZоW1;Ab;(=P·5=p`_G{>hV?t[JYPމMpRE޶rש\rQjEyeN% 5g 4^)ۊwFb՟6/U*ܽ)x =] AK`Я9$Js.*_ }7X& EE+kN=5J|tyP-+1 V'6/}OP>a[|E6|O/lKӏԔ@c_ '/C| }~̶~􇡾 .xqQ+L)x_7 W{? cSXz|R.~;= ]NEv)5x@[iÃ[H1hJz\vۭ;+~Yfi)l9|iJT łEY2Ll7Vn FAj>Gf3J:Rtj<3Mi[%Dv-886:S:__iC2( TMҶ$knׂ^Oi63? FdelLAy߷kl}%'>ft><~sWl?~ѦR;ђ ;uv,UHY9`N67ek/9ʒ=| xʒ3ʼn"Hkӟ0=_绒ʔ;/Oy?Zg|Vrx[kvNLx|l{ןs:?|x~oǾ>IC/;{7݀^v!_R<4$R1Z7HTkFzQ'fη}X]z͒8;.”5%qrlQڄ€2hGa$V.Y,mKF-8J ҅{6ȁ"PdJ H[4v Vd G bXQ1rfs1?ć?-!Hap*,e[TsLF墯;^* ӭ,Y V;XJr,]I+¹.S1`ە(givW!\/d) G'W3?gUU# rR^BUz[UNd u%YA3r\Qc JZmdOV KJ]:&!I 翝q *R{ U $k,'COj^bON\+(]N_R!Yq"xZqΖ2٤g#9BV["{j -QT:ͦwJiۅ;7=:У[׼U+wx~UEOB=rs'g~vъ̍^NLYȽ߂G&G!$3xS荏#?{v i6|[r *ύNKtշՎä6<;^"3-s8aQ4ϊk==kv-9m;׷u%@֊da6ގ,eo< `iAƢ^GjU;2LS䴘Lшٔ@c95/ՠѼ ͈'IDt"Mid"LEc-gUslq,j*t2bu;^O`G!u Hi"m-S)CɳyAY&xEMbm͠Fj-+pp];]dE *QAig[S0˹J.}crHɿ?ޤevw_n f4vo>/\Y*Q}%I,X"۷e ׯZ-[V|4 ~)B)A:2nZͮL 'apCOCO55~#\2))ۖ=|z&KiQ]|A8Vv^`engepCŀͭl^Ã}8>1xO҃g*0WlՄY`Ac lmX|_P͠nfUn lLĒzIuB GFn9 ЍiRUM\iAʼ)B40$ QC+:JԀ#JO(EP )~]!](ql␢|01hU(Se|jIaCMuj ^$RZ)UZǧ0^N7 1D5!97PbS(v :#Hx+U/+.3L-uy^(Y Oa2S |ӈͼEnܾ^!& @Z7}˗g_{\t'(9,hNHR2jqXC4V]-K}xP PU]Ȓ{;$X-]V鳊)YVI`pǃ)qkP9%DĜS!pCn@[c;"y'w ]pQ .v}9?}I>tvBgB^)qtI/dajҥЃމs'l6tK(Et oR=,I<Gzփކ,ka؊)N+š'e}WC-]v.VwTVcv|ڲpkWo8iR1dɩUޱ}yduwՏ7o_˫M5]j~+[dqKK2!9eePT/RدPvb~s?{~CG?=07z87QV 3f2dǧ>O/w?ܼe8*x[W7Sŋvp`,ݳ"KϾ ֹCQxlǷ|x~տ#`.H.V AEakUPبPbzc6~{%3 *5%qQ7!"$IlF^ֈcHcc*v)b>Gea@d@NHAYyӘL`8XQLS^j}p(Xf@f3Ԫi>;YEKեUKa\| ],BnZv]}Ѱcv~mX]%ҾU/\0tQ{oώjsZ7 ,W>rH\8^;'49Y՘O>sIssubvw</ynql$榍 Ue{mycO.nxa ]p\w .^7Oݿi5.#p*=`D_ lGcEݒ \YuI|ZwoMJEoZڮЇF=- '~ΚIUlXDf"^K2UuPy WCQ%słBJP%DѰ*SLuX9=U:׎P=:BTk<U(2?$e| =pּ(*V 3Lo9C%?CYfu߷ 7xsjW9emͮшA3c*Iϒ458RڵAPt![[3P)'J 7n矂9q=GJ}#يReٴ`u\d) l>a\Ccۨו虹_iI1ܭXz R`szjpĒ^ϒ }!%+m FVrސOlX[_7/x7q÷O};jtn\ٻ <7}&hѹ[7^(-&JWixnʮw;дJ/A/QnB>W#{M @5I"Lb0GLu"ceMyb"쌍k ʼs8هFf2/*Y {. r:JgÆ3n('Uњ=^ΐJԆyGY ( (W ١L`f9Z`iM&Vs+0Kssb/F5@Rʁ=./Ru=R$ @Ϭ/f}6mC4WejﺦH cQ!@XOkFNJ 0HU܆&|##l Cz{Ȳo,_PY89-'$I[݋  q*58P^]>r {KMѨTy;A nPb\ ڂWƉ?8_o_n.ñZn |zMBAҕ:d*rQEҷSY&K㵩.ե&>%LIkA|tY"^?edP w&u@@reR1Ǹ'7*y%<":WGSwV*"_8u3ǁwiҊ# ɆO-a?in|:+3 L SxgyÅ:t׬޽l2>Wep߄VA܆OB}c?hiw\|->MWJ@9Z?>d^0DCx|œ66yxrz5& #kWyǩW"u"DZ"dl>:H(K+?eE>7W,ˈMJV˾(,4) áA_zPba(",ѢDTEE$"deI]DzL0Rj>WZMh6 ncɑ)HjJoi4l^:u'L .#Y&\nɕ%YF8/l.gRij8 dc ,  u {ZMf3 p0^Ľ(/E5>}&oү#ᑞ疠yWWMv6DGڵZb1Bi׼Xݶk5K" Bʻ\%ͦ3]$llm ;;z8&?/LF(_Y̯m://[ \[8;?u_ȯZo>sN `~-cJ"aԔYL;bk D sA;UgX ۅZ@ku1Fs,p|H j+}X^Sʾq=n̡⾒ 39B J"7xTC6V}3;)! L#E` sF.fK L!=7ݺ ;ׄ\ciC߲-VhUt γ{ٶی;ƭfُcE֢HfeLDi"U,ZXkyUU'4SEX(ב^ ܂@Ѝc2^DIikl6ht:&'P4Jǵ ^"&#jdiXYY KHaښɍ0x%ܾnZM]-8_zD(z^mҹ֖%\1)7K' y͏oa^ϽƁ|'c+ 9Id 6oV|L1JOv*r$I@WÒS'gPKAE^8 ZxwU{·\V\_ݓ: Y|. ee;brx{;L,`9Xס4Y\6gǻ+ YU㿈L[ȏy~g} \bUdX7p4%!mn %RthjWj=nx(+IyR49kV^!RE"4,n.rHHSh#$|D4Em6!INOa4̈́V~ fcݵxlrF Sey.fQ EAǒ5m4h۲2w_,e' EaR8!dPON ?*65-1֦r,(;vl2 WV-HNKx] F$/ ;_'&@d:t6nr~o{tkecxvg.^A|,9 `p*gS2iV˒zyBөn``L:9cɞ,]ćC^T}zΣe4ΐ'_j;WyU3y+z~|KM|XO~AR5ٽu(sa3ޑ0GЦ ؤp} yWXq6!o}|P_5A aD܋R\ϕv&j`[1"a%6B %, HdF&N2G%rOȧaU(gB^j*uzmh]uƥ0 k am.QBA}#!`G)a< uwv!S 1Ja6*H_./ ]aT}K^J  YCȚ0)ո 0υ"]֤kBq.4wVA"d (b$T(FWe¢achB0#t  tb={A.]y'tx g,-?K2cMHV+4=pȲJ lfD?$C]Vv=eI+Ax+_cϸC׏ hO{^]G*ޮ ,sVu1&bPMư%.<8X>+nTFޱ(KF~ M**(A/ A^p}+JW$ػ)}yF#ooh<ܧ֋#kWZ ;@k4G~vKnZ dI?kCo/}̍[f4.qztA3^~3M*_  ZgE b:Y8lleY{ѻ=qL}N=1|ѺKrkarݔp8`Y,]K_xYX=jrm>Ib )CfXjCKaIbN {|Ɔ M!Q#t|8(`` ΃|܌f_IDi= ͔f|F^mA9N.Mo~>/e>˟oHqS ws]Xdµ;<BVv]Á})E9앦ծ r-`S3i0MmchQ?",wa|}U. 񏿉z? []" T n5mPΏy~ h @EQE"*әZ/8C\p s|֨s8鼋[$i+]?%\'$LSLSI=@zPb *2YBn,;B0=qUZ󠔥욯YHu|> > xhK5*Pz)sxSUeoU~n쉟r fjk{b]0!h9|d{`l߃G9.e0zsV8] >uٍtI~+D8]djbMT$$B(u28DAkǃJ~9Fõ9YY8D)QqB-t}ô8A?~3?{%{^G>‡{Nx[О>Fq-f5vǑl˚_/k6op+}!_į>FJnWVxiB "t>۞sXعs'wĈܪXAhETJiK.uxq)-Tu,L5өAQu *9soJD LgE@YYF\Dq$ $$. IqkABU=l`n\ fLϪݦ^gɪ]Pyj4,Ȝ|a20~@|g\^Zs X4 IDATW:LzxuL&{'(O<+ \/BLRD `LwF6cxz<0T0$q%Ln~xl_rzklgܘ|/66ԉv,ּc6S2N˥K8>Vm[&Zvo޺hP]|{>^Q};'~78x<_R9 f[kJTm-`xD}W ΅Φ <A'kJP]h!(龹vMR*($(jJQY8`t|*Ԡ׆LOGKi*dseU3-A]1.&;>VFjOB2_LrǹRtMJB!Jsa5FH=3ޛud@S)Q.&[68ԂiNwh(:QiԒ*.پbЦ:TIGn_5!)m+56I)yŹ"5%+,Ǯطn$47y??.q Q AxZX $yE10o|MRI*Fn([t$䞔< J>uwM?wb0fM++e ӊM~Fz>U9<PApê{U 7k5Vq9Rѯ0Gϴ+n aRU.; .uqVxWuɳ_ӚÊ#.]voԫus E%@]T$z!?ffcEi;H~2V=qc}kto9zA)W ٞsS2IuYSs=Y2ngNDbxNm~N88>iWV.{zC+.֐D¤/3[no}+?oCwY zn/_EpWi Cz#oF_6$.}m||̗X2qk;]q)3޴(OW:LA6pvR5^u},*MU23'?5U1"KRV_j"TC=(o! @ݖ@<, >WONU w g>'. &2hsj59nGDiJ$dA(,L;`>?HU\@6ԃ*1Z{zj LLw<\bC`K8"~ zi F%FpYac;M`m #?$Mhn@4J/4Ō! 6_~ZF[^-`<.P ل9dG bh݆axfնЮ༙0BgV}&xElc3ãc:eɒ\](46"/w%wv1XkE)D',.Z[MZdidEe -Ϭ`;:jO Z^\Eۈ G^0i|&,R \7+iZZQL GÀ*aЧRlt!,D "~"*W3PqĚ;T$ȶ+HA{lU!{>;+-5:Ye #KR~}+`G\8ź0|NYs]T4?3B7.l@O*vǙSB%.#Z uYV՛nzRiΒwr AqW*x*ǞL*,ݑ'T/p J?~\ 7]Wq;/5|] ۑ,]>GA7+8Ta1+\dG~mY&]a?ֆ ߮'0W}{MAAm E_ FT󒊣tcX"ZifhTħ솜%>ed>|);X3D7?\rZ7u) w}ӷMPF~1xv q9Æ/r-y t/|ڶMaŧ"4W8$ ׫mv[+A=J#<+.r^[R8^*,YbE|!B%z0㌴=p_` wqL$B|zJ6[vm I$A h4KSQD"HM&+.3rst.5$Y.ͦveiKV˂e$pdfs.sנHiF;_E S&; n@7 w:x@7v$utv:vM]CYuٺzUatIgHS8^rر<J} iӼ(N~jM8á%+jf]eWyp˱A\(YH` ,ptu;: Mn 8   K%*jP 97y8a=;*Ix{9{s8B \&Cݹ nA*.\hTrӯ7C|Y:iExf׀'p*Kjr-`y&rX/fව'U`[Equ&&p Iv2Ѹ+VL<^+(πNCcH2p$QM/Y (6K@-(x_NNB'0 c'@/o .D:빮`ّs]V&xe{7'%aZ wԑMKާu M9;}]%9^OQc,i;8(y}/;&KQ;,H/Q-t#OrzJb5GG xE<8_6_Cӟk\j'Dxɿ^xA!. CEJ_ǯiu=x+>!HڳLy<u ,\ BG|oyl2Gy=OP|K{0L$Β3G^M {=9%v^j~:z]T{Aen z>,l \UsesGzl,pz%P`U*i,Xs3Y-P%  :!kI2p0./EYc%p,&lL,QBT2 B D k{M{rʳ qIRH"RlogE ^;DVPu(C3pD H''R# J%q" @)!J4^J\ w#"1툱_Fż\J:3M`QP {uQJ ZRȕi*Q Pϙ,cR710iT Ш~p7b<_ ܨa;Lw [}y\*G3(3Yr{0ZosMZ܇z G ,qSa.9^pB4@r1xT\^g.j: Aֆd$UsN2Sc/s>rșlT9V]iv⌙SY?t49+xXPn*; 0p;tv11̇qcz~v{zU}v~IN2nx_]~tC1 4/{^t&$NZ⥻850]-JMHmCx{=p.:%yE,29 &,7Nd;t֜]Wntx(u]9n^!I߄]Zz/~e_ѥFx4TrX?;!TChRDv{eY$)[nlԬsswCUǪ>OSf2)Bf|kb}(!`{D9JڪЄ Ka^&d"J:kkB6LL&1x Chv< PT,XLTADx ?M9<<%19N򽍆ȼ\P,c% Q[֮* |z*(%]8c9w K%3L2\i<JbU<8Y⻯\[V\} N=;# "zP3<Xշox{ܺ%vIϋD.3a`BXVLGGuK IBΖ*2Kz= X$ld@V#db<Yw L2f*0^'2zt:-LD`# ?g'@(f.r3ş31EP ,(XF0L_@a ;L8KeM!03ɜ{@$ (ĄX]BЗ'O@XLv' Gf Tc"EB%Afk%E,Ȏ?4e}ٟ7 綥u;[/g _0Jv:E Wˡ8snB¼Ƙ!9c,΀eU jRVd |BVWap1)!.22tBH2 I±XXpY?p&q" JaC ΀(d1" xJRZ^aAT=xN@2RO<`B|t $>À>k7 _qsq O>ixsSӟ~H{3U(Jy6@<3λ,]U?2l{Id*t:"66U8JHcߙf \RI9{60ܔDn_aMrG$)%Eʹ[Ky#+\|s/+ +RҩpLITtJjO4ٲ8ٺ[Bu ~E);JarYN?ZzZg"b\6?S(SI:* @O'y~Ö^sS?e9y@@ dj1!`Ysy^[%0z+TݯoV&tLc%Z_TلESZL@$ޥma=VZWI)t*!'H96U9de1'y !Ct5iM="VxΏ @}̟>;+sS\J>rRk䁇x1F)𧿐ΩLxR=s뮁{7pI2n? GA Զpu;X`'Lo~-t: k1gƶ~verY:pGb|3Jּ!ڠGYF+6܁Vr:W,T[,UVPX1$*db"}xi:3qb`x(A>x`zKmp59.͕47t3F#Wh0A_ȴ \a=zox 49x>R'# %\׮yIp/s#vpR\J'IHggyBtbdrFgvvW#./ci8[/ժJ2e#2Q$cwJyŢW-2if|֝2*51dI+>Z-yBpB@N1XR(L uo.vZiy,}@P#`UT_MHHTL՗ .Й ap RKk0mmPxK10^ 8qM@z@ \1pÓ闥rߪJ'>UZ@,Ft@2¾@u!{cUot KE Y(CIXIv KٟȌ$U);:Z"M= A@29-x X)-;6`I'b)}:9II }@\>R07{yW`g|E>?Zakк YlSymUe`#[(2+CVd>eى }鈘gC!{`lh a9?o ]4e ?r_'z}? ]1x ˫ xG-yKt9o/"LcƧ~{\oTցo}REo͢Nm6mZ@O 2fD>"Ш&2fԘ1bƩ{N8to QA7<31BU0WZrfg~DL+<ˆ2Y({ &Qx up\eEyHepeIiiVXH`\D\@h$n4˿ϟ\1Iܵ[:7쓉''r>Q` vSh{_;2.{/ ~sj${Mr钌իo'4?upzÄb59LPKC,wC|_a%5k|^/W2!F#?wv ]Iy IDATJ gc4HRhp$@Q ߸&Gǚ.DDٔ4rEBL\=]Q7Z,mDP(h.|`|,@~&$k4.5цKHmp H? =FHZRIoUux[76d,Mzo i8K1)tQv I#`Cz(׵'`v*IIJt[@w]I \WGh/E/0&諚, c!~#t-r-E7T/^ X9r4B-xZWoD#j +]H31iZI }4tSȗ=# V*/O(տ;N+!5\p`w7;([1U$>>qvY V4u`9.a8sG@w^B[as)8eR_k9tV3'(hbeS] 2AvR4tV͝l380PՁ1C7Rρ!8k9p9r< =8^GA 4j`ףlP " s3{9x:U<׬Ѩq;QD6^ЄAG?㭱ɦA* ؜ jkO?LOQX:㾭;k5=cVY=Lx|% 냱Mu*bNT `4zZo:>{D]R{yPmoi<@;LF3)cO LǺ±8sO{(Qi೿ *FrR>Ru0#YӴSjP&$w@W ^=筆3hKB÷:Ɉ5ΐ\[<M#7m{}SgXЪ+aYx7 ~m$;>^}fs̳ YtI`:Ǩ)&&|lte I}DJyeۂW]0Kq>QNgEqy6Q˭TC+S(%hUɊ#Q@%|1s9ok߁Oߺ ϽJg"ZEJ$+NW*^(BaoQ.q&9x'X\ D͍m̳L>gp(" K^dc%u-I:,o5|_BxեHa(fq" ]"!Hx1`:n67)' q 490YkdY! Pޑ r.%Oga&*A<Wf!!zM2yBR&xπb@1{RώiA#cRΚ@}ҏK"@ oIg 9 fwZ NڌfE(NLnUwfC8pT(J&HL&@:lH@PJbfdE `x^,M m/]j)! 1ɄQ$G,I3ꨜ(܀2M{H $&!U'2&%M;!Q,;Ë'I}_$w8w,eo?w"~"]UeǩF[/|Cs M&9A]φff%; tX)B\X`5%[`1tv"RVC2 G~5=Pw59)Dw=pNP}X82kPaaEuL&O;P!8[уSkRRK":眒T&mWGh2$tK>zUZq"rTjgNY/ZGeYM̺5oRSIC[Ѧ͸\sQ#DMTz35M RE̱󈲺U1Z!|] F~01?d~n'Euή2:>&ɥy&|ZC3# diݛXP$a_"pDV˃X ؽM>2qVYPP ϚMDD"c0PeU/(+JJ\( 2 1HW,T*\܄*qr"AlJiUqfYt"A+H,8_안f0WDZ-Ip ݚ]`:UBSK ݔ@a?ck^ZF60r.2Z!am(t~J~F:p6u9FS`r T护yQŦHsKju ݏ;aa, `VJ!c {@(+xP]$T| O׫ oCpCkm p>DA^815d;9氫xz.^ם1q*ӳ܄ϕMU=H 4n;ݲ츂/Ñ#z :9/d5X/lr2 #H AjÑ:u\NWdfVNKW7&uL:E '$CΣo2 98881+&R><#~/'!Ywkw,3ϔrYyzz(07&Z@~H58%rԟ8UX$"q3GzUօNۀ5I3o ^U9c3cM/o;&5tM.V Ujcߨɋ`tCNV!6樤b(륭km>ׁg~=JS* #mVV9@)Gx!zUG.Am`/~?_>mRgM(M#M֦m)$QEW~-{xl4nMIY@CH f|MJHEq0C G Z.c4#MOb<S.`R&2 akaP:%ԫX0TU"GP2f n5E HbUa/Q eq>K_By,;8zkSBKEs5TU>CnVQB@Q  kp 3Fe HR40=Q| XD`j@߁2 F0$Ϙ#T>ooT{,szO?'u#1"H1[e@0Fĥ HS!#]3? \4)lzsx%j֗M%Zr=*wTJZPG+]SI'#5B;P1 CEz]GaǸ)8c%l QbX;Q6K;~ `) t5ΜRhE1$Z =%$!`5dKkjDrc f3h:z^cM»l`=LWsQS4_jkr(nsCy8[5USĹSYM{"sd179%z<8{XISxd]|A}T:z/ oa Wd8uo@C}dc^6l#HDv/8Ԥ„|~{30X=>OU_v/E?#/^xK.%P;_֗AsQY;m?PUd ār9RyLy%%;az#QQ;(E4INjP'R`k:k27yxKDƈgy/ fVjy݂njH$ %@DD*bj+(@=$4tNFExeQ*1$i:> UEeө\ޞ JA:{:ID'^0OK$i%!VYբruKe`6#1+WD>cN}c$`d2H|00.]f4n1TX#3KBw(U3ԗ&@ƨ`|i3k`$%2  yn3fc+y;Z[@֕2E`r (,iPZg eP'CO 3 "FBʄ(N,kD# jEFaAA  熌z5 KZ6! 03rn&#+@!$$ u:IA%˴Xӊ"׊JY>Ίq; RQ‐0R3B mFi ($ R4C|&U`K i@kf}` 'JϿ_0SFQ"*aط…]8=. Hع&}K^\^<J#xG&;TU|h0Vѵ$s({TAKh(.PO&o~D> CRNځlf2V%zͧ=_յgU%*5ugWAY, H6S k B|7.q#_N,*FS Rw!f4P\I侾.{9amMdáȿ޺%ltH@, %1B~ʘ|)hEYPhlf ^#497F֪$2)';pr"]BJX"!(ܣbA-=-v .31[Pa"qJ@V9!.jI y~]*s>Z#BK7qJ:aB挰J8REDx:BŒЪ tԦf T=@(G˜0̀YPJ}^"b\7]C(&Dw p ofׁ*bc!-iu3pE># @t.;ja z.=͝f C1IY"@zO`5B<)e}2} T!,SWrTZ?٬C9SP.3E;c?o?'KF#ZgZ*ZU{PV*W;Ҫ*] A\7C8JsoœM+XPnlVČr/ ]7}G1ۿTr9s[Zآ1!&lI˜ʚ, RՎDCU=VMؕ2C83pG" 4=9PH+XD漫7;z™4kyC!@@!T_b[+tK٢&dFFxVUK9G^ߙ QWJV5I9 qIexoR>J 3׺M=7T4Ytths'Wt~C"Q@;ւv&v9 S&X4gk6y#QW sQ.\~'*ӿ b`z20|:p"x6Bő5PXf+Qh(~K'JuGzuo{&7> xi;@$ 4nv|01ƻj9YIJᢂC2;Q\]E(yQJ}iyr2Uy`cs,K!J&g+RL+öS`A"inI.*T4")~]FTZ$sHQ?G=O=E˯ o~ og~?r&szt1OETw~Vrߋ|nčfJyh/|׎Kxؽl%)!^-Uk )L$fDW^!anY;Klj1/E!kO\Z$X=l j҇D ];E&6C|qnWHba\UJܟÜMvW'M IDAT5oDz*\לȍQHǠW"c2$42Gy`6<ܗ/G?ұy í҂xC'yƖuTî#i|/ xg)/QHOl/((53KTtgƲDޕ(Rngw[:c 4}Sѱ W<= SbfUw .qoN Zt^=rL -Y*#I,L8p~Zi:ӿF֟[ǽA tl:2 el;*U_;Sqw458Su.9dc=umOV(Gk%;3Xs㇨LuB;ƍ,XgS#w\OZh6}+4zvHjRA9C$ԕ1^S5w`_.#>@@X*r;^-aIG:DIE*͊v/O_gb0;0}x+>p'Ro:^Eƅ\U1Ȕ5XY!(.Oxdk)(_v:M7bٴ̝ڀ(WI=H:$@T,1Ua n J`xZRb $AŒ,=I( I V Y oD{E,C<:dAiCݮK(?Kc^>๫WqxNir@H‚|C'ǯBJ%Fg;{}G8zFfƶa򲜤]QSm -\Z%`"K+$CDJ.pK;' BTdYF#9/!Ⱥ.RN'I)$93PHb|$JNDG\̌lB],->,$ܸ gr$,{s@ lt&r 2кX*:HNoE40NFUdI%!e@ X$ 1؟zF4IX" 0*$AH E))x:0Gc`t @6P NՀ@i]:́tqݭQ4e'Sf}A z<)0iٽn+]?ҡCȼfcN\q*FzNȝBkB1Lo<5DQdX_s2|x\ÿw?8>1(; b/x*=8km N1SGԑ)/IMcqvOgjlSGdֱgvJr.:p-S5en1A%Fq>I$n;qi[':eGBEGJGLq[%t5XS|o-Q^=bƩ{P]k㉆ 9%S#rY}G֔_==vB' SG n|]Eo!Ǻ^6,\O̰q_*{k):WE#C@>qn8[#ó_$v́vP͡|hӔ(ϴ~ӵZxv4^Bx@z鈩pHqT=LK0c@f(o>cnx:N|Bv{ 6;=5Mb|Tں Ow絾T mi0׀uY}cB.kj{aKkAHPf^wt% f$7gz%.஁`#]"K`MKU Ij|Lúhq3rxuLu*sRYC`sKa$K2>Pl|kY=[N'r6.IdVb:bxSB%X<#w Tk@T3a+ g>͚tKJ.p8}^ d<1Rd&2ݚ4^\$viZ'09eN 鶤,6@ @JgM`{8*AaSbz"ۋB,;NU6s\Oܯ8$U֟9T;f2I;!94nxd `8j;műs x]rbK0<')m;INmC`5տd@4,uq.(1k8E FN6>Xu{q:>3 _BnjYp:QjXպJzÊS|YC{3λ<ވMozDU"UNM+m]DʢFí6AYxOCV* e񡞇Qv~To/> H: HUU-)q_JҦ~EKE;5 )lYUi=xv_6<u փánr9U TmJ|RatU$xzQ.ghH;L -!QR_.3 ޿1wI ma2`_ݤ'JtJ"PShЄCi\@F~pEHӔ0p2is.M!!2#+ĨDdj2EFj ,RD-x&{V{Lժ8z XwwE_2ߊcR(t;_ܴJf3I`5Nx,I@$͟zA";ꞙrAh%ak I""(z+vjZ bTۄɐg@ 4$_y+U}qdHƌfc:&UIh!%\Xl\P.PoPH"2NB`:N? 0Pš|XaBM|G܂KG]1VIEtUY ܨM՜WҬǐo}AT\UV$q2HWWsšw:3|e~,{*j"5"z{d~G3bVzq ZꘟͭkU^ɛMðo&ZJj%ŪGB9i :3pnb6%`م$:x*9ay}-873N[c&NSZPC@ʻUcE:IspgSM:ڥӹ(Xv^5rvX<P.LAjHDaGs*5ύ3sy;R1-U:3rWz_Շc9WXmXb x\SjWy3cRPJd+ʥcSggߟp572{ÄHEYb\P ͬ!yNNy6}eKekү'H7_e J%jGd>r|HtKv_S~ɚ#TRt ~.Lٙ\یs WGPUc1fUyfk6"߮PyAN$_B~$ӌr~}5w4gպjf{KUGʑ K"7{ S#ƹD'QnJd$ghuHT'VuDlުњZ G R'.Stw4iiH]&&3I4Wk!sx$sޤBvufu_6eKB O@ȫt"gL>T=S)I(iR>ge>$2l)~5ӓYL5Dܲ]5%1 ɗ߷fGV|$˾&ڝe-USiMr*|ѧ&l=ZQKj5eb*M_*leb̂85\+0>eYV2怽-_xO|h`n d* d{Q$ߊe#z FL-u`4!:✅ ِJp9`mC ^2U>FV8NNg1i? VxRfN5™ N \oboba؟nhWԤy@@x{SyJN*!.jbjzup5}k?'qu/żx7GUf+Z]`2c2zßM^wjE7ëy,3wJsښvnyΞꄾQ H3BPm;fI5q;T"5iҡS9` ʎ K~%@<*% ;l;v8A+!ȁQIȢbB 3y5I9M#JmB܉~Fw ~W.p:}B+m_Z>Pn72pW@:EL)9!% ;C~#uOxeȆaNx.9 1w! P*0)_TDZ򖿍ix*ocʣ08ӳJԪ"Yj𸞁)Vuu? ~.;Xl T "Pp@Vʰ]}K=8 ?2*³fXɓb{Gn͛O᳟}Lo1%c9J p*K萞y挩>?1c NQ\Ƀ:¸M  ^c$ TI+u!=HHAH/nړ@")ӫr=jÊZbI>_1 H^$tDEXoʱZ(Hp~][!ye0-}(?ilk,YBsLZ5gɥSp&2+]du&#IH&ҙSuJg@a`ҽyPnXc (#wJαiSBRB"yCݓ-*k"ܕn׈z`9Vjuor]Tu\Z4_Tץ+1C?嘉["WE,a`u8j7nx@բBkKY\Ek!8&[/~ kHc_g/{[@1r ˫T5َnp>jl /?)[ҥҹhK?Q~޽ǘ㠍̽*o;sсuU%}5J\d5`"l-[ZZ0~2⮍C@g4TE>ͤC{{ry}X]O'VYi6;|cF86Q³)Z?h{+^z{YA1㮧 Zc0.alx603hM[;woxǏ_nL/7wĮs;d*= Eoa)sA ԫyb vv892V"ml#xxzLw2K%>f `y&K' ϩqM|쵨*^J*Wr3x-2*M9QW锫~V*ZzШL?ܠR⿳XoCo8u0)6@r60Da1%`6|է.R:!~)Xc]`=P3ZU&5F ]܈5h/'O℡^лO9! IDATL~ŀbB^cG 9&98- LL <eX2y}̉ @<#kxnX:wǂ$@XѩF -ɛ_aUMS :Sng;%H7&!%,f`YK :(qЇ>KKtZhQ $REfܵ1l= nҥ?_NipSIkҷ\v!RQfI6:ڦUh-S۱Ǻ|gKUe|f*79Lp4e7QI7SRT@ޕ]$Q X+Acu4Ɩ,k5,DYTp.H*HVzdüe7PΧyO}x-:SPPK7Pۭŋ]/FKM8P:58n}LTn8V/S1R?k#uW%I2$$0TKأSrr)dϳJP)η+ѭ<yb z=V.=H~o/#xܺ\y .a>D3*1$ܹ:%85.q mP!/e/ZNㄤw2l7&..wؽfhE86p Nwv!$#&tPC&yW!IWg8BX3i>Tj4UhԄ# Nk,5EMii?-;U K@vUuv]q oyK$W qRsHi'  `IkV$ǭp*Z-C^{{% # ͳ*vH$M˧p.Ν{Xjr2Ԡ4 r-ul:>@b^rS|ݑk֮[˘8ߒnɡgY:*39RQK9)"HɸYJTF InՈϧ%VB 9n1k;9`ַ$UuDrڢ&ce rpN[ba6-Iu U7D̫TsPHd-ܪtBլk%I:&Z3W+}Y_Mgmy>z26_g!尟Uy! G>XbQ:W-)"5;/ב[kC}GQtHKdwJ3W.DoL4$?6sb|g K˜DI±\bUyԪ;0,(d3Ey-$k5*ߑxI/. H gA|M$S=/hl󞐺hK|?f_=Oe}[9Ļ7t&*'π-ZzShRwyӴ .W*|/3gGF[[̣3HM MՎ2hynXN5ʵacꋢ>-Ȉ 3x6p^`7K@k0WOl`0&x8\uB>=5@%SbOb6 * 6p,ȁ kl8-WYR68$7HZ̗htÀEJ^1:@%Bzy1e0 wA*Y p u*q]Bl09< _C!BUҭc rsn݈`ʜ\!c R9€jN 09DM2sBz,gPXydX*TT0&5 U%p/E.wAURJ\ **5Fʈ +DQ}J˟~4/BFe*wb\w4Z U$XsOe9q!ȿD|^GɂB)!A:Z\U:!A\kYםZ|Z<*59ϞQGD5I8!܀DٱօVSu'b!)_沩?pIY h@]I0ݔCҡBxHZK8T(Uuڵa%5!;C'2dK$-|q4M-6o,ْ{<'3*\}FKQn◲sk&sfw' ZiG.H&LMRd!0F җ[(}}W^^t1<25M V3sM`(yP%K4(d4X$|%GZQgu6Wje&ڷ&UMIX ? 8C>m+z[nV4H"a _||Wdw)hjLH>F#)OK;n XYyW`fBGSDՆzn}}1Pƞ5 xVTcDDž;YJ@ra0}qaS$j(1$Au",|R4Xll 9}nhᯬ`n nlͦw}w~w;.oF Fu~?N/MS>EqY GpuUwA*6rE}2ahāJ\ۮ='ar=/y+~%k[>^wWn}6NWR_ qX|5B qƾsqRZŜ +p%|) R߇*9A+u%NTv6+䃕kc^!$m5^Tv^/>0dȓJVWKły6+*l ]6I;A>pJM`< .p8%0s MIK$o["HS ,%,'$ǀNͪxpDh4Pi' /sC yn=`9P]g"'YBv8׀a[L1uD^PQh+Hı8ekm %Veܠ+IŦ0dŐޓec|MA:WD cs)g5~jִIq !ŢXdcPŮr+}j5OK~X~8m`Ig5Q}r7UǏdZ:|'`|@`Zw.~.8CmPc9@y sItdLf}K9{[})P忄15 z K~sh"r]q8Y(M&ȌA" nw@#8:bW<$(9܃ԣaHeT00 2;!Yf1ñc?Ï$X_7(Ls/DrμkWC~'6QAp.a/uZ<,@@xVPhwHzXT,ܪt3~c}yG*f KR&z@s8L^c?Z\{ hROx]`8Pcw2-^x[U*N0\E r6sWc*V*Q2<6f䷀YBH<.X#@vL@p(Eζ*2ITGͻ3c`= VC"L %`OTψɑUjӲN eYw'Xc ۿi%]_PjcNя5oy5oqx[ PB=OuKQ P&2?O0^{9BgeDZ,^NfNNbhE@nY 2&ճ*#zcCշVN9$$j[jkG*;A#@W j̭'=-s=M;@*eD?W{qd)Sͤ;ƮTTKg#(Ycf* %[$|ϬW8sfZ-$E-ZmK&d,k]AKhPkɜiy*w9/Zs>YJ G3Ҧ^O@iW1}+Բ$i(|u]wϦ|ܚ(լ\_fN\M}ZF:pZio6>r18z=t5nỸ0R )ZmY%+ͧ]Ӝ3C8#/W$U:5Փ8̅g^>hB!^m\LRb= ۠HD:k]+pI^vϮ R90Kߞ |I 'br/Cb@iZMko44&N Ajٱc,U*ǢFZ;RAy*c:n|05o'C.?0kԲ>(Trs(++|K,QVAnq?`uAaDWj5]N؝Q͉]_&P~ww3ѕ>_^ Մ`42"̦x,ޱvTCg _ٓGtAfRQUڷxN؍w r ޥ_Rt s5p41 XD|( RUX-2uu^III˹s|Pnf@Wo8 ?}NCHrSm~qBVz@mFӠ\gqGB$8r zhڀj@&`jD&e+ik~lg|` u gS B+tXn0ɛJ@|Nw9IG@o? P@t(RJ-*C|ي?v2@faEa-U΁sd$ nyŋG⥗.`:m(GV;Jaj2JVr >{Ж{WbdIȾhW}|_9+!@o˽{˺E%ؖcܵ4W, ^ ϒ̑+c>YkDmI%jعrmY+3ty=)d*-ϭd:*Nd}h,hZT(*BM3/MsLE°F'Ы\~K .Z - D$e%K{`4/R^wG(Q5.X_\*J S6%pCNKԚ0Ase[r +QLLeM{PWR^Re\+N?pT,W$2gCyT*XÐ&jID='syMЧ~5ZZѫ6E@OcB7zҔx_fa %p"fb':/~cIK眔N42MU!k$1ޝÏkPjz}&hD$$>ܦA D-`ԯpw$!Y of 5 k*!<ܲA@8`dC?UBae;Cp@Go &s $)`FKiSHe †L|N5.}xzCEEWtZA ~-CWuNMu 箁n M 88\F@++S<4>`Bl9 IDATZ)QQAEYucfԪ*OpYJM?|ro 3!nCPZkv,ֆp48ˉOM<l඘Ѝ %,l ⪳VVx]*v5fM~cՃ/Ɍu &){^#fRU?khTY`W+lu$plP[P.JLrUCEW`&8|u@<JU2yFfEؑyH31֚TֶGQ&9hj(k#Њ́FT$RkS([C$+Sda{*Wb&\)aΪ;$<[-ݑkJ$9 3wFuPj 'gCGIƊI(s9ϋv@rOߪ$h8oo%ǂ ,($ZE'OH3,#s7VdY};AѢPVQ.іݐ{%axIj "waDM`ކ3F[xX]P&1Q#;& _+[ ?(*} kDчIQy.ʔ|YfXEDu5d)m$}OnynʳcMiJ)9EDf$X!y|mR Nq^Aj`{9lfLS7߳J#/=A=B䱜Lؓ!DL>8Ъ=EMq\`yͫ]Ʊp~7gրdl%L >D  µ7o2i|u|$ 2рj0b Ei)#?S~BGҢz^G!c}}ysxG⋧q/B S%%k@\9gW@ƍu2& 6- YktH_`,P.cY"`xc 'F(Ҝ+V ]/Ib=K6V0eI5Z5kn[}ї$,52L|wnZGA"W$4Fԥykb_Q.X@7HL~RCI,HB_*\@9!`*ЎO.*f( fMyfA+CE7MkoK OIe)T‘T 2ϡ@)[ы%, ߙϥа"?rWjV,B>kWiiư쌴[Uɢ9V $Vg"RSCIE-l]ofeہq o>y6RWde8TnUk1A *8/C5阌1XSi۴:{Pdx r cK{zLĤϑe0#39Z_dCѾ8 " \7C1ï2H8i|8` d? ^8V n 76?d8`}>Gic ̫Uq3$xԿw1>x?#ﺁcʲ ¯W0/8K/_<6q JW8ux1*CO@HNcN&aŤu-dt:g9NL򭜨Mc|W\ 7~O#^m=Ƹ_P6M, |E^q}aws 0C\;pҏp d ! u )_@FdžG <`\ #%lyR H,K݁,wtrbуpNxy\)Bx/}t s'e ?"]6zV˧o$A/UV}©ulɦ3 b14,2+gjHpd8E,f|t\+!9&R>wֆkJ覨fM-U&$%=$HUT'G]vaz 0fO>%\xmL&9nun 8ZjCu_Ttl uǥX xI XV}TaD~$_ U Uǖ*@cROA 7~0gf/v&z4oqI^{d RO{;|r>~gg1ü(` с<"}9vX<:.nh[1`:@~Kwm*T 5 $P­+,m*B HTE#Jw[$aգI±HEM/*,B QiRuЧ"TS:DC|wZ-gN_P5rxEKUaI@.+׮mॗ.iccjOdu;T*C&('$)ݖ:heNd`TָeQ+r:Ԥ.sJ2Tۧ(=g1$]PH̒]00,iќoypTa% e =GWP*+kgAP,;)v”;@V+ Z'e8+͙R%aLDrϠ"%cж]*X%бSZ^&5D#?ۓq=b[.k6sA4Au,qcCׇ/r 39D+w:QA%ȑZzP!=P`+q,[\*`8Ekv> ,N`u;`HL]luI6-~xqq瀟yеwR k[#g`P`I5-D-jTڑ`Ui? ,'\lziWKrK:"ʣ\Tߖq8_!uh5VX_^X64/[P{) ,g%K&*Dna]ȁq>\GTBloVKb6V}σlnCV.#3pSxcᐛ'OSyܹiyCxkA7a, O lŰz0AΫS8ݹ gxIBHV9^}3 $Ѥ?1\9[vφC(n6'O׮14S iA!+gK^Q1ԁZ8>a6fFlDSVKnp,\ʁ|s{DijVDi~hlΉA-2 k!P C0)͸d1~)݄Ǹ^@ ÀR: 8Q'\!LY^63Z@RdteYҭ=`@Yd{/9u -ʮ K.oU O0 v] C= :sRXJT:ŵRSQ M-<@@%/Pgy~.١囡-P *eN*_BH2}q7Gwp]FZ a),9 s> ȄYK'qJ@ZII"+<$×zRZР[է A+r̞ lZ7Zr g.JTt4:YՏT# VՃE}1Eb `~ e>W.QK>[Kؾ!ܓ%7 gALSxG\NRRE E{*yI M$\T/*+ AZ݀"ë}Z~N ɀ:iW 'scAR0ٳ焷A%`Дd=bKc8́B龵wV7|@ntpyXXhNki@ت/qmj xs}>I~ QoxBϋWSb-ݛ\XR qe :wp!`0ǁO1ÍWհЌ;^jV^cBުXPҢp'(.dܧFz+X㹫K,3@n3B1SzRS=)4.~pM&4P fIbзzNоX^hKʥ$ 888U 2Փ\+891cGbI! wkK.r`dJɲhL gN(E9+~Y3,nBHVhIIphA[v0F@v\Ew(<"$;J'SYIJR%A:W8Z 0Szs UyV,2Bp29jV֮څ$<*c{_(Sj4tkt&.VQ@TVTa{eh⺐̳X܇oIO`dU#Pz uPaZZ8;ڔ.g- `O'5$p&*T..N:S%H% R:tB:@@wVWK*"Qc=FMyT}\Z'2gU* rܑ+A}dͧ|*ABW+:X6#s]=9`Ryvi]ClA 햬XUODWHJ%:}k*KExՒShi"yV1LElH67Mq59*[ HiR d?(p-F|Ⳡ_5`ud dCU.n ͤCMUIv/w0æw|}Ug?&pI4&K} 9{$PUwO^k@J-JEv|^N"tĿޮ879Ilja}ҙJqY- RE>X1 Jqq );a))DZ:/9Yr]`cy"ʳ ::SJ[-8kkAT}.'4 cwNl[Wn~>/6?lV~+Ǐz 8{W `58qy ٙz4]t,gIh|b`(Z5tj3^c=z?j`KxÐ( {鴘6ցAB.x[X~?]D"t;|eN݅u-z Z&ibJ!|^t"l2lLejWW>: CNEgUCu]Zj"݌8Vx_эk6 CF#S.$ V;Lz=[wɔ_ !O/LJd{~ 1@t6ptk@9x|R%=VZ""8>̀6oɈb0u7!cNH̜@sÙ҈՘L]֤TK$*+@ 0.üR%`NVD"ږDsNJ!0s,z\PVU8C.ݍܒ(arZ>qd&uKJTy{a7*YUCըB (}+SBbҳp$YYε:c7F_%ӹcu$}\ KVPՄUɉ"L,W^9~!$I O>*~Y:5EnAmhAR %Jk$̬X0%[JJ3~TWs(k2<(6P8]/6[*FoH`QI+sJhl=*:X2 SB}ɩ[}#P.nIFI %Yؓsiz\+]=RCmY w=з[c5Ԥr֢ IDATAz©{ʰLiH#C5+YB|*[R>(xQvOHB2Ե*zJvYÚ,n}@oA4O>PSvSt SPՐv5+8^qU+GWpwO}πt"ObMJ?lXÄ̮gUMJ!T;n.dO: n&:D>}oHN*ŕUEڳ#$KE"",+KuFXp ePRV*Z 3*ei mlww)HSPv0M)}?O?ql{_|"Hڌ9`zFMB,/w~ ߗDB. =7&Bjj4o~_: <|6 `P^h4!x+Wy#W!rY`2B& 4%,,HeѣOK"d$rۅOZMF {KR*IWjU "3ӣ;M:D4Ԩd~e9bj49V'RLTD)J83Gc')]pDȆiTػM (@!$Di3V! !VtBB/OH@= tE^AπhĈ2!qHvGHUM]¸Pʈl[8a #`dي"1+D@ c0V k B@xH@(PIX9 3%ySr@ NX'/lKWɞF ɯ" , W9A660-x𤒧f}۫'tة$ 17})DlDܪ>] w:Bmv'DWRZ.#׽as 36_m q_%UZL+vTW%lmx,i-=:g% ^:ExQcMF?9ͳ& es?b,>MV^$P5ē^бVԞTuXmoE[h:q@糢Lƚg' =^YEWA m ut͚ޞJp|^^ħmnyIˊE? %ݗ'xEas'ǟ&uÑy^uk!?Q!Zvmz}1+ Е\1&ִӺuAZhTl sy玳陘y5`r&ڙK$8P5H;NEqŢTwvZ Є8 onIUŗA~"`aQPfr l"*4qXDpv<-v.C$S׷Wc)g76M/} o>PG wSK%Ag,ׁBA굚ޞFl6@IGu jǎ,sz }[秀 , ycC`V"{pV, G$ŭKxEc [y4mI^AcOUqӸkx˭MLd;Ptj5`o15%%L &|N4p000I\1s,v\Y_4IC_(H0ZKQӖxtvH/<@Y(k~(j;:uBr6*m@JB[& I"oQ`A)$GꠓJP0@JR-{aSmJiDt=Ţ:P>nkbFLlY \H:ըE6$2#&8Ji趷hm"+&l!x`,Wa-ZKC*t35:Q*5x#e]ۯ}j|/;==GgU`o~WyFזv'0*G(忦~N?G'/(&ۑS&FA\k0H 之tQN:Ïs ]Y˻z?"*YiKk[ɶGnUgw3.ej^]/Z3DbkHJE~oR&1LDLU +b&;(dҒHF(A|(BJSUdJ% :!j0;wʛh!~x>-O8.\B_\'aqQo':$\֖$OXeqQ}V7H*냁$\779Bo;t?z#(£_oBB-d/^(O(RD%ЦnWQs @r1 aa xqV<"0fSI"6P=3ǯ>բS3xak9c-+aFu*^&ɂl9KL"1qDQjiIbժàO[Ve_:6~ħeiVma4_vu0"4C5 JP-Vz&6 K:,ҁ$=y1弔IB0YٜxI鶄u Z) XԺ!ժy2"2 ʱ1PJ2IZJ0' KPH Ox vb@('j:=}ḷ,!H}nMi^==<BӢM Esּ@a=۴V^nOBk6SǛV߼B~|s``UPԢ:.:<榜(zRڱ'Y$b:X__ٳH)s8q99ᐑ$s^Ҷ]oY=Ͼ'y2M:9:UĦ` \+Dx#rp'#%CJ@2 %:O;wˎΙ=ξe8 LX,)X]:@N:DžZVduK:m-f*zys(f8M&G,` g{#瑜:9dģfgGOCwM[03o' _0]'?#>;N : %\_'4@hY`~^<ۄH RBX6.|Ւ 8@$ocǀ"~`[W~8ǞX?cA>PRs.W@>KGVKj{Bkb1=t+s2c4`|q^ k0 }<g%7h=\NC#iѣ6ZBwf|taP8*iu"p(S2 "tfCB"ssFؖj%h8@s!jW4!zN*M%YtDr\FE88`$,ү@20Ohwa @P!0T`C^-!x #3 r Iz@8 3@Ԑ d:@Jq 'b˜UUq*qN1# %pda#F,B FBXU43.(H% pńB@!!D-ͪ$Ƃs!aC}1D]A@Ou5M lםx@nu:s@ȹ=,#-VYX3(iNy ~tR?4YL(IÖZJ 11H:sPΊ/ IE ql{0.{@J/\FM<H6Xmڄi·^||oSkvMS+U^AaT 2<1O$l$"@A!n1TC|&j1E (#hs?}[qbu'>xgxe^nayn'<㦛ei lo󤚽 F\ΜY_F3Uy,2uѦ X^{{8u:hZשcLOo]_̧qn $@7CVJCIIz= {*$q43Ónih*TŇh/l'Fe¿0Ύqo{Fa&cD UDަp8d@)2Ĥ0ұv5r^s{Y[tnH%J`mm&MtMugcRn{A:̞dKP0%bKp}~Qrw22glܹ Ν;/|x;‰xB ,393g4hT^LY ]b_x%Rse2egrE"^bz ϾU5K yjEnp+EN^Ӑth<ؒl*7eNT My)kafIu,ij> gUod4( `/{ŁKm-01+fԱ1Χ KH JUQXeO.xP#,njv }-QŸהbQ2 뼖-D< 3=r98 Rb7.f_R!NcY|O7?W~G[A]%}zWd(r9gxRЎC.{u@﫤pqCyB Sdz9I=5 } )kLm$Խj},?Kǂ&FiօCVo!A$uB̘%2X,c0sj"pE, IDATo i瞓cU5[85|qWc>ng/P*ʿ #Etvw 1+qp@h6RHߌJ"ƥKn;F1vvDZ榌].\@bFc9PZٌ r_"v(>8a l1S5Y߀2vwL\ xfF:i*h$pu~;M;o;o]>ֹٙ—Qptի:I$hp?TL_+[|ӡY<'YFmVX(B%t-‚ZHR%ߗy)HU$?$S nM:r8;eVK#g.h6]R(:OqT| /UHfC}_b'Qdm|GEc=ss_3];MvD*HuU"7ϢdAWɟ!SîQĊBH>2k #ﺾ-)61x9ߙ'@D-UǬ݁(ϚB/2GH04OF#]wJJPl"Z^FKU9W;'Q"<^1n" VcL]u:r$! $1I15%rE2‚H2(34|Y*yB.fKM':Ԏc-xGvi}ƒM:Kyux16zO2./0jaha0҄Q#!?!nXzͿ~~"2u ,#5: b_{i zBJtt9L=.&(+W$"wLdl͐ߗs$ ca!@ Ɇ^Ӳ]!;&+"Vhu:IBcSi*\[_rDtF e Pv8 3m*!M ȩ32Jrv$*ZL}y (ϰk 2Kx %A]XIelEzŽ`YBd(2! iSB 2 Asݬb~9Xaox$}\kemzʎחy]7SIMV_MXٹߴ✗Tr8Lx A#Z?w) e@U./}%jV58ȴmR)c\ 1NC 9β":9է=R4^_]M<:t~/RNX #ɸ]u+GĈ&^0 KFDg^IǏ馛o{[x_B^1PdLNJ/;2d,%$ɱpsQt=-:뚺 _@@uFg@f0 Z)jU1@]]7Ϣ]VȊ!B]y:ֵu/hӚ u76 INd% Rkęv3Nyxz>RU&QM5uyD!q -u6"O2OXݬf|=Pwxۃ:lCŸFBxKV#`CI0' t(.ql5 G361?׼EJN5l kе~z? ;&s_ D_n ISkR)(:u>4.;8+: g2Ք}#.{{1'^ur9K#/?k{MGҾY7r] zMJD=xڋf7DܨkrTh m6Oҩe,~K%=I_} c5v ށ{q#w]R_'sN*z;=lqeh ΈT=AY?_Ї󟣿#kuw"_wp]TA3uCLde# aK3ԧ2뵩m |.5q4NIF.]rBNGە5} GEy+\aI`p![rQ TJBޕ@*H $*zZ=<ؓ0L*}O&Ou B"4 :M Vt}+tlUȻR~I!_n`*sR}}, պ Z5{u#&9}Z=Umd?Wv\8FeO9spU(|KqXֽM㣤$ czEݛ xZ'#[zu2ZCT] v zx3s]?O\?AGaJɫsݏ ۺ$X;p[Cj(g]Nn{e(`mcOd|}USϼo<+=n|Nn{!#{(X?t%ԿV4F ^[Օ 8D7xoawG̥v@;A2V^YdEg@^N ~8xp%9n 2?̑%dvI `4pF$[K4E-.҄TM$ !cc,PzmTT$Y"L\㓄03JwPϚ9? S$#j)lςrA &yA'嘀q/BwFD0rY6xCSb%H{{Z+jE9c'iկ+RAVU 9 dkD)b IW%97%,;jMp!#:D/ v5ZE^֗Uo4";5!|eðz=uA+V5kS ٙo٤(5vm4c[WI+o{PP-̴veojU9gN삑YT@=ʺ<50GiRE7@s%7"x͓iy,Jt]M8,m<,8{P~YoA*{$.*ɂԟ496<1oh@"cO;5x_<Shm^8cnЃMkhFte%]u S 9j:/i]7U'E]0 ힵ2f9%۬gZi)}d{d>ȢbZ]:uwt̓XI3^}Xa/FqgFyWФ+ТĞRnL8{ 쬎ir o"@_!OɘtV5AΛK{ {-6}QFPߌx]p>A*W~;4/W<jyIG |=) %ܿ1.AC&@ߡA<D΃0R&d I:_T\PgǬׁn!#YHX:$ibtIMrB+ljonJiUT83vu$GmlH%|0ϛ ILm!tO<tXy g0q66eaW;U|:r@,U;`j{݈>^\5W h<QN0hGeA稵:A;NLMz#kNn?è[{0|_x Xx|^xI w1z7/y >AYFC lK%n'ҥ3KS3ŋc,,E ˺33ҕh3;4=IB|X gג&fZWxEz#|Afh?Ta)e18Lo+=Ϟ5ȒW,hY_,-%qH^:4-Y TygmՀy#F7M##i L覻읳/yޑG 5еs 500 R<Ҡќ: 5^p;qؓku:/%rdZ d$){ ?,Y9]kU+l;WgS;`Nܨ 6J"7 @sJ/CeX}R i /`;N|%{X ; vWtJHGf=QA:WsmIYwq83񀆮y4Y$cؖug3ul kg]4|Va{*T^hFTp EP۾%2#DSYc=g+51ʐĺ ס5I^T_mG#>CHwu%X?rDcJ-Be^9 9'1QU|ACwR;QƢvE[WvGKp(nH~_9 i*J;x*dTꍄMdҠ#>a8L%vL" 6 /Q*.j-^tJ|Y^uz!eB`aAdBj[cjHvy`vHRo oO~4@s+Mn}3@#G)ƙ3:r/  F#@r~.dU_?xB c0:UT-:Hy[|ϻ(Ӹg~?Fne'ed$BF8-/!Y!*jT6bu'&WELe-c,@h*):{zUvetSz^a۾V*# JJʹgԂcSg.G #ЪS^j"BNn 56ϩF3J8|x*.d]K*~&:,;qL;uA IDATjd-@Rvh2٬XH m玮W#h}:uoSX]0¥Kė.GNۘO^oY:th8wn|pp[[:!gYN+-j(6]Ȝ a&GLk w#koY q 2rX󦚦ϤK4ҤZ:й,Sh_Am_adUX`K--f$z wTc.6uM&,~rX-}zz}3&C%V/z\&P+?M $-}.UU5}})ɾ \"lgqj{Bf]MĚX(4u?L }^UwD/i׸[AUvLJ0H(X ^2s$C\ϼ߫;*Râ%/w^ߍol5QʮmQ "sQY*X*k PdP⒮DK@n4/XRQMfLS ߂P,Jѐ# He\QV%YUDUiqR葨O-.רVhP] Tu gfmsD t B(יRMVibV,z=RB!J6-,pJ| ao~>u&=y$1Ή Yȵ.{Vaui4YIr ('0u;&i.'s-sa֍ve<;@RsBSs%iI.^r9KP!t.\  22ʊ\-$aV-)pp,eY0qTxIkޚM}$ $ *xT吥&H* YJS4.jBB"T(k3TvUqWˌ<t@" !1^l2BI@]v.MJ.a]Rȫ*9}_ @nji ߺP4@gZg <9-#2BسzQ Y@(t|V58f$J4*zY:±ns,>9H%IC=Uo6i es.R]>!<#WÂr\̍`/p:@i`)Om\/4$\%ܩHFn .ttJ|rO>8G+9zSh}Ο_gc:wk$=]0e8_l4XGb^_5dgt@.O[kv\Lf kzO|'^ '>9}>zTsOš1cA2 J#$Ƀ9ok`_gq{>u,]]5iD_6 7ƖާbJNj!k1Sq hk=zwF@S3_  \Ǫ(4xTL|U59v <7T)$TIX(Xb"]' Hc1߰r7wz y末%sTC_::5炜Ӥt טMAs/`Vtm2/17T炾<:0t*:& $am`eلfR%,s"E Hϝ$@k gj󤉇bOGe]֦GUATHڮhR_UV4p%}5136md-j(YZ3 \y_@wMdE%Р\erP.V ܒ׉3R\>V@hK/8iuy!Ǔy{˺ @E9߭K_#mvxrxXhyVyI]S=0|_t !EH/vo«k\O,x"+WT(Hj02h R* pNh{X)nS,wbQEUV*ܜZMbaIncBh7"ybDa]E⢜Qqwn <5<~>}umxv-^xhL++rέ-9g(`Nk41?8KN*F([1Y}pr ;>GZ8؟W6b0xm\7J섈]?tt.eW*1'<4 R ^60˻|HF@3y)S/HK igf]SLX_&j^e8՗rNNjvW}#G/%x<)FLs5gU.}^{ד 17fԘ9‡γu(u?D03<67a{;zβA4P&-=i\#?!؏]zk]0]X|st=*c_2B~n7O>9ƓOq-5y_}tu:&jMvkPǻ@+,k 网˹ge0. =wn\ow#S=Aox.#Jr@lc cBYt2e"\&T6Jܜ1 Dʠ9eY:6g,/3fgxV#qO*&~0R^O:&%Ŵ͙kH%I}-62~{x)Ξ-ҟ2׼~ޛ[/o}}k{U^mT&$H Pf lckb<1 ;D0vf 0f1f#6 hZZR7wu3|S^ɓ/;;DV|NGE |Ţ\E }u/(9rleM@.++tu 7#Y_zWе-!͒FKTJ(VȭneɈa(E db BF#`2mQAJMM҉`:vT_>2@drQ䉕;nً Y*96+ 5yNk3~\٢ cݝ20%TV&{ %j 4|;)0e 9,>rw3p뎃@t 'L"1KZ#,FLDHD*_A+ƗV4YA?jGc.;xT) BXy|'^ x%fȷǵ =8jAYرĪ9gXukkCD.\XrAp+)M`a \# oHRq y~,!TF28^3cm7A~@ @s9y |µ8WKu-̯cBMUxLx%<}Kzw=&'Owo_rwDay}C5p{62[I4 U z*57<28Lp1@t \gS(JgCΣgB]]&a@@m`΋ª& >l\ΫTFp#>' <k-tI50rځPoȬiy׬hnr&M=n&#*KL!E'rJ4؜Lt :n4ӧ:m ?+?›z)??|RI _.kacCݴmA\]w#9<Չ 3z׌Z-iиQM:f ^`}]ŋs3Mpjir1I )UIXC3/OydcE RšP \kZVӗq2Mf,(l)/5@ mYfHVHc_RHJ@qE0nO>FjIN0:y -:KQɒn hAI;IDsnAFQ<LHnaJ/& b# 0D7J/7ӫ326JgкC~L6$*ֱțjB9!V68\*fUN#$$xGgg=g>(&m D*[4! oXgP2SYd2(aɋ2G?d]4HNsڄ7ȪUI\)"PB$Fμ<ȭr1y,-u{ NB3!^xWx\<LjoR㫼dhmLpY1k|=M<'®xzs2COԞTr2UR 300>퓳ܧk{fR3"Cx/o4=-"H"`R?àJ[3>c{^K<&aRvnktsqk|cuN! :Wa{kC.hUk&Y.n?{ (xekWc͈q v2|;ִ 6/ ܘ^Z; Zkί:ss90i] 4W쿙)$ke8 9M N5h|[>LjY,jpj)z84N9ҰWԠuK&UTٳZ^ZAhG1ɓZnu\Ze|]B2"oÍϟOgP]k`mMevM^L\%bMhz:P78CV0nv?I}O0}S/|Ap@׽Xu3Th0pIΔEFؓ&$4`_$zMTIӺޫy1YWxB*,׷R5o,-x4U%Q,zw|^ z^D ҩ`/PP,sK Yk$U>C [*@՗ATqO C%=PWLi/x1Opa. yfYIv:=dJBaA4ԲK,09T2@ HC09$LfQ 8߳-_s/M&m̠7t';.[GAD}^荀SM ,Yg L 3q9AS1)Yx7d XzG!(~dpn4,W+={/j qC=[nn{n;pAdi T][sƟ2#N&9fa;쵹aV=3nÁ  ݼR2y]xoi5LI~Ql^bXsH> C x w+u{=CHY]qϊ,II~r"8g7̋8>ȼ|,9X:tYdŖx;ֻ09iKwsd,#^Z?f>R=Z7;wxZ;39 d%bc`:/q/4+xK|/ lJ/s93*r)7529;}9p|[4^l^:[s6pWBQ̹{ER%9Ⱦ`FPy琞wt/:9?pHٮ7yάc5ޡ7#8Fw I!T7qA"`ׁI1E ꦐDd±MSsP.e4ngqT$œwH TzN&^;-TZ/#r!nվyL uy ]T)ґxi̹ aSߝpP|hDu 㭌Db=VWA&3O9U$ MChT*΄Ad }~ι{a:TVkBNx;ڄ,s7zY/Ar Jw a,WyBd43@N.H+_=м{{:??u<^'f4 *Tu:}M u<%}£ZYN05׿U̟0S^$RoR=Ĩʙ4ӱ{kdy;n]́}b6$(E+mB àScJђa' 9`)徛:J鍰?Tg JϻlSs`,`>7ўJ+wQ@Dx2r #@w>.< ]G98$I`c AඍZ!8y38$KMlT39N^#ih)\?Qd*X_agǫu)pݚ`]M6 Ss SGc"ia]c_]{cb<PMCfݺE,JJ><lmRSCA**W4VRF0Fe\i*3wo3Pp ML2sqVr$h{cO%3Md4Zkmf$6&|\t[\tfiNCr@NbJVF-PBR3 ISaّ uQf $ρ W!}`p sX]v(ԀEdV1! ]y **y=dwҬ 9'iw _ +|Lē:F+, ^̏"/U p0rp3+%:KyJMv2ҔЫap\ i=Co֬x7wL$,Y)N~yy =Oݞ<<𜆛,3n -VQ-(j1?Yǣ%cbŬ/yF1هRQѨ$r= H#^GkLn bg>":();I4j`za5:p@PhrlbϡN9F2wa.|\<ڋwFG_^x\+zk|<(baApѣHՑD4ptt C]ScJ $N}e:=uz=ϠP^O岮-/~E|_bk;%'=8H}u^[[>}/k\V>Nk4(|9$^ۣ򺾣eBZ"I/̯%o:AB5qsR4 tqg$Lw}'%}xTdD @ը:gbT4pe_l WXH>T.JB/ں㪒 KRPc>}C|-W !UVe fa]AY1 Vୂ/d|p^i4>| jrx5 0x-H l ; F ͸ spAwNX\75['m _&gǯs,LUF#S(#: Mú< 0:Z67 k„o 7 &w*Aޒ|D=s VsHgϱb]&<Q-!d Aw `O2 D_ëWzl5_^нW9g# *RAw3^k\sVnOMw4k6"OhTSO))(eoO+tI{upxZN`qQkk.>n??-G ^ $&w߭U~SȨx| ?JreK"|09F:ի&{V M ?3zݤXu]M7Me1S ]8VuNG0 db /k4M^z=GҺVӄ Zd?ƆBpaAI!rӾ|2v@%XZv1űW"jihAvΝӄTwYUv']`{fh &xhige2q8g+1c!r Ys`,INw0:]`,g<1M3xON&bHe  HG >!E<qO @ uFs60p^$S^VIfU/wBG.CYY*sV2*CL as"t1 p݁)keB ;%܎ͪ |,kQ<:!9QsR8c@0"7*8?-6i~aH(a< 7cxo!?: V?/[yAh o1 W(<$]dD|0!ܣt/28f*kq?jg %MA{I]f&8rYcUSJffc:znMX^Vqp*ddD&Ws8ֵkQQB 2(U"*rDzĝyZ4C8,Q ȑTE6t/zJ^\t$|Ԑ1qh4!0f3A%Q/Xrf 1=f(}qQ~LlO㣳J64bvӂc3`k)!@_h#sdwmL&q@VSLk u>S(USf$#Hϧg":dn;F o/ZeaC)4Š.pk0GBwA&TzxATk%BD$<4E#>KG2f ' G+߬@-;N {.sRЅ*yÂN əo*V{jܹsre8x2Qm'yؐńUO)1JyDZ#(ל^a5;MҠQ3 8E{L670d,ڞLM O>+P eր-@] T,1k@>^|c_G? r7/~ι$7p;j2X%i`Lq %ې ` stOa!"T3w9RªU*&C ͥHodB(ؔ:u-ѩqo9E=:rYFB=GH](:+73uܜɄd]#sxRļv J *Nց8/aYh doV)A|mMw&㱠çտ~?#eyGflVpI|PMrhw(gcƓ+fٯ-ill\:%^Oѣ:NGrYDb.ם̪{TKaC&'[[`n4RX&] ڝ){$uȌOE4q"Ux2ʊvwg,%mw~~j<]oH 'C^e!wt>y 6nnxsv/Hh\7;㍞ZTSr.keuSpB<TbQ0 Us_=A9 Sw=+U%jeE'~I/{> ]߅z\W*3jhI"2?|] PNF)_>\|!zklʌ<^3n{e. jUbժ1k@n(JnfRIlVls69( : .($*I0#=@$I}ႌ[}`eII67uX;\l zyN"@!CNj 9+&Ɍsa)SR Ϸ2r*yU;3 o `a#]4Q>Dnٹ&-ۢ`xm=bx;U4UB&uHQ-7 80RFlN"{2u ӁDslA49(pm(*KkjUH@3Z!d*\*dfm/_dEw/vC8 Xps3 Լ>3F IDAT׈ 1!x rHnhGa\/YJX;'o}U|Pv@-l(a0,cBrA%e/F5GOcfu2E7Ԁ."T`jmgc9h>#K$VI%9 7b ηlDrVkrW;X~]D6+W [x}d'?gӜAAk[ME<"Nͨ0n@>C.qLOx T5謵9`5Y>34R*K v#W~L.H#V1cM2ԋL<9W~i^ z1 GrУWSz_t98g̜oөz h8rLaJ<<*q݈Ц=`=??Nd[p. :wNe=)A:"x㫟CkkF" ֱr@6ܵ> ]}L]Cٟdp}+Ow:{fg1 6E龗^qr?͂?kAӈ?/[06S \?^t K09ˈkP$Lay3;o7%\!|ӧLJ>? _BCr6#{A|ٌY0t UD+TLVD1/NVv6) pm["Vy9bfb#~Trܫe. wx_mx㲘Ja'Ne$S| ',Zp"Wm?b]+*L< ώ5CLY@{f=aveP>0-li l8AL@a6!}rL8w睜Y&0U2A*tP\! *0!ޓ޶9Vdǧ,K8 }Z3N?}?s|F^`8MbH mh~4P*2ǟ]/y&:v[ozR縰ƀOfhBQ% U.p(q5&yk h`NA❨ej4_TK`JU p~vo1񡿟G\G~#չ*K˗u~|o;G>*uisE1gxHnU|?4o849f#OD^6De*dKг&>ctG!*ŒLbybBʗ^KmMYq_bw9/UX! T#Y4jut&C6#,cssvL8w*A :-=efF.[aaBFpR% ~`~Y)V$_ƶ*)ke r‘HwBA(텑9Ā6(L',06iK?5qKں*D|/_`i7hxcS89L0)I;J')9 Z \ Ο)MդJVgF)H@$K/~ *-19MFr eԩ nm/ފG9_cUWJ_`@Ax 1m& h6Gʵ 籖ł\!dey3|>^d(Gg?RsA@O$\^/r2?p ߞ~HU=4-y64BV|9$Py VtY *sb^4~≠2f+]` 0<,a޹hܥ s܋U&]߻Agk] 5d%ExC5r]i[rgᥲ˹G߁XfU%0`5Yx:A\|wBEپ<^KZj14gxDdj;ihhpin%7&,}PΩ9ɫ6*aFd|OmǎMG//~7xO~/Wq8?O~7Fq5K|@<>%McŢcqQnkK;өt?&I{{,&8|0gs0:J fJP!ٮu3})=G!5oLvkxw|G~w!O c=_.IAzz^?#|z HMtZ^BhKe5q C+.\X'/s-0X&#>I3..T3'~ >}&]S oF|nxcD,3w=-)gI-_ * (X`50e#s] DAs> l^4`/]3gqtر)xbt=(*Yծ@=H~m'UΌGD%񰞈\›<:x *56/ cʊI5n%pޑnv\yS g3sŞsʃc.8%xz)}r|tyu wˈ:a7̤ dFs!#08qݻUBЩq]:|)$b&J{/G< U .Pq ٿƷ s~?I71 3NHU.ѣ$9 %mOhN>v\;vhм S}N-c>S]>}S".DlSl<ۺf22s~ag3=%˰;Qj1  (A0F% AvOe8ˬ$ CB#o,Bp܄h!(gƉ*Kc@z/)sK͠bgY'G>R¯]: BXJu :aȺ ".Z !{ <=6o<9dUT; 6fL<ۗ{v5xxP;EcNy`oϡXT.u,Z-.=Y]l*hnPCRU+ k5AY6+@(|`\\|pϡ>Rn^֝SOy|^pC>W+JEZ̤M#[& b /QnWx<瀡Hټn8#` wL/Ez}j'uz,(T+ +T (Gac&Y$FHHE5)` e !}J(dT.\AUjɲ봲9L ?Ro\>+/A6MmkN1Ř +{a$x<%נ>@j9K'\> m[SI 5K{t: mC? c͠Y<}$y>ǀ"a0x W(1pтTH}(Ս y-pFKPQYžtRV,n@\4doSɩ`{|gp?';|oyKEydynHnMƱD 1q5޿~1ވ= -&Й {OUўz &#@ 8aOOH`nMRJx]l<@OJ|/c_r˲s\%1n,it)1fR. ^i0)D,e ykK_P؈9}=dmTj`I:ktV0^/3LJ6r"c Sy?Gj _jiDSC[ |+|rסo6pT0ɵG0M!*G~9~.s'v:Kw2Ƨ ގ:RGP,r:n׻ \ "JvDaa[[\lmyrt>ISSjUW%LtaLFǬ–JF~_rY\NDJ%1nxʟlX:&3S^ш#^V X]U\s^\㩲X7OH%_ d/#űvCNEM7*.Pl`<J9P$e-T|_XpcL^[Jf:%9 ]2u Q =* NcJT|5 ZPovAЏ _f9|0HXA|!& % ŠgkkǍӠPוgYk18[ bfhVō6eJbdZ5+d#<`g$Dp73| O~v[ >IP1Y*u&>WܽW,0 ++ wɣ: u^x/IY9FA7FB >dhwfX8eI9?egP}9pW{Edp(8 oic]ʶWֱDb= r_ m&qFP|02{B9 dٲ$-S4 /`לyYX'Ďp cь a~$s&18YBڙOuWo"{^ ) 7Z4l+F5&`UvLv8\ ޷*BWy_,9v`w3\ .7xίe76g|k =N\9V?q8}Z}N",zrHD=.\u0ѻ@,% -wcV#O4Ωg ~=_=J?J} GJԸ^׀׿JN^OVe2˚ j0*:8ر5z7}kr J"r3U@&j5LR* +M5Yr={V:-n}s!1sXX 2St:t|5?ooK%ׯkru.4h1AKnbi\"= 9ı!HLDU"0(_!Sm [zC S(+? 1hV X7Ō ^2ey,LȤč|Qu/F dտ#>$9 +õ 07ȗa3/hUMh2D +v: "*T]&<+޽ړ IDATi,Y ;K 3O* a-_U^\G(&ǭ^,⸁_a4}=$2ǚ>nJ>BhV݂ۜoCz>%́hBgI *By b+ZYߦ}Nmà;f7,Iqlq p̵0hJ1 ^19Egtl6d}}6=AbH w)i Q O h`gKNcx(*x%"`? ۼ1kg ,cWw-YnP̱VDcb6s3>kB!{^e{acLyyyv(Os-(%DwcYvh<IÆ+%fy&&Ղk%yQ7_ҟZ^aߺo}_ه)T _$ _h014/׀4-Cbw C ~*oG{_?`lXg p#Bwo}rxXSfT)1:IpP^͒ث)9]~wE, 3Nu^m=F?t3ݦ/k'Xgba)KY|K|[, 4NM tT=sszNGЧ\N榿á7Cv0ٰIDnل݉btQ grU,qPRAE_ b\k݀QP7U 8xFڕnUa#H*Q2mr4T*,/hζ3B5S)pW|J4N Wj|5w/ị($mWaЅ]^u&.K@lG.ae%o=8{ Ly͠*tG9t^k3 0ex* v[>ֶr#HK.La'EI ޿ ZrG({ }c2KS 21[yj“"l%8=; x".\[:8~|~-ZqM٠M-eRۜ[>.I>aNcCϺ5u`2=5l ugLͪϵ9A"gI56kݮ6L]`½KfmHxqph ⠫ٿn.[r=b'f F Ĕ?G,?~7Ȍ\z̐ i !Q5e `O~m6-=  R2j( gz5}=~8UU2hTUfĽ{[Lw]Au96`"nqr~{iw|4܌͛Z F/8bK1J&S\..zce2zfx`^?*L2xH8.EzhKKّf3)-A W< 0Ij2H&c\.Н;y1^ f/9ѧR M;ۆ0f'Blr%^ϰ9 ao/*Q,vSF㺻whn|tApY)3c} `8 a@5orE`6 kKL_eaB/|x# dn Y-F@ ʭ ä` W@$IB/&j_ҫ&.h߷C(-/!y(L*q&ḺN*/ˈC|in?pj+ĉQv -AlV$<+_f=v -b3U|]|S[ZsNt['ym),38nu%6"wYi^ qg ~ I`=79TUX?b3qXXEh^ֵ8;.o>bФ$}"Ŏ2RRl!%]KF[O_UlmՐQ;hUȹXrqݔKQ1L]8yL~G?W걣 ɰyWTIyi|K8*ism/>pX.Qi0x)5$3Lyv9\2 kNHϋ 6 -8kFy>Ƀxskv , V^K@rT.!ٯxMjS*,ax꾃U٭!>65GY (pG]tΏc^Q(jz5Vo݊8:iL | %*" .}l. K `}ШݣXv-VяL\&j"T玎$k^ǟ7R3*#!cgB]^Ҫn ÚF#;n[| 9e8E X egY*Gxvuq*㭷- #7gxˆV?(b 3g ѽ}w7&mF46&|Ix81إKqlαƂ bv`'$y.9+ԮR= MaJE,B>US6뙃܀ɂcllzg`L -2q_E`!,fxAU}ĵ%!{|  :0Msͅ60U\Ȅ\x"@f){4&&/K@vR%6_9B8CzAKMA _B~~W<D2[VݿQu*oKŲ*}.:r&u&Hm9~>#s+!ܐ/qlot]guykyc @ sΣ:GSh 2 頛e"$^c{?Y"Lyw3^r'$ _ |I,q ڦ!v  k G(%u&.qOQ )E!΋< CJx?Oمky2 ._ /ZfՐ ];ީ9p R2]܊T_ᾜ=6aBgVq)6;>χTHg3|C(bpx+|&Z8O3#&7XЇ9NHAL|.2>[gt,t!&;!]`ls>$K\gN'i\a-_,&,vRGŐq%b`0͐$0_&`#D%u$C$rN=p+8"sx'>99j@Fu%sQspX3ˆeCbX\ Ffᡡۍo6ET"9:%<,)egQ,>boT2r嘰Ղ^qbظdx`6pFFJ]n4̋9ZHbc"BB0&p͹*$q63{ ?} o BA1BfY0f@J'|B ɇU,靷4T].s%K]sf/"`ޓ Y3`&!, WU. f _L3cG=cu`1xqS>X1V=3:Z\"B0.({SidaP#A0iZ"qs/0)s f:[ ,K -W9~Ua]ƗY ~ꧾk_+,yI6Iǯ.4#%ʜk!oY-Y)`q {󗸦O(g1xP)B,qZKUUߛdYQ}q$#勐q%X2{MK"Ғ%>=ԸF?2X2lS?< w?k/c;﬒vwY>ܴ䁢k^aGֿ$YD!ӞXWBW{`$y^*6׿kYLag1j \+Ylrk<9oXLxilp#u狖k;U `gy =\au{{T|; x3zXixzH5X/c%8^/3yFfN~q]xoK% _?~xb Ew~7q;R X^2r+7㺚%ģ.GGR̊,"]ҽ2:F>/5R HcBHP;¡\23 S=Ȁ1".@iU014`@6JKV’%' Wru\%Ye )(Y1d|֒s'"Sb "EVs2fŶx iuVT&kzˬJI=/Ax-orXɼs׾ܟ{ ?~-γ,+8ƶ~ cuh!w-< b$AzHRĂx6h_m.#%J <;b% R2|HEJk 3׍P B"EKԭ$NԲ"-6ܗϾW{x $GHEuVsHP?U@{YB21pj4^:^c!uJvY^[\V=KxUq,='s!2yI6D7^{v$K[aEk9y?k .A{n-%ṤʦԯrIKBIns\6>;:\\H"s2y<[j+ %y\4? 'ɜ1~Jq^/zJ6xEEYLZRrq:˱#E& Tŵ;4D A_ .P5Ƒ nQv:#|Ɖ9l$qf%h%KQ2ۜ{br E$e$_x hwS } ܗ( c8 1xZZddL"^!\{nݚH(+E$#$9ewSfߒ1#^ Ȓ$DBJHBk4':QێKʽ|ђ`\ue$!%-loO>̅^!VڕKEƕ$~>sETLV8`lJ5IxMK NIs [K NE21{0%VIUmW} 0v9U,ro sUExu d¥ 3W)]}#VW]PkC"țXFtrIߌs]8ӂG&Sa D"gĪs%UM~?1՚%(Y]gȽruWKQH2 *#$O| _ur彰boKT ?,1y'wyϜeHC[L#}B.~Ku,3K]W<մTm7;@%.N D#ffw!M~>\&ꤔSk]&]^z()daFmS0qRR ጞ8|tp7K%{ C$=s垥D.U>7K ';R"dԨwXpϓ IDATsZEEqM0N;dgqǕT=8ϜlE䝐ϋZM8ŧ8Ϝb^ݔjhxǧ:F |8 S"bK}JKqKQ( ++;=1:8r֛o{/ՊI6 S4?;åPv0&Ki nE^H1$W06v0N1Yr8^A3r|oLyg×<$NE/;_7$($,ϪY0kYzQ;o+w3Lg[rOV dΒrRY`nL.xQUmU!LA*HiD}tk Xrc﹤Jt +?t{h:Ipb]eJƺIN]BA IKpOޥ\o\H:KefElo?k/fnio71Wo Lr(j W.ksmp481ِˆ oCvZT[u (g2EuBr>bFuK? Z|`Q<`Rgl, -yr#DbkHjnp9Ld"?g̉+4p_ .JC&[kv`)!y'Lm`I<կd嗥__[ ,3?lؽ]X)rU.Gl pV1S2d`e*X\g񕶸FQlgg 'On /ga{5SdY }ԽK4,5w\rYT4rJNmuZHc|Pu˪H,(s:rKL $sL:ZTz/C2&h]gXH1!:`6Px󾇔Ȏ8<-\g/{b:;ˣGeKɈ8G[,D#T$d+ 3G]U$-<܃~s (mrnmt]&R yZUv rQF UWB6}}lsR)”^U%ZM{{iNFL?#=rn{S!˿R {7?-EMf<lmΝhCYLbBHю%^p6strB4tUItԐH/gIYdAaes}+AHҒl-  " ?$ȁCKᒅsAG/[L/Y&z@NZg Rw]eNncK2SW]fRPu .ǵI(F%TS}|fگ ܹܯWWE~enýħ:sk@gsPK Hpy;2q85RgePڜG Y;<=M̽Z`Kﻵ>\)yuj|nQ .#u$֞/ynj.=vmۋh;,[ʖx]+\UK."1>(so_ޕ<}ڝ .H|ՔF sZ6eQdx{.IOJ$uC )9^^sd.qm;;ǐ{ Uw1Qe_<ﱆDu䞕9_)<XD~]^ bQi7_*“`wqA:;~ݎ`;,AأbIU$|>ս^#Ewm {?"`~fBkEP"[R,A` tz=v,1rt:%`sH\;;;+$0@'Gٿ^?%%WzZ GEKPG0ލm޽-0|"bE.\/|+;e++}DKBRHF:=< =FUEL>&zVyA,6PJG)q$%"WpR7oě\CZYzkƱ%@Ϳն[ۿD-ѻBݟ-&REx`gd΄H~q]%EJ z]ْE_$Hw>RT=(ZRcHR{]GdykuA 7T cq0Hܶ}Wm# !HGJZHEtpe\vO?>q/w ykbݾ.!ajI̸/-uPk\Hdy`}{dA${?cB(ύ1s=#79g#dlu>?syYS<Ɩ‚5m}:/CDXqSjHDdMϙtV9U~F{-AQl!LL)n ] `Hqtu0~l/z=Ez4$(eRz1'Y8A*Bb7OH ?\JȔJ+lK](DxϕF>ժx# TAe _}+yqwվ^7i\!Wkkq^w9>|:w_>? !Jx`3!a}1v *\{IRrcW ֋O IYVh.#<9~1HTG?d.+X.!I`EvjA%^\5tNZRY$M ;5;H猫 KܬUD;xs?*]ᅣOϺ.S.GO0A*20ܺTrKP cp&O U-ɅxY1Pe\\窲#hN{A9b%m9Ze%uuUt/-w.;S_iZz YJ`n B78 cd\gX=p")[Mlnpj_-F{6)8Tz̄HVIۖ.@dan &"".ٔYuSgVIH4];rEK d]%5 Oq^2\\GDAr_9^IN{%qx YxSJއ%>Z*vM, \rե;u%%<"ѨH%9˜qʜ9sn4bёͱrPKKh7}Qq(stҹ(1+bg\D_7~oaP:GA!db؈ݘ4[y7H vIakHfw)UgH>yKUK"&|,8 s#?MT5o0/R?s)*FkxIs 5"+v56\ 8{H2z9XIVS:!cKppur*眏xmPGF}B%:.Г/SXV-ɡ:ʀ,XE-:֔U]r؟3Cҗ^Vk K?F >.3q$6ȟ+T%qhH8uZ}^˄ C9BL"]cP}wNhU1ALv- pce1[V &ܣ :`BBpK8E2'X2 )=$9NWZѳ%H 8%wE$brK{,㘳]pΏw'.8g NQڹ :#);}\ҽ{QE*80=(sNa;zbQI<(l.I;D>I &*!v92*Tk/9ѷFòE* p3;߾b򯟴=AÖ?f!ztFFq|bI+C;[]5O28H!,"ARdBNEI2țt/Z&; $g[.UX-@u|7 MA]!BI< Y%9)р%#UEeRzTYlF?@rsy&NR%Āw) ϮZ"ͷ hc4㥗pfժ!{}oܶwz(&8_Grija6rϴ<58Df܋5~W*vc$i_%.Uu\c۲$!+*9'LpIY9@Tg/J}Fx,%aJp*{$uawt9&u~߳E(s^0`I>l6ymL|;vh)*k$`93g5Svx/ A3>9u9fdgη9 A X&Rsxgs9uϻXLpJm;'Wk5k4Dϗҽ(7/ub_tv huo=o2 o dT+": (Ύ$n)V2C.?:?ɵr=MH9%6yט,9q1}? rNVP a$mo%>fոMʻq7@=d |k`0$.DA<ȭ3 YRP2vKJ,wm %"YU,$Mkٟ>m';%uHl8tWD2HĽ]1SI.ջ0,>pBVP"KOӉ-8 E!s"Z[3uY;n.2YZ >!UۼJ/=^U\o®ɲ c8Jn~u$> 1F^wٳ__ު+eCJz-XGXdRUizIv:xv(*fNOH',By#zgUQv2K!~PLuKtӐ8 n%RSrŮ1QewW9)bpR鈁F ξEb*SpԆJ̎Cz VyVtZT]`' VzjeAA .0w;C8Gt Zgjugq40G*E. T* >h L'uL Ȗ_`caCW]ÿ"yt^kՀ|>rSbϧ'J%IqI{Li8Og"11 p>Q@ G yў`NP%^s i檯3_v HYH82Jn—Ldbw{.˗X& _FRQZyˈ;uY ($UUA<}W Ukv2ʡKh,'Ϗ>\UJU,02AX P n+u$l!I # WoqW^?_&L6Y8H39Rk6@¿<7 BF!lk72cZcr.HM'!a`b9$g14=`RTB<+vܗd{=@"\/h$YuAb_$q?Cr(!wëʾ5_@4,ٱ7O5%>nk1Qib IDAT2TwQ>Rs NfUg/\`v Ny- x%\pZ2Yew,q%|k6 1(n=$"%8M@}ɧ91?̪͘ψL _ER/+:+vC<4oui^brwKLTLA6בT@sp Xr9γ$U?(G_NEċ͹Pyu?\郞A'0v;")KK\Rb|ΌF f*5ʀD'2/Oy0n7_ͷx ;@q6BL"j0P#7k}B `"W? &@K :\C"v{SkA\Бw(8^1D}h^$`&$#.ĄI dD\rASN~D2:rm{@*s]}OL fdtUd2a-yAer,6U*}>n=7wOq܂ p!T\a,Էf hd&Oros<"7T#Jp ^e51"DE Slmض nRl2P[^2IŸ9Fpo j"-$7# qyp}87C5I2 D2+.`ae v9H|dj `'H! Q*TThoX^i?`v)>sΫq]|XqylI"_Ct`}P`_|%<|xa!uUp(v)`gGFzK.GDjf̂LMGA`w-'CWXy>4e|] !%iLxa\@p_ u/e*"DXeIr&TUGU1XЖ*øvGHwE!u5s-: *5By֑%KW˗&/Z:_OZSAdk{8IBg>/-2dP3Fr!.PM xv:H\E rە܌ܹ`M(;$>=Dm׫* XtUXa!Y-Vܓ-YrX?s*VGq(,~dD=}DUNu :K N;5DdSk`c[!͟?wLwuMNE9$&0 tqE/1A> &c7 !!vpP9#2g{.){'e" ɑk:@-NGJD#59 >B2|-"ytcȓ?Rp(̃|8C8SpqZ]t;!47r9u\DG9gW"a<#u3oD^\ r5zv1;+kc<;pڨR  2óPpgK_cC_Y}YyU⳦Պ~;zA}  4mW{ M UDY;R肣1 {3$B%Bv?@4?' .5|A*&;!52; ?A"yJbHu4y]wI!I .ɉXZ}&;- >Ie柷bΖwsYBcUc%Vqe$iOyo ^x+AH\ (l\wCL(\vHEmUw95u D@3ul*u4BD]zMUy5;DH3I "ϥyGݣ.q=pmk;y +~>$>\%^${4ټs_AJC&z(!/n3oL;j}&()vH數&sy<$_'9o5|p$p_g9c~a9B} y9U~te0 $uۇ<1u@4L$便uU';̲BR!f`6lrl6 p㵑ie 0`6E@1u#);8W,JwmA{xtw8:_qf6V~RP `UCŸ\??\W;**s9\%2ΗC"XHY|Yȁ[%VWt4,UVb~ATQ|!]J)1SVcKˮ 5Y`rHrE}c[]'U U%AX=,ͷf1ݽK^'[K#TAL 5Wzn30u4>HzߟLL., n⽭;XōE<GS]# Zfhc`2tZcL%6J`8K܎RQ Q*:~Q]/a ӎs؃,/s|6U\|y%EwrzTa]!.TmU-I"VSϪ98'( K}jSe$m*\KvI iV9ϑGX˺`)yݷ-%8(H*IňL]?3?&~{ݿ{?:kmVu| y`C)UA4Vk*rp~JVhnCu̴{:t^%_C`U U)u|{H.RPFu2]Kܚ*#h3Kԑ;$OIysdXc9fP|@G R| >W} HTRwWP /CfBss\Y^>+݂!A ʈ5v: 7-w<8Pwk Bh|,jEŅUcyu\8*$?{ψgmԽYg EҗS_`B8"PC& +9W3DF4kdb':I{ #UnpNß8^1YL _| >H˅11"IV t3'r^J.TfV .yR TpA!% 2j%zo xN)R\W݇'Te>D"h.d'eAZ_˿/};~);]v *X keMށ5W( o/jXuSP%%:~}}A> YҿuL?b7mђ4>ǰj.WcuDyQ|@mC;uZq_Ơrdjc* r\m&FDA($WksQr-ߥ(\z{Oe{qΗy<3O#(mXDO>1Iq>i:y9.j}99d97 6uL?a=9`swcP@`G{;DxcF*P"X4TĎEo.:˜q8}sNv39cHU"RXbJz0QI{R-UKET\`N(e#l_tfYݳZH: \̢ ,sec]Al-rvP cWN5& K~!yT30aoe|Xn)uxldW a@@hH,nڵ1%.ͲW,C]G9+-VhStP2Y <%9ϱoɨa{Pgl` cSKҘ"E, & {{k8.ȑ=KFG-t ɘ<XJM3vFf-ܯ0D"ژH-Xwm EǞwD K\*X)kTEi_KܡϢ*0sVԮٌ7wRhMjzéh}s~?g{α?9?S7_B 墭`4R% چt)>cLr&\:_<|rT00mpUWYS~.\SDn}FR hY*t I 5.{ki=勵l<%nk! V:CHʔҎ9EOqjzH=z}_D¿{+l d)'0m׮:U.|A=juC! S5tDGHqDFujL.2B:\Jj._F>h9!x.Qn Y_5Ե3ާ9YO3$(I1=<%:Q~$x~9A-9 ^ ȼr{C?LRК U[tг:D..u|D۔>jr}\CvZJ|mj.ծ]K2b)w;S19:i);%kN q 7ᓬO/T@Iuun,Tp񺔙 qM:f:%t)Ts}q?;6dN`<lg6V]sQQ9SEJƇ1g1>@go܈??az5Ds%G'3d\l}ps>_Q=~)Q6 :9W.;ҌK׌A)ppK4,A+J|ajHN.Z"2 5b0p*tg )h1dP/ xwL x#0xpÎ{f4`b5$ ]W<`E8o@ Vg-"]vՀ)rMWl|[0;@=Kݜ5D|A$w?`Cj9HxKU; os7,}`R5_᫖ 'KXc|׫b1%Wc85b Uuθ.Ỹ#KxLL^xXɖS{Hն]#. +o+Ȝcdut.YRHVdbA |mx#&E^[b.PRҒ&ZcvAZe%W&.?;k 3 qFX`#HygR0y9q̟|FTy1/"vm}yfn*?#q]0q͞K(92v*N}yι\tQ|Re4Ylki@dXa㋰=9Bޝ[k'u':?xo+;cf82P'C/*?`ZiHD@K,ꂃs*z%uArgVAJ[c+~#DըoURw^|9Vٗ.wC2\r$0NNWF|8%VfFHCkvuh"ŖElE<2B%^y 8>ux=~~|H_fTDd!2>ƪ*ҭ+9G1`]ce}qcc{pֈ0%59Cu8r 즈B+ܿ[4Ϫ{lD;yΩ "3A\O?+u,wkB R=H CT)=/3a1Vr08-s- 3`RYφ=, I-m:UEW(Yۖ8^w9C}1ṋ!&H 8Bh+Q XCx) g}]AfG9uo3UKy.8Wۈ ĩ9&BYxќUaWvΎJQ+"SǜCXYr&RI܃$zr,cwyDWUs?z7,쀐fIe/ V˄r·rmr\,%jV1[3_O^7ne2k ǵyAr=[-ж< R\FD1=#g ~]Dz՛ IDAT*:8#R̙w\&XS{Dݬ[ܛxdڷ{HE;H§%EKx]Kк?>:3,/21\9Ow񺷹$hU[܃x@$A]Q ,v pƀ H!E0y Rc[v Uw,u1$=ΣΟDԨrSq:b >'u~_v`*1q:bXeXpdX6eR8a/1Y#% s#ǺcI=JmC>K8^s`QXA\8ö8Ai@+FBO%*_l֡XD+ !BX)`֬ChXYܭ"`<>՘q(po\9'4id@`nk Xe~r.Ό&;ǽbj4Y3 tƊf/Bis:aunGY h >iJ S. z'E&9&`%0{א eMq`x7wNﳿFug}mzGe\ɑjFysgƑY QIȮ7ﯙ̅rE˙tXHO|z54"sХL[&.E5f,)zWnCӀ(!({"ɂDP+w&2ƒ^4 4xUqcҡ1*$?#HyygVQ!9b.re$*Qf{E<2 L!(9NaNz'5Knx), &B-I4+Ih{mc"0Mf"h}Ռ4b%~%yO /(3\2D\~[EcR(Ϙ/fSI!hhKRQr dкfM 7P+ `ƴ=gY/rTrqT "0rՍlt #C]* ,&?:%%y?9a $-3M P:=ef%\o8o*޳v")Mumӹz=o02@gkeVrʛsEL3Lpo+9?46i)3k=gM#KS{%[/n}Wz>ȍ}z}v7Z}:}N܆>6q|ou=<}k,v˻}mկ|8V7 _\x͙=m6R˅4娰 bRR+:tsU|K_ķ?Hn4T͇YtoLe &L4,"C 6HXGs qhudEpYCR.DeJʆ,i8S1*#+B{eRR?!>elG/0L(̲/o2j# ?BqwqDwXu_x ΁ MrTM:QBu}ִ^H5B&Lj h8ĝ+\b*WY0ei,פdcNME]LCn_Xu1X8˾sXO==G · YS6syq5A N3 ^0Ihܙ=r?XEM!x زIPpNYݑ E\pDts8=e)>{kjZ֭^F>XoeM>w3{%!s^ݻ0o⽄ r.X 4v>m5"#Lu<$rlȝA)"MFb)2 H>0tLvmX!z! }`kq}XN 28Um$ąrzrG~:ًSD\2wǎMqlD-5 \Vi|J{d4HaCViч9WgDXNYRBL#+Dd; ЙUdN(?ӈcIs e0uc-!S~#Ъ."PcWz*BTeqcCG eh$> 5xe>9vS}ZnCK&׭p.c!G8 xxC\é2tY2 \[Uak<=[7M>2wLrVcVSsxR0kKkOE7RH"7p7y}j[nufrn}v+[n7:6b 4)紡+"=A{ctYZM#ϯʁ06DHȟ"a)Jg4l/faN@B fX&i +8gB_#֣e^Kec()Lj*WSKqQtoQ\V&E8p9du/R?{&a "b"ي1eT(q!:k0i IJ1FLTZ4#co2ұ)od{'sѽf,0KNv:.⬩Y5T?W;ZU4<焊OJq-!1"L` nuvIgUi^ıQ}MLk.DӜ+3a_k/ʡxfzX{n8Y{HgLBr[5 !4c)Iux۴EE9ې15E?aO 6IbqݮG]B1uEe/bB|TTy|DF"H$XYqp. J|}84 E,-pBj;PoG?a7VF`ZD( &#!8 QCДPF~~A_`K2q3X|A3f ̽5x&VeHvH7mFAL\m&͌W٬)q'؈5dĺ-*Lt}팏팉1E"p!2`t Mvnt6njϠcSd[#J,bumMs|`هomp Xb΃zrWc½aMc=ɇRoqd+foo[soLf؝ lrnqMDv1p}~|3!;z7rf"g_>*[gLE ytda!hG@a4W('zF%w7Ţ4&6?]6}V,by n)V e(` hLYhzai8%s:cb \i 7qaŔ!T*dM<>\tlQ$F;Je m00GLvPhcyyQpRf 33I }29*.PXBD\s)ˋpD"T*DtH&3h}>_C>qb YJȡZ(Q<>]qL ܳBdJeL7i=h$\ =S1p 1kf'Xb#bV ơ{w'Q^Bc-M78.*V-i&)6E}|- xe iJKQI\?5Ģe\#I64*mРFFG\o@ac21 ˎh(.2 ULJBaKt)ÎfÞ_d 򈋢;>(i˶-c;a2RN!"W0eUۈGs%λ[}:"":fQpdC>WS.>Z4 27:>0g놇H&066|mۺX[Ғw/x?^~y'ZQ_s_usPFB/r;E3g|87|ً!nr [d;"d-iY(}fMs aT}`R UG(^:z)sGziJ{hx­w#1i5h4ř1. FZt|O{2<⃞%R ebUsʱƵ֌2Y"..#vPQ|YҜW#2!k>fA^g'#{^K9V"}씀%Țv2;`6áqMr--PҾmopuH̵:YCNt0ޭ>cێp0ޯ}~2vDEiͫь4 pak4($^ݐݐ#"1^ov!Pfcm?Ȉ-܍ho1Rb/4e 9m%Kv~~Y`Nَ q0c=Ba< ԼsIH4\>\ O&W6-$%f*s56\ &=PĿ_A3:7Iu9ဵ(vF.q,1o 2cqyWt[<%FW=iLI 1:y5H{XA^>ck܋Fu̇7>\3{`XfL |*זSdT#u|hs&렚ge'9k>@rﴋa_@*q(Τ,$'Ik}ǚcxb(g#C{U+Q(.ԛL"d}q>ѯjrK>fhfB-1CQH@LT]fI3mw>كoVgkkcw7s k|;Ϸ:3x}J﷪|[_|^zi/ΝK9Z%ĥy@+AF"R 02E`6b`>8u|ᐡa# 9et&#z pU+=FϤ }%@6泖IϬ3LH]A5''ϫb)9vb#Yʐ9~3f/f_ D%$bbBu>8CƓ%osZ e m UD1H4Y]&7Kodks >lќЮf+/O")1PM)~bד`f˶8|t 2\w n*q=f kyIW=W1,ޫ}ތs>ot>&;W_ys}n[GL9mmu..#в(JΈ5i EP-1lA+"Jۇ(6?Ǣ$)::ZF8C!3(nK1p 3"(ѐlM(54 6Uh8}(N zEݫر.^l~?rPLH$"_]uGNP(dطJRi j=o ΜyʿVT ݮsp.p0Lrd}њ2H*q|;e̓jF !g۸T9r/CsSUsx d2}wQ<1.V+?G ^C]F)E $(YYmqT&)ZY498bguB}qMD_} 8k7}ʇڐy¨hT] 0^dj>ۥ]pXuAQAYYzt]ZzzV(#2>8yǴ@ ;ZZҞؤ)#և:\ hpmq, iԋ:\Y5<0esa9>Cb9n>>L]&1gDYy؇ٖ:BP3)r937q}6u+e06c \(ѿҫ]Zk_ahyV4t}RTgw6`8lY\ M.>-JJ9/ؙo_r8t$ɚ$Q& E"袩G(<\0F*#F ƿ7 (fWxkxH$QE|2x}goa,bn>!::ǨǛn%< k#Nù;}z+w ?^B*Uÿdo|1<8ZgUqYR+T t-]Wr`/p,sѰi|YKXb?I̸224dO}>U'?y=4ANȑ?/XCnq1?8rd7>r9>i|^FP/ѩtB\(0ڪUH5ihܓ@C1%R9ff#$i`f &&r=~ 8{6"2t;|NM;xee9SA@fQb[ B̛EH@rhT)˸3Yٶ\D4ꛌoc$h>9fV]H CiGl/Gεcbf8$t^T%*`gm}:us#4>S)۸@'⾢zU "X-TY;}YYՄTyfim8w!+$)v/ ך̦T $)Mr>0\y~fXb]{`;}QCh+}@VNڻ.$A\ "@km! q `=ăjt/ S-~zDM9#HB۳H6s>*}g%7HeכE+@LZBлx]z*.M\K'~b>GF׿>^(9[DkH 5)>65OŎUl߾~z.^܉ptFts:+'|0;"4!E\`/ DsZl倥Jc3Ee*/zyؾ \DCQ,|>pQ-$=LL,ywIo[0IHn!pOLd7؃o}ko1DǯfS!CD*I[lp7WD}u.sƸU~(fF2iC_]>isQgD`|J*3`y:ܟ‘639:  {FB&/O'fjFIͼ šL)aN :-gUOfM D1ʖRQiߥ!r ͩ.Ɋ !^WM 梏3 )mrl{iğI>u>q,lks!ޓƹK8Z莴of[a??}|"v+A{!:,DDd@ sڰ!5FdJ,;BJdE")_znt1Dg;%˼42gwӠaf8ߓ4xb|>|f>AXDK/y|9t]q#<ط T'x716\zNj8m*!PG*b>`b[^o;azzvtP }C Ey;?SA(}kə97K#[ DZ @}߽={ 2& "Z-^Dbjej\nb||{\@RBgq|̿:Ģ:,w17'݅, o JA.e*xKδ`2=~CT`|1dAԙ=w!"迠p mv VO-+XFi"؇N1Q7Aꤟ*kRa:jZP(1>}@34qy,~r1s!kd@αiAV* 7")'e–f?]iNw3;iOEQmp~,qM_F/o+bd>(&w@&c~ng9%~CYxh#-(:kb) W9溃`wUIF#_c4n?E3Gi0y4@{xPSF2ekՄ+7w~gGSD_gcffz5u );:FM03͹ ߊ1fML3};9XDOXY}#vN~r͗#pFoU>9mąuOSsS *](`2נְ֨cl넏AK<@*j5$6 IHâ1\&e2>R.wƉp)4k 4 9ά<İV $s1كf*gϪk#hU,nmiX ߭ @'5^w]Swf0{n2R߽wN;y>>+b͉۝$@dC|_X߼1*ұ å )!AP*z%~^4A~]&Ч^?EdO!@&2e`}/05UF07_eO >k.LK3#m%u~11FoDZc?_@\e}q<,{—t#X@7ȠIjN c1fc|~KQp\8 >CE0)ĉ,4$&:fU pع8>}~'NLw$.;?h,ЈS0 |QmW~xС7w8 z"8ׁx'7O\15:wفEvk"P֙u8Oq89"q$u.ߌyr0YԆeޘ^i;kƀ'n{ f5@ T߁|%M&g|.2U `j !KF_GG9zDCP6N+xY|'Cgm43~NO΢d{mș:fbko9Z0' 5n##OpC>b(&c&Jld`@(`TZ9c 9 I!hWD[e0s*EP U 0IwND᫚ 8t5ʯ`++S;_C v! Xxǎ&>\xq78rDh%E kd?cb>ً*_CpA2Ԥ[7% -CB,JwCZ-茊e|9S~ *zjO?CU4]4S>ߞo ۭ"@LT]3G `7\8zt9 ě?}nᓟ\C?|/(^|q/Hx=:q?}pA|E+,r>x1^2gk0 + Vӂ 2pY33k2Q**O>UqК"֘X9hk+Ec@6D8EyNdMT\"_O_Hg?3"=VL?o yS$g9eO" Yљ6a+a2G2dLm:aY4 $t6P޴_c}VsaMGqca bn7N9j=ߝ$akU{ϻMop}XϸN;gw;D7~_cn~N3nϗ!Q^VF(b#.b u bwJ#'gM J:.~˰q.Â,a pAaDv)5r"GR#~md{y<:CX],E%.#qhگ5ԔGWt܇?Xd97YZIs.^~ypNFT1炋ޣ4dT'ؓ1r1nb2=g](͢TJg~|C8zt?ܗ|;zw8h9WGqwM0]5Gll'D %?~>3ι{anRE70_Ľ.`nnņ_cA*Uv#h6IzB6fnׂvzA HP!&Z⽫1}OYD0Oe&'?v,m+g^u1ɂ ?k0ٟ,&邖Q -j`@ U&jdp V IDATtM,FɅ"=Jps=6`? UC#3nkk6?Ek׬ijM9kׅ~$C 6B6sw83.H#:vM0HD-mSxgQ7ck_ƭPxL7:~vluw w;~6D"c Xwb7CAm<ӖWS<{Ѿ唼 Dkic(D?6ר #gpj;dUmQMTfh}?A*}hg( =nmH8ΖC~'ukIx4908|xXMT_B pn0|r>'v:44|M4+4Rt,zU9| s.l!%]Oo:.{!|?|g(ZC "h@dY{qڎPq.v,͠+ȑGp"zw2Z'8xଟN,="XìJBM>ǻ99O{, ˕hKzVC.Љ(9֕bCCm5j,.D؞`0N]MͰ4 S5MPEyrFqZor%ηlT 4簜 - 0 *R.M³d41YG^]gT{tvdzGD9?5꛲7:r݌7s=cF&>A6H%6ц+D7M:m&>Gwp7Zg\#My3kF26I"9p^xi>\cL6C4&i}"dL 1<(`@p4yܨS_OP.| 9@ps.Vq\0f*4 RxzN:T prqimdCb^dgE.@Hw@#B_)w*>#x≓H&xajsӥLv뒢ń82bXXޅb]ez.^^.f0uwݎ酻5:5Z{.q}tGF޽VKGo64`*Z|qLZ(beF#L[vaI_GVtf-]Εlx}+sQf)2F O4Y$VBбHsiRV8}7i%#,\lU]0(PAlCd:I9c}^Y=aVF$aߡ6K8C5&I_GfoP%yrAQ}$^DGP?8\p=zޅ@2.Om8n&w{䁼e:nuTvL.}-VVE7Qv7gliqN8D&w2_͘9+^5u"'X4$g !:&Xɜ }Sla9V ?GopG&7AYF(c= {Rą jCNN#0dޅj ޔw*_cT:wƜCRF 3;Kd?meM|NGiDW\ľ};B'ډyDeF3Fh p5dܟ8QΝػ J>.rgΌfs3G9ry =}StBP( w/2y&*]Xvh<6 +*!vtd5 `SW:-sׅY1> &T׵[w9eDl$۸o#Etkp3Ra?Lo̽J cj=CEsLԪPD Ad #9b4oѾI"̐#1:a92B;i"(s: X\ƻJ$`5\D&L{F Aqj3O\n}ѸMLV|=os@/"ԥ7L_)PD*Cq#2riPHG0|f5qc`HVƴ26@Ca}#-FS pG}};wp-xa!݋ /B^|s=\{s;#<`_bvhUiS c;%|+_91>A1$ cC 1uoI3{8'p*]8O<\r^gFK5ꖏ3"uTk|m qyADm{tV >PB`ª|e۷?o~gEϜP7Yq++_ࡇNcd(jr9%pQTsm; jXf} B(Xa.8>6Uy1gZQt\0E5؈Љq(qO9l( ڃ} -q( zghb\(-YY:`[~=@57tߛo^`[ԞRݪxٿ}sN7>;ϛpٸ_>L=Vֻ{|%G1s; QQ22-+j-lGY$o"RpA;"&x J{ ;>M3ăgZ& {Y8uxyxn,T1Zo8p NmOX_Am/ueXbXb{zxꩳB #Ht^^${h("d/@3?p8$| i0%A@C@Ȣ,~ux4ݻ/$WP2b*cP(8~k"Q]Pu~oY.ǹtطM:u[X(AxaϜqܳ▖&w% 7/cΨ^jH̓.-oh1qNm#ݦ^v|ֵj1Kƾ؃tdD+?ߤc_uSvA2KqL[p1lž}0k( CB%imHK"Ff"uctwxmXZ@DWBև:ifM%urV&EɅz>ǸA&8IDk-CU6=3#}B`[A9uq>'X_b&ow#oܾa1n4n`l4މ>(}k |ށ>-:kv7$)*ZƮR&bct8~Ձ2Q2v^C`J(DRQfBJfAf00}t*OEW_<=U)6wU𻿻(b-ȾJ_cAlE᳟= 33c8vOĬ  9AfMޢ^Ϲv;/$#,\B y|΅ʉ0ف72K<^xaw?f]ANQ'+|}<Emc{O!:>ljrse XQ\<2բ{!eESn|jWw#4&z>6e'}`, W,e &-E-YSP6QQ;'}H v#Tߝsm#ԴBxF.; A<ƹ^.vti̞b Fk2c!7y2}Ujm}g+@:dY[Wzo"TmsґC;\kĹ%tlȬo΅\@dF 0mP2& NDd:8,JtI잾'lۘɸ}|uF2|6{&7USn }v>wQɟV&!UX{Y(E<.1eGH*L]@TQ<ĥxCP?8쓑f\!(Oa߾y:(J?}tr PgCع3_}_=^C^0 U8vC&2ad2@"?~éS!1c"ix*a#.w9pP><&'ŝo.\,Φ*=p.خ)]E0$VtH]0{Et2Fh yPɓ=4ESxX\:| *%&/ù<1o;s7]E3c6]>:`T#$êoLqv8U&8ǎI3h[nKnYnU:#0ǿof8|x/^}.!4Š4Sqxv43<5\``~>pO>y ;wְإhaKn}Gލ}0x-UShU3Xf "d8 ړOF̎d\(CXδ2.@ =Q1B 75)5CPv5i26*b4f?k{c: PFDXT҆y!hȑ1a(ٷT4kgJQoCZ,UC5dҊM⌙#253wyfRPo6iomvMƥtp]_rǼUd'fOW.8 "+ͯTy~52*] 5]g}]7ʨt#}VV*zzдk=g\:lq |=6er#6ͣwiFR>dľ䨮+in F5~ׅGqw z }# -ea, 4C :R3<{3_Vb\"bE ?2ܙ3E8 ۰bhwa_{177#,dq aA=(uC2mۋ_&Z&o /ղN^RaÌtEvݹ)%G~O}N ?.^qWgd>Ϛy#.CJ|r]ϙ:av Ast6=!m .6z>bHZ˨| mñ% q)`&W؆Ȭ99<ɅB">(v= i8 rY s nޛzkg>}zwԀ$4!Ib*8Ħ<`"ĉ/RW媯RJ)8  ql؀B@ IHBԷ}^sNun>޽k)4JGcK:vljig a 5)mk= ܇RGҴy>; #Vەww]WZ%^|oc\~g%^=1iYQ[=oq,,5ї}9k!cHHW, fgX]mQ z5:{8w@x/,cj*?ߎz!ٟn,jA'HeK`~?WVя|4]1OrliĢfRrm!PN_!y=⅏SM9RZ2i (R13Sw_p2~ʃR0;;}Zi?^#8p`FG5y6x{9zVβ?#"̈'Ns沏)O t ybDn7WZ9c%wLT$1Q'莩l-*v}BU-WݔMN.FfA=5s.-3LM:hV+i)oe#Ҏ`}? {||NVHU#c"~CF W̚H._2{j mbX/]}9ɏ35JK/^|="| IDATrWj|kp< Vs7? ~cJTj|z$6 EX$Ruy UERUE<+x %8W hRg߿5kl,(FFG^ӧn! sČޭ&_^s?{qlYk'jH ꈅ #%.#β+ط{n?>WĀec#>;3sa=+f܀HQwN 1qRIEh6H l\;^}no}zjĀ&ffnFGx4Mܠ%M=neR0b=*wcY5τZG/ ,E[ SX{>PV ]I},|}v(RP.WѹK~>gGoa{vڱ:U=E&)E]?&(3)-I CHiS6f}9z;Wf\z{n$}ュ\<X/B+d=Jbx8;Os38 1yᜇU [n׽(^s^OcWOz?fgEiN84 brIF6>V1^A22򖇐 ]O}F]"К&L9ȁ~ZC.,PVvԶ%MjII+0:.W9WRCNg&ޖÇ _Oɬ+aqq8v>${ۋp㍧}X\85ު=1S Kpއ+JG%{dL}Ըy9 3YIಏy)fT )wИCy%ED2suF,@s:$#%aN#GȳemWT`T6CLTIsϳ Kac[5M),wm'cvDZfU;ccrHy_Ϩ]Se k"#zSɵw|T*rNepS浶-N ]6%cop1nq~{Mymn_,wbŗ6<|n]ݫϛs 7g?}}< 3?֕6t/pX;FJ@>\K. y#>WsI;Hȥ #rki0j*,yNot_tQHi<OOvTTe_xַj "?@/z cxӛN}V߉˫R4`w@Zb3)8}TrozӔ|;6n@PJCΉ gGd AE"9L&4j5Mo_Fnus [0yV!!e}+* )s3| ѝQ0ƞPp]}t|~"+MWpdC%\u NHGh>zSS)[\x<ᮻ.k^3g& H{1 FgY) ?-o|s$O)/E9P_@Ǿ WML*ihe3:b^h+B^4gX5vF <-#@,@NȶMJc(˽bxZN#gM;F4b fk( 6BM js\0  QEJ4sF6`mϑ  Mc˙Fꃳ gc`0qyY^,|3g$X;Ipvf:nYA^ cԉglx65>oX_qHڱ,%Z#Э k1Q<"m<]T.bD墄 yKX5|,4V3恳ԟĵ!ʳws |[RâD+R707fg_~OnE/SZC@!&KKW*Z5ޭѣh$hOE< ]L_/ 3CPZv9gyo8V}{[O\^©SFJm"D'1d4`B>J!_GhjL~}=Ngҗwz{h{Р9dR%*z=*D\QH)z]!5@e٣u6=J v\@9 )fᡇ})zqgpm8thO_11@Cv K jeAg{f:v(J۱JR6o" &HOqM5\EUGT+sZSm29+u\!ʛۉ9ϻT  8.R )gDjls^#Ahk<7I׫ Jl!{Dk8-bm[gX!0߫[?<^@\]}!iYɑ6Ͼ>)[~B_%^{ۙh+S=S.Vk] Izr<4T?7ÇK~?t {.Ћs7;C˸޺毹)4]tsh18+.o3l,u(~M$c`(rYS!'_߭2J&s1Rv7& Psłu]2b*TO R˻S(a"jԢ hT2JB}2ͳB]?.}Fkv'}ޭӫػg\>]>MA"7&J8D#F/,0D7<`$#MOފ@{/[?GرQo-UrKBj;|w>V> O^O P\?%O"xϦ1Yv@k=9 _-x䑆"Q$)HհA*"&*!ʀ1![>"Mimo@"YJ7-FF^\GQw016e\wI hˠ:`t%`'i\PN~jſsxQ? ${-yW!7h(w(k|#bTq.K ec{9~fǡ#(i1bB ^C9( #Ne?ZL4/rmU}|Ew~E9Ld3KqVIcE>8qT =E08A](0ՙʴj5tGqWGLGA9e#"F{b;9f}U`)GJC]0vUSMwb^SקU籾y/E"=_>tv4T_>iWh~58w{ZZxS>Jpx@Z5Gۀ )#UvI³_j S 1'2սN9M%4S'[D Wd PIx6= R΅#\ { N2ccxWg?{Ϭ -zˇe`[pib ^Z?G@$HArCP~k늹G.SL,s&9-RR ٲF=l^*nIIsNDM.*b=s=d8r(硈X%rŜ,ën֜_!۰* 1G9 Dnm;0M$HVyo"W% C;f\/PU0sb\:.U$gϓrt0E^]6y]`]# :qM0>V_&b݋]I}_I}v>/ic{9 n69Ԁ>.<l2J$iSBqchetXDB>|ʃxa}"$rkT4EG)Pޣ_}Mg<_SO= ꯞvǏ!j=oć?i 055:j^/ m3.})ufض**|*:8z`bՅq]y<)s%&x!KN(,G:q;XW+;{OZS>WdGߛ6k8occ7X'Nıc+lbC\QEs8~|g϶p,^3ַ* "6Ҧ.Sw{E|JdQ!8qx(T4$b u o|5$, 7]4L0HGf/KrvrTKεSyɛhAΕ*kߑR 5j3o.A_p4OEpEY p摔)B1˛X(q\LE}c:&R=u ^4dY.[v;I{`o؏5>\7mc$d~2ݚgc߃>A~#A[}{a|"&?<Ф"B\oqaX##|e ݣn" 4HJ|9t2xI+1ŔF&yr<H1$IW;gϘ:>GFs Z xǫwO| ?>GvDXL.̲=#Їh2%&?j{ *cA&E9Oq-:_aTEDcxYEg)+^"0M3t|uCb4e jG ay9bNSr>o(g7q#70qlLߚ )H#[@`xY-ﴞaj)`8P>k1Dի {r8.e jw.>3Jq\pq{]p/v! YiJKD`r<>\@d]%> ,>gR`DpOnb5=bެ{yS搦' o=$b۲5 &\$/!XK Cc4=Qߟw{/gpBGV5e*=H HB4 W *g bX_*7G[, ФdW]¿o}ժßKLRYB M@2 ~AYarr׌9ڏƺTGx,z>G-%#*(h (α*b@̷8L.(\"ֹBP5eѓBA7@x3m9m"eH"">@CUDAq4p--q}8+c-&ʙ(ɡJ̜;E><\,3s,Q3|׍T0F]b o n0B%#OIc( УvwnE_.fR[)׭Po2k7M=F>"c8dS'_ڏDZ>͒q}kf:~}lE_>wkf}}o .v=նhq5y3Ƀ](^sGo$5 {l/IFy0۪by3^6 4j!6޺蕨,uyv[۵Z%|8a܇_[>Q$w4s#< 9]kl 5x0K-hp Q؍R<1{Ss~%Nbee"X2'H?]LƻĹZ"EI Ǵ1^,"w= 2e-ULs4hT2\; )"7`u?\P>ݙ3)_~!i}/^JkX͢LTCftPuk"p1-㒕o8(OMQҹE#g:k@QsCA{G^|& 5> 5) knE#Лj.en<51I;" d3cT-lu]8|7u#="og$6E;.Jj/ Ћh'xS $lb<ϕ8Mӕ̞p'zf8gx,< FQ?+J\iT32ʽ)zԺ.@n?1.ָ l;6%n&Sx+~ܫp>IX c0w{ne]o`W>^/>N7[C^TVۘ-au^Y}'":e%r/*>eUQ鬏*=IC^.ur[B7DH!j< ^ >qqI*9X_u@@(s4 Dw?+8^~nm[-獵am/:U?iaj 2fT䊹 93c1KasOνէRom̰9nlr< /L8Z!(52,~KiOo&B'@M>A@*+d4,W~ew5N'/yla >I>\K{݄K.` i#yZU@_Q@U}>2jx:Yb )>Ki<2q.EhPJt15=A* ϴ j}b~CAGFjx{9J; FJN8ϣ4 uƅQk-G+eO14e}TbR$>R-s3뫊}ƴÿuk^d|gJc%d]V#I<#dhBJJc5J')Nf >ݰŰEejp1oQnr4/4Uh |qlJ{䕖@*zN.Ii^C`@c`QHE[E1 %Ê׭+GdPU#U!%BEACc*G493I58y4~g'Nҗ _wqPlɨ4Ճ'_%ʂ ݄^2.رAFDFΩݢ(3%R`l ˜SLUðn(3{ tk9wo\ _7\EB~`J}=Кl &T$ WDDN=e\+?izʢ2+ERN>3m?5X!iPj6UYHV<*AHWF62QdJfD )Od<ʉ]K%uM[Y_j1K #gR5_3y2mVƆ-CB1`=jF`80cP'g֗m1R:&eIB! ߲GIjԎ p})ȕخjnI}ƕ]u[޿cs%o-|QԔauQG<\ B/6C}ٙQ9x ҳ^0YNP[DwQrbW-_auu/zZm3 g8xࣸ瞻7$l#Hu|,Bn*س_+_Yr_as+j7gYDJ~a2bU !DS. Ke jevP$4({=}\O=G>pϢ^ I$XxaƓ;ʗ}'CAP;uvkޥ\彬~r摷Vl ˤeWMqekc @qHq& y*e%h*@>X'ac!z=;XA #uoL#JgH&墑g9ÜiKig8k/gX9?qHUTgiO 2Q{mVcU}]rRg4J_zў6[V5웽Z,bb<:t24/@BbU?L&ekMrm S~=HE%2DI25{%X_ FFJh4V7TRHI|k'w;l}γL}~}6Mat7$}oo6z69kۯO pՌErW@5B@!o[WfY7&!Bjۊ&!j=7 @l$99SQ M+{J};.`yY/ү`x??s~#HӲVG߀={rtah\@Hl R1e=ux_q9|=mC>SUL/1t5,C.TJߞMDI  '|1uL.DϺ(eH^װߊ :Y([w9\ufg3/bO>EWH}Ts ɣQMyƗ(TA:uM@$(U:XO-zc\yii%H-(v1RXCXvOmjY,!N,㚬TrtE:2G" 9oސ0#}6[ cxz +t~qxkql`La4 \1f%.R{d絖l|l9Λdk 8܎ ?3\du)ڸ50qtLc5r|t^WJ c/Gv>M3wz][zD6Ƥ͢g7.t9l/$zķZfןw, M}?Aj4"\T![Lt:$I>e1]ϝoi yU*Q%TeTmv۪{A&;Hf<]HOه5??h L _>"uiAc|h[y[RR$ja w i|4UntIϺԇb;u&}Le*>~EЖ%>k4"PO|9R2z6)&`*4@c:d0>&\y)T58> ]E+@C9W؟h#fg9ki 4;czMo%s\o곪Ճ"'ݰ9Tx&b,aw6nf n>{y/*6MihHq %3jh4O|9$TSguE.x[Ds.VN9+wל3QtJ).PAbݰQՐR]|'YEv!Ҙ(XLMqDfao,4냞>Ë۳.N2pwG5+}X3z{|a=Wx >l |~J(^"P>ߏ!x_@*ɨIq#J7c JD;Ƣ^nW(i)ʌ Hu.mcn..k_+`yJqDj%?CpNzj;vBꂗ+^^ NQփƒҋߠ?fe'7D)e4TqUFh-FY(&bq1a_QN)2uTF8bNI^E!%Cꧏv4qAw40\b*siELQ\d4w|L\nE2 oEZsVF8-kikf?Wab.NsHt*ȡ鳣AHI}z[=+o;P]H?u0Ҟݰg`] ^Vaj_;6>c u^ ڹi_%4;~}J_>[NEj Rk!uG"he%͹XUJJ1A[^K=95<#!xFJn=`<*&KX/%c)%{u D%(cmkorM4,Y\*2,VV2xBsརZ6)xs.DLsVA4c`75Kϲϣ9]b@\f q͐漫^AY2\lڭ ٗnQ 7lyi8WmQOԷf (KҔk?o< ۞"#&oU/b(EhxjV EqJP\z)2Rr)7%vg&#NW-nyh(1繁XjY󼆪HƳ1 >:&(2&ܾXycq*>}MQ~]B OѭV"[@*9-g̶:إys>>3݊*v{c<{W4QIL>ƹ@rL`EDq)MxE1.r=sQwCZō77pKHff3J&oC UWr*pPrdK"UI7ҚsHUc#DѰuBsg٬,֫gJNQxAL81o8_jZ##1ma㛞YSf|A4Tv*Uv? DZPR5כo'n2[tunV6Xou)"":[ݘ~R}YQEP`[0Aqn%KϕbesDO%NZWwdSWc2"hL ?"0~>y c >$e!M 05O8acb/X6! Tf|I\s2ކgWN{ ^~ 9NFMX@ɚ{i+mb1_`)dQSNg2Mw xӛ8|BG=oxͦSUw&饕Y_+BҰt d2)[ 0.#^ap]ͱ/e)rme\G8 *5lc<}` 1vaHIB >vwd(ڳGOv{|4_2%USyf.j>ZҰ0mۯQ|Bvm뭊|%ů* ik]赯:g%OgLTALjAPaO8f A1옏;\uC>֪i3@պEzlܐ:_oܭ]XH ;ek5m p]U'c+q`} 7Ɛzl@-f6;-,WwQU7w4mn~ÓUp^;əTn >t EV9&)Q!6hkAw[(o(W{=Eh]a(Ӏ8Vmz+lodl ,DPk@rԢz*W#c{ +Ƌ> ,כ"hYT)F!\@{`b ՅРԳ"XÈ Yӧŵr#R̙֘sL1g  }\E/)^N3Ev"Տ6v8:( Mm@=nL_/%jo5w2>}sech>F3<*vs8xX(B>& dbIMpUAaJ<{ dNF bgYc"1TDy灭C>*,-x0 d=p;%#9z}sY}!m`q1}0/R A@@>EyAȚ7Q^uzZWyntI_D%Du|%2JfMr~}2$ݙ:X%GqIkJnu($nF1r:|=\dW\4:1Q 1,`Os-,wIe?uA3DQM2"_\Q~U|/m56xt:y9Q*~t% 5|,VjkN2e)ЀXQ%E밣* P/H]k3qTTTKA`^px͓ d+\sOp?c~ v3}j(V_7n`2k^(T?a?dQ[65 5rDMU5߯"9l"rj(>V*ȕ!x7.Ggۓҵ$/{$I ?< .ʆU9F1FNs dIFU @TU C*K-*-h~_1k@+zA ˳(^Ww~,#yC>M!&_TacB;ɗ`"gr.IVO3 "ULNČ%MDz ׼`<@~ϡd^࿉(JMDVISs |2hh1 ^91#Eh! LAǒ17IŽ[tsjib{gQ :-N\#E|3?DQt:40ㅇBUWs Te.D\wX3ό fKI sJ~f*2ʢ$knrs4vn * !ғZ) &hormJ9NyQW1['(Y:փPg f35(YCwijeh$ʾXj3EHmIMD֊(It&K`FDz)Aجq֌hj-/ ۟7F4ެĬ5 dCEORE7aˈa:{/%|+ dK0Vۉ`IԿ]'Y;k̸H@G~7_E y+w1tnP `CM+!R5EHmggK)j)d./zD;RDeL{Jѥl$ܜKu"zxE/)18RT>} ~gF>0}6 @좪OsVDE1ξU Z4`/aDGg#CR{|t8V7W4k?D=y<0=cje3HR[f>NTuJ!o#P``gE 9&dח%ˋt=qX4|B~Ek!6"O)DGlӰk(5wr&Py+ J:Fu||OMvCJ`75}I^Q`( Z4h1ǵk4"'fԳV2ZY70{z[22~~7`Q͚lJEZ 3EUmp]G4d 7 #&bg~bJ*8ཟnӧmfDĽ0CcE4#X7\c``O?}Y5ωԭs`w %dD@@^8X+Z7JX3#c<g ‘j%àjyf;"[1ʤ56RJMDIuHHhFJ8nj&w W55{hW6ύ" "*5,թƱR7c1# *(K2MEZg~ؿ{ɌMDݢl/~;W?}$vo k?T\{F!&2k׊4 ь- ҠZBӍùAxpW]u"j?;8JeGdq<1$YѽXd|HrVѩQp$:XcѸa߸ I M$8 J.Kk;f%I7ܰ lQ([`Rs> 1ْBڪ15ib75#FOo!$/Zhϴ̘%ڍs[w7=o"DyϜ  " N?vҬ]綩M `>LcN=I#EF s7'T+{8u\9'58W 11꣡.rX{Ж3(cn@v7Q`sW߷nz:f_$$հ/<㲉97O9=WP;|6x UZbru[.o4S[f40Cbe1={8pM,V9vU?{$U] s~vWunu <08lm=31b?< G̯/1|;1v5#c<0l^z j]{g;S7%a:{{̾I{I~ :]ě>4}ÀM8wy? :uO<1Q({91oC <-C3d1z^ȭ )³sv6{8dxߌw,..EX =.(>߁sM:oCpv ȩ H|8+DU <PsuR)>d㸀c6kn_W%Шh'G3m4b*/I-oYF?c>4U^_*GY/ ٗhε氀@pKĪZsAa0Lg#v[[éS#GOM5q]_OܚFcVXj#(Y\TR\uH 6Ee8Oޗ𖷴÷ ,-Dq8ʵ fAxC\4Ux?ZN95,յW) UBnF6^Z=7Qk=qkzuLab6浊Q'^D02Py+1xiR78BkRpV1&Jxai݂$\gA-֠GJ cX6byWSc*-ᾞ*+S6qns)t11UKcAom5cziqbAPb>U2QϪٽ)"jZ)k=qd) KO'P@ 0\ {.ːJ&95l]ªH?a(R!0kpct;7U↸ߓAM.8͋ IDATasui<- ZzYDb)SkNUoNT 3iRA౷"(MA7l3 ⱳNM;ͼ^ vnOO1xhs+Mob5"A Ɵ/㘞_ō~eeǹ99]w Ni? Adj 8H#Fq5%Uj>ܫ_}++?^B:~~w?WOr.;P ޺{?ԇb*,RM-;rV~W6!8O𚷼653{E\HQ(QG@geP>?f'r^`t**UGq/9MWxMR x޳DO9RlzHV%YX}iögr"$ G5 ǩ;SeS.BsLgmp̪̬p$;RJ9(m)M(}6K62;7`SМMۦ dWiE-,h(fzRky:yȫj րI*9c)-_۫&K@\6(K2MK4&*6;𰯵LI8狮}*r\~^/<<|O?: `M*0_߄'jm'=S_o{nz=~'=9c"xT@@Jњd=27ߜp$}T$DM|ʳu=/p,WUDQ &Pg x#6oBch M|K3zt L~XJnqS2U, 86a9sA/Bc \G lrּU Bq+:FAUp^\ĈG3fʵ %Jyv!P5IY9,BK[q'K%&"t*l^v&>']ɳE'4~/;;{$ " %% %8akyD5wui<t{;:JjFx@`'h NT5e%hKqpR9IfՐJaFLZGdk<9]8eZ05/2Y&y3<3k]:zغ1˸h2u̜N P>;,qfHrSSi7?ugXyUE$cgn|/޵LJNk=N_JIB>s%}+p`rNGzq@9mf,ؤp9VF{.ApvUAnî]w[W_@ž?Fp:ULIԧhx5.>Io Jo4YF4*b#INpۤ,â/w;P#y-ruK,<gpO:L (q%hӬd?똝| 66Gt?x h6c|Wxp@ؒf4HJgL5,e׵J7puX[[< YiLA|'y'Lf;O-4j94vdL_K6]`&1 fy/͙lzҵŊMTzj;g$锓PSI宊WeMu.m 1"x:sF5(U `JWӦ*`7rtdR EOdM2I*;u44c{T!7ZTl*PJJlW`lNPǻ@*uY'wa9h3 -o/);GF{~ɆRr˾'v/]uCu-k ;-͋f 䥜*X D86\e-MlÀVT/wEfi /q@eupc[VJ%e]!fEFbfxkx+w |%lg*;eaEj+%x+3B\G <MCXް9j~ ߾ oxÍcWښPBRE**Y]y^PоYW0@5'P]178 ԳBdP*sZ*M ϛnڸAR׋P{ğd̬y\'27,xIlAn֠q$;P 0r*Ze]Y ǤJHF3@菖#M{:]6U s=ҕ6cG>15Qkj׸ɳi ns(5NFBU# oUrf<[} `^ȋYxc%QJ<ի08 k*A\kwX&pΒT oza('a(Eޟ?:E<ގɚKh $E9m*΀idr3LMPHN^eB9h F2$ 8{.OԤedqg  N.~;;1WW6 ;m."% Kh&4c3׆¾2!)s)VN~gP5{U<Fb$7$ڽ^;8HB>5$ 4(v{|O0tD솜烜cm}\Yi[]c|%x ߽km>\sDAѣ/#4AOPqxTA1LE>dV[.: *\e'@e ty~ڬ9=V k|}?Isob`n*f`뜃"] \x﮸u7'(ʙdGEJN\ԏyUsyܫAdJT5M#]s@5m_…y|SUʯR<7s ^ڷſrz,籽(:BWա+K `S&5"S񚁯 O57Q^ŝg%/+0g`u㩲9<@斝-<Uflzo4}mcvޫ5ÍOq},W8ye^sөZ[Tvp=b"9`lMU)[΅nA:Uԥb̠'BkLb׈ag3Vgx} F@;jQUXe5ᒁjC;Mus^ h0uV3HϤ.Isٴlj60r\Gդ>U$ճ$I{瀱=l[A@W ёG)Q{sJ1+CrJ-M*ٖfo)#B 1#NYeVS(_B^EŜ g>Px^Ef`g-ti`g1>^wozEGtժhTy vK<"UQ.BOCM*`ly: O|Y?_ښ1uTlfq{/[z;7k86V N%^ VMPWfnx晣Snp`k qwL|D\V|6W#>ph׼FY *dRݛbfNE ֢@qj><{aAg(횶ߛ *Mug/u.F$@,}Om~3({)mPe0x =YaF.I]7LèB-Ϟ16KD꟏vN%xzpQc_3:M3àk5rI hJd0w s|οm[I,y1jO* - k'1$&H΋ĩO<(Ԡ^Fm{ɋ:a9)5fPbPY>^(I`?k i6Zv"x4nu%~=$ S9tO~CrGn7`Jp`W0`\“pT8Y,{'"Fz >'[OɺSMSQn[p 4 ǎuLC$+f(&tJq_/07W2ꗗI)2݋PXxʭ^$O#P7Bv^R/"T,3=O`vv[[yq|ӫ4|qg?ޖsYA^~{xo]wiWl@ȵa[ke0'GV1L\Npʜ>x5eRVwQs~\ %5u4)>;VW[wh6<@#@aOPt-xb2^=?Xg@/ďzTކz8TʼfZX*㕪$t%_hl>c'1V^lS]wSBS^=uǴi^A"׸¿ǝRJtdxY) [/ar2}WdF| I^%(Gy,+L[&@T\r*ԖN8;<*_r 5[Aߋ<,+lSң6mְՊ8{{2'=q׫mQWG=^y{4>uG^hd'[4*3?^.~|7MJ>0a:u jT0zOV|7c_yN*0LcS~jkײ1^mgm9 ݉yᤡ? "C{uQU .^Sp! M3i{+M{5 Eoy"|ogQO/ t ·L"P$E|րq166#G_NUZ7YJ.66/s4(Sxgh9wd"fO93?C.0o@.u(RY'3g>x[~=u)8ళ~ (u xeVͤZKޗqd |gxcv (EɌuZ\sg|PgKGg-1+-yhFܳeJ 5"J9v&"U<3w׉T|.ڄ7ɡ:EHњ*Rl uBT }gsCÒg%Y_R'$~< 釙׼{>u>uTixSB‡KƆ=/? H~dw¬ʘ}`!boɚꃥ4yJ@%hiB>HMno6d/BUzL( X(W)f+Pnjw^F?JpVb hL63oƱf#JN P27"4:א8sV+?Y|+ܗ{hЇV+r8pTO?'|V7t|X ۿrFıҎ@%&yǏ7c?V\r9/}.7@- NsN7%}V S9L&Z&Ow13">YQ6hC *7L@_1YP2+n~6_GPIT)* rӽ$0.c*u-2(Go2>C;7rf m(FA;No mhvԀLᩗxc`]hSqdz瞇R[@{#ªcrNXɓWp*o7ѩ1/ΘjdWL&A2Ul bs fGǩSҥ>"Rbx'+<Iai)n(㦛;Z|yر;ނW4vvZ8~w ';8.1C L7[S~qbUhDܹVVb_Z5{_#"B#w@i(wu ?:l8esN`v2oam-{T.0Hcb7`YJc硍V'Z(t):qnA%` 3AJdFT7q$6AudkTR1djϾ] t9`*mum#ב2j.6USĦj+n7T<,Px}J5Df~&lʋʥJ5xes>ki`k&9&o=/5#A&7IԌ@? '$$ל"UiW]ًa5IIn Z6Or CŽ?ad ,$mX]M<&L2%w [+Žy^Jsv6zY#[7oAm,SS DPxVUվHٓT0~~~h?ԽCBLW󞼬S @oC))͙n lH>cok2+2c<i,U/pݓ|KT`ZPjQuIWdA3}P[1Fx(N൯]CUoweEKKe( s*)nڂ|Mh`"SJ8w~of1i=Ws-< kfŢíq|<`/{|n}G40= M4^mg"lnܗ4J%F!V&?dS=Azjfs&~sOᮻWC]#5x?nogL#Y{*D4~n8mhAl+ժ"u"\T& kMڻyGoU.yx@>創iE8oZ0Mރ(8,5c!Aπú䃑%ǻE'_[{ԃKR Z6 Mf}ہr#Kh}Z@:A8c\vD %D>NMA)$3Y||Ԩy_G8Pc2Ў#/7#9;'Rm\I9I $n*IM^kv &D nI<=~ܸ]g,Ȥ;$Ab?aXAH="3a9Gudl 3z+%|te%&o8m^My՝ C֫d"zHD&+hL |iK_Ad)X@_y TZuI~ϫNN^JR$8Xw>e6):~g|ٳ0Y4%e\n["7~@65 cY]=ʾN㓟 W؎[Tf n]U$",-y,-;7źKonggcTq nQ#`5k?4(wHǙs'Գ$[?A ?)ѧPO~bbwKK0!jka+dD"#a1,qF_$LJ!8ؗQ>,r_9fe ^+u<{ kgp6$x0y}Gbl'26c&AF ב1 QTZ_ES)Smg.;<_/^0Rt":je F3S ymOaԴ wOߘ:t6VN)m.5=LK'i w I!\zՇZ\s$*B9/`a z)LC2^rIԷa~d2nx};9>3OT{ 3eė< ܂7![| @!tDEθliUr , 'McY6 ,k>|Sfz +WL9hqڬYE08u}aa[n)GZ?pl^qNyԢ %,5k褘X}8SU˜Y>WV+rx<ҌQm+Gp<F#)ļ0xUH@uޏҥ.]~8=mi&u{?1A\gT8. .U;"`8!PƮp T/vҗxMh yhNw4`M[SOA=&-61 /& \5ոNEֻNZS<N)Z-tJn@{vlooIb G^c\wSq&FucyVv^U׳&GyB+>[W屛Q/wjxB{ NոsksQBb3~V;v{v$ FJ?Vw~z01,':1ɏ~^ ?y%tI߂5,|ogS1ˢV^rj'TB ]/I5 OzK8ؽ.sǚ$(JKAEhi[mx$"ffꘞn~uU@*hzH:Iv̢1 2Nls zUZw=4Ϛ,z`H(JZufӮ-|eueVr=۲Cڀ/΄&"ԯUi(uocuS(bѬ2ԌqhW<{W*MwP$ŜoT*i05O(i;:p%f UU5O3o 釨|C,!ܗ0,[]e2ȻvGVBVT*̫{"ܱJHp=YNTKv+h^^v#4uCTdkc^.L8CϪ=ٟ,)sͻc*PGڃc k_$Nܛ1Ջ^dNoM$QtJ2Xs;¿ϳ>e3xy)eM& h):T{QV"IaTE_'Gg#*)3&-2=Ɗ#|@pv:Y$` g1h߾ZILu<兾Z/%~Ad?ٿ ׼߾$Nג"uHN*#8DJ*qEj;L'o#_o7 XWeoF~ L&NhI΀lԙ7Y)u&s]2= 4 ̺S3 5;MI[2aӅW*^1 \ʼn"en>qsMW #2Nq.+EfkFs0,s?M@<uCWr#ι%F#>E<L(EjP0"g49~,^2G V!6/1`rLE$&7 J8coYGS *n1UDQP ] EWjInM\U4_m ڃ1ýɕ ܎2J%cÆW%ee0$DhhȊ(2YYMkQQrap 4Y9j'~`ui4*rO6&]ml.jNZo$"mh_zQQh4 ;Pym3TklC8&P>Rg;Iuٓ \WE7Š~ggٱ'=קg~Y~tk\k#I}C!U0^~hs%nG>2 յ6(Rx!Lf-r.NuG /%=—ӎTWO߅g49/qߒ]"He G^<9vC>​lT?Inq?≐9PE*O#)OE??p=2xThH&(k0'F&҂xYVj.E%x)Ԗ_%wmh裻T-}*1t, )\8u9 IDAT鯻xb}S 1 MӼW.r078v'E8@͞ϹXsqͰLN8H', Pzo^ZHЊ=+GsjQ[^v4py#M$0"#zDN *UcS%E<$ N sSI|IP¿SWp?Ԍ0,VDqJEm+6):U|WoON}J|k'~?2@rA$%\z}!0ejvoA 胂 zUKkcǼc<|'5sH.Uļ.ɸ Iבڒ 4gNN!vL0{D%]3#daMASwcVcV2ŗgЙ w-QJ^!L0z7=pZ\!ؘp!\'m!d@Ry7}eVpǛ7=A AZ楟UJ^[q"ހ冿=Wq)4Sԧ-YfC c(B2ƌsFL^_fQWDyAhr"],M ;TPf%p1l>9c8-v-̙(ZA`$.E߉YS5s 9]/Pe.Ch \A-P+xml L5d}U \(w9UR5kSF <{㚗I(rֶP{"BՖ1LlY gtaEDŽ=#4I@ڇt; vQFz^P_R{ zznz^h1`^c37 <}cﵝKt3֯w*-*: 3Mۦٲ Bh\Ճ]K9U%LabrEyy*fu'I:uAvh^Efq/1ĈS#9_XE</:^._+_I#(39V|jO MfOkn=iA´@ɘ/eٽuW9|7ᡇFrHLZg ʳ"p>9X眤7BYJ  mfy!<+yz0йZ@X.xL38Ozh}e4 ]g74cBbj7F)M 뢑)=½F- |A+aB*;5 ~ V!gy2i&S.=<[<ِ S\{UoܐJ"9NRwwb0!T~z^;#zᜪmTQ@:MsN7Xp.[=TLcT#7Bgr]>{jFk#v0*RI%LEFnata}0z;7~ JՠG>U`;$UPHW5?;IdЩ$]i&-15fՍ\̖W3ɪU:!yuއLwهL RbtxWp\玧MGqn6(&-1.=LQ7v}l.)3K@=wHC+ Y:-onw 5 UYcu]Ϛʓ0T9:ـ?㵎qLXԡ4Qߗb X1s(Mқh$UVz)POKEj0x*O ^ܓ?9Bw>z[f_ݮ޷ҿ)E#񣦺*M pj_RUy.f<H10{IF]Gmxu!Unj>طoЋ-S{ vS 0 3;r-|رNZ7Vaq ~$o~UAaiiO:/`k~>'ۮ;)$r^$Ru:@K XL`PG$bAEAE/-9TK'Ǡj@OvԇfbULPe{- ԣ!gYHb|cxq !SLc[D3^6{cSiK̺i<3FxA)ceҶtpR\d[yeƉ㤼g8s7uɴ-(&P%@sU@/׹mTmdꬹ%9`h朮up$4;}TMZf]4x""쐆AxVXZӵWE_~J D$ :qu@9f41V4*;)U,lto^RSGfy4N4:/IqAtL0"=0I n~Lam ǾO!:i5<3l\ Nh}|f՘}m;mnmpCw2Bu u*:lA{I rڒ8=}V#_H !"@EVҤF0;[dJt& /z5Eǫ^]Jfx{̙5lo?^[p.o8M]"CUÎ"3\gK n+S&TqJdtk hmuŐ$-a~.`Tsn.5*Woc]՚ZB]wN 񽉜Rz=9@G??m0>Hޠ F?$R/ Ơ FgevmD"zmSu&xi -srvo`=Ռ߆ګ|Ay#_GPd !_B5L+"n@śrxkr9"ԯAxjM(H/c2BAUFRgĬ {U Nʦ*e%Kn~'4g*X1}=B^b U0b ߹}ce ׹$0ycc |KX\\ƕ+ԧ&p87C7i " lTY;ugPU61ywB%~?o+j5.kJRݨC=1"%,ؕ8eS7 YS)t}$^TM۔OdH.)MEC1cRΊa}^|{~0^i$~t7Cdz5&8ث4c5mk}cvÔ~Y^aoA9ܫXK I+2ѐMIub UlzUv61zShՋ{g}ʎJ"dˀ })Z^Wx;>Pay-p2?~iv]rqՏC)P7Na:#5 DPpyhWcrگ!M[ߺb1?)wiY*@BTW"*YmPbC5UfD|pJۭ1x@LHJv~Yu%1}r?wRqy܋avŀ.@6Bb^q^Wio#ກE٫@;Ajʅ)"pB]ѣWp8~&n/; 㽗ܜH~isL29#CŢmJNCǪNc\Cc [hq  9k[*"B+&~w8; 7H au{ j\8'r"q+q g6֪XwfE¸)}DBqB_RZ{mbR~9_ϝ~[$]\Cy$J>9~g*9:14FϽj2PA}ӔB#P-T#cFҔ]4kUHS%O;~tCէ (יYks )nO@eE9b۩sН\6(9ϋ4=6g#Ջ4(9d*}'U4V*0 J3;ǐ~#9^؉CéIgȵe:].|&-K݊LPa҇@52Le\pTY) +oS^]ẞűi>P|'BHލx;ϻAKsO@X`a$[_fd=5Kx)y0`tD~y;>Aos5 Mr={:$@wq?l 1~~pHJsrJi$Mfи 4*<_fO"m?C 6!@ Ñ#O~tq,qU޼8TT8ny#}ICPh2,XqkxJef6'& 5R4`T5L\E\eQ_UTTmq@+1@cgyfvcyv^(.AG=M"2 =G]i7:>w~c,ER"Y+[~ Cwːcv:a0p;a4wǃ lGt@X6q iEVKMIbUWtsvkSw8wxCU{i>$Sk7]`K]m 9f\^&=-R&og 6N?P1-+Ad>; LIQj=΂mU#ʖQVD,.KǬ,^µEb}i{c8(n>($6[&'bBֆ.Ib2N8/1si TxNN kCE9rF<&3)uf,!_iGi4EY5+wvVi7iuv{0l/|p,.P^Lz\eֈ$L4]AL#sWy<e%2;B*z/ftQn!"Rs}zo㳟]>2#KA3K#@IYatFj>jJVs '!#֡]1Sr/my`CbP G!:?qn!L)tkX$8DXL$ɖ52Zk\VVV~<2 /\?g)o9"#hjgcDhy4n0Z-fɹ]R2 R#foZoIL,љ5es _W5@Rُ뼦`-uHGCKʼ}>)3" p ͖Y)Ks#l{5FKdE屫f݄̹G5m/ˌSzX AP㨛'E0XIgflɻL$~V :ʦ5Xtg%%3ܐi3 ksri ׳<o#Daum U >.PBO%ۈLQ-jEߡ>cp /WW{'A #t b ['h9&=bϻPV6cm2`˻\6ǼoGuk3Y%`zEr}~df6x\#ikA\'04٤s&+PGf{GoW~:=1ܼNn$PE}_9@BERC{9>m&{#BLM*yk^Aہbw}~GצY> z1*!xWSJ=55.f$YԪi|:x+wqe|Al,dQWD뢓 ND k̐&,SV͛i{n6~yҵM9nCYȔR:㉌։-]fx7y_,5:I|R#5nʔ M+Y|mrJ':=B{Kh-Ap6QTE:=ݒne{!*` Ԙ G`0"eUĿ 6!d@VW@9< 3`oou?asB2RO HnAhDE e\{ IDAT?A,!Һ7KAZ=59质,jN[}0 ZtGwJRFs5>I^#(ḇ"I!bj܅m|÷xdMgU<*#[-h#ֽWXlZ1Dd\okgq7@T"6 *1hQgf_JM΢2]"¨:~@ #_o/ζtY3{b:q+R3k?>5Ze?, c6{]ov0ϳ$+U8g"exo^Gmǎy1YoNĘ!fQ`,Θ0Xڛy. F'a@ol#D@!:}Sb"qUz*^|t FsHICQs 69!6|Soxg̿`U>ReK%cbZ1ZV}FJL|} 2F|o"}#6q%9κ)Iy]^/8|l}ǿq f. vUgYb66Cl43 ܷk8s&O W>K}ԫ&3-gyo'}W6H 3jvnԍ 8cB=.q4\U~ޒ(+X4:BYo9*٩@m.候MDƩAi]0neSӹ|QY:%_ܧBI.!h/AC?Sσ;7drxs> ;'㸅N_->I ,5(\Ʊ%1;I =ͼ+s˶W6y`^݀.y3&r.6D߹B,]:wogW4^ =mht8Sꇐ·@j̀?ez$Z5sND.!~%g8+།sgV D =~ok)%ɬ. 8~?m?M|+g\KӢGaY'-`q5l=AD#fg*pιEl.˦Kd]EyZm6K!Va_CdDߨ hN}CWYu JEB* lMϬг0r#YK^-5@V?w}<ǻX6!q3oDö?(9fe*5GWoҶ 6U毆=D~A;:DߋR}QdY{=CYvv5iŽ0u/b&w}MOg{啇nͽk<'K5~. U\DȒ}lk ;8>;ڧ#(i~6˩l}V_ c}L91Qр%UNH>Dz?]FfX}9-2?jg4*3 {J|^Z:&rgj $Q2mVUlz$A 2ՌS0 Lx\)K]+VZ[:ZQ |͂4֞)Q qs?rrv7ؘfemPb BG\?usAY]fhfkj2%EoHq%9[`CH$F)J່=(+,Q%]Y|]GAuFJ[]t S5e(dc%ymBSx 'PŹs)߽8.fZk*ZG(Zs[ ~*5]emDZX 7F{Kf23Hm5[-kT"uқQ'EMO_sc.d?Me<αGlo&8jytĒ 5S%w A,zQWgnu"(C\gVFP-PTVS <'>i HI{lį cDlAϱLcj@*UT]by: +ʎ<LjUi!lN#ķy}A!*C;V#-Ē 9tyARQ}.~zF/GuBrnj*."-q"/\d&c΋^oZƑCGP2?MeWDʥaUf9v80&8Jt'KLi+!Ocqk@+D,UVnb得z2%6eCݖUFgBy^ԖϢ0J0k4h9lVZEmԖAS;Qse)ff%y}l.}S2 Geqi,yKl gb]#UX3FSRv{YhvW zrsT0"0bѪ߃@ q_E m9\m_u\qQ`0Ӓ>yʦj`ފ#G'urԳ"Y& ȑLqQA降Ebd.f۔yZuQJ >6X/f.8$6eUX fe;C] SfV|,k\(W5%9zZH LFKrtkGef`i\wNy)?eZ/bf@lfeja*o18p>2F۳Yyw︒e3g0~*T. 'ɀԠG}D%픀,m'b2By]e}_Q5&7x9Đ~G(כx~me?_k|<5pIy"-A*ܥGD=]E%%gpjJHz_$ bkMw Eoy@eMԐ. κ>|'A(;xc_Cl!A4-|z:>'u}裸wp|{o[f4EzA(wٓ~={s>OMc&@ -0b c@3qƵީzQ# J*9.1 4=ٓMpJ4)wQ󙵮NQZ )V,^k O͚M mӱl< Ht շ}alV?mYhuzR`Y2(/M t.;'yJ-8%)<2w6OFY;95H21|] ѲHpk÷J/ph w/`>"*XжPTUmnⳟ?=^]|?O?bt ] `5:f]dF{$V1zƆYnUs筛 6U׋؛*%)dxZ蔬o"jS8&/9ޠC7;|댬kM#:u~ 83y pn?b9."{&B 8hӖك-2![UtmQmY+e=@}iTa ry'%!ꯨ"405{p`Ω=*8팣L`*g&!G$5ЉpyXRqF;6w[GÖ||)3̪6ibɡۤ3z\&~&=*9ȎsԸlPq<%R*yV]i^Uk7ƣ,ì5lY%lN+&(y6O46-+>,'e0-j=ym>m>*y$F*vxo`$^ND 1_gEřr#To8 c~ Pt>m_=OOB2_{џ<7C=nbtA`"hYx)t=Y+562/9g Df[3;>4lHY\3u,̱w|t0w.,ަzSŒojA碎wEŋ{x WЦb2_xȲKV~!(zcަ&hŒUڸGU6bn<ljroxN}F@㵥GY"sZ"\ _L5>^\k}&u,)mβAihdϳBypp^+j\óKCR&nP0,d3:kʚ6e<~UW͑3=3({oLdR2Wyw{<2 ?.. ./㭸=iui'5h-y/#:w?`#G>&oaGc!\sX'0O%F|IsѵJb4@&ܡ!HרH&}uv5ӌ6ٳg?#g%<_|}k/^oB8xTs}~jbݬ#2TsbD0q|Q{dB\ᱟCUYꮋk.M[..ƤM q qsm{<ٳЇk_<O=wQ/}QAdZEwy"4/=W]byUfaFxE.80٭6mo#]:cR1"= ]"0kud&7Njb-Z\; S^QĖo8VAp.nTf^5p_mh 4݃^Gٞ)Rf28!ְYl^㨛oyLg0z!KprFH}P@}~xw~?΃@yZ̟HHN,;uy}R5Y޶(ҝľu^Pd1;<'>qȳwbQ+akv5|^} jr|qXp5lip+n?*vv>wk&9>D铈e^.s#|<|tc)SCĽtamD}^J: =m#cf }}虾*y]\Ӻ#1C+0:+RCui9W CQ2=z.^b˔5\ic1^=M^|Ǚ^ux|hULzB/Ig#;s 6k$`;Qٌ2L[zQf:vަylafdm#ė}}>_9Sz+v1*"_Q1%Xa p4pt:X_oG) oyі8sW/h IȲ?HZpEJ}#̪\EwN5q~M@> _FWZQ5  0>KwNS;0Nͬ^\;I 4XqL9OYe*6##;W_#يq d3zI.LKfuOu' f<#١ܲuGmӽmf]&";Sk)א$ww@G!Im! f5 bl3k#55f+[.m0KH}p@:4eM6iQSBCx8g|C4?f 3BnW}Ct xS ww5 Ȳ=΍w8)Q(' 6NlD]oao޹(ʗ!FU SF2ZlBzyT2'gh]̎=W?ɒ^5-5dyb->(-KC-okK+s/GJ}^U]f}*&kgG`!rj fn }AG>|6/ >'5=Ɠi˴qF5u㚼%fFٜH錣Ѥ w nHy{9B?@+J>fa}}٬1z\3.4 &&:o|u>L?](A5](Z35RxE^5.qI җlgGoqCFk&B㻄s AiZ.6lwLpטE><읥qY'TG`=H[bis/f7g4bJŒ=og5)W],_ 2u\tV|Ca%J뱤Y%L ,$S,kͅr;}!=R)yF*fҰhpa;?3 %?"v-kk4"{^U7)@L&4Q3al^dLxMn3h$;eX6l yQ;f[.G Ȯy0W <w IDAT8l(襨gGP2QM|TYf.]% wLw@<4PBJ+WD2.V=$5dƤމ-#Rخ#- gȗʽ.!Z˴8w},Qðy}{|w BCID!{HssNӷ] D=b)ek+]ۤtYeL]`89q Ap e"8]]lczX! wJٺja A"CP{d %lT5H]x.ɨsN|,ȽJ_0p|]nvѾٟ-b&m;? x߇{cȲsa |cO묲UF?woq: 6+Aḏ]g+.5ߗm?bm}aq] 11jŹSV"E-(`2cMz?Hz<X֞I>G>__Cॗ^,~w/Z5>4ɚt %X>Ԛx@Wts.&M'PSy\8{<^h\$>Ln /*W7ET!벁osv^tM4^k*RgWAEF)eAw2CxU6 5F[bB@ULkDzL4]ْ:m ]7צIew-C:@uƹ411F%^01Y =zZUqyS 1>"ӛԿ^tc|ΕŪxskn `<dXոG*[k٘+ 5$Yi=`sP9(yym 6=,pXi7ylv'fD6ϲzz5iR[WI)N#ƪe"ŪL_RQzE>d}l0u/,IMիR7cxWD /mILNQEl!Љq BO6Ik{!jOr[n1`5Dƫwyn89w7}s C!#HNGmMҟȌ#!5$'o]=/qfMQ0oOl`N lv^[A8W"&H[yjӹn*"kg&oKPV}>!^p^<~{'3s: >_Of2zG(+͂K<^6o}uF:eHpI & kئq2[$7M?tigytax2am*@wLsHD-kT.ѹĬAvrq.~oEBeZf+,/qR~i }]غ3Nm237ks?6X:+WPW 5;Qb6j2MD̊Զ5-vuP5 ,-=ԞN_DlO]4ec6*'*, q`@j7>>!`g?*'Yƹs`SV`@22x.k&&B#tJMT_ P}T6mcK <}~]Pe>MDEuf,U}#ιC?sUa5{A=& 0`K`EcT Ȯc@xYDm0@/v=l\k}?ѹDTX_3 P@˹EȮ 2o"F/!4X6m}n6B\Ʊi/X8bHiG"_!1~kZr>G=zt PIQHr P_5{MYihd\s]Tͼ8^yy+Q1q{8r@9R&ۦa~=>gyX^U;^5Y3q^E/ [e]Ubr}O4zGY{!ιUǸFPؤ)&G1,:u֕{zEV}"4͂ <$ [wvTKmp!Қ\7^=ϲw1.氌wV^g|!3Y!6 H{B*, $& aI/{w#Cm*{exF78O= I\;@Ge]&uNX+=|G[etm8~V}=Gf3](q$mHOz?G]#2)8љwdP|iϽʂHk]=<_E'kt=^s<&k7a[> $ L}Vy*gˤ$L:91;l<(,csd0Pros/׍;uC 0]ꇰ(:0UI+BMƖK`9O__6/ᓟ$\dB}+ya,3 6㑈~4sg";K:c_{G'LszY&B]%K&/b7Qug2+^EV$He8wAPu..Eۜs80s탬b^ނaBtDn5.iяw~u_rf_{sDy9g:%@*; d링 ʈP͜Wcp/f 6Үo;5msPKg\IQDoW_Dc;Rxo6޲/y*)pu!_GE 0En'<~19yv},bkZVW6ڎ*9أ ]Sq9_g}v䘼妶I0J]G^`M耴|cԢgxԖ`fB:#9X$ chjo>: j"UF<"@CoE!k:NkZ˾kR5?!_ESөkCUn[[olն|+&g֠>Оkm]rh2snhL.p YkffDiH^=-u^6K"gIC\$9{]WZ 52=}(g2E{k}xuOzYO(˲ygmjel7aR"0>-6kRIŬ6cA1ol5'؜Lٜ QkƬ!JϨ}zT6O>= ]1͘`פkOFi3= aL2:cʱPIN1w,TnxVUBUǾ4OM#٪.yOEc4X~3Y0יs %Ǡ͌IꃒAOPڥ*CZmmpWא0uQeUdc3~z7s-z{4]r0ܡCjMDa;gtHI Jw"syF#Q:LSqk9껨4=,rpzҺ1tsb.!+ j.^<Jb˹T;3QY1\hn:8ITprQtqU7p,{v[wNgej$`El.l~lÏyX$zND Z9Hy%l.\ y^w_&B[6`3E˲yYe<@"ys6:~y*40IY#涙" ,ͮ| \4|dx3<ޞh5z*1"d z.v^!XVsJ|dDeCCǒD'4v['Ezo#9)^E[A,5RTUBoVa[VNSc!l<}-9%&3f3'rg"}t< J*W&=_{ EijAyX:J.Lsh9LR߇f&bsǽ"2o=\bkYKGrkkYNr .w.vypQ`e<k[qEoepݴ$BlqʈΕ`qj3f7ţmEٌm}n/q-_8uv[f7Yo tU.[f+͸+%s#zJLdXbl`t*5g{1\+.j~(#SAHJzXSݺh74ef=4N1y$ۧEDɕ)ɲ6lƒl 6ϛQ%28B!J+_˴l3f/fͣ2|yl&CHNߏp0|^8&z pD#1g,, 0:(lwCt\-ҡ]]Ex}GeH>:Jp>`;|d%l!0 =[>ʶm̪cSʜ*g6൮6QE 9v`tc?3j#)?lvi!MC=co"uU{I:ј*z1?V&yM5>2inhM{\ۺ AJח7N\c)R6X7Ĕc8E,;+ f4,K>{2meIP~32/|7وfLπ+¦]&$ˠ%[4 B)Ұd4tMMIzRu.2%Qoaellso/x"3.wjfW UDJt9&-[̉=^1z: 5i \?861X$5HӴc>+QƔ=O&Vn!~ ΒV~Rewsc5Ԫ|[ϙ}G_y)ݗ7w9Y۲l6u̓fa9-G}(k?1q|"3ڜN9G6 VSLB2f{Qm4*"=kT E(ù!Ҷ:r/!5[ЪPL@ 7"Y\Dh~zi@][sJJӻf^.?ޅb֠am~:%l\S`Hxa4Xe:7gWPhׄ3eIs(8%qYm<ρT]s jQNӖ%xp7zuR12cenHoؽm~Nu*J1.K kt.NgTG1:Dh}FNQ8кM v ЃJm\b@hsKK"b|xNK{ȯr<"d8W0a%\o9PbuUC㚚"_yNոkA|DpiὊVZaO9ꕙK`˴4Ȫc JLs8HEm85嫗jb4W$(_u}̿lN6 _pƁLQg6v8VWn"4\ߗ89e*;m{/.:.GКh,b͖&CՂc6,E"}`ǂضoDFGs3VM㨥.z)LIDATsX0^C^CfjAs1K%˙qpab rou~e`0ĝbsM\gfI#OK}-&Z*Yf7fNPt 9w6#Xfh"Jis9-Q?r. J\C`4 ؖqئ:],)Phs#H]u*E'OŀBoEUCE8C_Ŷ GBV[.Ȩ^A pRAL3%i5>W$!X{EGt:C\~[U4I H *a6E`ϻdDlO(>xkGkH ͤu]0n+v蒚=Tڸ(K0QҠHFkf8G~*(,M f`q}osrV8#˴؏":ξ2o&SeUE kљa;jfrbrj7׊uT)g;QHTVH."ekaF#xٌc`s4,6X~%y86͘bs)g|Rr\ٹF0F!r)ɿu|v;wc(' BşG(wx*}H{TvWz xH ;.hBp̌hufTՇ%ZDǧ>m*x;({ prʤ] D>+swpH:)GR󜜰_۪/OkR?@tjIr,'^bNWGe+\0m2 )3icvŊ؁?\z%nPfX2UF€н,d:Ʃ񜷞gObP,/xet\XF_ҎL6ƻjy#8uMɁqtt@txR׋1u#q\f1|t:l3?Gӧs6B&'q%Kc<_ǽ:;֗{ތMÝQf;@k%IULZ4UQ:4OOMN(< g[pͳ\ QGh1=ylKZg5pl^־6Mw:&: `ƁHs 0%%@ [5$;wÖj(8B;_t$ZNa!!@:.|&[z*RIԾ.\h2$! ˰TsVLiVF콙"UmBǦ?>v|TGن̬鰹VxeWRc{#1|5ݽN0B.T]E}<լv8e ee`E0a⍉6W|,:vn78!6ha"aքp'Nm0o.ָހM\ޖ$e4ۀo =]8% oqqQ:#;X{oIv"6%X]SSL{U8zN1aA/̚9"` }E=gpmȯr%hO8.dV&K?J?EiT&dif fQ>p ;æ-Fш2)5{>6/Gmr~wD6mhc49Qؼޓc_iМ<5 GQ8sݽ> ]}*Ros Ʈbw`: Uy E7z? *t+!haӀe_Co}2$b\*neLD (5[uxhOn. ce?hj|rϊWcwI459˾Y9m]mQ_潂e][) " ͵A#F0K>XzQی#8NA:A.bD6 -dl])8*i y[IMlJB>8Gݳ.BY:P Q|o'<R ~A_(|eӕP)":1}-MW\~TѨs FY\y83s.(S7Yً*\.oo:"F:ڲ[>6s?WM`3i#L2(lO_uȪc*XQǚT/]=:~wz`TWx A`z6+!HYu:`totKv%{#Y1X4=MEij\hҀ㳹j[fQe4.b3^漞$q6'LF6ou1-/s0j)I8c(Xsq_^$/#(ѡ#~r6)PVphh|׀5<FbπSͅPӻ 4T ⌣Xgn~t(8r.g@G߶す{aJ?#Hz ܈Seϥύa#)za6'ШϺ ˆh wI6csIԜ 3Œ;V ŽRf$ݔ=I$##lv`󴱻)w%6'sqUfS0@hcs9A_z}dRRb`*lV&K=sy=&Bt-5srg0_9MM٘mR BFҥZA]^V$Ulp I^zGhq?4\PnWfIu[20NUwIa`n2>_1Nhm{t,<ðeiw '9dym k'%M1wq&e7:+I\lvSlv%mBRl(/|,=FSj+isyܦE Gח#>'Xռ{3*mQt7';a[g}nS^݀wQ|NNʦo0. xɕ,Ue1N .s]4Vۮb꼘/WTgZd0lƌ6);*ݸlmvl;!6fL%2lsڼ9yeoY/ns_ePAـ;n|Osfv`ƛYK\PntB5j%b DT{cl-J3+. T sJo"-E%P4 МH(юi`ތqeؕ܎jF!upyg2uy|u5)_+*[w) #ض+q_{V0_ L43،6Z;YD2=IENDB`dipy-0.13.0/doc/_templates/000077500000000000000000000000001317371701200154445ustar00rootroot00000000000000dipy-0.13.0/doc/_templates/layout.html000066400000000000000000000066341317371701200176600ustar00rootroot00000000000000{% extends "!layout.html" %} {% set title = 'Dipy' %} {% block rootrellink %}

  • Home
  • Overview
  • Gallery
  • Download
  • Subscribe
  • Developers
  • Cite  
  • {% endblock %} {% block extrahead %} {% endblock %} {% block header %} {% endblock %} {# This block gets put at the top of the sidebar #} {% block sidebarlogo %}

    Site Navigation

    NIPY Community

    {% endblock %} {# I had to copy the whole search block just to change the rendered text, so it doesn't mention modules or classes #} {%- block sidebarsearch %} {%- if pagename != "search" %} {%- endif %} {# The sidebarsearch block is the last one available in the default sidebar() macro, so the only way to add something to the bottom of the sidebar is to put it here, at the end of the sidebarsearch block (before it closes). #} {%- endblock %} dipy-0.13.0/doc/api_changes.rst000066400000000000000000000075041317371701200163100ustar00rootroot00000000000000============ API changes ============ Here we provide information about functions or classes that have been removed, renamed or are deprecated (not recommended) during different release circles. Dipy 0.13 Changes ----------------- No major API changes. **Notes** ``dipy.viz.fvtk`` module will be deprecated on release 0.14. Use ``dipy.viz.ui`` instead. ``dipy.io.trackvis`` module will be deprecated on release 0.14. Use ``dipy.io.streamline`` instead. Dipy 0.12 Changes ----------------- **Dropped support for Python 2.6*** It has been 6 years since the release of Python 2.7, and multiple other versions have been released since. As far as we know, Dipy still works well on Python 2.6, but we no longer test on this version, and we recommend that users upgrade to Python 2.7 or newer to use Dipy. **Tracking** ``probabilistic_direction_getter.ProbabilisticDirectionGetter`` input parameters have changed. Now the optional parameter ``pmf_threshold=0.1`` (previously fixed to 0.0) removes directions with probability lower than ``pmf_threshold`` from the probability mass function (pmf) when selecting the tracking direction. **DKI** Default of DKI model fitting was changed from "OLS" to "WLS". The default max_kurtosis of the functions axial_kurtosis, mean_kurtosis, radial_kurotis was changed from 3 to 10. **Visualization** Prefer using the UI elements in ``dipy.viz.ui`` rather than ``dipy.viz.widgets``. **IO** Use the module ``nibabel.streamlines`` for saving trk files and not ``nibabel.trackvis``. Requires upgrading to nibabel 2+. Dipy 0.10 Changes ----------------- ** New visualization module** ``fvtk.slicer`` input parameters have changed. Now the slicer function is more powerfull and supports RGB images too. See tutorial ``viz_slice.py`` for more information. **Interpolation** The default behavior of the function `core.sphere.interp_rbf` has changed. The default smoothing parameter is now set to 0.1 (previously 0). In addition, the default norm is now `angle` (was previously `euclidean_norm`). Note that the use of `euclidean_norm` is discouraged, and this norm will be deprecated in the 0.11 release cycle. **Registration** The following utilty functions from ``vector_fields`` module were renamed: ``warp_2d_affine`` is now ``transform_2d_affine`` ``warp_2d_affine_nn`` is now ``transform_2d_affine_nn`` ``warp_3d_affine`` is now ``transform_3d_affine`` ``warp_3d_affine_nn`` is now ``transform_3d_affine_nn`` Dipy 0.9 Changes ---------------- **GQI integration length** Calculation of integration length in GQI2 now matches the calculation in the 'standard' method. Using values of 1-1.3 for either is recommended (see docs and references therein). Dipy 0.8 Changes ---------------- **Peaks** The module ``peaks`` is now available from ``dipy.direction`` and it can still be accessed from ``dipy.reconst`` but it will be completelly removed in version 0.10. **Resample** The function ``resample`` from ``dipy.align.aniso2iso`` is deprecated. Please, use instead ``reslice`` from ``dipy.align.reslice``. The module ``aniso2iso`` will be completely removed in version 0.10. Changes between 0.7.1 and 0.6 ------------------------------ **Peaks_from_model** The function ``peaks_from_model`` is now available from ``dipy.reconst.peaks`` . Please replace all imports like :: from dipy.reconst.odf import peaks_from_model with :: from dipy.reconst.peaks import peaks_from_model **Target** The function ``target`` from ``dipy.tracking.utils`` now takes an affine transform instead of a voxel sizes array. Please update all code using ``target`` in a way similar to this :: img = nib.load(anat) voxel_dim = img.header['pixdim'][1:4] streamlines = utils.target(streamlines, img.get_data(), voxel_dim) to something similar to :: img = nib.load(anat) streamlines = utils.target(streamlines, img.get_data(), img.affine) dipy-0.13.0/doc/cite.rst000066400000000000000000000063361317371701200147750ustar00rootroot00000000000000 Publications ============== [1] Garyfallidis E, Brett M, Amirbekian B, Rokem A, van der Walt S, Descoteaux M, Nimmo-Smith I and Dipy Contributors (2014). `Dipy, a library for the analysis of diffusion MRI data. `_ Frontiers in Neuroinformatics, vol.8, no.8. [2] Garyfallidis E, Brett M, Nimmo-Smith I (2010), “Fast Dimensionality Reduction for Brain Tractography Clustering”, 16th Annual Meeting of the Organization for Human Brain Mapping. [3] Garyfallidis E, Brett M, Tsiaras V, Vogiatzis G, Nimmo-Smith I (2010), “Identification of corresponding tracks in diffusion MRI tractographies” Proc. Intl. Soc. Mag. Reson. Med. 18 [4] Correia M.M, Williams G.B, Yeh F-C, Nimmo-Smith I, Garyfallidis E (2011), “Robustness of diffusion scalar metrics when estimated with Generalized Q-Sampling Imaging acquisition schemes”, Proc. Intl. Soc. Mag. Reson. Med. 19 [5] Chamberlain SR, Hampshire A, Menzies LA, Garyfallidis E, Grant JE, Odlaug BL, Craig K, Fineberg N, Sahakian BJ (2010), “Reduced brain white matter integrity in trichotillomania: a diffusion tensor imaging study.” Arch Gen Psychiatry 67(9):965-71 [6] Garyfallidis E, Brett M, Amirbekian B, Nguyen C, Yeh F-C, Olivetti E, Halchenko Y, Nimmo-Smith I (2011), "Dipy - a novel software library for diffusion MR and tractography", 17th Annual Meeting of the Organization for Human Brain Mapping. [7] Yeh F-C, Wedeen VJ, Tseng WY (2010), "Generalized Q-Sampling Imaging", IEEE Trans. Med. Imaging. [8] Garyfallidis E, Brett M, Correia M.M, Williams G.B, Nimmo-Smith I. (2012), "QuickBundles, a method for tractography simplification", Frontiers in Neuroscience, 6 (175). [9] Garyfallidis E, Cote M-A, Rheault F, Sidhu J, Hau J, Petit L, Fortin D, Cunanne S, Descoteaux M, `Recognition of white matter bundles using local and global streamline-based registration and clustering. ` [10] Garyfallidis E, Ocegueda O, Wassermann D, Descoteaux M. `Robust and efficient linear registration of white-matter fascicles in the space of streamlines. ` [11] Rokem A, Yeatman JD, Pestilli F, Kay KN, Mezer A, et al. (2015), `Evaluating the Accuracy of Diffusion MRI Models in White Matter. ` [12] Ocegueda O, Dalmau O, Garyfallidis E, Descoteaux M, Rivera M, `On the computation of integrals over fixed-size rectangles of arbitrary dimension. ` [13] Rafael Neto Henriques, Ariel Rokem, Eleftherios Garyfallidis, Samuel St-Jean, Eric Thomas Peterson, Marta Morgado Correia, ReScience volume 3, issue 1, article number 2, 2017 `[Re] Optimization of a free water elimination two-compartment model for diffusion tensor imaging. ` A note on citing our work -------------------------- * The main reference citation for Dipy is [1]. * If you are using QuickBundles method please also cite [8]. * If you are using track correspondence also cite [3]. * If you are using Generalized Q-sampling please also cite [7]. dipy-0.13.0/doc/conf.py000066400000000000000000000173741317371701200146220ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # dipy documentation build configuration file, created by # sphinx-quickstart on Thu Feb 4 15:23:20 2010. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys, os # Doc generation depends on being able to import dipy try: import dipy except ImportError: raise RuntimeError('Cannot import dipy, please investigate') from distutils.version import LooseVersion import sphinx if LooseVersion(sphinx.__version__) < LooseVersion('1'): raise RuntimeError('Need sphinx >= 1 for numpydoc to work correctly') # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.append(os.path.abspath('sphinxext')) # -- General configuration ----------------------------------------------------- # We load the nibabel release info into a dict by explicit execution rel = {} execfile(os.path.join('..', 'dipy', 'info.py'), rel) # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.mathjax', 'sphinx.ext.ifconfig', 'sphinx.ext.autosummary', 'math_dollar', # has to go before numpydoc 'numpydoc', 'github'] # ghissue config github_project_url = "https://github.com/nipy/dipy" # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8' # The master toctree document. master_doc = 'index' # General information about the project. project = u'dipy' copyright = u'2008-2016, %(AUTHOR)s <%(AUTHOR_EMAIL)s>' % rel # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = rel['__version__'] # The full version, including alpha/beta/rc tags. release = version # Include common links # We don't use this any more because it causes conflicts with the gitwash docs #rst_epilog = open('links_names.inc', 'rt').read() # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of documents that shouldn't be included in the build. #unused_docs = [] # List of directories, relative to source directory, that shouldn't be searched # for source files. exclude_trees = ['_build', 'examples'] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. Major themes that come with # Sphinx are currently 'default' and 'sphinxdoc'. html_theme = 'sphinxdoc' # The style sheet to use for HTML and HTML Help pages. A file of that name # must exist either in Sphinx' static/ path, or in one of the custom paths # given in html_static_path. html_style = 'dipy.css' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {'index': 'indexsidebar.html'} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. # Setting to false fixes double module listing under header html_use_modindex = False # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = '' # Output file base name for HTML help builder. htmlhelp_basename = 'dipydoc' # -- Options for LaTeX output -------------------------------------------------- # The paper size ('letter' or 'a4'). #latex_paper_size = 'letter' # The font size ('10pt', '11pt' or '12pt'). #latex_font_size = '10pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'dipy.tex', u'dipy Documentation', u'Eleftherios Garyfallidis, Ian Nimmo-Smith, Matthew Brett', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # Additional stuff for the LaTeX preamble. latex_preamble = """ \usepackage{amsfonts} """ # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_use_modindex = True # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = {'http://docs.python.org/': None} dipy-0.13.0/doc/dependencies.rst000066400000000000000000000010661317371701200164720ustar00rootroot00000000000000.. _dependencies: ============ Dependencies ============ Depends on a few standard libraries: python_ (the core language), numpy_ (for numerical computation), scipy_ (for more specific mathematical operations), cython_ (for extra speed), nibabel_ (for file formats; we require version 2.1 or higher) and h5py_ (for handling large datasets. Optionally, it can use python-vtk_ (for visualisation), matplotlib_ (for scientific plotting), and ipython_ (for interaction with the code and its results). cvxpy is required for some modules. .. include:: links_names.inc dipy-0.13.0/doc/devel/000077500000000000000000000000001317371701200144065ustar00rootroot00000000000000dipy-0.13.0/doc/devel/coding_style_guideline.rst000066400000000000000000000135331317371701200216550ustar00rootroot00000000000000.. _coding_style_guideline: =========================== DIPY Coding Style Guideline =========================== The main principles behind dipy_ development are: * **Robustness**: the results of a piece of code must be verified systematically, and hence stability and robustness of the code must be ensured, reducing code redundancies. * **Readability**: the code is written and read by humans, and it is read much more frequently than it is written. * **Consistency**: following these guidelines will ease reading the code, and will make it less error-prone. * **Documentation**: document the code. Documentation is essential as it is one of the key points for the adoption of DIPY as the toolkit of choice in diffusion by the scientific community. Documenting helps clarifying certain choices, helps avoiding obscure places, and is a way to allow other members *decode* it with less effort. * **Language**: the code must be written in English. Norms and spelling should be abided by. ------------ Coding style ------------ DIPY uses the standard Python `PEP8 `_ style to ensure the readability and consistency across the toolkit. Conformance to the PEP8 syntax is checked automatically when requesting to push to DIPY. There are `software systems `_ that will check your code for PEP8 compliance, and most text editors can be configured to check the compliance of your code with PEP8. Beyond the aspects checked, as a contributor to DIPY, you should try to ensure that your code, including comments, conform to the above principles. ------------- Documentation ------------- DIPY uses `Sphinx `_ to generate documentation. We welcome contributions of examples, and suggestions for changes in the documentation, but please make sure that changes that are introduced render properly into the HTML format that is used for the DIPY website. DIPY follows the `numpy docstring standard https://github.com/numpy/numpy/blob/master/doc/HOWTO_DOCUMENT.rst.txt>`_ for documenting modules, classes, functions, and examples. The documentation includes an extensive library of `examples `_. These are Python files that are stored in the ``doc/examples`` folder and contain code to execute the example, interleaved with multi-line comments that contain explanations of the blocks of code. Examples demonstrate how to perform processing (segmentation, tracking, etc.) on diffusion files using the DIPY classes. The code is intermixed with generous comments that describe the former, and the rationale and aim of it. If you are contributing a new feature to DIPY, please provide an extended example, with explanations of this feature, and references to the relevant papers. If the feature that you are working on integrates well into one of the existing examples, please edit the ``.py`` file of that example. Otherwise, create a new ``.py`` file in that directory. Please also add the name of this file into the ``doc/examples/valid_examples.txt`` file (which controls the rendering of these examples into the documentation). Additionally, DIPY relies on a set of reStructuredText files (``.rst``) located in the ``doc`` folder. They contain information about theoretical backgrounds of DIPY, installation instructions, description of the contribution process, etc. Again, both sets of files use the `reStructuredText markup language `_ for comments. Sphinx parses the files to produce the contents that are later rendered in the dipy_ website. The Python examples are compiled, output images produced, and corresponding ``.rst`` files produced so that the comments can be appropriately displayed in a web page enriched with images. Particularly, in order to ease the contribution of examples and ``.rst`` files, and with the consistency criterion in mind, beyond the numpy docstring standard aspects, contributors are encouraged to observe the following guidelines: * The acronym for the Diffusion Imaging in Python toolkit should be written as **DIPY**. * The classes, objects, and any other construct referenced from the code should be written with inverted commas, such as in *In DIPY, we use an object called ``GradientTable`` which holds all the acquisition specific parameters, e.g. b-values, b-vectors, timings and others.* * Cite the relevant papers. Use the *[NameYear]* convention for cross-referencing them, such as in [Garyfaillidis2014]_, and put them under the :ref:`references` section. * Cross-reference related examples and files. Use the ``.. _specific_filename:`` convention to label a file at the top of it. Thus, other pages will be able to reference the file using the standard Sphinx syntax ``:ref:`specific_filename```. * Use an all-caps scheme for acronyms, and capitalize the first letters of the long names, such as in *Constrained Spherical Deconvolution (CSD)*, except in those cases where the most common convention has been to use lowercase, such as in *superior longitudinal fasciculus (SLF)*. * As customary in Python, use lowercase and separate words with underscores for filenames, labels for references, etc. * When including figures, use the regular font for captions (i.e. do not use bold faces), unless otherwise required for a specific text part (e.g. a DIPY object, etc.). * When referring to relative paths, use the backquote inline markup convention, such as in ``doc/devel``. Do not add the greater-than/less-than signs to enclose the path. """ .. _references: References ---------- .. [Garyfallidis2014] Garyfallidis E, Brett M, Amirbekian B, Rokem A, van der Walt S, Descoteaux M, Nimmo-Smith I and Dipy Contributors (2014). `Dipy, a library for the analysis of diffusion MRI data. `_ Frontiers in Neuroinformatics, vol.8, no.8. """dipy-0.13.0/doc/devel/commit_codes.rst000066400000000000000000000020331317371701200176030ustar00rootroot00000000000000.. _commit-codes: Commit message codes --------------------- Please prefix all commit summaries with one (or more) of the following labels. This should help others to easily classify the commits into meaningful categories: * *BF* : bug fix * *RF* : refactoring * *NF* : new feature * *BW* : addresses backward-compatibility * *OPT* : optimization * *BK* : breaks something and/or tests fail * *PL* : making pylint happier * *DOC*: for all kinds of documentation related commits * *TEST* : for adding or changing tests * *STYLE* : PEP8 conformance, whitespace changes etc that do not affect function. So your commit message might look something like this:: TEST: relax test threshold slightly Attempted fix for failure on windows test run when arrays are in fact very close (within 6 dp). Keeping up a habit of doing this is useful because it makes it much easier to see at a glance which changes are likely to be important when you are looking for sources of bugs, fixes, large refactorings or new features. dipy-0.13.0/doc/devel/gitwash/000077500000000000000000000000001317371701200160545ustar00rootroot00000000000000dipy-0.13.0/doc/devel/gitwash/branch_dropdown.png000066400000000000000000000376671317371701200217560ustar00rootroot00000000000000PNG  IHDR7'piCCPICC ProfilexTkA6n"Zkx"IYhE6bk Ed3In6&*Ezd/JZE(ޫ(b-nL~7}ov r4 Ril|Bj A4%UN$As{z[V{wwҶ@G*q Y<ߡ)t9Nyx+=Y"|@5-MS%@H8qR>׋infObN~N>! ?F?aĆ=5`5_M'Tq. VJp8dasZHOLn}&wVQygE0  HPEaP@<14r?#{2u$jtbDA{6=Q<("qCA*Oy\V;噹sM^|vWGyz?W15s-_̗)UKuZ17ߟl;=..s7VgjHUO^gc)1&v!.K `m)m$``/]?[xF QT*d4o(/lșmSqens}nk~8X<R5 vz)Ӗ9R,bRPCRR%eKUbvؙn9BħJeRR~NցoEx pHYs   IDATxEi9'IdADO' z9ܙ!D# I0P3Kf[l M;3vOwW~aHIvio.UTCo+"dEcml"E?%E@PE@ʄx(182:"(@!G .eSE ĬLHP$e͢(@!G fe(bŊrHy"("2={[WE@(ĬLjEcRȡ)"8 :`ӣ"( s&§犀"( 2qt^"( AڸqdggKNN.A(@!2!Pݎsʮ]L2Rre)RTnI4Z"(B &e*G8%7"P+$S-CPD .ecR&x$,;cOMKPE bd(ݱc(QS%E@P4D feIL\i(@#W+?% ,6mʘ^/_~x-(G nLKsQ$ ,FI˖-3&<Ɨ.W\i畊+*{viM(@)<I9%ڼuVٶml߽=E8S6E@[$ X\H2J,)[lї:3uhE xU'@+t;d.-[P@̞S$ #KVKW.L Wӵ5"(B fτ Yɕ\}|]xŇV"2I&+۰iԩl2^vav#d#N^(UVɺuZjRFjNzh/;%dee%͠D۷oKsznO9L /;1Xxnb +~z!Mr墲2$*q&[nիWr䭷*gyf蚞Ew}.;lР{RfiÆ r=?OUL:Ejת9pnw`~{Nxd~ew}jd=䓂p;t sLTՍ~xRby:uEʒ%K^T)]virAw/`>Bzٽ{} ?mڴv wyGڨ2I7བɁv .H6m*gϖ^{Mn69m۶ZMO$^~ >\;8.\P|Myg妛nҥKh{K/&M~p޽U,OYێ*U]_msc/g cǎ!#_}U+vȚkGJB8q؇n"sNQ:t(Txo\xRlYK/$x G}={R;wJCkB <,N(FZ3g ^K|VHq0y.ȑ#_ꫯUW]%͚5#8B "X͛7{W~+tbLÆ m9Xb6mbh{/|r'_Wڵk@c;~wߑ[ ˱W^ ޼y }?|p\ƍg:BKWU6? Huֶ<'AK.:/+sD](@/ݏ.Æ G$ba/_܎-ZȔ)S/~ 7ZƳ|;G+0ygołڰWζV=_Te2.b v BGG3y?;(y~| k֬}֬Y{=}t s \rp=a0.?DG)sذa?[h b*4ƀ9ꨣd` /1O>9JKٖMP:i$lZL6ncYNaѢE¡켤Gx@Xx*Z11]NW\!u-( 0

    "9F+<<_XG'KZog5;R+ٚcsN;Iǘ&-a0ȏgt!뮻jSc1 oRqpFgeG_EsN`KS'9XtoBcb`mϋ;rI'> N P" Lh}acgkSh7l0p<2s]G#yY.| w϶?cJD;Q&<xx2ž6 HyCNPsqJx@y7"K^ Gye=V0 uȥ "rP Xoz#er M^~ߴ {8|Y=A}= sG:9o1@9Ƴ+7^xu]/WopCymLgr1ec(ǟ~ɮ"v>j(hYb(>euͷS47cB.$9e8 -,,9gyBX,pĈ@(Q&eYgMs p2eO Q\n1^_>d +3ʧ?Ƴw|;1Y!ƒwj<V1~Q<'ƌÈ3V["+ bVp&<bAXXj(E`0)d3AzuG C(zZLb9(^k& BVĬ/bC!# sw;PXTXX=1QZx6 a Cµ aG{L)G @p>u1YBxxڽցB(EP Yy6k*+lD7 4ÍX>lx($r%/l)Bsm(x1&! Z#ؿ:E;?ZAhUB[b` ?ﴉX4&0?f lIt :v屰...{X( ڋB[<WQ(XgD!xq便m\l%a /Ed ;ʉ0uxN9~Tv)%m=;e&l%&O:b JH ~'ZA?>z|ǭ (o.x1^ 2q -Ld){CD;Ὗ F=?y.mߏ;'r.ZY픉kc4~‘ 7ڽ(lu(^3pD)3gA ")aq??H(AGݥK#8S$o[~gbx^#O˛YE  \M8-OuޙۍeNCi =E@P ŢHh sYE dS4WO8c&n(+^YP3$ԕĪP[P@Lh&AI!PҀdCA#,e",V WSP2I*@e2q.OF~E@P1+<]qDIIPE 5Y$] $a-_P!vɃBKVE@+LSE@!az("WR&0^k늀">q)d6)E@PQ&[^J"(@ʄ0$LOPkE@P2 ]͕i=UE $4\RRE@HORFU̙3Q:fZjsZM(@(p0q.Bĉ A+ "Hea.'s>Ϝ֖zRF\ymh P )Lמ ڒ> 'M)LThI0 TS ~dٲeI`WٹsgvMM, ޴i#9RnuP\GrrrK<t:/毰ԟ2]|G2d|ouѣwߕ\~m"&3f|3DL3m4KbŊ6 Cx@ʖ-1߁܈ͷȣ>j7N|M)"'tL0~*.k狣ϸ׼ dʔ)&|ʹN:裥٭!-^,gϖݻ˜9sU˖/1iУG4_*]wK{4NG1߁/{nr*/tmOڵe%ˌ-ZhU ɿp"yᇥ|+tӦMK.R%K&!oꫯ ^|<q믥wض}|7ܥ=2S$>AygeRn]믗^{MXȱkfثL ܬ^5^A!x?5sJ#FHfͤtҲ]ZL\8.ާl^'#G/8?vfR(KL' ,*UHdk6/K.@|ҨqcOQ&U2+r,{l߶]8it9KR."q֭Qvl&cƎ=T21xL Odƍ%C5;%9[o2fs 5a0`<H>c)W\pҧO \yrmmV{VVL2RjU!1eԯ__nQ4PnlXLrXXSXi{YP_XS3FYѶm[˃߰a﷗]v^޿4(X˧|y?kS(Ν:ncpR$kc93beWQ& .wBA?o-x!%J&?V3C t옱f[f7c ܢxa^¥\rԠ`;eT;w7cRRVm_|w4*U,mBg\iƌ(p\pT6iFFQZR%Rv-kn}No/yP$M~4-[ӌт,;̸XmfO27}[8-ttR5҇(B͟7O~|~+fq/;#ϟo"+"a&B=΄XyymƏ/O= vw /o 7 dy(3<ӆ 9}";#"qGh1y;/%8CkrMʲ crs$EKL?ۍC[ O?]N0b c"*#QNc~fԑx{Xx{ {93{3{UH8-sڵ/o 7vt5kJ+Y,ܽ>n6J%#ع&mH KV^-r@:Frmٱ2F:͆`$k9oo#G`G3,P&ccN8a PK䷡&?slF܄CI._]KeBypDXc"KCuGpф#p8G*U moL>J3%I=ocIDATX9-wqͱpp^阏p"x&4 Q&oЮGyTnN?ZL &%̬Y/ #+y;/XsgY(͛['aF(!4ŏ,i GG9qП,uD>ƆnǂI~m͢ʯf+}Y5廴(+'%Jڹqƣui7DE̘ܽ> !7:(;ǜ ![j5kAаaCۇ^}}\g/c[ل]^\]ys4 ?c C}wǔQ&`'㈉o0eJw%  AUQ>>8/`fnPHe|!(^/؆X+bC\sUB-$Gi/õϛ6 :ƬC &K;aܲX-j&kըi ,\,L`/X; F)ϦXe5 X5CPV4R^Ь_^s%=WHe";qk,.pCA!nx줴R'G8(bHe`-X;ڍQڦMkU[xKxJX=Wx)G>,Oa1OM11Ka,᥏7wC@ Tgs8K(#K[+ׄth9\΍p@@ LW26[ݽleO2̋q`LK/ԄP Ʋs:$B)Y1{cBxn|1G6[TbRы/h{XD{1yvn9D!-!^²@aI:x3 " %E) X,ѡCeya1`𜼫E 2Y|LBM'BP/W]iP[m#dwѷ,dJռh4&JB" ,dBPhtd?~ "cxMX~#GV"(/}^,D(Oe[\)p#(?7K_12B<%ry? iiR7k"g{)sy/DJnuQ޹P^,,yR3!%lx#@|ax83ibB1NLHPho! pD -rÕ R3 bR( ‚VJ,eEjm{X:)LrWyGA@mO"2Ŀ-zv0#0nfM'sݲc C+@"t>\ g>8E|ʳ@x1d"%%E@P W&vgÛU?攬9PC>~lKXrE@D eLܜ`0ztjQY-eJKבWX!7l>Gז.~PVdo.ij>ZT|gfE=][v%d)oD"ҾIA2m&8rt_%D19k-bOVMҺay߫<>bIx8CI2a|>m1ewm]E.^/!VhnRe~qRSJ:emY?(5 ~ώZ":T_))"+i'16%sdT˷}oϕV_T^b>(#sn_Ⱥ-;euҠF?jZS#t[/+(P>}59}U5i|;glپ;vֺ߸C_nfo)ύ\lQT15F@iӻ_+KS߮wXLPsAKv˲5$g95vȲ[m{C.ꕲdњuOPH;ĵcjˑk0}4WN֣jyTaE^W+e֣Ueyl"Y*su{@FKKm#`:1}6R{_W"h]{m\T*-!233+*Wח*QT~0&ay;_>lTԭRJf-߫8Vzd=HuX#\6(@XV/=|n2$DUPoѠLAV/ePT,%Kx27c 4 s*ұy%pcLNʕޛ7J{o)*+V.e-Wfowt#jHrY7˄Gݐ0U-HuoE@P;AISMdž~[/o{]A %%sCdY% uK8>R,>|2m4+Ԃ3g|=cܹsn$׿T~TXQn>''G;+L/j_P Q{+? &iFV^N=APxt%|С2sL^P?s2"űT}>xqKLo7X;'|ڵK7n,SL Dwmn0|Pϟ/ӧO5ju]g|r 'Xo&MX˞˖-<?!UW]%>޽[(W+]t0yfڵ 6L*UڻARļԨQ#yWyhhBB 8PUۻh/M 6E7Qz1h֬Y`y,6OK(!}/ԩuY6-B}Q~~,<!ʈijǢEA+(ȔV&u֥}J@@2ʮ\~J*UNM2˒%Kc A^f۵kN)2)S̛7Oz衰|\xA 䡠z ;V.+_]{!>'(VZ.תU A2zx,MbMGX#Zyx]%T|8ʇzAދ;B$(7gʏDx}USZDb^+Baopks׽GC=Z:}ժUvڲ`w* a,k/BX>=)~G9#w^2wy ]?ކr6mjP<ՆN|+++zjSNL($}䊉5chE*#ZիW;mpT1exoşJ+@B@M0NFGu<ʆ֯_oqI>}֭_g=nIPE)qR|y;ҦM C?O?ٹ* j_PA#(M翼,&Md%:bQP?4юy-Ëc:3^Wpb=z#FE,UuYN=T KI{feur)0Q}儢ݻ XrT/:̩ 3<3l\dmps<({,bzw#}Am"֡C2Hm']T"\AX9F˖-CϠWOky-O>z8SzGJ@aA`ĉvN8tBT􌇘#aC7/)VԬwj=9&^~ suL[(@!$D9RE Pev] +"z2I>QE@H;RzuJ"(@*N~_{5c:uo³Y"/3^~cv3pyK~l^Xxّ - !CwY7ߴoE owU֮]kʇ"7=gƌ6lX9zhzmӦMv;q'KhvȧU}rԩ[ZK Cmʕ+oav8Xڿ[4o}JoܩsD kРAv71/Gmp;$nΧ(PX6QP!oc1;+W.|xGn:Yξ XӢ8_l:I{#L Ldɒ69O*]J*WKAuMtk[!Xp\wuv_5ppJܕv#@UG)(/epn"H[ejd<O<Ɵ}1cX=& ^ ^"ć@ʆkdh.ڶBAC.X3(z&dE@P. N4Z"(* tm"(F@IE@@Td`kE@H4L(@"$;](@PehDx8Gf!`g;p855Kָi4 233QPP`U]IJhooWWWږLwGJ[J#SD@@REkl>甔h)W5pS@(yf3_^:;BSeG!`]\4̂;N"Yճ3G#x{\4ea"uY8GZKZz5-'!3@X(GC-y-bOH.#vyn0^z 8#*,8<  QgtHFuǔ;5&Z6o*q H#dy~LYhb [W?.!|%tR'ؗHV"5+^A\FKn]–+/$$i&-dnj2fnfhh1vQ0KɎUkƇ|53w > ,dľp,] )5Rѿ-drg9~oG]YuⓇ3qX|,,o@d=#rfD/0gr[Magx8kaFPkM e|z+1Pb(;_M:;OB/@Išطl.Q2).X30 )c"p!cC^ƣWpr**8  [ Iv!*m&nPiteW"!WU 0ط N&ՙX=&MEW2zR#Rى[ۊ|0{}܏؈kA #Vf5 ^9PԣWiq`~}Fv»=}XTi ;HN {79lsHiU]{6.Fہq-ĠɊуt*jI5dE]wr:'KZw-Ь-u#)lJ>8c`HDЫq+6.FD:5EG Quא5+SG§lMah*ԋu푹%[m;1]B˜ UY5,q{U_1eI{`ؿr)fb١' *%_֩VcLC#I`okep7=v,œkg>~>z{P&v| ~oLگFU7K wo@+\Z4-ImulN'ad5?=Rw,XZ O7QyX=DOSPթRBCcvy WWdҁ0v2VgN^-0h4u.~ߐH?s3Dj;q#S[Ƽ6DM:r6ߢ!`&v; boGJqw`/X=g>6vعu$+24 a/U%lz6fnc0EilRч{Ʒر堀L^ sHVړ ϿE&y#ZS\! -Y5Ur Zvơ\@.)U5v@x8h1ZO@kGd Gu[h {tC_j1B(yds]0zL >[y"4c/vaִGS<#S]U͸!)'7IpR:F,u!Y{k8'/HƐ~*pl] ع' %K*7 [BO& ؁RK|4c$IcZ7ǦQ8r1 A]eX`tP9Y{ڴ=@įǕh֪5d)5Zm޴`DoDշQi ǔ~1LĠٳ>diYdĚY7~=rq'\C}"-3ǐGKM}D8ΘxB ez>E _̌'_Q%60jg">j'f=y8C~M7KG47۠|Q8ID8iwG8 674^r#; X6ک)X4@Rʺض68y.n艄8Lx:^ŕT-h >O//aq%}F@O<'fk&%r߅-| 2uI|WR:M!bRl]-U>]HsԸz23*bpOOypew-1g?k'y"`Ş!=ÉVA;ԫ]}|:,l \d vE~j 2ӄ,C {psײ`Wʗo^WU\A2Lal߱ȅ‚;X`-Α7W0{=9P?Hn#<?!1{V0)F"R3!!*q /k aԸ-Ҵp:t2[dIƝ<2l eI^ mt\p#\w:Syptv(9"xZyY򎭇ൠXMV )o GWI|,Z~qG`{ˠ^B^l%P{?{;{?*+"n'xE3~dZ i GcL_'!}GY6b8P]@gANFiP~ixw` i/܅R0PsCr>_1{k>@,w Ǡe;}꓏7->}C{W>Zwr0"Ѕ@?>ȗR0384Pa0[TۣRD*9 /W(+'03̃G#V"<2W#7Z|Pwn߾#/`G/`P5,= ~3p_ITڹ{9KpE$\W˛8z&\: (̘ V|5(E&%[iS dhhch2W +[ϖxi*r"Dn @tAm컣 s i E3C`r'e"٥Zk !$C?l)hΫh>\Ov7#Ƞ-bsnLę<;]1.^maM8Q⎺n"2(d;2F$`Xeۇ*5֗Af+b5>9 Z+a/k%\Dޘ \8m; ""q6Y2z9^pшhЍ}pƷW2Dۤ-~XI$8PnDC;a5!Mb4ۖ|ZJ]0lg,eNi+7'Y/$42 1OFJܴ/t*%s)V򽩡ͱA$Rڣge Rd8aR{"0E2Ȫ37M+#ȨqPʧO=# jK<>펦*d0^y)ŒMEZ 1nŽezdn "J0kVSGh0SIVDvydّRQADVZCA$$<б=Y¤2 ,;( G&qo MD(O=k"N)#⥬$')qfY lzcuZ̶,^yy |Tј> χ`pg23=* LJݍ4 Q_b?"tB50lt0Eseđew3t:loڱo z-р(gDB~NFwhꌅQsTeN\*ph=IBVpJcDD$W]`D?]+p矈Sۑ &< 0h?ipgG{!hݒ-5x=7B̕%Ֆ>HWfRS*C½+,Rmx#mtahGSjKGMl;ƯRIsJe?Ej.bN_I#ȻhSߌwz= ʯIiG4~G".hbQF26s^OF_6H"=%o qÿ}'a ,:;o6TÓR+w c)X V d-le'bMWx W|_ b # k="3ƈE,):w}Pԇ sG1m3 /&c@ND3, zLǒՂ` NM pp֬ZͿڹ }0=J.]D:t *˳`KUe'_wZfȧ'eRtM6郃<Ҥ"rfλ1|8#C;- oąLglH-'Kq*7ՎYfmxcL@,nױG6 h4bz忇 QGxU3U\VM:C2_#yKexAtŐHb8a^IiƮ21o >{'NGF\[ĉ:njҋʰ!+I%1jί'ƿGoCP`ӄX "PyQe ϮQ t6dl${eaŜ؃5˗㷋9<1%a,zJ Þt|uXgc- vX,Zr؏znCю{1JӸӑ͊Pp镢,Xӂk/=xKقLMFf3${z`(ؒS!_nvj𨿫k5%jy@[{zR ?]-ox:OTާTҹJbN9G;;ɏJ9g)dѦZ+Yhr49)BM9r S+0lnKH]f,CE2#mfoX]R:˜CL_)st'JI\!7fTL9*6prs{5)a }٠P6ZЦ>8І&QW@* О6db(3"mlѶ3#.m-ah,9/bZ' al]U!7n%W7h>bvwܹ/ΌWvO3rטڒО*_ ɓYpZ9%9\npc*ڼUT*野 ssH>Id2+)@Hk1/6x6Kug:JIToIiwv^"2բJe0dV99B:MgOa)˽JK~DµFɗYI dJxHdذ,YXZr<e]pjUYeo.#p oaެ9C4v(; ԱXHDң"bʩp^-§<*HL ? PfqO1_l^q|cԿ&mE @=#iIo粖&{]_xw~ ҇EbIxڻTSFrpѧ?F/Wzs~ '$WqBtu<2e1m܂IMJ9߅J[jɫrR!r4sutDZ\_1O$XWos5,,O?O`*1Ub8>f7C*E"=)?LMD@/C.  Z5ơhM㮢9Vo,>G+2ikq| \9Bl #bp:4]%f;U4"e#R҅[ż1Pygbv$˙'o<_-qaB@a(F lMԦH]އG7<_Afo#:.l>XD@yup;$]vCOB\= ~$?mO`U kìi=w67`ոگ!2=Nl_$?G-Mѷ-L8湫iW`#$ h\} ƍh蝏tyϽ1Qrx ѷۚƉ( LAO5G^0cԡgD+WaYKU>y%uۡ1r0G=|*,_?'q;W'C߿6C19)~Qk 3S1oX8}㣙{UFRR^+b@t:Q |x~)b޼x"6#m9QI=pjd"߀6/c1N 6ChɎw牘DԹ}ײW\Ƃ=M̝>'w #30yH>Gi0:7)&D⽻‡i^ 6G'YǙ8tp;wNV9~v58 '.gzǘԧ.f"H|zpܳWs%9JO[dų6ѱ.\+ʫ'iJOt= %dOT N!ڼys1m]w톎@k*Я?Yl麞*a[>/očy6Gw£ӫ3~ĥ5Í$IC' ehe-ד'C Y8v\.L UL; lY]bOR[fKҍ됵8`oT)SxYɩYٰA@ oJG2ߌA̍3Z9]1czg`'bR 9b"Y{ j[=.3.5ɅuKa1u6+>|6gt?9R;l6$U:~g6CY<޴A $H=*U^mxKY%Idd+ 'r)fYy< zBoz"݀n>3ՅA g:jgJ,+to@~1y>"!\qh/Y~/`v^SK"ƆgJLy[صXLw,A/6fgMڷAP6hݦOCΤD7Xt_K.ooęJ4iYf9xFl0i@ C'4FY̑TiJe%]G>Z->:wX8ohXo!^HQ^bk S14j"2Uޚ4Q6FǞ0 tIh'˽^y?DbkHiҲ R±1DS?N3ˢP xC&7Ȗؽb^z[5oų`ǮY('qR #@"3e39o:a04cR|h~/9fDOi14Tbh,-)N; SC>kkȐ5GW1P9L-4׋cȔ0Q2 (筌4ͪ ef&2pa^wZ:9xK@tnV  d-P5f̮,φbHǕ†/hy.IdFH)8]t<@Ehڥ&9 7=~٠!TWn",.4*o+#֚K}׫[̂wqq1LYo/wAzlV?q okP5B}FhOtg4!_:Io⡶ %S.- _~Ӎ`⽱d\P}4xԚiNJMxmH[;X!]H梑,]sZؽ--YJ{\ yBI ,0MMIKFJr X4B'@fY'`.~)aaĴA+(xwucWڌ#6o!MmHAџ 7 ~ڄY*l7r٢r'i#MYL0s߉ϻ]&Z4_ ř,C-GyP??ޠ`)^T/+mÂӦM }޾t0YR;#k|0w$)ܵÙa0^ !"̟yZ=x:Hq BhRkþlL NNlT!D Мo:g!^Q%bFbc4N_[5qmr/8fp#ѻ{@,KUƼu8i)Z<>kWkll,$]~. 7_CYVCL0TP( fc,ɃT*χ YC*/ZE. :rub,@dp(5~={B-'$ 4>)Ao# */g+ 6Ԁ0sh<*bp=z~uφ5;]K*.f-NcxIENDB`dipy-0.13.0/doc/devel/gitwash/branch_list_compare.png000066400000000000000000000246671317371701200225770ustar00rootroot00000000000000PNG  IHDRL4sRGBbKGD pHYs  tIME0 IDATxy|u4I4mz=-Zh r! JUP\WPpZQ ޠTVXE[RP,-6M#Ee|B!2JB!OB B!$B!VB ֒cX]*S&D!^k]3WBH^xe 5 s{ 2;SO︗%fC%gLP <9;[F\/=:Jˤdqɲ>#n{J Bi9qRx|3"L#RBڒ2ג?#!?:d0o(SGDV"=w t08ztfÎT8BĮmeT8lmձ7e&d+ q0өG i+Y0e_sU7̵s]K9NWMd: G #enz6{-J2v7!5yԅNrol>Gj6c/$iC-A)buZw Ŷ۷RL,]څ7(͈߮#..6m0d q:F-!{>2ӗ0{VT˿x۷`j;@B\snѵ: $3H\;ݓr">٫v/U" JRQ#ڸ2h*B\l]ٴIϠ?,0b)sA9SȬQl 'gp+!5UU; qۍ)f%h?~{[ӱ[ PG.r } !1\F]B!% B!$B!B]!B!$B!B]!mces_r݇~U%JB\!e7/I.;g-y|kGbːdKn чֲB]!oZMat/jIPZ)߿n¥فC:ŲEiҮJl۲Ehkܿ&O `;_D.*7Dl! HׅvSisbH,M(.Ub4)hlXqoSI~ʯBhiM Ӓ=`[:b}wvx#_XP;`D 2cb& W fBqstx>r0Ex~ n ؀&fKş¢ZRU& Դ~`; )R!PQ]v39QҚ| UžP C (GH`laJ69нfq&BˁS.Z[5I.-;Cb-1F6<Ѐz]Ť<Il|g7YU[5*Rm*.⭫zUl nl6 hGz-!$.*6_ԊOVQG_Azi`^F86*؋ H`+Jcf\ӡNSgڰ(؈Ѓ^zܞ/'lJOa{]K$%` ^X9oC'_E!,uwV[XPj4r[;nЗl*qCo?nTa()jqnVuѦ} VR 7et zon\ui}74äT}TCpx[)|h܊:qN`WIk6Ǚv^r"0~6 Ź6ErO"e穀hsFwg9R Ԟ͵޸fU }XSܜ]yok% 37'jX!tY95&MKHuCiwS>`Ex<F V>FB {o檞'ȴhx] /~d=јPw1(\{T~-;dkYWx<8qr  !,, ?soUTUU8k3뉫)XN WqĮO捾ju`&[O,L46ޙO5_Vg6 哩 ٷAyi걱|1k*x)aXo7C80Mmg?Cu|5lj¼SD8|.<oԌכylku賯|_ËRVb0d;߫,vt&: 8nt'JP5nc ԂӉON.&4 !įwq<&IqTU ???6mz˝}6)Rt&ޖq귟yjT[k靦RM'=niUY%dU_:⿿]ˇ)LpD[8≯6YvXuW|UpЦRWYp 8p6޸306GܷJ[QyT&=oh˒x7JuxDڲ{R/槊:  -D9& 搐3$$8c g0aL3jk -Kk2K+, FQqEh4bXkֿ[njM,~7rޖMm+QA]IOs MlY (A`7|`y[jN8M@ۺ7Є!x2VPɫp࿷vN-8UK#meaUaފqJ7{Y9xya 1G>-f(pnB\5k^bygd(Y15]8^w#_OB(G9 iF\:#wy$3\ըed}Ŵp }6r%uᄧnz-:_.Jmu]ѺWІ1|ҁBN)ѱT}v1}`8>k5ԻWO]]PΩ~?gQ(8uP}s+h+k xfh.^2.ךb}]_Q]5Z/wUT.ժZ"!MHXɱ&Bb<{UUU5k3+9ZNg|lʔDw N _!!(χwB\-~k̘1(J /ԩSQVK>}NQ5ϥZFu;Zy ,fV-SNhO9_BQ5~gm@P&'jB{kzuẚ.H `K iZM_C{3fk5?Db`432.&ՅSc[PXsx(wԌvEC\E~׆sB!~?SO??W_}bt:o4|V^ݸy#+0~$}>0%q5Naݻ7a^F.b;q+1woy+yav7VBUU̓&3ݛ[|-6TUxv:N 7@U+INySne\T>?vMuƸy}_^E͓߼VJݠnݛ\'y 4+;h0p2@dlf`qCdTi׿O?`dbʔ)`09r$ .l0 5g!CRY0J*T@`2-[͍'SN"яf<,cғ<'~GS"$3۹Խ(F p>]3i}'7Mw/N#4iszt{%O$5yͩvZNgaaAm۵;_Į n2omQE)?{hMZFsitD6OF!>`^W;Y`vUUt' BCuU{\MU?m 1s8;ᶩd(fl7o#ۄ^}L&O_Oe'׵"7?Cܴ3+wQS}/6<q"ϬB!9/vr{2eoѭ0gǼ;י:(#w_]?&w¤QyLo& (z t#'y lPb<)h_wX6!iZ}yޖh%@5qo=OߟeZ]wzhB#7 !75{n"##Oŋcj9r$/`ȑ 毙qvva jFHӆ}̗NqQU.f!cힲ<Չ% @⩢܃S7_ACW܎oՕ>K rVr*ǨBGBꡦVrOU.Z.:y~:km z7.g<tҠʈQ*\3U=칳\2BqI Qߤ4Z\<~L].fտ?`yV71czg}iЇsIW@ɜ{Qy_ݭjxsxj LJOȶoEF?ى6RKv٩dI -|eX9gP!)<@;kϾo4Gxor <7<8n,%$qe7#o fi:U2` Ӡg3ɺYG8[R}&3n]mc{1Ww?;pZn"3 }B|t$1j>/O샓cG>^Z@ؑ{o0o%A[mE!1>z7ulR-JeQ7c-%UyY|4c~*>>w>1l]Xz\KѱpQ+ea>Lۺa߄ZFņ14Cók^Q$FCAeέMb4GrΥ܊泏l-Q/!s.]V>\N_5XYUn(sIP`/bkn[HC  ^c c&MeRJm|<~bԩoƚo UV0}JÄ$sm&?o/~k3ld&ȮT4G~{VR?Uֲ߮~O FLOiEӺH^ mOY`EǤkIRb8)3eTw4LC9OG._) 1uX/|IҪqՏ9Xshңvj>v擣.*t?k[槂J m;F5pURxFh 땈Fc_*̲V@+ |j@Bh4x<$ќqW%Zll3 %¤S,H`l[$0tP\duk.7=eCΠt!TEV@㧥d<ݺBCѨ`wGQbڸs@NAui#C! +IDAT<(8 uB ӌL`jU!`=OqW ;cV}OG_);o,yo,WnEMmL̘ "#Nnl}NYDMFmejQ~FIvaA!R[ OLe)Ir >1+b]bLиP ԢH7Ѥd3KW"QT~4rwJ[.eox8.Ն`Fud׵5q򯢴Nʍ,_N Wb.]7c-#d2I0.@UU'5Wo/bop}=QfPJ69нfqɦBƝp ),n]"44e9]@]Beh}qDMlYWet<32zJ茮8jиb`Uix ˹[y[ &ob/ȺI(ɣiֲ"=G_ \NhJT0(wFP@URR&"-/&5B!c z=3xCJi9wS[3Y[E_W 4Sо8:ϡLe+!)=3x{g9w9&\I/>H@2OαT@lu7ثM%a]ߩ\|Y(D h5 ^So'ʑÒm2D] qFp5Lk1vh]J0wm?*c+.*hNNq:\6 h- !ğ͛7y9%fTV?ȽڳvlƭtKaӆTOnȢgoO& 3bIK[a Z20&d%zZ:QRHf⽽Иaf|2PУ!덄| sly{S2xi_ȂSj/yl z3HayL4×EBYMB>n0gl[[vBZ̨赚}[5113ãi@H`Gՠ3,{9Zz& d;s=RW"1a&w }#WGID~/aׂ,$#`oN̰'Ƒ3u}=6|-UCHՒڦyXhrt nFVE'z$X6ѥ}8JV M/A+md_.찔bui3j`2,%EX &s:`:z)ưuSQR7O=Φk&a9QInk 'p,!aZqL5q8pi:5;,Tu[)=aEu[+8aDx/a`EC(7騢܎`=a'ǵ^Y㸴!`Ȧa+ =PuSܑ[8M wn>t= p=#hYn%Q7Bdk ;Xl#6{eh:eђzwOLɜվ/7X'QZ O{FOݣ F#e !݋q#?l<9;¯YMI<9(Asѳ];evA!Os[ƈPcۉZdB+<\V 6M A%B!n#7[3)D8lŹYdnR' =naPg-$B!B]!B]!B!NEF/B! !B!B! !B:/Q֭[!I!2B!,I!B!B! !B!B! !B!B!DW!BItB! hE$IENDB`dipy-0.13.0/doc/devel/gitwash/configure_git.rst000066400000000000000000000113001317371701200214250ustar00rootroot00000000000000.. _configure-git: =============== Configure git =============== .. _git-config-basic: Overview ======== Your personal git configurations are saved in the ``.gitconfig`` file in your home directory. Here is an example ``.gitconfig`` file:: [user] name = Your Name email = you@yourdomain.example.com [alias] ci = commit -a co = checkout st = status stat = status br = branch wdiff = diff --color-words [core] editor = vim [merge] summary = true You can edit this file directly or you can use the ``git config --global`` command:: git config --global user.name "Your Name" git config --global user.email you@yourdomain.example.com git config --global alias.ci "commit -a" git config --global alias.co checkout git config --global alias.st "status -a" git config --global alias.stat "status -a" git config --global alias.br branch git config --global alias.wdiff "diff --color-words" git config --global core.editor vim git config --global merge.summary true To set up on another computer, you can copy your ``~/.gitconfig`` file, or run the commands above. In detail ========= user.name and user.email ------------------------ It is good practice to tell git_ who you are, for labeling any changes you make to the code. The simplest way to do this is from the command line:: git config --global user.name "Your Name" git config --global user.email you@yourdomain.example.com This will write the settings into your git configuration file, which should now contain a user section with your name and email:: [user] name = Your Name email = you@yourdomain.example.com Of course you'll need to replace ``Your Name`` and ``you@yourdomain.example.com`` with your actual name and email address. Aliases ------- You might well benefit from some aliases to common commands. For example, you might well want to be able to shorten ``git checkout`` to ``git co``. Or you may want to alias ``git diff --color-words`` (which gives a nicely formatted output of the diff) to ``git wdiff`` The following ``git config --global`` commands:: git config --global alias.ci "commit -a" git config --global alias.co checkout git config --global alias.st "status -a" git config --global alias.stat "status -a" git config --global alias.br branch git config --global alias.wdiff "diff --color-words" will create an ``alias`` section in your ``.gitconfig`` file with contents like this:: [alias] ci = commit -a co = checkout st = status -a stat = status -a br = branch wdiff = diff --color-words Editor ------ You may also want to make sure that your editor of choice is used :: git config --global core.editor vim Merging ------- To enforce summaries when doing merges (``~/.gitconfig`` file again):: [merge] log = true Or from the command line:: git config --global merge.log true .. _fancy-log: Fancy log output ---------------- This is a very nice alias to get a fancy log output; it should go in the ``alias`` section of your ``.gitconfig`` file:: lg = log --graph --pretty=format:'%Cred%h%Creset -%C(yellow)%d%Creset %s %Cgreen(%cr) %C(bold blue)[%an]%Creset' --abbrev-commit --date=relative You use the alias with:: git lg and it gives graph / text output something like this (but with color!):: * 6d8e1ee - (HEAD, origin/my-fancy-feature, my-fancy-feature) NF - a fancy file (45 minutes ago) [Matthew Brett] * d304a73 - (origin/placeholder, placeholder) Merge pull request #48 from hhuuggoo/master (2 weeks ago) [Jonathan Terhorst] |\ | * 4aff2a8 - fixed bug 35, and added a test in test_bugfixes (2 weeks ago) [Hugo] |/ * a7ff2e5 - Added notes on discussion/proposal made during Data Array Summit. (2 weeks ago) [Corran Webster] * 68f6752 - Initial implimentation of AxisIndexer - uses 'index_by' which needs to be changed to a call on an Axes object - this is all very sketchy right now. (2 weeks ago) [Corr * 376adbd - Merge pull request #46 from terhorst/master (2 weeks ago) [Jonathan Terhorst] |\ | * b605216 - updated joshu example to current api (3 weeks ago) [Jonathan Terhorst] | * 2e991e8 - add testing for outer ufunc (3 weeks ago) [Jonathan Terhorst] | * 7beda5a - prevent axis from throwing an exception if testing equality with non-axis object (3 weeks ago) [Jonathan Terhorst] | * 65af65e - convert unit testing code to assertions (3 weeks ago) [Jonathan Terhorst] | * 956fbab - Merge remote-tracking branch 'upstream/master' (3 weeks ago) [Jonathan Terhorst] | |\ | |/ Thanks to Yury V. Zaytsev for posting it. .. include:: links.inc dipy-0.13.0/doc/devel/gitwash/development_workflow.rst000066400000000000000000000327611317371701200230730ustar00rootroot00000000000000.. _development-workflow: #################### Development workflow #################### You already have your own forked copy of the dipy_ repository, by following :ref:`forking`. You have :ref:`set-up-fork`. You have configured git by following :ref:`configure-git`. Now you are ready for some real work. Workflow summary ================ In what follows we'll refer to the upstream DIPY ``master`` branch, as "trunk". * Don't use your ``master`` branch for anything. Consider deleting it. * When you are starting a new set of changes, fetch any changes from trunk, and start a new *feature branch* from that. * Make a new branch for each separable set of changes |emdash| "one task, one branch" (`ipython git workflow`_). * Name your branch for the purpose of the changes - e.g. ``bugfix-for-issue-14`` or ``refactor-database-code``. * If you can possibly avoid it, avoid merging trunk or any other branches into your feature branch while you are working. * If you do find yourself merging from trunk, consider :ref:`rebase-on-trunk` * Ask on the `dipy mailing list`_ if you get stuck. * Ask for code review! This way of working helps to keep work well organized, with readable history. This in turn makes it easier for project maintainers (that might be you) to see what you've done, and why you did it. See `linux git workflow`_ and `ipython git workflow`_ for some explanation. Consider deleting your master branch ==================================== It may sound strange, but deleting your own ``master`` branch can help reduce confusion about which branch you are on. See `deleting master on github`_ for details. .. _update-mirror-trunk: Update the mirror of trunk ========================== First make sure you have done :ref:`linking-to-upstream`. From time to time you should fetch the upstream (trunk) changes from github:: git fetch upstream This will pull down any commits you don't have, and set the remote branches to point to the right commit. For example, 'trunk' is the branch referred to by (remote/branchname) ``upstream/master`` - and if there have been commits since you last checked, ``upstream/master`` will change after you do the fetch. .. _make-feature-branch: Make a new feature branch ========================= When you are ready to make some changes to the code, you should start a new branch. Branches that are for a collection of related edits are often called 'feature branches'. Making an new branch for each set of related changes will make it easier for someone reviewing your branch to see what you are doing. Choose an informative name for the branch to remind yourself and the rest of us what the changes in the branch are for. For example ``add-ability-to-fly``, or ``buxfix-for-issue-42``. :: # Update the mirror of trunk git fetch upstream # Make new feature branch starting at current trunk git branch my-new-feature upstream/master git checkout my-new-feature Generally, you will want to keep your feature branches on your public github_ fork of dipy_. To do this, you `git push`_ this new branch up to your github repo. Generally (if you followed the instructions in these pages, and by default), git will have a link to your github repo, called ``origin``. You push up to your own repo on github with:: git push origin my-new-feature In git >= 1.7 you can ensure that the link is correctly set by using the ``--set-upstream`` option:: git push --set-upstream origin my-new-feature From now on git will know that ``my-new-feature`` is related to the ``my-new-feature`` branch in the github repo. .. _edit-flow: The editing workflow ==================== Overview -------- :: # hack hack git add my_new_file git commit -am 'NF - some message' git push In more detail -------------- #. Make some changes #. See which files have changed with ``git status`` (see `git status`_). You'll see a listing like this one:: # On branch ny-new-feature # Changed but not updated: # (use "git add ..." to update what will be committed) # (use "git checkout -- ..." to discard changes in working directory) # # modified: README # # Untracked files: # (use "git add ..." to include in what will be committed) # # INSTALL no changes added to commit (use "git add" and/or "git commit -a") #. Check what the actual changes are with ``git diff`` (`git diff`_). #. Add any new files to version control ``git add new_file_name`` (see `git add`_). #. To commit all modified files into the local copy of your repo,, do ``git commit -am 'A commit message'``. Note the ``-am`` options to ``commit``. The ``m`` flag just signals that you're going to type a message on the command line. The ``a`` flag |emdash| you can just take on faith |emdash| or see `why the -a flag?`_ |emdash| and the helpful use-case description in the `tangled working copy problem`_. The `git commit`_ manual page might also be useful. #. To push the changes up to your forked repo on github, do a ``git push`` (see `git push`_). Ask for your changes to be reviewed or merged ============================================= When you are ready to ask for someone to review your code and consider a merge: #. Go to the URL of your forked repo, say ``http://github.com/your-user-name/dipy``. #. Use the 'Switch Branches' dropdown menu near the top left of the page to select the branch with your changes: .. image:: branch_dropdown.png #. Click on the 'Pull request' button: .. image:: pull_button.png Enter a title for the set of changes, and some explanation of what you've done. Say if there is anything you'd like particular attention for - like a complicated change or some code you are not happy with. If you don't think your request is ready to be merged, just say so in your pull request message. This is still a good way of getting some preliminary code review. Some other things you might want to do ====================================== Delete a branch on github ------------------------- :: git checkout master # delete branch locally git branch -D my-unwanted-branch # delete branch on github git push origin :my-unwanted-branch (Note the colon ``:`` before ``test-branch``. See also: http://github.com/guides/remove-a-remote-branch Several people sharing a single repository ------------------------------------------ If you want to work on some stuff with other people, where you are all committing into the same repository, or even the same branch, then just share it via github. First fork dipy into your account, as from :ref:`forking`. Then, go to your forked repository github page, say ``http://github.com/your-user-name/dipy`` Click on the 'Admin' button, and add anyone else to the repo as a collaborator: .. image:: pull_button.png Now all those people can do:: git clone git@githhub.com:your-user-name/dipy.git Remember that links starting with ``git@`` use the ssh protocol and are read-write; links starting with ``git://`` are read-only. Your collaborators can then commit directly into that repo with the usual:: git commit -am 'ENH - much better code' git push origin master # pushes directly into your repo Explore your repository ----------------------- To see a graphical representation of the repository branches and commits:: gitk --all To see a linear list of commits for this branch:: git log You can also look at the `network graph visualizer`_ for your github repo. Finally the :ref:`fancy-log` ``lg`` alias will give you a reasonable text-based graph of the repository. .. _rebase-on-trunk: Rebasing on trunk ----------------- Let's say you thought of some work you'd like to do. You :ref:`update-mirror-trunk` and :ref:`make-feature-branch` called ``cool-feature``. At this stage trunk is at some commit, let's call it E. Now you make some new commits on your ``cool-feature`` branch, let's call them A, B, C. Maybe your changes take a while, or you come back to them after a while. In the meantime, trunk has progressed from commit E to commit (say) G:: A---B---C cool-feature / D---E---F---G trunk At this stage you consider merging trunk into your feature branch, and you remember that this here page sternly advises you not to do that, because the history will get messy. Most of the time you can just ask for a review, and not worry that trunk has got a little ahead. But sometimes, the changes in trunk might affect your changes, and you need to harmonize them. In this situation you may prefer to do a rebase. rebase takes your changes (A, B, C) and replays them as if they had been made to the current state of ``trunk``. In other words, in this case, it takes the changes represented by A, B, C and replays them on top of G. After the rebase, your history will look like this:: A'--B'--C' cool-feature / D---E---F---G trunk See `rebase without tears`_ for more detail. To do a rebase on trunk:: # Update the mirror of trunk git fetch upstream # go to the feature branch git checkout cool-feature # make a backup in case you mess up git branch tmp cool-feature # rebase cool-feature onto trunk git rebase --onto upstream/master upstream/master cool-feature In this situation, where you are already on branch ``cool-feature``, the last command can be written more succinctly as:: git rebase upstream/master When all looks good you can delete your backup branch:: git branch -D tmp If it doesn't look good you may need to have a look at :ref:`recovering-from-mess-up`. If you have made changes to files that have also changed in trunk, this may generate merge conflicts that you need to resolve - see the `git rebase`_ man page for some instructions at the end of the "Description" section. There is some related help on merging in the git user manual - see `resolving a merge`_. .. _recovering-from-mess-up: Recovering from mess-ups ------------------------ Sometimes, you mess up merges or rebases. Luckily, in git it is relatively straightforward to recover from such mistakes. If you mess up during a rebase:: git rebase --abort If you notice you messed up after the rebase:: # reset branch back to the saved point git reset --hard tmp If you forgot to make a backup branch:: # look at the reflog of the branch git reflog show cool-feature 8630830 cool-feature@{0}: commit: BUG: io: close file handles immediately 278dd2a cool-feature@{1}: rebase finished: refs/heads/my-feature-branch onto 11ee694744f2552d 26aa21a cool-feature@{2}: commit: BUG: lib: make seek_gzip_factory not leak gzip obj ... # reset the branch to where it was before the botched rebase git reset --hard cool-feature@{2} .. _rewriting-commit-history: Rewriting commit history ------------------------ .. note:: Do this only for your own feature branches. There's an embarassing typo in a commit you made? Or perhaps the you made several false starts you would like the posterity not to see. This can be done via *interactive rebasing*. Suppose that the commit history looks like this:: git log --oneline eadc391 Fix some remaining bugs a815645 Modify it so that it works 2dec1ac Fix a few bugs + disable 13d7934 First implementation 6ad92e5 * masked is now an instance of a new object, MaskedConstant 29001ed Add pre-nep for a copule of structured_array_extensions. ... and ``6ad92e5`` is the last commit in the ``cool-feature`` branch. Suppose we want to make the following changes: * Rewrite the commit message for ``13d7934`` to something more sensible. * Combine the commits ``2dec1ac``, ``a815645``, ``eadc391`` into a single one. We do as follows:: # make a backup of the current state git branch tmp HEAD # interactive rebase git rebase -i 6ad92e5 This will open an editor with the following text in it:: pick 13d7934 First implementation pick 2dec1ac Fix a few bugs + disable pick a815645 Modify it so that it works pick eadc391 Fix some remaining bugs # Rebase 6ad92e5..eadc391 onto 6ad92e5 # # Commands: # p, pick = use commit # r, reword = use commit, but edit the commit message # e, edit = use commit, but stop for amending # s, squash = use commit, but meld into previous commit # f, fixup = like "squash", but discard this commit's log message # # If you remove a line here THAT COMMIT WILL BE LOST. # However, if you remove everything, the rebase will be aborted. # To achieve what we want, we will make the following changes to it:: r 13d7934 First implementation pick 2dec1ac Fix a few bugs + disable f a815645 Modify it so that it works f eadc391 Fix some remaining bugs This means that (i) we want to edit the commit message for ``13d7934``, and (ii) collapse the last three commits into one. Now we save and quit the editor. Git will then immediately bring up an editor for editing the commit message. After revising it, we get the output:: [detached HEAD 721fc64] FOO: First implementation 2 files changed, 199 insertions(+), 66 deletions(-) [detached HEAD 0f22701] Fix a few bugs + disable 1 files changed, 79 insertions(+), 61 deletions(-) Successfully rebased and updated refs/heads/my-feature-branch. and the history looks now like this:: 0f22701 Fix a few bugs + disable 721fc64 ENH: Sophisticated feature 6ad92e5 * masked is now an instance of a new object, MaskedConstant If it went wrong, recovery is again possible as explained :ref:`above `. .. include:: links.inc dipy-0.13.0/doc/devel/gitwash/dot2_dot3.rst000066400000000000000000000012511317371701200204060ustar00rootroot00000000000000.. _dot2-dot3: ======================================== Two and three dots in difference specs ======================================== Thanks to Yarik Halchenko for this explanation. Imagine a series of commits A, B, C, D... Imagine that there are two branches, *topic* and *master*. You branched *topic* off *master* when *master* was at commit 'E'. The graph of the commits looks like this:: A---B---C topic / D---E---F---G master Then:: git diff master..topic will output the difference from G to C (i.e. with effects of F and G), while:: git diff master...topic would output just differences in the topic branch (i.e. only A, B, and C). dipy-0.13.0/doc/devel/gitwash/following_latest.rst000066400000000000000000000014641317371701200221670ustar00rootroot00000000000000.. _following-latest: ============================= Following the latest source ============================= These are the instructions if you just want to follow the latest DIPY source, but you don't need to do any development for now. The steps are: * :ref:`install-git` * get local copy of the `dipy github`_ git repository * update local copy from time to time Get the local copy of the code ============================== From the command line:: git clone git://github.com/nipy/dipy.git You now have a copy of the code tree in the new ``dipy`` directory. Updating the code ================= From time to time you may want to pull down the latest code. Do this with:: cd dipy git pull The tree in ``dipy`` will now have the latest changes from the initial repository. .. include:: links.inc dipy-0.13.0/doc/devel/gitwash/forking_button.png000066400000000000000000000314441317371701200216220ustar00rootroot00000000000000PNG  IHDR]Vl8E pHYs   IDATx]|Tv7ݴMN]&O  X(("}`@z!!@$ I6{wHŐ ww|s,JlG#i7p8t"p8Dn ͛p8tp8Dn ͛p8tp8D۪SMYVܺu jN:H;Ц22{wG(7o#77}Z+6/_18+{22+ߔ'Tnn.\\\z)H~cCEҿ++3^KX,`B`UC(s*Ҁ{/22'7#G>F},\>ë˗ sݿ/^v;s+p[cF5nGd2{VVՍÙ3gh"A0'OFvٱ߹nɊd-ڕzzz:mۆ9s=33ž鯬C~0~Ure$!nm,W#q(gǝ݌XlP844\K7n' USֽ__\] \d 222@h[/Aadr]3\ĻTnJo8aӻ%&52_VN߫nV䭄ha9r0`0vލM6 駟.ڙ٦z$])Fg,Zn.P:٠3Uӕ+17sBo aMM0V9EGի3f S`f Bq]_w34&k%dAG=pS*V999;w.Fc$o0}tNANj̈́B!@<E~=ֹG ~Jk6hubC}@#S1@H³^gȌn1j˕+W" AAA5kؽS6> |oZlY`ݖ>,uh ؽę}&|~4Ú`{a{hi_}UΘ(d]ٳÐD74'Xfm Z`zd ޚ0Ԥ/{q٫!Lq)5(J/1+pũK|K+bէƏ>ƺ9Bc>GSޝfDND?](f[<kcc\wviprr\*씝6vTJ%m㿳b%/Q8`[BLjo?@ ew26؆c޻E'6W  _B8'^m6@m_ϚA'|%=s}]j8HGǦ`wgiSbȜy8|b7r$*&t+|Nc;#p헏P:GJ+.2. Ô)S'|RHc_}=Z6P(QKؘUN|N6ؤlEqbs!ES0nl"\CoBvn>lB{o6IBѽs#-F!#ؾ= D*βă84I!V<.#\iZe?`(l 6 >َ; pOoʼnU %/8. D/8˨㻘vn'[UH+w-_|@l1b9&N(rY-47{=Q/W[n4b`vq=b,bpu&=\Ip`g8ً+V!JJ@Gc?ExF(>1JO 2/s@~λvH4rl.ӻ-,Ĩc#U"~T|bBǽzDؗLrK@硫]+d)J ('Y<3nʲJZLeE;)yc3QiXʊYc,^A>,#El %I_ɎƙHB# <$W|,S. FqF{-.-R=F?zGzys> 2ȭ0.峂dSr%b>\f1eiggg和ܸhME(p]_ ĶC`zP__cgA ~U%%Yt5q˞ՁÈ!kl;HdiLט\.8>n0y: aPK%:`#"DJ.])b#P|cl,:hՂNB"C^~.n,"NG.zc2jb7Xƶň % ^.nJ?DKVfʃ,/S)].`:2= ]EXE>H{ Xt`qܹϟ/={~xxx7Do*|]k=&teAN:mj>:EO![N>EqwpJx CqḬlRR4TƋ k ¯Ɍr тTRF0r,zp9pG%4oo~)dߞn ƅ*X攝D4Ta3'XJB%Qj`.KPˋ9i M&[Yp{yq,Y\0*9a=!aON:`AB,l"mӏWr~ţcah|4$ꔠ`Bo3 {e5;Q#m.擏810I"'S33~^fӘah#18mkSp`rGZ(D}woo5+p_ +9[Kc;91czꅤ$pq>|*OJJ\ uX*:e> Q+ƌ8c1>[f(h^ de 76ѯ=.]a6 y_Bzϡg#3.7 1P-FbM&'c3Иz >5<( /^ Kj(ǸO1wٟ|e=?6M~A q`lѩS'!XYJN&&<9soVC_=똧s-? !:bs". ۿϛ/R􅊔ʫQБ=ˋs ONS.VM>‘tװ}ؿe"_^5^оMNָ.W 6C':ep[ȶ)o۠m]h c'q&iI>7`=p!xhVEφ2HGa{T*{ӾߊVC&&iɲv.p)2br-ˈYk:NgϞ &y @L W?}@+8w4¤sщfF 'ڲ3S9HF@O4)PZMh1Q̞0 pQ!E/G ز`sa&^~fXh4|]D7pTZ͛z/^T 53ĉ,"{e[C}#+N!vr_b\ \*w2l%7=gadaR.^H7n\A ޗ$ - Lj,::Sycr67'nAp_A+Y'A>0|1ާWg`ΆH<'x*A_IS"PNkcoǓf!#&Ǿo,ċt(r3i"m7с{ 55 N\D"R"G"_gRdU2VޖSqI=?r2eȶD]Rhew pMyhZ=#hӓ&6-P*/w;R\\=i&UZg&&3|=)L5@mh`+  =R\V;ӍZˡP J|MKKCHHH_+믿^Ph•ijrJ@J>K gOBC.gGZ-LdyzcW q!pKa1h_vPtE^5km+d4FAwJd`iǝg]7,)H/n@1 7Ȝ!w;CFq{Eƫ-ɭpÇZt+IF zm vAFZXԆAThO";l \vHftoTJ6PSwRrƀMvSfΪ%ҒU# ŋ.,VJ~C4T*R\VKe0fRفٯ*,^l2FF?qd__fޖ"*'ov(JH*VZ`E½<^?olz[wG8 7Z18'{322];aVun 1ǡ" +#3~{.ɍxq9.5s_VMv6Gy~ǘPʹȌ);+G>E|S>4G#-8]GN"IN#-8]GN"IN#-8]GN"IN#-8]GN"IN#-8]GN"IN#-8]GN"IN#-8]GN"IN#-8]GN"IN#-8]GN"IN#-km.G#s8yyyչAs8Bj y.G#P'plur|G6nmp,t9@ju&G#Pgn=8G#Ppҭ y@E9@[:o#pKΊ# [69:lʔ)ӫ:fD}$U-\OhZ #r4՛t.Y?p"ͭHbѦ…xe!-/_]oxDE"QxR5|\ing8]ܢ>d:jpv]9kMDMTMtȼQ Rq!1psqxÔeɦK/m804sayF ,YWcu DMkYaIߢ#GwgSp&"V(OaRa_7`?ķ"lQk\Nȸr+~ك[LW^w CQ#OCj$6l(=㥁Q8}KV}6"öduSLꫛj2dFaǎs4*ծŞp݋xUkشgM3k[KӜ[@)xl&!͟#c ZʧN0k"lV CDҁHt`12^$XL6]1wV7 R~ҟtp vGxcʳh+8f6$w'Ӟ*Y̮Pa܊9<=4/}k |MP[D??|e_ϫO˘0qHsK<1TO=$_jF¾q,^ v2b(Zkǖ}r 4n=26`Q`mBG*s[Ȃ͕'-/@Ett /2Dz%D;yxa` ",DYKTU8k$褰!awU1Hv͌ m}1v0 oÑ/w||}%DϨ=4ƀ^a”W81ra4ޱ O›<`p4]һ>-%t'`!'W'|t59*֯ .!H?1~@S$Dg!'0ok8[2m h  \ Nmؓe1rHsIYvPTѾC!,z >b0Z+q`8-4&5IȒ*i5f^GVB>T0K6z%6.sQ/C1bpONᗅ{`0(wʕЭPk:R6FhBd":Ҥ%زy%y\]4eBDGiK$[HUu1#е-#|֟"ԄHw#Ȉˑκ˩(ɇwסA#?S{h'\}(YQY0F 6p}Q#,|4_`IšP7t8 F- םi._i&Mg@LAPiySpխwm$QMՔ|rU{ KHI%-C1\ˆt˰)W2P Md,a#T:Qx 9ߦZ\s'LfA>[5&Mo£W'Q B " q1't"񮐑QKY˷yyUDC;"ڤB O/;Çggd \'!2<Ύe q1pUzbCå'iV#KxM¬ 4)7֞ z[ѭ[s@v)]BKKS*읤VF6fIVbs&4莽!ȴ6sg sx[9l0p#3ѡl̀ JgAR3 F3dMKdYìuXlr_udMȻ!?f|`IF(ĥg"W$ꎩS?YJڶrlsçtܳk&R0,bE<=;$bTv_F2+ԭ(rxȿoo!+a[I[.`EC/3aĠIDATF6`!vdI#:T ZT=$&{+-O^#XA>c 01x(`61ʠ\g/IkM"݊J/.\EK[4i@_ĥ,훉hZ5T=:df?ʒ47n@{mFL-\-yM4X>'6icTr9lthJGN}gr>\u#O/.d':F6$vgp&70eڹ;j?B;W*s=lɇ#m5Х>d aF~0 ޭ >e"4XPG^Cf1f`D=Ef@@61X?{26CmukhDii/( ;H#$2h)aQz#1hfˉ YKO im,?-(aoak&*"HI+q0<ӔR 5$ ! /Hw"\4~1߯u@9LGAeñ+7KE{KȲ!:ՆaP،ɡе\:1oJݣ`ީoȥcu bBGzBWZ $^!g67: c tf q$&_p=IB-4"{8W趒#zlBZ%RhEk5!Gk4(F'4 ͪe}-.PuKV`s@|^+D=.xێ.#6)Ei* i,<ĢFڡ)Vjq%|;-Phk&v!_ޱ㈊?oSc0HT#[ Tcv$zھ-OFd3[ 7PѷڊS.#!Uqe8pƍ4諔F`#I žHػvñq6RMdÉq*җHXgk1d mmT@R)[X vI2!d0˒yÍpv;0 if" YidD!YӬ3▧PgJȓ{%!rP]PN!CV:X[iAxx2BAvͰoWf:4Ul$+Š"RgCoڦ-4p/HgQacfANDf5.],$ &&ӡ&] 9 B 5$ە_xaI6ƢBKm&=?߹OgQ!xE= :Ǹsss^6'_3Y0I!6€aKm|T[0hj:B4R⥧GgB/AT}۰!IiCۋ+\FlpJUw ]qOҤ" t:Y~, F"/ĨehE@ m:HMh{Q}lD17R4ep&mq+C[D/]V q[xN-lH7ɔ *49v0i{= hN=)B !~2rHXXB=4΂gyXFH8 E +EP _-("Cv|ċT͚ةKHS_Xf&Y8vEI 9h dP]z :.'M;D-ċccwM˓ 歡ƉYrm߁_* ++pCtJߍҰ ,Y{U!aeҲ^4{F&G4Y\XciG%hأTV:@kv i׈Z6e7wع2VڤP߉~©rkbhr\Tn]_fc-jxV4hۊ&sVj\?a$N'WRdt;$dLV;[GYJG~4d%ŚG&F֣t]k#X):j_`pgx!$7Uy`X{ARؙ@c=ca&ůN%ȅ:(h6Wь"H[Oؿ E,3N^'7O¼6'[KUDt ?gc JtA`V>rV:~^O_$ڙ9匉gѧoZNuIUti $3KB;fA4.ApT#z5Act9EY* <HSI_4!4Kװh vb-!3iGךp! hEYqڴ\#It[0vcX=p)>)/)m'dۆ?>b]yOM\1phmO}P/+^5h5p4W]paزO֠&6ȅY*\6dg#UZ,)#a=MpǘIHOeߙ²4b+CflZ [z?? = ^]111 V%߅ ɕTwWVWNt^MŐ@oZ+\_"o_=4w1#ڒ0PxEKwDǚѵ]؏Sm3hڈ'RT*ꀓ™ɂ#n䅕8tyS`QZіᓸ9d2`9+L>MΡ LeMKvGv]+hRQφƒUO6fdUC88~+e=9ܝc[ȍKA !6w沸tƂہ ^Bf;W5H^C?K4 (NidQ8)TӬFnYE,tH樗.Ɓ]tp"W]) rA5*`9ɹ^|PZf>NÛ"nNHf:tc DE)Y~f|JMr+sjQU/QxCub|M. [Г޴"2QJ((/DYYEI?vX0&)<|}lVAlK ZJד_ |-f'DbYt`&wV+|UtKĂ'YwZ:H4* J %ʂnTN@aW;%x1paLͿUSg My::WY:Ҕ*_wY(Ț%pYFrZ<b7ɇ~?MehdE{ix*# hSId3}n÷coK[95w?]sN W9ёӎOQ١܂2V2rWᏠk3t@ -݂ɢX;l :+Jݙ\ڛ\s"Ǵ&&}/o/[^@[f%~5[,A\GFm˲feK'\Vd"kTirMn .Mƿ;8su*$TVY䋑poo kJ U-ro~Z p/gRUkiKJf$h.kj)$\]'q0tJ 7NGA^Ąv} &O Lj_DUXʪӒ(c3|WںhkpYIݮ`e )V2d 3YB^5 )qXE%W[{ {ѿDWҲ4} zTYx|# UC5[:TI/_wrȭpM~XL[,E1w<$!!sJM p8@8G#P>U2V~}<#pA[Ó8@u#-FpA[Ó8@u#Iq8rrIGd-N$IENDB`dipy-0.13.0/doc/devel/gitwash/forking_hell.rst000066400000000000000000000021731317371701200212540ustar00rootroot00000000000000.. _forking: ====================================================== Making your own copy (fork) of DIPY ====================================================== You need to do this only once. The instructions here are very similar to the instructions at http://help.github.com/forking/ |emdash| please see that page for more detail. We're repeating some of it here just to give the specifics for the `dipy`_ project, and to suggest some default names. Set up and configure a github account ===================================== If you don't have a github account, go to the github page, and make one. You then need to configure your account to allow write access |emdash| see the ``Generating SSH keys`` help on `github help`_. Create your own forked copy of dipy_ ====================================================== #. Log into your github account. #. Go to the `dipy`_ github home at `dipy github`_. #. Click on the *fork* button: .. image:: forking_button.png Now, after a short pause and some 'Hardcore forking action', you should find yourself at the home page for your own forked copy of `dipy`_. .. include:: links.inc dipy-0.13.0/doc/devel/gitwash/git_development.rst000066400000000000000000000003401317371701200217700ustar00rootroot00000000000000.. _git-development: ===================== Git for development ===================== Contents: .. toctree:: :maxdepth: 2 forking_hell set_up_fork configure_git development_workflow maintainer_workflow dipy-0.13.0/doc/devel/gitwash/git_install.rst000066400000000000000000000011071317371701200211160ustar00rootroot00000000000000.. _install-git: ============= Install git ============= Overview ======== ================ ============= Debian / Ubuntu ``sudo apt-get install git-core`` Fedora ``sudo yum install git-core`` Windows Download and install msysGit_ OS X Use the git-osx-installer_ ================ ============= In detail ========= See the git page for the most recent information. Have a look at the github install help pages available from `github help`_ There are good instructions here: http://book.git-scm.com/2_installing_git.html .. include:: links.inc dipy-0.13.0/doc/devel/gitwash/git_intro.rst000066400000000000000000000010241317371701200206010ustar00rootroot00000000000000============== Introduction ============== These pages describe a git_ and github_ workflow for the dipy_ project. There are several different workflows here, for different ways of working with DIPY. This is not a comprehensive git reference, it's just a workflow for our own project. It's tailored to the github hosting service. You may well find better or quicker ways of getting stuff done with git, but these should get you started. For general resources for learning git, see :ref:`git-resources`. .. include:: links.inc dipy-0.13.0/doc/devel/gitwash/git_links.inc000066400000000000000000000063641317371701200205430ustar00rootroot00000000000000.. This (-*- rst -*-) format file contains commonly used link targets and name substitutions. It may be included in many files, therefore it should only contain link targets and name substitutions. Try grepping for "^\.\. _" to find plausible candidates for this list. .. NOTE: reST targets are __not_case_sensitive__, so only one target definition is needed for nipy, NIPY, Nipy, etc... .. git stuff .. _git: http://git-scm.com/ .. _github: http://github.com .. _github help: http://help.github.com .. _msysgit: http://code.google.com/p/msysgit/downloads/list .. _git-osx-installer: http://code.google.com/p/git-osx-installer/downloads/list .. _subversion: http://subversion.tigris.org/ .. _git cheat sheet: http://github.com/guides/git-cheat-sheet .. _pro git book: http://progit.org/ .. _git svn crash course: http://git-scm.com/course/svn.html .. _learn.github: http://learn.github.com/ .. _network graph visualizer: http://github.com/blog/39-say-hello-to-the-network-graph-visualizer .. _git user manual: http://schacon.github.com/git/user-manual.html .. _git tutorial: http://schacon.github.com/git/gittutorial.html .. _git community book: http://book.git-scm.com/ .. _git ready: http://www.gitready.com/ .. _git casts: http://www.gitcasts.com/ .. _Fernando's git page: http://www.fperez.org/py4science/git.html .. _git magic: http://www-cs-students.stanford.edu/~blynn/gitmagic/index.html .. _git concepts: http://www.eecs.harvard.edu/~cduan/technical/git/ .. _git clone: http://schacon.github.com/git/git-clone.html .. _git checkout: http://schacon.github.com/git/git-checkout.html .. _git commit: http://schacon.github.com/git/git-commit.html .. _git push: http://schacon.github.com/git/git-push.html .. _git pull: http://schacon.github.com/git/git-pull.html .. _git add: http://schacon.github.com/git/git-add.html .. _git status: http://schacon.github.com/git/git-status.html .. _git diff: http://schacon.github.com/git/git-diff.html .. _git log: http://schacon.github.com/git/git-log.html .. _git branch: http://schacon.github.com/git/git-branch.html .. _git remote: http://schacon.github.com/git/git-remote.html .. _git rebase: http://schacon.github.com/git/git-rebase.html .. _git config: http://schacon.github.com/git/git-config.html .. _why the -a flag?: http://www.gitready.com/beginner/2009/01/18/the-staging-area.html .. _git staging area: http://www.gitready.com/beginner/2009/01/18/the-staging-area.html .. _tangled working copy problem: http://tomayko.com/writings/the-thing-about-git .. _git management: http://kerneltrap.org/Linux/Git_Management .. _linux git workflow: http://www.mail-archive.com/dri-devel@lists.sourceforge.net/msg39091.html .. _git parable: http://tom.preston-werner.com/2009/05/19/the-git-parable.html .. _git foundation: http://matthew-brett.github.com/pydagogue/foundation.html .. _deleting master on github: http://matthew-brett.github.com/pydagogue/gh_delete_master.html .. _rebase without tears: http://matthew-brett.github.com/pydagogue/rebase_without_tears.html .. _resolving a merge: http://schacon.github.com/git/user-manual.html#resolving-a-merge .. _ipython git workflow: http://mail.scipy.org/pipermail/ipython-dev/2010-October/006746.html .. other stuff .. _python: http://www.python.org .. |emdash| unicode:: U+02014 .. vim: ft=rst dipy-0.13.0/doc/devel/gitwash/git_links.txt000066400000000000000000000070171317371701200206050ustar00rootroot00000000000000.. This (-*- rst -*-) format file contains commonly used link targets and name substitutions. It may be included in many files, therefore it should only contain link targets and name substitutions. Try grepping for "^\.\. _" to find plausible candidates for this list. .. NOTE: reST targets are __not_case_sensitive__, so only one target definition is needed for nipy, NIPY, Nipy, etc... .. PROJECTNAME placeholders .. _PROJECTNAME: http://neuroimaging.scipy.org .. _`PROJECTNAME github`: http://github.com/nipy .. _`PROJECTNAME mailing list`: https://mail.python.org/mailman/listinfo/neuroimaging .. nipy .. _nipy: http://nipy.org/nipy .. _`nipy github`: http://github.com/nipy/nipy .. _`nipy mailing list`: https://mail.python.org/mailman/listinfo/neuroimaging .. ipython .. _ipython: http://ipython.scipy.org .. _`ipython github`: http://github.com/ipython .. _`ipython mailing list`: http://mail.scipy.org/mailman/listinfo/IPython-dev .. dipy .. _dipy: http://nipy.org/dipy .. _`dipy github`: https://github.com/nipy/dipy .. _`dipy mailing list`: https://mail.python.org/mailman/listinfo/neuroimaging .. git stuff .. _git: http://git-scm.com/ .. _github: http://github.com .. _github help: http://help.github.com .. _msysgit: http://code.google.com/p/msysgit/downloads/list .. _git-osx-installer: http://code.google.com/p/git-osx-installer/downloads/list .. _subversion: http://subversion.tigris.org/ .. _git cheat sheet: http://github.com/guides/git-cheat-sheet .. _pro git book: http://progit.org/ .. _git svn crash course: http://git-scm.com/course/svn.html .. _learn.github: http://learn.github.com/ .. _network graph visualizer: http://github.com/blog/39-say-hello-to-the-network-graph-visualizer .. _git user manual: http://www.kernel.org/pub/software/scm/git/docs/user-manual.html .. _git tutorial: http://www.kernel.org/pub/software/scm/git/docs/gittutorial.html .. _git community book: http://book.git-scm.com/ .. _git ready: http://www.gitready.com/ .. _git casts: http://www.gitcasts.com/ .. _Fernando's git page: http://www.fperez.org/py4science/git.html .. _git magic: http://www-cs-students.stanford.edu/~blynn/gitmagic/index.html .. _git concepts: http://www.eecs.harvard.edu/~cduan/technical/git/ .. _git clone: http://www.kernel.org/pub/software/scm/git/docs/git-clone.html .. _git checkout: http://www.kernel.org/pub/software/scm/git/docs/git-checkout.html .. _git commit: http://www.kernel.org/pub/software/scm/git/docs/git-commit.html .. _git push: http://www.kernel.org/pub/software/scm/git/docs/git-push.html .. _git pull: http://www.kernel.org/pub/software/scm/git/docs/git-pull.html .. _git add: http://www.kernel.org/pub/software/scm/git/docs/git-add.html .. _git status: http://www.kernel.org/pub/software/scm/git/docs/git-status.html .. _git diff: http://www.kernel.org/pub/software/scm/git/docs/git-diff.html .. _git log: http://www.kernel.org/pub/software/scm/git/docs/git-log.html .. _git branch: http://www.kernel.org/pub/software/scm/git/docs/git-branch.html .. _git remote: http://www.kernel.org/pub/software/scm/git/docs/git-remote.html .. _git config: http://www.kernel.org/pub/software/scm/git/docs/git-config.html .. _why the -a flag?: http://www.gitready.com/beginner/2009/01/18/the-staging-area.html .. _git staging area: http://www.gitready.com/beginner/2009/01/18/the-staging-area.html .. _git management: http://kerneltrap.org/Linux/Git_Management .. _linux git workflow: http://www.mail-archive.com/dri-devel@lists.sourceforge.net/msg39091.html .. _git parable: http://tom.preston-werner.com/2009/05/19/the-git-parable.html dipy-0.13.0/doc/devel/gitwash/git_resources.rst000066400000000000000000000034221317371701200214640ustar00rootroot00000000000000.. _git-resources: ============= git resources ============= Tutorials and summaries ======================= * `github help`_ has an excellent series of how-to guides. * `learn.github`_ has an excellent series of tutorials * The `pro git book`_ is a good in-depth book on git. * A `git cheat sheet`_ is a page giving summaries of common commands. * The `git user manual`_ * The `git tutorial`_ * The `git community book`_ * `git ready`_ |emdash| a nice series of tutorials * `git casts`_ |emdash| video snippets giving git how-tos. * `git magic`_ |emdash| extended introduction with intermediate detail * The `git parable`_ is an easy read explaining the concepts behind git. * `git foundation`_ expands on the `git parable`_. * Fernando Perez' git page |emdash| `Fernando's git page`_ |emdash| many links and tips * A good but technical page on `git concepts`_ * `git svn crash course`_: git for those of us used to subversion_ Advanced git workflow ===================== There are many ways of working with git; here are some posts on the rules of thumb that other projects have come up with: * Linus Torvalds on `git management`_ * Linus Torvalds on `linux git workflow`_ . Summary; use the git tools to make the history of your edits as clean as possible; merge from upstream edits as little as possible in branches where you are doing active development. Manual pages online =================== You can get these on your own machine with (e.g) ``git help push`` or (same thing) ``git push --help``, but, for convenience, here are the online manual pages for some common commands: * `git add`_ * `git branch`_ * `git checkout`_ * `git clone`_ * `git commit`_ * `git config`_ * `git diff`_ * `git log`_ * `git pull`_ * `git push`_ * `git remote`_ * `git status`_ .. include:: links.inc dipy-0.13.0/doc/devel/gitwash/index.rst000066400000000000000000000003531317371701200177160ustar00rootroot00000000000000.. _using-git: Working with DIPY source code ================================================ Contents: .. toctree:: :maxdepth: 2 git_intro git_install following_latest patching git_development git_resources dipy-0.13.0/doc/devel/gitwash/known_projects.inc000066400000000000000000000027101317371701200216140ustar00rootroot00000000000000.. Known projects .. PROJECTNAME placeholders .. _PROJECTNAME: http://neuroimaging.scipy.org .. _`PROJECTNAME github`: http://github.com/nipy .. _`PROJECTNAME mailing list`: https://mail.python.org/mailman/listinfo/neuroimaging .. numpy .. _numpy: hhttp://numpy.scipy.org .. _`numpy github`: http://github.com/numpy/numpy .. _`numpy mailing list`: http://mail.scipy.org/mailman/listinfo/numpy-discussion .. scipy .. _scipy: http://www.scipy.org .. _`scipy github`: http://github.com/scipy/scipy .. _`scipy mailing list`: http://mail.scipy.org/mailman/listinfo/scipy-dev .. nipy .. _nipy: http://nipy.org/nipy .. _`nipy github`: http://github.com/nipy/nipy .. _`nipy mailing list`: https://mail.python.org/mailman/listinfo/neuroimaging .. ipython .. _ipython: http://ipython.scipy.org .. _`ipython github`: http://github.com/ipython/ipython .. _`ipython mailing list`: http://mail.scipy.org/mailman/listinfo/IPython-dev .. dipy .. _dipy: http://nipy.org/dipy .. _`dipy github`: https://github.com/nipy/dipy .. _`dipy mailing list`: https://mail.python.org/mailman/listinfo/neuroimaging .. nibabel .. _nibabel: http://nipy.org/nibabel .. _`nibabel github`: http://github.com/nipy/nibabel .. _`nibabel mailing list`: https://mail.python.org/mailman/listinfo/neuroimaging .. marsbar .. _marsbar: http://marsbar.sourceforge.net .. _`marsbar github`: http://github.com/matthew-brett/marsbar .. _`MarsBaR mailing list`: https://lists.sourceforge.net/lists/listinfo/marsbar-users dipy-0.13.0/doc/devel/gitwash/links.inc000066400000000000000000000001611317371701200176650ustar00rootroot00000000000000.. compiling links file .. include:: known_projects.inc .. include:: this_project.inc .. include:: git_links.inc dipy-0.13.0/doc/devel/gitwash/maintainer_workflow.rst000066400000000000000000000060001317371701200226630ustar00rootroot00000000000000.. _maintainer-workflow: ################### Maintainer workflow ################### This page is for maintainers |emdash| those of us who merge our own or other peoples' changes into the upstream repository. Being as how you're a maintainer, you are completely on top of the basic stuff in :ref:`development-workflow`. The instructions in :ref:`linking-to-upstream` add a remote that has read-only access to the upstream repo. Being a maintainer, you've got read-write access. It's good to have your upstream remote have a scary name, to remind you that it's a read-write remote:: git remote add upstream-rw git@github.com:nipy/dipy.git git fetch upstream-rw ******************* Integrating changes ******************* Let's say you have some changes that need to go into trunk (``upstream-rw/master``). The changes are in some branch that you are currently on. For example, you are looking at someone's changes like this:: git remote add someone git://github.com/someone/dipy.git git fetch someone git branch cool-feature --track someone/cool-feature git checkout cool-feature So now you are on the branch with the changes to be incorporated upstream. The rest of this section assumes you are on this branch. A few commits ============= If there are only a few commits, consider rebasing to upstream:: # Fetch upstream changes git fetch upstream-rw # rebase git rebase upstream-rw/master Remember that, if you do a rebase, and push that, you'll have to close any github pull requests manually, because github will not be able to detect the changes have already been merged. A long series of commits ======================== If there are a longer series of related commits, consider a merge instead:: git fetch upstream-rw git merge --no-ff upstream-rw/master The merge will be detected by github, and should close any related pull requests automatically. Note the ``--no-ff`` above. This forces git to make a merge commit, rather than doing a fast-forward, so that these set of commits branch off trunk then rejoin the main history with a merge, rather than appearing to have been made directly on top of trunk. Check the history ================= Now, in either case, you should check that the history is sensible and you have the right commits:: git log --oneline --graph git log -p upstream-rw/master.. The first line above just shows the history in a compact way, with a text representation of the history graph. The second line shows the log of commits excluding those that can be reached from trunk (``upstream-rw/master``), and including those that can be reached from current HEAD (implied with the ``..`` at the end). So, it shows the commits unique to this branch compared to trunk. The ``-p`` option shows the diff for these commits in patch form. Push to trunk ============= :: git push upstream-rw my-new-feature:master This pushes the ``my-new-feature`` branch in this repository to the ``master`` branch in the ``upstream-rw`` repository. .. include:: links.inc dipy-0.13.0/doc/devel/gitwash/patching.rst000066400000000000000000000076451317371701200204170ustar00rootroot00000000000000================ Making a patch ================ You've discovered a bug or something else you want to change in `dipy`_ .. |emdash| excellent! You've worked out a way to fix it |emdash| even better! You want to tell us about it |emdash| best of all! The easiest way is to make a *patch* or set of patches. Here we explain how. Making a patch is the simplest and quickest, but if you're going to be doing anything more than simple quick things, please consider following the :ref:`git-development` model instead. .. _making-patches: Making patches ============== Overview -------- :: # tell git who you are git config --global user.email you@yourdomain.example.com git config --global user.name "Your Name Comes Here" # get the repository if you don't have it git clone git://github.com/nipy/dipy.git # make a branch for your patching cd dipy git branch the-fix-im-thinking-of git checkout the-fix-im-thinking-of # hack, hack, hack # Tell git about any new files you've made git add somewhere/tests/test_my_bug.py # commit work in progress as you go git commit -am 'BF - added tests for Funny bug' # hack hack, hack git commit -am 'BF - added fix for Funny bug' # make the patch files git format-patch -M -C master Then, send the generated patch files to the `dipy mailing list`_ |emdash| where we will thank you warmly. In detail --------- #. Tell git who you are so it can label the commits you've made:: git config --global user.email you@yourdomain.example.com git config --global user.name "Your Name Comes Here" #. If you don't already have one, clone a copy of the `dipy`_ repository:: git clone git://github.com/nipy/dipy.git cd dipy #. Make a 'feature branch'. This will be where you work on your bug fix. It's nice and safe and leaves you with access to an unmodified copy of the code in the main branch:: git branch the-fix-im-thinking-of git checkout the-fix-im-thinking-of #. Do some edits, and commit them as you go:: # hack, hack, hack # Tell git about any new files you've made git add somewhere/tests/test_my_bug.py # commit work in progress as you go git commit -am 'BF - added tests for Funny bug' # hack hack, hack git commit -am 'BF - added fix for Funny bug' Note the ``-am`` options to ``commit``. The ``m`` flag just signals that you're going to type a message on the command line. The ``a`` flag |emdash| you can just take on faith |emdash| or see `why the -a flag?`_. #. When you have finished, check you have committed all your changes:: git status #. Finally, make your commits into patches. You want all the commits since you branched from the ``master`` branch:: git format-patch -M -C master You will now have several files named for the commits:: 0001-BF-added-tests-for-Funny-bug.patch 0002-BF-added-fix-for-Funny-bug.patch Send these files to the `dipy mailing list`_. When you are done, to switch back to the main copy of the code, just return to the ``master`` branch:: git checkout master Moving from patching to development =================================== If you find you have done some patches, and you have one or more feature branches, you will probably want to switch to development mode. You can do this with the repository you have. Fork the `dipy`_ repository on github |emdash| :ref:`forking`. Then:: # checkout and refresh master branch from main repo git checkout master git pull origin master # rename pointer to main repository to 'upstream' git remote rename origin upstream # point your repo to default read / write to your fork on github git remote add origin git@github.com:your-user-name/dipy.git # push up any branches you've made and want to keep git push origin the-fix-im-thinking-of Then you can, if you want, follow the :ref:`development-workflow`. .. include:: links.inc dipy-0.13.0/doc/devel/gitwash/pull_button.png000066400000000000000000000311351317371701200211340ustar00rootroot00000000000000PNG  IHDR~\iu pHYs   IDATx]|ToߔMHH PKT,"S, O}%"]zHR@dw޽fIBB$a&{3s̙3s72p8 G# ?G#p!}p]G#?G#p!}p]G#?G#p!HKKCVVVuNPF C./y>xbpˏUVe͛7׭=Obyyy()6c\,4lYѬ)4,= {$H& 0,n8^CtqѸ˱gEsZzl6~?vu;Oͽyv+gkx G#}كSԩSª8xy$;w}Z*.6.O[B$ɴ r)ɝĻB? Z}|?ìqu'.=IXHG.A0[rgcǎN,YDx_>kƮG όSN3^ǶQX+PmY+ˁ\K_,?~\#GBPN$,"E؟ A~ǧGLVs',o<5#3( fa0`9UI l>`ŒOлa彣p0e3ggx~Zb>1cƠiӦE*(O_˝)ޝ:8G| < 'g>!A>; oxMۍ9gH0taق.IFp̟3W6bά|`xhs &އĶ?EK+X4|ad#,0+VCSȦٳоq?$` 0ot$WRsp-.vx3k;~ŒO >`4|H>Og,O5E 4}LZ'Š&^xz` hXl,CbM8ִvXI:?? $̯?c{+]DJ___L4 ׿h5 &O  ^d g|tU"?5yɟ0pwX&<[?GWirToV|){<3mLAmxs 6'@9l>.#0}H4tŅx-w}__n Ơu~1b{֮҇#`4|(6y{fm^g][l?2wagsxqt'`oqѪ!!CMcZxL8ϦmE)*ue/fJ{ ž{LhϞ=WZˀ8y36 ‘3ݟL?w $ZMsѬ^ tɉ)"NKto+Щw+N7oG@2_ >tł@Y``0<\"aJ)Ϭ &57"&I! Rs=oֱ8) V8ޑxkme: #Hxӧp8ƿRh,@ #膣kw )@Ȥ r-e0gG|)o$b}"1bPddpyC#W.?E'\lǂ\Zز\m4rognYr2y^ /&욟*SY|!ڜi&rE|v9#' eH\|=0i;@F%!_Am62bK ? ( #`H.Ң #ӑݐ`6ֿfO< ͨOzde/mȄ<@soΛXyɒKcs30oOIZM}5 R-AnJiz$HlY3"1j^8A: GZE_ErrV $1W,PߐT?M!1v\w'_w :\db5nMQ^&SZ ,h׮Knq7mP?څ3PBK2}G6IjwMF:,`*Lъ<-r$PjZaY4l/,y&q-[$e]u߾}'ABWՂgyL&%1/H}0 3wSJ;Fc~1{NĿ?\ꂠ8\"`}H`5%zf40>-|ڊyJw-<˖-1}tt ؿ?q7ni#)}ӡd:җ؁hn)U8D_Nҗ"z@vM$Y/ kg߲CC0[mğev= Lg!Kĩ֮#_<*JzPp. ˌA"ө)(5K@0Mm?_@~}'v U8k ~Z(tn@Pu/71fM |_7P>1/fGn, K/)_QWKcAtهK嗰EVG,]0>\!dD6]4}.>_F MX5GfNY {;Ga1 Z*,Rf-Nslff*e0gJ1)#36fKGzܰ W&}9cpNA-[XP&ɹ[u.lNw}M=]gT̚ghSOgϞ`&eؤ) |4xnwH{ n>X'5!ȋQ(0Mз"d]H001\ka/<[`0->BkG8.ТVUo[1㥓l&IUF˾B["^.W".=REIiWiMb6WW/.MDvT~kg ;DY(4(sy#jᓟephn×/?Wx,j[ П|\Ld1q@)2j8ˊ}'`㝑|v ( j:/}Za;,Tw(pDQBU2Z}d.s^_|~j?^4IaA'¯vW>TaS I) ֖4$^+0ܹ^=NѺs ""]tg:ea3YLN˳g/tZ9llv4AAslnf6L4Ծbf"^}ϱS Y9P{“].ã䞠{'śrx)帬V _cO%U8c\LdҗVHe =S9SnLH'?lm|,:Z( cq8@,o]UW8UKi<+{? f˘0`X.pnPⱹWS8nL8ftv\+­=bcNIU:%qK^L<+B-p8!PiX\)p8U,8G|p_>x)G#PeʲpʇWÍp8U,8G|p_>x)G#PeʲpʇWÍp8U,8G|p_>x)G#PeʲpʇWÍp8U,8G|p_>x)G#PeʲpʇWÍp8U,8G|p_>x)G#PeʲpʇWÍp8U,8G|p_>x)G#Peʲpʇ+Kq8@D@YF*I8'#pʇw7^#TY p8#-cKp8*WU}xG#PvGJ#-*>NP{@)#mq57 B"hC<jE@O>ǧ"ӪC:9ш /1g.%O_V0$t3D& S݆WT`3kA-sprNdlg.Ν8x"g/!Eo.0nUՆ+ZjUW*@\=SXXe(^l0T:2hVquSM(lx:}0^su*i 7 ,I&g-51xk/cx2S&2[qyZHAͩ#ɑz[Pw ~!} _lH2pm0q;W۶q6_1\=D"|+`-'B-09Tyy0 "u Aj>{`ƄpmȭWw1XXZM90t$Q6h@<FsxE(٤mҮ/}|%fg7е7IAvAPz^TTKu8L: ځ,{+NZ*(Vw_Bn+oAxbPSĬɊ-ʋB_zeʠhƕ qB뗐Њ$㲞CzegڪZO$<g_b̛F>-rOG`V'%|<|࡮x y-U rYѦfdLYǾy8dAZ>\ IfEnA$_Nͳ/[liQyNxvyC;S[>SXm1dqd4v:mS`- ܈6tAx5nCfy9A7Nnʭ kja&5Rnhۼ6^6䦜ǁh3|zc՘W)0;BHwb9bj4 cuɰhjt/nF蓮!mHPHs`"nfVbl8l#ߠ@ {/|3cGw|g,h42G`Ӯ|l7aCwa\]T߉ 7"9=$FNm:6ݒ!c)v5.gBgNϧV㔃+Pw yF+D_ҝ EzqLhIf/@1[Ix}!⃺uB?m"A VD{lŔycۡtxb]?ca6JZIbvby-z==\dryW)ih__:(\JNESOgL"VtjڹkuImvHtUBe_h^&*oM6hGN>g WCaBU|A _ I+,N,ɉ~ʕF)2\ju~]8kGP^AwԈ9ʇum %O6B\ti-%IT2ꃊr*Cm& o^ f2f쒘pF6r%߾tzfdQ OQ2 hZKwV&v@taa iZV~ jOadtNC},ŏ4<RdL'Sԥ9s>ǡa,Jm{|˨CLN]3>^| vM.\5|xU wnsI,!Τ-gI<4 nۣ 1f4GOJ~F\VD؜A<)IaR$1tX6{y _?x$ ;6\KC=;!keؖpMhBT1X9]l A&-q4 ž|Yj:/; PÓ͂bdٱILMQjb =)7M7@ >G'5j4abwTm |9٠9!mʻ@ w+ۀVJh-7iĽ>;$ \V&·/>:=C+:joxFjobpH;;x'#%nȋJ$9nYioP.eӠ1Ypr>̦ oH ^Bs@jr)l+kl7QWSg]-u9[ӈՑIU.d+)/'Ъ jFw$@20HV~$˛')^ЫI-l}V z'&h`űbi.2b J+W"lR*d f$He bɔ)D{2E= u yS/,oe@t6Gځ+Fk 4UZ됰{-"ikKpq ώ u9yG; bYR~S&@tmļԀMBHygCju/.Ykس4c|:(>*6mBjڐ&lM4X.mv6\[lS)o"jegGʸ4@|d:tjH.*ъ x=ŷj 4$e%z FN8џEgD׎K( _kҥȩF~JeY.i7!%5O߫cm'lŶN6ܔl$ɊfD9hP3;okJ-RY~uʒ{?$Zܶ;۱yՏ$:ڣK;r;zCbwād >vvQ 6at܆BDȗ*XMTcKVGӪ. >,o;~g&M~)?lFRv_;r1gbK5$_5::EuZsZG)&rЊMn.@gT"T:5,JVC`R]Ehi~ 2߶hk'5!h߆k[&xB4v>EMWBb<iR+UŖ)$K'Ϯ3pSb7\8X0: <;+K#;Yv#^;7҆ZtBZш՘Xj,8(G0S[ gbI T&(dt Vn O t…NAF*Qϓtj#+)e& LdȾSx 2K.Ҳ ]?tj rrjCm#-ϡ{.M9d,de8_7,mf$i3O(lI IɆл 5$PER Cg1~wf )kVzփϥ7csb7?ƳSn>yH̫TShj$R:;o .<Ѫfڌ;^EapiWSGJ6#>|nV7ՄԠpǟxL{.鴡)kK~b$|& r's;% '* ;mAӬ?g~ݒ<|}=p)LW|>}LIRT/M}twI4mP{Ċuo" vZ ,7:t`J!6%) E ge4)ZF,{ MX%Ft8W23- ^ y/pQP2ZSyfRC1.6ꈼ>^l$ཹo9"l$Crr#6R $36iXiEG ╭_cYd;̚=3!k-Nr䲘dF #UəPFPBxKI+\;&{&{eTR%}@Y3rr)|wE1`h׬&RF\C1T~4j%3&lgz dQ4#-ߝYzr/! zjI^p8@+ hWp8:youSp8eA[eAp8nW&.p8 -r8jWՀ G,pWOYy9@5@[Հ G,?V#9}nIENDB`dipy-0.13.0/doc/devel/gitwash/set_up_fork.rst000066400000000000000000000036441317371701200211350ustar00rootroot00000000000000.. _set-up-fork: ================== Set up your fork ================== First you follow the instructions for :ref:`forking`. Overview ======== :: git clone git@github.com:your-user-name/dipy.git cd dipy git remote add upstream git://github.com/nipy/dipy.git In detail ========= Clone your fork --------------- #. Clone your fork to the local computer with ``git clone git@github.com:your-user-name/dipy.git`` #. Investigate. Change directory to your new repo: ``cd dipy``. Then ``git branch -a`` to show you all branches. You'll get something like:: * master remotes/origin/master This tells you that you are currently on the ``master`` branch, and that you also have a ``remote`` connection to ``origin/master``. What remote repository is ``remote/origin``? Try ``git remote -v`` to see the URLs for the remote. They will point to your github fork. Now you want to connect to the upstream `dipy github`_ repository, so you can merge in changes from trunk. .. _linking-to-upstream: Linking your repository to the upstream repo -------------------------------------------- :: cd dipy git remote add upstream git://github.com/nipy/dipy.git ``upstream`` here is just the arbitrary name we're using to refer to the main `dipy`_ repository at `dipy github`_. Note that we've used ``git://`` for the URL rather than ``git@``. The ``git://`` URL is read only. This means we that we can't accidentally (or deliberately) write to the upstream repo, and we are only going to use it to merge into our own code. Just for your own satisfaction, show yourself that you now have a new 'remote', with ``git remote -v show``, giving you something like:: upstream git://github.com/nipy/dipy.git (fetch) upstream git://github.com/nipy/dipy.git (push) origin git@github.com:your-user-name/dipy.git (fetch) origin git@github.com:your-user-name/dipy.git (push) .. include:: links.inc dipy-0.13.0/doc/devel/gitwash/this_project.inc000066400000000000000000000001701317371701200212420ustar00rootroot00000000000000.. dipy .. _`dipy`: http://nipy.org/dipy .. _`dipy mailing list`: https://mail.python.org/mailman/listinfo/neuroimaging dipy-0.13.0/doc/devel/index.rst000066400000000000000000000004331317371701200162470ustar00rootroot00000000000000 .. _development: DIPY development ================ Contents: .. toctree:: :maxdepth: 2 intro gitwash/index make_release commit_codes coding_style_guideline python3 Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` dipy-0.13.0/doc/devel/intro.rst000066400000000000000000000024611317371701200162760ustar00rootroot00000000000000============== Introduction ============== Dipy_ is always seeking courageous scientists who want to take dMRI analysis to the next level. If you share the same vision and you are willing to share your code please let us know we will be happy to help. The lead developer is Eleftherios Garyfallidis, with support from Ian Nimmo-Smith, Matthew Brett, Bago Amirbekian, Ariel Rokem, Stefan van der Walt and (your name here). See the main documentation for the full list of dipy developers and contributors. The primary development repository is `dipy github`_ Please do contribute. Have a look at :ref:`using-git` for some ideas on how to get going. Have a look at the `nipy development guidelines`_ for our coding habits. In summary, please follow the `numpy coding style`_ - and of course - PEP8_ . Test everything! We are using nose_ ; see the existing code for example tests. If you can please use our :ref:`commit-codes`. But - just pitch in - send us some code - we'll give you feedback if you want it - that way we learn from each other. And - welcome... If you are new to diffusion MRI and you want to learn more here is a simple `video `_ we made for the general public. I hope you enjoy it and apologies for the low resolution. .. include:: ../links_names.inc dipy-0.13.0/doc/devel/make_release.rst000066400000000000000000000312221317371701200175550ustar00rootroot00000000000000.. _release-guide: ********************************* A guide to making a DIPY release ********************************* A guide for developers who are doing a DIPY release .. _release-tools: Release tools ============= There are some release utilities that come with nibabel_. nibabel should install these as the ``nisext`` package, and the testing stuff is understandably in the ``testers`` module of that package. DIPY has Makefile targets for their use. The relevant targets are:: make check-version-info make check-files make sdist-tests The first installs the code from a git archive, from the repository, and for in-place use, and runs the ``get_info()`` function to confirm that installation is working and information parameters are set correctly. The second (``sdist-tests``) makes an sdist source distribution archive, installs it to a temporary directory, and runs the tests of that install. If you have a version of nibabel trunk past February 11th 2011, there will also be a functional make target:: make bdist-egg-tests This builds an egg (which is a zip file), hatches it (unzips the egg) and runs the tests from the resulting directory. .. _release-checklist: Release checklist ================= * Review the open list of `dipy issues`_. Check whether there are outstanding issues that can be closed, and whether there are any issues that should delay the release. Label them ! * Review and update the release notes. Review and update the :file:`Changelog` file. Get a partial list of contributors with something like:: git shortlog -ns 0.6.0.. where ``0.6.0`` was the last release tag name. Then manually go over ``git shortlog 0.6.0..`` to make sure the release notes are as complete as possible and that every contributor was recognized. * Use the opportunity to update the ``.mailmap`` file if there are any duplicate authors listed from ``git shortlog -ns``. * Add any new authors to the ``AUTHORS`` file. Add any new entries to the ``THANKS`` file. * Check the copyright years in ``doc/conf.py`` and ``LICENSE`` * Check the examples - we really need an automated check here. * Check the ``pyx`` file doctests with:: ./tools/doctest_extmods.py dipy We really need an automated run of these using the buildbots, but we haven't done it yet. * Check the ``long_description`` in ``dipy/info.py``. Check it matches the ``README`` in the root directory, maybe with ``vim`` ``diffthis`` command. Check all the links are still valid. * Check all the DIPY builds are green on the `nipy buildbot`_ * If you have travis-ci_ building set up you might want to push the code in its current state to a branch that will build, e.g.:: git branch -D pre-release-test # in case branch already exists git co -b pre-release-test * Run the builder and review the output from http://nipy.bic.berkeley.edu/builders/dipy-release-checks This builder does *not* check the outputs - they will likely all be green - you have to check the ``stdio`` output for each step using the web interface. The ``dipy-release-checks`` builder runs these tests:: make distclean python -m compileall . make sdist-tests make bdist-egg-tests make check-version-info make check-files * ``make bdist-egg-tests`` may well fail because of a problem with the script tests; if you have a recent (>= March 31 2013) nibabel ``nisext`` package, you could try instead doing:: python -c 'from nisext.testers import bdist_egg_tests; bdist_egg_tests("dipy", label="not slow and not script_test")' Eventually we should update the ``bdist-egg-tests`` makefile target. * ``make check-version-info`` checks how the commit hash is stored in the installed files. You should see something like this:: {'sys_version': '2.6.6 (r266:84374, Aug 31 2010, 11:00:51) \n[GCC 4.0.1 (Apple Inc. build 5493)]', 'commit_source': 'archive substitution', 'np_version': '1.5.0', 'commit_hash': '25b4125', 'pkg_path': '/var/folders/jg/jgfZ12ZXHwGSFKD85xLpLk+++TI/-Tmp-/tmpGPiD3E/pylib/dipy', 'sys_executable': '/Library/Frameworks/Python.framework/Versions/2.6/Resources/Python.app/Contents/MacOS/Python', 'sys_platform': 'darwin'} /var/folders/jg/jgfZ12ZXHwGSFKD85xLpLk+++TI/-Tmp-/tmpGPiD3E/pylib/dipy/__init__.pyc {'sys_version': '2.6.6 (r266:84374, Aug 31 2010, 11:00:51) \n[GCC 4.0.1 (Apple Inc. build 5493)]', 'commit_source': 'installation', 'np_version': '1.5.0', 'commit_hash': '25b4125', 'pkg_path': '/var/folders/jg/jgfZ12ZXHwGSFKD85xLpLk+++TI/-Tmp-/tmpGPiD3E/pylib/dipy', 'sys_executable': '/Library/Frameworks/Python.framework/Versions/2.6/Resources/Python.app/Contents/MacOS/Python', 'sys_platform': 'darwin'} /Users/mb312/dev_trees/dipy/dipy/__init__.pyc {'sys_version': '2.6.6 (r266:84374, Aug 31 2010, 11:00:51) \n[GCC 4.0.1 (Apple Inc. build 5493)]', 'commit_source': 'repository', 'np_version': '1.5.0', 'commit_hash': '25b4125', 'pkg_path': '/Users/mb312/dev_trees/dipy/dipy', 'sys_executable': '/Library/Frameworks/Python.framework/Versions/2.6/Resources/Python.app/Contents/MacOS/Python', 'sys_platform': 'darwin'} * ``make check-files`` checks if the source distribution is picking up all the library and script files. Look for output at the end about missed files, such as:: Missed script files: /Users/mb312/dev_trees/dipy/bin/nib-dicomfs, /Users/mb312/dev_trees/dipy/bin/nifti1_diagnose.py Fix ``setup.py`` to carry across any files that should be in the distribution. * Clean and compile:: make distclean git clean -fxd python setup.py build_ext --inplace * Make sure all tests pass on your local machine (from the ```` directory):: cd .. nosetests --with-doctest dipy cd dipy # back to the root directory * Check the documentation doctests:: cd doc make doctest cd .. At the moment this generates lots of errors from the autodoc documentation running the doctests in the code, where the doctests pass when run in nose - we should find out why this is at some point, but leave it for now. * Trigger builds of all the binary build testers for DIPY, using the web interface. You may need permissions set to do this - contact Matthew or Eleftherios if you do. At the moment, the useful DIPY binary build testers are: * http://nipy.bic.berkeley.edu/builders/dipy-bdist32-35 * http://nipy.bic.berkeley.edu/builders/dipy-bdist32-27 * http://nipy.bic.berkeley.edu/builders/dipy-bdist64-27 * http://nipy.bic.berkeley.edu/builders/dipy-bdist64-35 * http://nipy.bic.berkeley.edu/builders/dipy-bdist-mpkg-2.6 * http://nipy.bic.berkeley.edu/builders/dipy-bdist-mpkg-2.7 * The release should now be ready. Doing the release ================= The trick here is to get all the testing, pushing to upstream done *before* you do the final release commit. There should be only one commit with the release version number, so you might want to make the release commit on your local machine, push to pypi_, review, fix, rebase, until all is good. Then and only then do you push to upstream on github. * Make the release commit. Edit :file:`dipy/info.py` to set ``_version_extra`` to ``''``; commit * Build the release files:: make distclean git clean -fxd make source-release * Once everything looks good, upload the source release to PyPi. See `setuptools intro`_:: python setup.py register python setup.py sdist --formats=gztar,zip upload * Remember you'll need your ``~/.pypirc`` file set up right for this to work. See `setuptools intro`_. The file should look something like this:: [distutils] index-servers = pypi [pypi] username:your.pypi.username password:your-password [server-login] username:your.pypi.username password:your-password * Check how everything looks on pypi - the description, the packages. If necessary delete the release and try again if it doesn't look right. * Make an annotated tag for the release with tag of form ``0.6.0``:: git tag -am 'Second public release' 0.6.0 * Set up maintenance / development branches If this is this is a full release you need to set up two branches, one for further substantial development (often called 'trunk') and another for maintenance releases. * Branch to maintenance:: git co -b maint/0.6.x Set ``_version_extra`` back to ``.dev`` and bump ``_version_micro`` by 1. Thus the maintenance series will have version numbers like - say - '0.6.1.dev' until the next maintenance release - say '0.6.1'. Commit. Push with something like ``git push upstream-rw maint/0.6.x --set-upstream`` * Start next development series:: git co main-master then restore ``.dev`` to ``_version_extra``, and bump ``_version_minor`` by 1. Thus the development series ('trunk') will have a version number here of '0.7.0.dev' and the next full release will be '0.7.0'. Next merge the maintenace branch with the "ours" strategy. This just labels the maintenance branch `info.py` edits as seen but discarded, so we can merge from maintenance in future without getting spurious merge conflicts:: git merge -s ours maint/0.6.x Push with something like ``git push upstream-rw main-master:master`` If this is just a maintenance release from ``maint/0.6.x`` or similar, just tag and set the version number to - say - ``0.6.2.dev``. * Push the tag with ``git push upstream-rw 0.6.0`` Uploading binary builds for the release ======================================= By far the easiest way to do this is via the buildbots. In order to do this, you need first to push the release commit and the release tag to github, so the buildbots can find the released code and build it. * In order to trigger the binary builds for the release commit, you need to go to the web interface for the binary builder, go to the "Force build" section, enter your username and password for the buildbot web service and enter the commit tag name in the *revision* field. For example, if the tag was ``0.6.0`` then you would enter ``0.6.0`` in the revision field of the form. This builds the exact commit labeled by the tag, which is what we want. * Trigger binary builds for Windows from the buildbots. See builders ``dipy-bdist32-26``, ``dipy-bdist32-27``. The ``exe`` builds will appear in http://nipy.bic.berkeley.edu/dipy-dist . Check that the binary build version numbers are release numbers (``dipy-0.6.0.win32.exe`` rather than ``dipy-0.6.0.dev.win32.exe``). Download the builds and upload to pypi. You can upload the exe files with the *files* interface for the new DIPY release. Obviously you'll need to log in to do this, and you'll need to be an admin for the DIPY pypi project. For reference, if you need to do binary exe builds by hand, use something like:: make distclean git clean -fxd c:\Python26\python.exe setup.py bdist_egg upload c:\Python26\python.exe setup.py bdist_wininst --target-version=2.6 register upload * Trigger binary builds for OSX from the buildbots ``dipy-bdist-mpkg-2.6``, ``dipy-bdist-mpkg-2.7``. ``egg`` and ``mpkg`` builds will appear in http://nipy.bic.berkeley.edu/dipy-dist . Download the eggs and upload to pypi. Upload the dmg files with the *files* interface for the new dipy release. * Building OSX dmgs from the mpkg builds. The buildbot binary builders build ``mpkg`` directories, which are installers for OSX. These need their permissions to be fixed because the installers should install the files as the root user, group ``admin``. The all need to be converted to OSX disk images. Use the ``./tools/build_dmgs.py``, with something like this command line:: ./tools/build_dmgs "dipy-dist/dipy-0.6.0-py*.mpkg" For this to work you'll need several things: * An account on a OSX box with sudo (Admin user) on which to run the script. * ssh access to the buildbot server http://nipy.bic.berkeley.edu (ask Matthew or Eleftherios). * a development version of ``bdist_mpkg`` installed from https://github.com/matthew-brett/bdist_mpkg. You need this second for the script ``reown_mpkg`` that fixes the permissions. Upload the dmg files with the *files* interface for the new dipy release. Other stuff that needs doing for the release ============================================ * Checkout the tagged release, build the html docs and upload them to the github pages website:: make upload You need to checkout the tagged version in order to get the version number correct for the doc build. The version number gets picked up from the ``info.py`` version. * Announce to the mailing lists. With fear and trembling. .. _setuptools intro: http://packages.python.org/an_example_pypi_project/setuptools.html .. include:: ../links_names.inc dipy-0.13.0/doc/devel/python3.rst000066400000000000000000000074401317371701200165510ustar00rootroot00000000000000.. _python3: ############################################ Keeping code compatible with Pythons 2 and 3 ############################################ Dipy supports Python versions from 2.6 to 3.5. In order to maintain code that supports both Python 2 and Python 3 versions, please follow these instructions. There is useful advice here: * http://docs.python.org/3/howto/pyporting.html * http://python3porting.com/differences.html * http://ptgmedia.pearsoncmg.com/imprint_downloads/informit/promotions/python/python2python3.pdf ************** Future imports ************** For any modules with print statements, and for any modules where you remember, please put:: from __future__ import division, print_function, absolute_import As the first code line of the file, to use Python 3 behavior by default. ***** Print ***** In Python 3, ``print`` is a function. Please use the ``__future__`` import above, and the function form:``print(something)``, whenever ``print`` is used. ******** Division ******** In Python 2, integer division returns integers, while in Python 3 ``3/2`` returns ``1.5`` not ``1``. It's very good to remember to put the ``__future__`` import above at the top of the file to make this default everywhere. ************* Moved modules ************* There are compatibility routines in :mod:`dipy.utils.six`. You can often get modules that have moved between the versions with (e.g.):: from dipy.utils.six.moves import configparser See the ``six.py`` code and `the six.py docs `_. ************* Range, xrange ************* ``range`` returns an iterator in Python3, and ``xrange`` is therefore redundant, and it has been removed. Get ``xrange`` for Python 2, ``range`` for Python 3 with:: from dipy.utils.six.moves import xrange Or you might want to stick to ``range`` for Python 2 and Python 3, especially for small lists where the memory benefit for ``xrange`` is small. Because ``range`` returns an iterator for Python 3, you may need to wrap some calls to range with ``list(range(N))`` to make the code compatible with Python 2 and Python 3. ****** Reduce ****** Python 3 removed ``reduce`` from the builtin namespace, this import works for both Python 2 and Python 3:: from functools import reduce ******* Strings ******* The major difference between Python 2 and Python 3 is the string handling. Strings (``str``) are always unicode, and so:: my_str = 'A string' in Python 3 will result in a unicode string. You also need to be much more explicit when opening files; If you want bytes, use: ``open(fname, "rb")``. If you want unicode: ``open(fname, "rt")``. In the same way you need to be explicit if you want ``import io; io.StringIO`` or ``io.BytesIO`` for your file-like objects containing strings or bytes. ``basestring`` has been removed in Python 3. To test whether something is a string, use:: from dipy.utils.six import string_types isinstance(a_variable, string_types) ************* Next function ************* In versions of Python from 2.6 and on there is a function ``next`` in the builtin namespace, that returns the next result from an iterable thing. In Python 3, meanwhile, the ``.next()`` method on generators has gone, replaced by ``.__next__()``. So, prefer ``next(obj)`` to ``obj.next()`` for generators, and in general when getting the next thing from an iterable. ****** Except ****** You can't get away with ``except ValueError, err`` now, because that raises a syntax error for Python 3. Use ``except ValueError as err`` instead. ************ Dictionaries ************ You've lost ``d.has_key("hello")`` for dictionaries, use ``"hello" in d`` instead. ``d.items()`` returns an iterator. If you need a list, use ``list(d.items()``. ``d.iteritems()`` was removed in Python 3 because it is redundant. Use ``d.items()`` instead. dipy-0.13.0/doc/developers.rst000066400000000000000000000060111317371701200162070ustar00rootroot00000000000000.. _dipy_developers: Developers ========== The core development team consists of the following individuals: - **Eleftherios Garyfallidis**, University of Sherbrooke, QC, CA - **Ariel Rokem**, University of Washington, WA, USA - **Matthew Brett**, University of California, Berkeley, CA, USA - **Bago Amirbekian**, University of California, San Francisco, CA, USA - **Omar Ocegueda**, Center for Research in Mathematics, Guanajuato, Mexico - **Stefan Van der Walt**, University of California, Berkeley, CA, USA - **Marc-Alexandre Cote**, University of Sherbrooke, QC, CA - **Ian Nimmo-Smith**, MRC Cognition and Brain Sciences Unit, Cambridge, UK - **Maxime Descoteaux**, University of Sherbrooke, QC, CA And here is the rest of the wonderful contributors: - **Mauro Zucchelli**, University of Verona, IT - **Matthieu Dumont**, PAVI, Sherbrooke, QC, CA - **Samuel St-Jean**, University of Sherbrooke, QC, CA - **Gabriel Girard**, University of Sherbrooke, QC, CA - **Michael Paquette**, University of Sherbrooke, QC, CA - **Jean-Christophe Houde**, University of Sherbrooke, QC, CA - **Christopher Nguyen**, University of California, Los Angeles, CA, USA - **Emanuele Olivetti**, NeuroInformatics Laboratory (NILab), Trento, IT - **Yaroslav Halchenco**, PBS Department, Dartmouth, NH, USA - **Emmanuel Caruyer**, University of Pensylvania, USA - **Sylvain Merlet**, INRIA, Sophia-Antipolis, FR - **Erick Ziegler**, Université de Liège, BE - **Kimberly Chan**, Stanford University, CA, USA - **Chantal Tax**, University Medical Center, Utrecht, NL - **Demian Wassermann**, INRIA, Sophia Antipolis, FR - **Gregory R. Lee**, Cincinnati Children's Hospital Medical Center, Cincinnati, OH, US - **Endolith**, New-York, NY, USA - **Matthias Ekman**, Donders Institute for Brain, Cognition and Behaviour, Nijmegen, NL - **Andrew Lawrence** - **Kesshi Jordan**, University of California, San Francisco, CA, USA - **Maria Luisa Mandelli**, University of California, San Francisco, CA, USA - **Adam Rybinski**, Jagiellonian University, Krakow, PL - **Qiyuan Tian**, Stanford University, Stanford, CA, USA - **Rafael Neto Henriques**, Cambridge University, UK - **Stephan Meesters**, Eindhoven University of Technology, NL - **Himanshu Mishra**, Indian Institute of Technology, Karaghpur, IN - **Alexander Gauvin**, University of Sherbrooke, QC, CA - **Oscar Esteban**, Stanford University, CA, US - **Bishakh Ghosh**, National Institute of Technology, Durgapur, IN - **Dimitris Rozakis**, Tomotech, Athens, GR - **Rohan Prinja**, Indian Institute of Technology, Bombay, IN - **Sagun Pai**, Indian Institute of Technology, Bombay, IN - **Vatsala Swaroop**, Mombai, IN - **Shahnawaz Ahmed**, Birla Institute of Technology and Science, Pilani, Goa, IN Boundless collaboration is in the heart of Dipy_. We encourage everyone from anywhere in the world to join the team. You can start sharing your code `here`__. If you want to contribute but you don't know in area to focus, please send us an e-mail. We will be more than happy to help. __ `dipy github`_ .. include:: links_names.inc dipy-0.13.0/doc/diffusion.bib000066400000000000000000020313131317371701200157560ustar00rootroot00000000000000@comment{This file has been generated by Pybliographer} @Article{Garyfallidis2009b, Author = {Garyfallidis, Eleftherios and Brett, Matthew and Nimmo-smith, Ian}, Title = {{Fast Dimensionality Reduction for Brain Tractography}}, Journal = {Computer}, Volume = {15}, Number = {6}, Pages = {2009--2009}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Garyfallidis, Brett, Nimmo-smith - 2009 - Fast Dimensionality Reduction for Brain Tractography.pdf:pdf}, year = 2009 } @Article{Ese2006, Author = {Ese, T H}, Title = {{Analysis and Classification of EEG Signals using Probabilistic Models for Brain Computer Interfaces Ecole Polytechnique F ´ ed ´ erale de Lausanne Silvia Chiappa}}, Journal = {Learning}, Volume = {3547}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Ese - 2006 - Analysis and Classification of EEG Signals using Probabilistic Models for Brain Computer Interfaces Ecole Polytechnique F ´ ed ´ erale de Lausanne Silvia Chiappa.pdf:pdf}, year = 2006 } @Article{Oliphant2010, Author = {Oliphant, Travis E}, Title = {{Guide to NumPy}}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Oliphant - 2010 - Guide to NumPy.pdf:pdf}, year = 2010 } @Article{yamamoto2007dtf, Author = {Yamamoto, A. and Miki, Y. and Urayama, S. and Fushimi, Y. and Okada, T. and Hanakawa, T. and Fukuyama, H. and Togashi, K.}, Title = {{Diffusion tensor fiber tractography of the optic radiation: analysis with 6-, 12-, 40-, and 81-directional motion-probing gradients, a preliminary study}}, Journal = {American Journal of Neuroradiology}, Volume = {28}, Number = {1}, Pages = {92}, publisher = {Am Soc Neuroradiology}, year = 2007 } @Article{FW05, Author = {Friman, O. and Westin, C. F.}, Title = {Uncertainty in white matter fiber tractography.}, Journal = {Med Image Comput Comput Assist Interv Int Conf Med Image Comput Comput Assist Interv}, Volume = {8}, Number = {Pt 1}, Pages = {107-14}, abstract = {In this work we address the uncertainty associated with fiber paths obtained in white matter fiber tractography. This uncertainty, which arises for example from noise and partial volume effects, is quantified using a Bayesian modeling framework. The theory for estimating the probability of a connection between two areas in the brain is presented, and a new model of the local water diffusion profile is introduced. We also provide a theorem that facilitates the estimation of the parameters in this diffusion model, making the presented method simple to implement.}, authoraddress = {Laboratory of Mathematics in Imaging, Department of Radiology Brigham and Women's Hospital, Harvard Medical School, USA.}, keywords = {*Algorithms ; Artificial Intelligence ; Brain/*anatomy \& histology ; Diffusion Magnetic Resonance Imaging/*methods ; Humans ; Image Enhancement/*methods ; Image Interpretation, Computer-Assisted/*methods ; Imaging, Three-Dimensional/*methods ; Nerve Fibers, Myelinated/*ultrastructure ; Pattern Recognition, Automated/methods ; Reproducibility of Results ; Sensitivity and Specificity}, language = {eng}, medline-crdt = {2006/05/12 09:00}, medline-da = {20060511}, medline-dcom = {20060609}, medline-edat = {2006/05/12 09:00}, medline-fau = {Friman, Ola ; Westin, Carl-Fredrik}, medline-gr = {P41-RR13218/RR/NCRR NIH HHS/United States}, medline-jid = {101249582}, medline-jt = {Medical image computing and computer-assisted intervention : MICCAI ... International Conference on Medical Image Computing and Computer-Assisted Intervention}, medline-lr = {20071114}, medline-mhda = {2006/06/10 09:00}, medline-own = {NLM}, medline-pl = {Germany}, medline-pmid = {16685835}, medline-pst = {ppublish}, medline-pt = {Journal Article ; Research Support, N.I.H., Extramural}, medline-sb = {IM}, medline-so = {Med Image Comput Comput Assist Interv Int Conf Med Image Comput Comput Assist Interv. 2005;8(Pt 1):107-14.}, medline-stat = {MEDLINE}, url = {http://eutils.ncbi.nlm.nih.gov/entrez/eutils/elink.fcgi?cmd=prlinks&dbfrom=pubmed&retmode=ref&id=16685835}, year = 2005 } @Article{BaoPMB2009, Author = {Bao, LJ and Zhu, YM and Liu, WY and Croisille, P. and Pu, ZB and Robini, M. and Magnin, IE}, Title = {{Denoising human cardiac diffusion tensor magnetic resonance images using sparse representation combined with segmentation}}, Journal = {Physics in Medicine and Biology}, Volume = {54}, Number = {6}, Pages = {1435--1456}, abstract = {Cardiac diffusion tensor magnetic resonance imaging (DT-MRI) is noise sensitive, and the noise can induce numerous systematic errors in subsequent parameter calculations. This paper proposes a sparse representation-based method for denoising cardiac DT-MRI images. The method first generates a dictionary of multiple bases according to the features of the observed image. A segmentation algorithm based on nonstationary degree detector is then introduced to make the selection of atoms in the dictionary adapted to the image's features. The denoising is achieved by gradually approximating the underlying image using the atoms selected from the generated dictionary. The results on both simulated image and real cardiac DT-MRI images from ex vivo human hearts show that the proposed denoising method performs better than conventional denoising techniques by preserving image contrast and fine structures.}, year = 2009 } @Article{Baldi, Author = {Baldi, P and Kerkyacharian, G and Matematica, Dipartimento and Tor, Roma}, Title = {{arXiv : 0807 . 5059v1 [ math . ST ] 31 Jul 2008 Adaptive density estimation for directional data using needlets}}, arxivid = {arXiv:0807.5059v1}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Baldi et al. - Unknown - arXiv 0807 . 5059v1 math . ST 31 Jul 2008 Adaptive density estimation for directional data using needlets.pdf:pdf}, keywords = {and phrases,density estimation,needlets,spherical and directional data,thresholding} } @Article{Science2008, Author = {Science, Computer and Supervisor, Thesis and Wells, William M and Westin, Carl-fredrik and Orlando, Terry P}, Title = {{Quantitative Analysis of Cerebral White Matter Anatomy from Diffusion MRI by}}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Science et al. - 2008 - Quantitative Analysis of Cerebral White Matter Anatomy from Diffusion MRI by.pdf:pdf}, year = 2008 } @Article{Simon2005NeuroImage, Author = {Simon, Tony J. and Ding, Lijun and Bish, Joel P. and McDonald-McGinn, Donna M. and Zackai, Elaine H. and Geeb, James}, Title = {Volumetric, connective, and morphologic changes in the brains of children with chromosome 22q11.2 deletion syndrome: an integrative study}, Journal = {NeuroImage}, Volume = {25}, Pages = {169-180}, abstract = {Chromosome 22q11.2 deletion syndrome is a highly prevalent genetic disorder whose manifestations include developmental disability and sometimes mental retardation. The few studies that have examined brain morphology in different samples from this population have found similar general patterns, mostly using region of interest measures. We employed voxel-based techniques to concurrently examine specific morphologic changes in multiple brain tissue measures. Results were similar to previous findings of volumetric reductions in the posterior brain. They also extended them in two ways. First, our methods provided greater specificity in the localization of changes detected. Second, the combination of our measures of gray and white matter along with cerebrospinal fluid volume and fractional anisotropy, which indicates the structure of white matter, showed a posterior displacement of and morphologic changes to the corpus callosum in affected children.}, doi = {j.neuroimage.2004.11.018}, file = {attachment\:Simon2005NeuroImage.pdf:attachment\:Simon2005NeuroImage.pdf:PDF}, publisher = {Elsevier}, year = 2005 } @Article{Hagmann2008PLoSBiol, Author = {Hagmann, P and Cammoun, L and Gigandet, X and Meuli, R and Honey, C J and Wedeen, Van J. and Sporns, Olaf }, Title = {Mapping the structural core of human cerebral cortex}, Journal = {PLoS Biol}, Volume = {6}, Number = {7}, Pages = {e159}, abstract = {Structurally segregated and functionally specialized regions of the human cerebral cortex are interconnected by a dense network of cortico-cortical axonal pathways. By using diffusion spectrum imaging, we noninvasively mapped these pathways within and across cortical hemispheres in individual human participants. An analysis of the resulting large-scale structural brain networks reveals a structural core within posterior medial and parietal cerebral cortex, as well as several distinct temporal and frontal modules. Brain regions within the structural core share high degree, strength, and betweenness centrality, and they constitute connector hubs that link all major structural modules. The structural core contains brain regions that form the posterior components of the human default network. Looking both within and outside of core regions, we observed a substantial correspondence between structural connectivity and resting-state functional connectivity measured in the same participants. The spatial and topological centrality of the core within cortex suggests an important role in functional integration.}, doi = {doi:10.1371/journal.pbio.0060159}, file = {attachment\:Hagmann2008PLoSBiol.pdf:attachment\:Hagmann2008PLoSBiol.pdf:PDF}, year = 2008 } @Article{menzies2008wma, Author = {Menzies, L. and Williams, G.B. and Chamberlain, S.R. and Ooi, C. and Fineberg, N. and Suckling, J. and Sahakian, B.J. and Robbins, T.W. and Bullmore, E.T.}, Title = {{White matter abnormalities in patients with obsessive-compulsive disorder and their first-degree relatives}}, Journal = {American Journal of Psychiatry}, Volume = {165}, Number = {10}, Pages = {1308}, publisher = {Am Psychiatric Assoc}, year = 2008 } @Article{Gong2008CerebralCortex, Author = {Gong, Gaolang and He, Yong and Concha, Luis and Lebel, Catherine and Gross, Donald W. and Evans, Alan C. and Beaulieu, Christian}, Title = {{Mapping Anatomical Connectivity Patterns of Human Cerebral Cortex Using In Vivo Diffusion Tensor Imaging Tractography}}, Journal = {Cereb. Cortex}, Pages = {bhn102}, abstract = {The characterization of the topological architecture of complex networks underlying the structural and functional organization of the brain is a basic challenge in neuroscience. However, direct evidence for anatomical connectivity networks in the human brain remains scarce. Here, we utilized diffusion tensor imaging deterministic tractography to construct a macroscale anatomical network capturing the underlying common connectivity pattern of human cerebral cortex in a large sample of subjects (80 young adults) and further quantitatively analyzed its topological properties with graph theoretical approaches. The cerebral cortex was divided into 78 cortical regions, each representing a network node, and 2 cortical regions were considered connected if the probability of fiber connections exceeded a statistical criterion. The topological parameters of the established cortical network (binarized) resemble that of a "small-world" architecture characterized by an exponentially truncated power-law distribution. These characteristics imply high resilience to localized damage. Furthermore, this cortical network was characterized by major hub regions in association cortices that were connected by bridge connections following long-range white matter pathways. Our results are compatible with previous structural and functional brain networks studies and provide insight into the organizational principles of human brain anatomical networks that underlie functional states.}, doi = {10.1093/cercor/bhn102}, eprint = {http://cercor.oxfordjournals.org/cgi/reprint/bhn102v1.pdf}, file = { attachment\:Gong2008CerebralCortex.pdf: attachment\:Gong2008CerebralCortex.pdf:PDF}, url = {http://cercor.oxfordjournals.org/cgi/content/abstract/bhn102v1}, year = 2008 } @Article{Carlsson2009, Author = {Carlsson, Gunnar and Emoli, Facundo M}, Title = {{Characterization, stability and convergence of hierarchical clustering methods ´}}, Journal = {Methods}, Number = {April}, Pages = {1--23}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Carlsson, Emoli - 2009 - Characterization, stability and convergence of hierarchical clustering methods ´.pdf:pdf}, year = 2009 } @Article{Avram2008NMRBiomed, Author = {Avram, Liat and \{O} zarslan, Evren and Assaf, Yaniv and Bar-Shir, Amnon and Cohen, Yoram and Basser, Peter J.}, Title = {Three-dimensional water diffusion in impermeable cylindrical tubes: theory versus experiments}, Journal = {NMR IN BIOMEDICINE}, Volume = {21}, Pages = {888898}, abstract = {Characterizing diffusion of gases and liquids within pores is important in understanding numerous transport processes and affects a wide range of practical applications. Previous measurements of the pulsed gradient stimulated echo (PGSTE) signal attenuation, E(q), of water within nerves and impermeable cylindrical microcapillary tubes showed it to be exquisitely sensitive to the orientation of the applied wave vector, q, with respect to the tube axis in the high-q regime. Here, we provide a simple three-dimensional model to explain this angular dependence by decomposing the average propagator, which describes the net displacement of water molecules, into components parallel and perpendicular to the tube wall, in which axial diffusion is free and radial diffusion is restricted. The model faithfully predicts the experimental data, not only the observed diffraction peaks in E(q) when the diffusion gradients are approximately normal to the tube wall, but their sudden disappearance when the gradient orientation possesses a small axial component. The model also successfully predicts the dependence of E(q) on gradient pulse duration and on gradient strength as well as tube inner diameter. To account for the deviation from the narrow pulse approximation in the PGSTE sequence, we use Callaghans matrix operator framework, which this study validates experimentally for the first time. We also show how to combine average propagators derived for classical one-dimensional and two-dimensional models of restricted diffusion (e.g. between plates, within cylinders) to construct composite three-dimensional models of diffusion in complex media containing pores (e.g. rectangular prisms and/ or capped cylinders) having a distribution of orientations, sizes, and aspect ratios. This three-dimensional modeling framework should aid in describing diffusion in numerous biological systems and in a myriad of materials sciences applications.}, owner = {ian}, timestamp = {2009.03.05}, year = 2008 } @Article{Barmpoutis2007IEEETransMedImag, Author = {Barmpoutis, A. and Vemuri, B. C. and Shepherd, T. M. and Forder, J. R.}, Title = {Tensor splines for interpolation and approximation of \{{D}{T}-{MRI}\} with applications to segmentation of isolated rat hippocampi}, Journal = {IEEE Transactions on Medical Imaging}, Volume = {26}, Number = {11}, Pages = {1537-1546}, abstract = {In this paper, we present novel algorithms for statistically robust interpolation and approximation of diffusion tensors-which are symmetric positive definite (SPD) matrices-and use them in developing a significant extension to an existing probabilistic algorithm for scalar field segmentation, in order to segment diffusion tensor magnetic resonance imaging (DT-MRI) datasets. Using the Riemannian metric on the space of SPD matrices, we present a novel and robust higher order (cubic) continuous tensor product of -splines algorithm to approximate the SPD diffusion tensor fields. The resulting approximations are appropriately dubbed tensor splines. Next, we segment the diffusion tensor field by jointly estimating the label (assigned to each voxel) field, which is modeled by a Gauss Markov measure field (GMMF) and the parameters of each smooth tensor spline model representing the labeled regions. Results of interpolation, approximation, and segmentation are presented for synthetic data and real diffusion tensor fields from an isolated rat hippocampus, along with validation. We also present comparisons of our algorithms with existing methods and show significantly improved results in the presence of noise as well as outliers. }, doi = {10.1109/TMI.2007.903195}, year = 2007 } @Article{Kanaan2006, Author = {Kanaan, Richard a and Shergill, Sukhwinder S and Barker, Gareth J and Catani, Marco and Ng, Virginia W and Howard, Robert and McGuire, Philip K and Jones, Derek K}, Title = {{Tract-specific anisotropy measurements in diffusion tensor imaging.}}, Journal = {Psychiatry research}, Volume = {146}, Number = {1}, Pages = {73--82}, abstract = {Diffusion tensor magnetic resonance imaging (DT-MRI) has been used to examine the microstructure of individual white matter tracts, often in neuropsychiatric conditions without identifiable focal pathology. However, the voxel-based group-mapping and region-of-interest (ROI) approaches used to analyse the data have inherent conceptual and practical difficulties. Taking the example of the genu of the corpus callosum in a sample of schizophrenic patients, we discuss the difficulties in attempting to replicate a voxel-based finding of reduced anisotropy using two ROI methods. Firstly we consider conventional ROIs; secondly, we present a novel tractography-based approach. The problems of both methods are explored, particularly of high variance and ROI definition. The potential benefits of the tractographic method for neuropsychiatric conditions with subtle and diffuse pathology are outlined.}, doi = {10.1016/j.pscychresns.2005.11.002}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Kanaan et al. - 2006 - Tract-specific anisotropy measurements in diffusion tensor imaging..pdf:pdf}, issn = {0165-1781}, keywords = {Adult,Anisotropy,Brain,Brain: pathology,Diffusion Magnetic Resonance Imaging,Female,Humans,Male,Middle Aged,Schizophrenia,Schizophrenia: pathology}, pmid = {16376059}, url = {http://www.ncbi.nlm.nih.gov/pubmed/16376059}, year = 2006 } @Article{MCC+99, Author = {Mori, S. and Crain, B. J. and Chacko, V. P. and van Zijl, P. C.}, Title = {Three-dimensional tracking of axonal projections in the brain by magnetic resonance imaging.}, Journal = {Ann Neurol}, Volume = {45}, Number = {2}, Pages = {265-9}, abstract = {The relationship between brain structure and complex behavior is governed by large-scale neurocognitive networks. The availability of a noninvasive technique that can visualize the neuronal projections connecting the functional centers should therefore provide new keys to the understanding of brain function. By using high-resolution three-dimensional diffusion magnetic resonance imaging and a newly designed tracking approach, we show that neuronal pathways in the rat brain can be probed in situ. The results are validated through comparison with known anatomical locations of such fibers.}, authoraddress = {Department of Radiology, Johns Hopkins Medical School, Baltimore, MD, USA.}, keywords = {Animals ; Axons/*physiology ; Brain/*anatomy \& histology ; Magnetic Resonance Imaging/*methods ; Rats}, language = {eng}, medline-crdt = {1999/02/16 00:00}, medline-da = {19990329}, medline-dcom = {19990329}, medline-edat = {1999/02/16}, medline-fau = {Mori, S ; Crain, B J ; Chacko, V P ; van Zijl, P C}, medline-is = {0364-5134 (Print)}, medline-jid = {7707449}, medline-jt = {Annals of neurology}, medline-lr = {20061115}, medline-mhda = {1999/02/16 00:01}, medline-own = {NLM}, medline-pl = {UNITED STATES}, medline-pmid = {9989633}, medline-pst = {ppublish}, medline-pt = {Journal Article ; Research Support, Non-U.S. Gov't}, medline-sb = {IM}, medline-so = {Ann Neurol. 1999 Feb;45(2):265-9.}, medline-stat = {MEDLINE}, url = {http://eutils.ncbi.nlm.nih.gov/entrez/eutils/elink.fcgi?cmd=prlinks&dbfrom=pubmed&retmode=ref&id=9989633}, year = 1999 } @Article{Parker2003, Author = {Parker, Geoffrey J M and Haroon, Hamied a and Wheeler-Kingshott, Claudia a M}, Title = {{A framework for a streamline-based probabilistic index of connectivity (PICo) using a structural interpretation of MRI diffusion measurements.}}, Journal = {Journal of magnetic resonance imaging : JMRI}, Volume = {18}, Number = {2}, Pages = {242--54}, abstract = {PURPOSE: To establish a general methodology for quantifying streamline-based diffusion fiber tracking methods in terms of probability of connection between points and/or regions. MATERIALS AND METHODS: The commonly used streamline approach is adapted to exploit the uncertainty in the orientation of the principal direction of diffusion defined for each image voxel. Running the streamline process repeatedly using Monte Carlo methods to exploit this inherent uncertainty generates maps of connection probability. Uncertainty is defined by interpreting the shape of the diffusion orientation profile provided by the diffusion tensor in terms of the underlying microstructure. RESULTS: Two candidates for describing the uncertainty in the diffusion tensor are proposed and maps of probability of connection to chosen start points or regions are generated in a number of major tracts. CONCLUSION: The methods presented provide a generic framework for utilizing streamline methods to generate probabilistic maps of connectivity.}, doi = {10.1002/jmri.10350}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Parker, Haroon, Wheeler-Kingshott - 2003 - A framework for a streamline-based probabilistic index of connectivity (PICo) using a structural interpretation of MRI diffusion measurements..pdf:pdf}, issn = {1053-1807}, keywords = {Anisotropy,Brain,Brain: anatomy \& histology,Diffusion,Diffusion Magnetic Resonance Imaging,Diffusion Magnetic Resonance Imaging: methods,Echo-Planar Imaging,Humans,Models, Statistical,Monte Carlo Method,Probability,Uncertainty}, pmid = {12884338}, url = {http://www.ncbi.nlm.nih.gov/pubmed/12884338}, year = 2003 } @Article{December2006, Author = {December, Draft}, Title = {{A n I n t r o d u c t i o n t o P r o g r a m m i n g f o r M e d i c a l I m a g e A n a l y s i s w i t h T h e V i s u a l i z a t i o n T o o l k i t X e n o p h o n P a p a d e m e t r i s}}, Journal = {Control}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/December - 2006 - A n I n t r o d u c t i o n t o P r o g r a m m i n g f o r M e d i c a l I m a g e A n a l y s i s w i t h T h e V i s u a l i z a t i o n T o o l k i t X e n o p h o n P a p a d e m e t r i s.pdf:pdf}, year = 2006 } @Article{Komodakis2006, Author = {Komodakis, Nikos}, Title = {{Optimization Algorithms for Discrete Markov Random Fields , with Applications to Computer Vision}}, Journal = {Optimization}, Number = {May}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Komodakis - 2006 - Optimization Algorithms for Discrete Markov Random Fields , with Applications to Computer Vision.pdf:pdf}, year = 2006 } @Article{Duru2010a, Author = {Duru, Dilek G\"{o}ksel and Ozkan, Mehmed}, Title = {{Determination of neural fiber connections based on data structure algorithm.}}, Journal = {Computational intelligence and neuroscience}, Volume = {2010}, Pages = {251928}, abstract = {The brain activity during perception or cognition is mostly examined by functional magnetic resonance imaging (fMRI). However, the cause of the detected activity relies on the anatomy. Diffusion tensor magnetic resonance imaging (DTMRI) as a noninvasive modality providing in vivo anatomical information allows determining neural fiber connections which leads to brain mapping. Still a complete map of fiber paths representing the human brain is missing in literature. One of the main drawbacks of reliable fiber mapping is the correct detection of the orientation of multiple fibers within a single imaging voxel. In this study a method based on linear data structures is proposed to define the fiber paths regarding their diffusivity. Another advantage of the proposed method is that the analysis is applied on entire brain diffusion tensor data. The implementation results are promising, so that the method will be developed as a rapid fiber tractography algorithm for the clinical use as future study.}, doi = {10.1155/2010/251928}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Duru, Ozkan - 2010 - Determination of neural fiber connections based on data structure algorithm..pdf:pdf}, issn = {1687-5273}, keywords = {Algorithms,Brain,Brain: anatomy \& histology,Diffusion Tensor Imaging,Diffusion Tensor Imaging: methods,Humans,Image Processing, Computer-Assisted,Image Processing, Computer-Assisted: methods,Linear Models,Neural Pathways,Neural Pathways: anatomy \& histology,Uncertainty}, month = jan, pmid = {20069047}, url = {http://www.pubmedcentral.nih.gov/articlerender.fcgi?artid=2801001\&tool=pmcentrez\&rendertype=abstract}, year = 2010 } @Article{Cook2006, Author = {Cook, P A and Bai, Y and Seunarine, K K and Hall, M G and Parker, G J and Alexander, D C}, Title = {{Camino : Open-Source Diffusion-MRI Reconstruction and Processing}}, Journal = {Statistics}, Volume = {14}, Pages = {22858--22858}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Cook et al. - 2006 - Camino Open-Source Diffusion-MRI Reconstruction and Processing.pdf:pdf}, year = 2006 } @Article{CLC+99, Author = {Conturo, T. E. and Lori, N. F. and Cull, T. S. and Akbudak, E. and Snyder, A. Z. and Shimony, J. S. and McKinstry, R. C. and Burton, H. and Raichle, M. E.}, Title = {Tracking neuronal fiber pathways in the living human brain.}, Journal = {Proc Natl Acad Sci U S A}, Volume = {96}, Number = {18}, Pages = {10422-7}, abstract = {Functional imaging with positron emission tomography and functional MRI has revolutionized studies of the human brain. Understanding the organization of brain systems, especially those used for cognition, remains limited, however, because no methods currently exist for noninvasive tracking of neuronal connections between functional regions [Crick, F. \& Jones, E. (1993) Nature (London) 361, 109-110]. Detailed connectivities have been studied in animals through invasive tracer techniques, but these invasive studies cannot be done in humans, and animal results cannot always be extrapolated to human systems. We have developed noninvasive neuronal fiber tracking for use in living humans, utilizing the unique ability of MRI to characterize water diffusion. We reconstructed fiber trajectories throughout the brain by tracking the direction of fastest diffusion (the fiber direction) from a grid of seed points, and then selected tracks that join anatomically or functionally (functional MRI) defined regions. We demonstrate diffusion tracking of fiber bundles in a variety of white matter classes with examples in the corpus callosum, geniculo-calcarine, and subcortical association pathways. Tracks covered long distances, navigated through divergences and tight curves, and manifested topological separations in the geniculo-calcarine tract consistent with tracer studies in animals and retinotopy studies in humans. Additionally, previously undescribed topologies were revealed in the other pathways. This approach enhances the power of modern imaging by enabling study of fiber connections among anatomically and functionally defined brain regions in individual human subjects.}, authoraddress = {Department of Radiology and Neuroimaging Laboratory, Mallinckrodt Institute of Radiology, Washington University School of Medicine, 4525 Scott Avenue, St. Louis, MO 63110, USA. tconturo@npg.wustl.edu}, keywords = {Brain/anatomy \& histology/*physiology ; *Brain Mapping ; Humans ; Magnetic Resonance Imaging ; Nerve Fibers/*physiology ; Neural Pathways/physiology ; Neurons/*physiology}, language = {eng}, medline-crdt = {1999/09/01 00:00}, medline-da = {19991007}, medline-dcom = {19991007}, medline-edat = {1999/09/01}, medline-fau = {Conturo, T E ; Lori, N F ; Cull, T S ; Akbudak, E ; Snyder, A Z ; Shimony, J S ; McKinstry, R C ; Burton, H ; Raichle, M E}, medline-gr = {P01 NS06833/NS/NINDS NIH HHS/United States}, medline-is = {0027-8424 (Print)}, medline-jid = {7505876}, medline-jt = {Proceedings of the National Academy of Sciences of the United States of America}, medline-lr = {20081120}, medline-mhda = {1999/09/01 00:01}, medline-oid = {NLM: PMC17904}, medline-own = {NLM}, medline-pl = {UNITED STATES}, medline-pmc = {PMC17904}, medline-pmid = {10468624}, medline-pst = {ppublish}, medline-pt = {Journal Article ; Research Support, Non-U.S. Gov't ; Research Support, U.S. Gov't, P.H.S.}, medline-sb = {IM}, medline-so = {Proc Natl Acad Sci U S A. 1999 Aug 31;96(18):10422-7.}, medline-stat = {MEDLINE}, url = {http://eutils.ncbi.nlm.nih.gov/entrez/eutils/elink.fcgi?cmd=prlinks&dbfrom=pubmed&retmode=ref&id=10468624}, year = 1999 } @Article{Bradski, Author = {Bradski, Gary and Kaehler, Adrian}, Title = {{No Title}}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Bradski, Kaehler - Unknown - No Title.pdf:pdf} } @Article{Qazi_Neuroimage08, Author = {Qazi, A. A. and Radmanesh, A. and O'Donnell, L. and Kindlmann, G. and Peled, S. and Whalen, S. and Westin, C. F. and Golby, A. J.}, Title = {Resolving crossings in the corticospinal tract by two-tensor streamline tractography: {M}ethod and clinical assessment using f{MRI}.}, Journal = {Neuroimage}, abstract = {An inherent drawback of the traditional diffusion tensor model is its limited ability to provide detailed information about multidirectional fiber architecture within a voxel. This leads to erroneous fiber tractography results in locations where fiber bundles cross each other. This may lead to the inability to visualize clinically important tracts such as the lateral projections of the corticospinal tract. In this report, we present a deterministic two-tensor eXtended Streamline Tractography (XST) technique, which successfully traces through regions of crossing fibers. We evaluated the method on simulated and in vivo human brain data, comparing the results with the traditional single-tensor and with a probabilistic tractography technique. By tracing the corticospinal tract and correlating with fMRI-determined motor cortex in both healthy subjects and patients with brain tumors, we demonstrate that two-tensor deterministic streamline tractography can accurately identify fiber bundles consistent with anatomy and previously not detected by conventional single-tensor tractography. When compared to the dense connectivity maps generated by probabilistic tractography, the method is computationally efficient and generates discrete geometric pathways that are simple to visualize and clinically useful. Detection of crossing white matter pathways can improve neurosurgical visualization of functionally relevant white matter areas.}, authoraddress = {Department of Radiology, Brigham and Women's Hospital, Harvard Medical School, USA; University of Copenhagen, Denmark.}, language = {ENG}, medline-aid = {S1053-8119(08)00779-9 [pii] ; 10.1016/j.neuroimage.2008.06.034 [doi]}, medline-crdt = {2008/07/29 09:00}, medline-da = {20080811}, medline-dep = {20080708}, medline-edat = {2008/07/29 09:00}, medline-is = {1095-9572 (Electronic)}, medline-jid = {9215515}, medline-jt = {NeuroImage}, medline-mhda = {2008/07/29 09:00}, medline-own = {NLM}, medline-phst = {2008/04/30 [received] ; 2008/06/19 [revised] ; 2008/06/19 [accepted]}, medline-pmid = {18657622}, medline-pst = {aheadofprint}, medline-pt = {JOURNAL ARTICLE}, medline-so = {Neuroimage. 2008 Jul 8.}, medline-stat = {Publisher}, url = {http://eutils.ncbi.nlm.nih.gov/entrez/eutils/elink.fcgi?cmd=prlinks&dbfrom=pubmed&retmode=ref&id=18657622}, year = 2008 } @Article{Nannen2003, Author = {Nannen, Volker}, Title = {{The Paradox of Overfitting}}, Journal = {Computer}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Nannen - 2003 - The Paradox of Overfitting.pdf:pdf}, year = 2003 } @Article{Klein2007NeuroImage, Author = {Klein, J. C. and Behrens, T. E. and Robson, M. D. and Mackay, C. E. and Higham, D.J. and Johansen-Berg, H.}, Title = {Connectivity-based parcellation of human cortex using diffusion \{{M}{RI}\}: establishing reproducibility, validity and observer independence in \{{B}{A}\} 44/45 and \{{S}{MA}\}/pre-\{{S}{MA}\}}, Journal = {NeuroImage}, Volume = {34}, Number = {1}, Pages = {204-211}, abstract = {The identification of specialized, functional regions of the human cortex is a vital precondition for neuroscience and clinical neurosurgery. Functional imaging modalities are used for their delineation in living subjects, but these methods rely on subject cooperation, and many regions of the human brain cannot be activated specifically. Diffusion tractography is a novel tool to identify such areas in the human brain, utilizing underlying white matter pathways to separate regions of differing specialization. We explore the reproducibility, generalizability and validity of diffusion tractography-based localization in four functional areas across subjects, timepoints and scanners, and validate findings against fMRI and post-mortem cytoarchitectonic data. With reproducibility across modalities, clustering methods, scanners, timepoints, and subjects in the order of 80-90%, we conclude that diffusion tractography represents a useful and objective tool for parcellation of the human cortex into functional regions, enabling studies into individual functional anatomy even when there are no specific activation paradigms available.}, file = {attachment\:Klein2007NeuroImage.pdf:attachment\:Klein2007NeuroImage.pdf:PDF}, year = 2007 } @Article{McNab2008MRM, Author = {Jennifer A. McNab and Karla L. Miller}, Title = {Sensitivity of diffusion weighted steady state free precession to anisotropic diffusion}, Journal = {Magnetic Resonance in Medicine}, Volume = {60}, Number = {2}, Pages = {405-413}, abstract = {Diffusion-weighted steady-state free precession (DW-SSFP) accumulates signal from multiple echoes over several TRs yielding a strong sensitivity to diffusion with short gradient durations and imaging times. Although the DW-SSFP signal is well characterized for isotropic, Gaussian diffusion, it is unclear how the DW-SSFP signal propagates in inhomogeneous media such as brain tissue. This article presents a more general analytical expression for the DW-SSFP signal which accommodates Gaussian and non-Gaussian spin displacement probability density functions. This new framework for calculating the DW-SSFP signal is used to investigate signal behavior for a single fiber, crossing fibers, and reflective barriers. DW-SSFP measurements in the corpus callosum of a fixed brain are shown to be in good agreement with theoretical predictions. Further measurements in fixed brain tissue also demonstrate that 3D DW-SSFP out-performs 3D diffusion weighted spin echo in both SNR and CNR efficiency providing a compelling example of its potential to be used for high resolution diffusion tensor imaging.}, owner = {ian}, timestamp = {2009.03.27}, year = 2008 } @Article{Corney2007, Author = {Corney, David and Lotto, R Beau}, Title = {{What are lightness illusions and why do we see them?}}, Journal = {PLoS computational biology}, Volume = {3}, Number = {9}, Pages = {1790--800}, abstract = {Lightness illusions are fundamental to human perception, and yet why we see them is still the focus of much research. Here we address the question by modelling not human physiology or perception directly as is typically the case but our natural visual world and the need for robust behaviour. Artificial neural networks were trained to predict the reflectance of surfaces in a synthetic ecology consisting of 3-D "dead-leaves" scenes under non-uniform illumination. The networks learned to solve this task accurately and robustly given only ambiguous sense data. In addition--and as a direct consequence of their experience--the networks also made systematic "errors" in their behaviour commensurate with human illusions, which includes brightness contrast and assimilation--although assimilation (specifically White's illusion) only emerged when the virtual ecology included 3-D, as opposed to 2-D scenes. Subtle variations in these illusions, also found in human perception, were observed, such as the asymmetry of brightness contrast. These data suggest that "illusions" arise in humans because (i) natural stimuli are ambiguous, and (ii) this ambiguity is resolved empirically by encoding the statistical relationship between images and scenes in past visual experience. Since resolving stimulus ambiguity is a challenge faced by all visual systems, a corollary of these findings is that human illusions must be experienced by all visual animals regardless of their particular neural machinery. The data also provide a more formal definition of illusion: the condition in which the true source of a stimulus differs from what is its most likely (and thus perceived) source. As such, illusions are not fundamentally different from non-illusory percepts, all being direct manifestations of the statistical relationship between images and scenes.}, doi = {10.1371/journal.pcbi.0030180}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Corney, Lotto - 2007 - What are lightness illusions and why do we see them.pdf:pdf}, issn = {1553-7358}, keywords = {Artificial Intelligence,Biomimetics,Biomimetics: methods,Humans,Image Interpretation, Computer-Assisted,Image Interpretation, Computer-Assisted: methods,Nerve Net,Nerve Net: physiology,Optical Illusions,Optical Illusions: physiology,Photometry,Photometry: methods,Visual Perception,Visual Perception: physiology}, pmid = {17907795}, url = {http://www.ncbi.nlm.nih.gov/pubmed/17907795}, year = 2007 } @Article{Commowick2008, Author = {Commowick, O and Arsigny, V and Isambert, a and Costa, J and Dhermain, F and Bidault, F and Bondiau, P-Y and Ayache, N and Malandain, G}, Title = {{An efficient locally affine framework for the smooth registration of anatomical structures.}}, Journal = {Medical image analysis}, Volume = {12}, Number = {4}, Pages = {427--41}, abstract = {Intra-subject and inter-subject nonlinear registration based on dense transformations requires the setting of many parameters, mainly for regularization. This task is a major issue, as the global quality of the registration will depend on it. Setting these parameters is, however, very hard, and they may have to be tuned for each patient when processing data acquired by different centers or using different protocols. Thus, we present in this article a method to introduce more coherence in the registration by using fewer degrees of freedom than with a dense registration. This is done by registering the images only on user-defined areas, using a set of affine transformations, which are optimized together in a very efficient manner. Our framework also ensures a smooth and coherent transformation thanks to a new regularization of the affine components. Finally, we ensure an invertible transformation thanks to the Log-Euclidean polyaffine framework. This allows us to get a more robust and very efficient registration method, while obtaining good results as explained below. We performed a qualitative and quantitative evaluation of the obtained results on two applications: first on atlas-based brain segmentation, comparing our results with a dense registration algorithm. Then the second application for which our framework is particularly well suited concerns bone registration in the lower-abdomen area. We obtain in this case a better positioning of the femoral heads than with a dense registration. For both applications, we show a significant improvement in computation time, which is crucial for clinical applications.}, doi = {10.1016/j.media.2008.01.002}, file = {::}, issn = {1361-8423}, keywords = {Algorithms,Brain,Brain: anatomy \& histology,Diagnostic Imaging,Diagnostic Imaging: methods,Humans,Image Processing, Computer-Assisted,Radiotherapy Planning, Computer-Assisted,Radiotherapy Planning, Computer-Assisted: methods,Sensitivity and Specificity}, month = aug, pmid = {18325825}, url = {http://www.ncbi.nlm.nih.gov/pubmed/18325825}, year = 2008 } @Article{Kerkyacharian2007a, Author = {Kerkyacharian, G´ Erard and Petrushev, Pencho and Picard, Dominique and Willer, Thomas}, Title = {{Needlet algorithms for estimation in inverse problems}}, Journal = {Electron. J. Stat}, Volume = {1}, Pages = {30--76}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Kerkyacharian et al. - 2007 - Needlet algorithms for estimation in inverse problems.pdf:pdf}, year = 2007 } @InProceedings{Leow2008ISBI, Author = {Leow, Alex D. and Zhu, Siwei and McMahon, Katie L. and {de Zubicaray}, Greig I. and Meredith, G. Matthew and Wright, Margaret and Thompson, Paul M.}, Title = {The Tensor Distribution Function}, BookTitle = {5th IEEE International Symposium on Biomedical Imaging: From Nano to Macro}, Pages = {FR-P2a (poster)}, abstract = {Diffusion weighted MR imaging is a powerful tool that can be employed to study white matter microstructure by examing the 3D displacement profile of water molecules in brain tissue. By applying diffusion-sensitizing gradients along a minimum of 6 directions, second-order tensors can be computed to model dominant diffusion processes. However, it has been shown that conventional DTI is not sufficient to resolve crossing fiber tracts. More recently, High Angular Resolution Diffusion Imaging (HARDI) seeks to address this issue by employing more than 6 gradient directions. In this paper, we introduce the Tensor Distribution Function (TDF), a probability function defined on the space of symmetric and positive definite matrices. Here, fiber crossing is modeled as an ensemble of Gaussian diffusion processes with weights specified by the TDF. Once this optimal TDF is determined, ODF can easily be computed by analytic integration of the resulting displacement probability function. Moreover, principal fiber directions can also be directly derived from the TDF.}, file = {attachment\:Leow2008ISBI.pdf:attachment\:Leow2008ISBI.pdf:PDF}, year = 2008 } @Article{Wainwright, Author = {Wainwright, Martin}, Title = {{Graphical models and variational methods : Message-passing , convex relaxations , and all that}}, Journal = {Electrical Engineering}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Wainwright - Unknown - Graphical models and variational methods Message-passing , convex relaxations , and all that.pdf:pdf} } @Article{Tuch2004, Author = {Tuch, DS}, Title = {{Q-ball imaging}}, Journal = {change}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Tuch - 2004 - Q-ball imaging.pdf:pdf}, url = {http://noodle.med.yale.edu/\~{}mjack/papers/tuch-2004.pdf}, year = 2004 } @Article{RHW+03, Author = {Reese, T. G. and Heid, O. and Weisskoff, R. M. and Wedeen, V. J.}, Title = {Reduction of eddy-current-induced distortion in diffusion {MRI} using a twice-refocused spin echo.}, Journal = {Magn Reson Med}, Volume = {49}, Number = {1}, Pages = {177-82}, abstract = {Image distortion due to field gradient eddy currents can create image artifacts in diffusion-weighted MR images. These images, acquired by measuring the attenuation of NMR signal due to directionally dependent diffusion, have recently been shown to be useful in the diagnosis and assessment of acute stroke and in mapping of tissue structure. This work presents an improvement on the spin-echo (SE) diffusion sequence that displays less distortion and consequently improves image quality. Adding a second refocusing pulse provides better image quality with less distortion at no cost in scanning efficiency or effectiveness, and allows more flexible diffusion gradient timing. By adjusting the timing of the diffusion gradients, eddy currents with a single exponential decay constant can be nulled, and eddy currents with similar decay constants can be greatly reduced. This new sequence is demonstrated in phantom measurements and in diffusion anisotropy images of normal human brain.}, authoraddress = {Department of Radiology, Massachusetts General Hospital, Boston, Massachusetts, USA. reese@nmr.MGH.harvard.edu}, keywords = {*Artifacts ; Brain/anatomy \& histology/pathology ; Echo-Planar Imaging/methods ; Humans ; Magnetic Resonance Imaging/*methods ; Phantoms, Imaging ; Stroke/diagnosis}, language = {eng}, medline-aid = {10.1002/mrm.10308 [doi]}, medline-ci = {Copyright 2003 Wiley-Liss, Inc.}, medline-crdt = {2003/01/02 04:00}, medline-da = {20030101}, medline-dcom = {20030422}, medline-edat = {2003/01/02 04:00}, medline-fau = {Reese, T G ; Heid, O ; Weisskoff, R M ; Wedeen, V J}, medline-gr = {R01 MH64044/MH/NIMH NIH HHS/United States}, medline-is = {0740-3194 (Print)}, medline-jid = {8505245}, medline-jt = {Magnetic resonance in medicine : official journal of the Society of Magnetic Resonance in Medicine / Society of Magnetic Resonance in Medicine}, medline-lr = {20071115}, medline-mhda = {2003/04/23 05:00}, medline-own = {NLM}, medline-pl = {United States}, medline-pmid = {12509835}, medline-pst = {ppublish}, medline-pt = {Journal Article ; Research Support, U.S. Gov't, P.H.S.}, medline-sb = {IM}, medline-so = {Magn Reson Med. 2003 Jan;49(1):177-82.}, medline-stat = {MEDLINE}, url = {http://eutils.ncbi.nlm.nih.gov/entrez/eutils/elink.fcgi?cmd=prlinks&dbfrom=pubmed&retmode=ref&id=12509835}, year = 2003 } @Article{hyvarinen2000ica, Author = {Hyv{\"a}rinen, A. and Oja, E.}, Title = {{Independent component analysis: algorithms and applications}}, Journal = {Neural networks}, Volume = {13}, Number = {4-5}, Pages = {411--430}, publisher = {Elsevier}, year = 2000 } @Article{Friedman2008, Author = {Friedman, Jerome and Hastie, Trevor}, Title = {{Regularization Paths for Generalized Linear Models via Coordinate Descent}}, Pages = {1--22}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Friedman, Hastie - 2008 - Regularization Paths for Generalized Linear Models via Coordinate Descent.pdf:pdf}, year = 2008 } @conference{lee2007trajectory, author = {Lee, J.G. and Han, J. and Whang, K.Y.}, booktitle = {Proceedings of the 2007 ACM SIGMOD international conference on Management of data}, organization = {ACM}, pages = {604}, title = {{Trajectory clustering: a partition-and-group framework}}, year = 2007 } @Article{Ipython2008, Author = {Ipython, The and Team, Development}, Title = {{IPython Documentation}}, Journal = {Development}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Ipython, Team - 2008 - IPython Documentation.pdf:pdf}, year = 2008 } @Book{DiffMRIBook, Author = {{Heidi Johansen-Berg}}, Editor = {Heidi Johansen-Berg, Oxford Centre for Functional MRI of the Brain (FMRIB), Department of Clinical Neurology and Timothy E.J. Behrens, Department of Experimental Psychology, University of Oxford; Centre for Functional MRI of the Brain (FMRIB)}, Title = {Diffusion {MRI}}, Publisher = {Academic Press}, year = 2009 } @Article{Guo2005, Author = {Guo, D. and Shamai, S. and Verdu, S.}, Title = {{Mutual Information and Minimum Mean-Square Error in Gaussian Channels}}, Journal = {IEEE Transactions on Information Theory}, Volume = {51}, Number = {4}, Pages = {1261--1282}, doi = {10.1109/TIT.2005.844072}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Guo, Shamai, Verdu - 2005 - Mutual Information and Minimum Mean-Square Error in Gaussian Channels.pdf:pdf}, issn = {0018-9448}, month = apr, url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=1412024}, year = 2005 } @Article{Mishra2007, Author = {Mishra, Arabinda and Lu, Yonggang and Choe, Ann S and Aldroubi, Akram and Gore, John C and Anderson, Adam W and Ding, Zhaohua}, Title = {{An image-processing toolset for diffusion tensor tractography.}}, Journal = {Magnetic resonance imaging}, Volume = {25}, Number = {3}, Pages = {365--76}, abstract = {Diffusion tensor imaging (DTI)-based fiber tractography holds great promise in delineating neuronal fiber tracts and, hence, providing connectivity maps of the neural networks in the human brain. An array of image-processing techniques has to be developed to turn DTI tractography into a practically useful tool. To this end, we have developed a suite of image-processing tools for fiber tractography with improved reliability. This article summarizes the main technical developments we have made to date, which include anisotropic smoothing, anisotropic interpolation, Bayesian fiber tracking and automatic fiber bundling. A primary focus of these techniques is the robustness to noise and partial volume averaging, the two major hurdles to reliable fiber tractography. Performance of these techniques has been comprehensively examined with simulated and in vivo DTI data, demonstrating improvements in the robustness and reliability of DTI tractography.}, doi = {10.1016/j.mri.2006.10.006}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Mishra et al. - 2007 - An image-processing toolset for diffusion tensor tractography..pdf:pdf}, issn = {0730-725X}, keywords = {Algorithms,Artificial Intelligence,Brain,Brain: anatomy \& histology,Diffusion Magnetic Resonance Imaging,Diffusion Magnetic Resonance Imaging: methods,Humans,Image Enhancement,Image Enhancement: methods,Image Interpretation, Computer-Assisted,Image Interpretation, Computer-Assisted: methods,Nerve Net,Nerve Net: anatomy \& histology,Neural Pathways,Neural Pathways: anatomy \& histology,Reproducibility of Results,Sensitivity and Specificity,Software}, pmid = {17371726}, url = {http://www.ncbi.nlm.nih.gov/pubmed/17371726}, year = 2007 } @conference{deriche1990dcm, author = {Deriche, R. and Faugeras, O.}, booktitle = {Pattern Recognition, 1990. Proceedings., 10th International Conference on}, title = {{2-D curve matching using high curvature points: application tostereo vision}}, volume = {1}, year = 1990 } @Article{Vogiatzis, Author = {Vogiatzis, George}, Title = {{Visual Estimation of Shape , Reflectance and Illumination}}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Vogiatzis - Unknown - Visual Estimation of Shape , Reflectance and Illumination.pdf:pdf} } @Article{vernooij2007fda, Author = {Vernooij, M W and Smits, M. and Wielopolski, P A and Houston, G C and Krestin, G P and van der Lugt, A.}, Title = {{Fiber density asymmetry of the arcuate fasciculus in relation to functional hemispheric language lateralization in both right-and left-handed healthy subjects: A combined fMRI and DTI study}}, Journal = {Neuroimage}, Volume = {35}, Number = {3}, Pages = {1064--1076}, file = {attachment\:vernooij_arcuate_fasciculus_2007.pdf:attachment\:vernooij_arcuate_fasciculus_2007.pdf:PDF}, publisher = {Elsevier}, year = 2007 } @Article{KanaanPsych2006, Author = {Kanaan, R. A. and Shergill, S. S. and Barker, G. J. and Catani, M. and Ng, V. W. and Howard, R. and McGuire, P. K. and Jones, D. K.}, Title = {Tract-specific anisotropy measurements in diffusion tensor imaging.}, Journal = {Psychiatry Res}, Volume = {146}, Number = {1}, Pages = {73-82}, abstract = {Diffusion tensor magnetic resonance imaging (DT-MRI) has been used to examine the microstructure of individual white matter tracts, often in neuropsychiatric conditions without identifiable focal pathology. However, the voxel-based group-mapping and region-of-interest (ROI) approaches used to analyse the data have inherent conceptual and practical difficulties. Taking the example of the genu of the corpus callosum in a sample of schizophrenic patients, we discuss the difficulties in attempting to replicate a voxel-based finding of reduced anisotropy using two ROI methods. Firstly we consider conventional ROIs; secondly, we present a novel tractography-based approach. The problems of both methods are explored, particularly of high variance and ROI definition. The potential benefits of the tractographic method for neuropsychiatric conditions with subtle and diffuse pathology are outlined.}, authoraddress = {King's College London, Institute of Psychiatry, London, UK. r.kanaan@iop.kcl.ac.uk}, keywords = {Adult ; Anisotropy ; Brain/*pathology ; *Diffusion Magnetic Resonance Imaging ; Female ; Humans ; Male ; Middle Aged ; Schizophrenia/*pathology}, language = {eng}, medline-aid = {S0925-4927(05)00197-6 [pii] ; 10.1016/j.pscychresns.2005.11.002 [doi]}, medline-crdt = {2005/12/27 09:00}, medline-da = {20060227}, medline-dcom = {20060425}, medline-dep = {20051220}, medline-edat = {2005/12/27 09:00}, medline-fau = {Kanaan, Richard A ; Shergill, Sukhwinder S ; Barker, Gareth J ; Catani, Marco ; Ng, Virginia W ; Howard, Robert ; McGuire, Philip K ; Jones, Derek K}, medline-gr = {Wellcome Trust/United Kingdom}, medline-is = {0165-1781 (Print)}, medline-jid = {7911385}, medline-jt = {Psychiatry research}, medline-lr = {20080417}, medline-mhda = {2006/04/28 09:00}, medline-own = {NLM}, medline-phst = {2005/05/24 [received] ; 2005/09/13 [revised] ; 2005/11/03 [accepted] ; 2005/12/20 [aheadofprint]}, medline-pl = {Ireland}, medline-pmid = {16376059}, medline-pst = {ppublish}, medline-pt = {Journal Article ; Research Support, Non-U.S. Gov't}, medline-sb = {IM}, medline-so = {Psychiatry Res. 2006 Jan 30;146(1):73-82. Epub 2005 Dec 20.}, medline-stat = {MEDLINE}, url = {http://eutils.ncbi.nlm.nih.gov/entrez/eutils/elink.fcgi?cmd=prlinks&dbfrom=pubmed&retmode=ref&id=16376059}, year = 2006 } @Article{Mallo, Author = {Mallo, O and Peikert, R and Sigg, C and Sadlo, F}, Title = {{Illuminated lines revisited}}, Journal = {In Proceedings of IEEE Visualization}, Volume = {pages}, Pages = {19--26}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Mallo et al. - Unknown - Illuminated lines revisited.pdf:pdf} } @Article{HM96, Author = {Haselgrove, J. C. and Moore, J. R.}, Title = {Correction for distortion of echo-planar images used to calculate the apparent diffusion coefficient.}, Journal = {Magn Reson Med}, Volume = {36}, Number = {6}, Pages = {960-4}, abstract = {An algorithm for correcting the distortions that occur in diffusion-weighted echo-planar images due to the strong diffusion-sensitizing gradients is presented. The dominant distortions may be considered to be only changes of scale coupled with a shear and linear translation in the phase-encoding direction. It is then possible to correct for them by using an algorithm in which each line of the image in the phase-encoding direction is considered in turn, with only one parameter (the scale) to be found by searching.}, authoraddress = {Department of Radiology, Children's Hospital of Philadelphia, PA 19104, USA.}, keywords = {*Algorithms ; Brain/pathology ; Echo-Planar Imaging/*methods ; Humans ; Image Enhancement/*methods ; Sensitivity and Specificity}, language = {eng}, medline-crdt = {1996/12/01 00:00}, medline-da = {19970225}, medline-dcom = {19970225}, medline-edat = {1996/12/01}, medline-fau = {Haselgrove, J C ; Moore, J R}, medline-is = {0740-3194 (Print)}, medline-jid = {8505245}, medline-jt = {Magnetic resonance in medicine : official journal of the Society of Magnetic Resonance in Medicine / Society of Magnetic Resonance in Medicine}, medline-lr = {20041117}, medline-mhda = {1996/12/01 00:01}, medline-own = {NLM}, medline-pl = {UNITED STATES}, medline-pmid = {8946363}, medline-pst = {ppublish}, medline-pt = {Journal Article}, medline-sb = {IM}, medline-so = {Magn Reson Med. 1996 Dec;36(6):960-4.}, medline-stat = {MEDLINE}, url = {http://eutils.ncbi.nlm.nih.gov/entrez/eutils/elink.fcgi?cmd=prlinks&dbfrom=pubmed&retmode=ref&id=8946363}, year = 1996 } @Article{Kume2005, Author = {Kume, a.}, Title = {{Saddlepoint approximations for the Bingham and Fisher-Bingham normalising constants}}, Journal = {Biometrika}, Volume = {92}, Number = {2}, Pages = {465--476}, doi = {10.1093/biomet/92.2.465}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Kume - 2005 - Saddlepoint approximations for the Bingham and Fisher-Bingham normalising constants.pdf:pdf}, issn = {0006-3444}, month = jun, url = {http://biomet.oxfordjournals.org/cgi/doi/10.1093/biomet/92.2.465}, year = 2005 } @Article{Koev2006, Author = {Koev, Plamen and Edelman, Alan}, Title = {{OF THE HYPERGEOMETRIC FUNCTION OF A MATRIX ARGUMENT}}, Journal = {Mathematics of Computation}, Volume = {75}, Number = {254}, Pages = {833--846}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Koev, Edelman - 2006 - OF THE HYPERGEOMETRIC FUNCTION OF A MATRIX ARGUMENT.pdf:pdf}, keywords = {and phrases,c 2006 american mathematical,eigenvalues of random matrices,grant dms-0314286,hypergeometric function of a,in part by nsf,jack function,matrix argument,polynomial,society,this work was supported,zonal}, year = 2006 } @conference{corouge2004towards, author = {Corouge, I. and Gouttard, S. and Gerig, G.}, booktitle = {International Symposium on Biomedical Imaging}, organization = {Citeseer}, pages = {344--347}, title = {{Towards a shape model of white matter fiber bundles using diffusion tensor MRI}}, year = 2004 } @Article{Koles1991a, Author = {Koles, Z J}, Title = {{The quantitative extraction and topographic mapping of the abnormal components in the clinical EEG.}}, Journal = {Electroencephalography and clinical neurophysiology}, Volume = {79}, Number = {6}, Pages = {440--7}, abstract = {A method is described which seems to be effective for extracting the abnormal components from the clinical EEG. The approach involves the use of a set a spatial patterns which are common to recorded and 'normal' EEGs and which can account for maximally different proportions of the combined variances in both EEGs. These spatial factors are used to decompose the EEG into orthogonal temporal wave forms which can be judged by the expert electroencephalographer to be abnormal, normal or of artifactual origin. The original EEG is then reconstructed using only the abnormal components and principal component analysis is used to present the spatial topography of the abnormal components. The effectiveness of the method is discussed along with its value for localization of abnormal sources. It is suggested, in conclusion, that the approach described may be optimal for interpretation of the clinical EEG since it allows what is best in terms of quantitative analysis of the EEG to be combined with the best that is available in terms of expert qualitative analysis.}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Koles - 1991 - The quantitative extraction and topographic mapping of the abnormal components in the clinical EEG..pdf:pdf}, issn = {0013-4694}, keywords = {Brain,Brain Mapping,Brain: physiology,Electroencephalography,Electroencephalography: methods,Humans,Signal Processing, Computer-Assisted}, month = dec, pmid = {1721571}, url = {http://www.ncbi.nlm.nih.gov/pubmed/1721571}, year = 1991 } @Article{Kim, Author = {Kim, Min-soo}, Title = {{A Particle-and-Density Based Evolutionary Clustering Method for Dynamic Networks}}, Number = {1}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Kim - Unknown - A Particle-and-Density Based Evolutionary Clustering Method for Dynamic Networks.pdf:pdf} } @Article{Marinucci2008, Author = {Marinucci, D and Pietrobon, D and Balbi, A and Baldi, P and Cabella, P and Kerkyacharian, G and Natoli, P and Picard, D and Vittorio, N}, Title = {{Spherical Needlets for CMB Data Analysis}}, Volume = {000}, Number = {February}, arxivid = {arXiv:0707.0844v1}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Marinucci et al. - 2008 - Spherical Needlets for CMB Data Analysis.pdf:pdf}, year = 2008 } @Article{ODonnell_IEEETMI07, Author = {O'Donnell, L. J. and Westin, C. F.}, Title = {Automatic tractography segmentation using a high-dimensional white matter atlas.}, Journal = {IEEE Trans Med Imaging}, Volume = {26}, Number = {11}, Pages = {1562-75}, abstract = {We propose a new white matter atlas creation method that learns a model of the common white matter structures present in a group of subjects. We demonstrate that our atlas creation method, which is based on group spectral clustering of tractography, discovers structures corresponding to expected white matter anatomy such as the corpus callosum, uncinate fasciculus, cingulum bundles, arcuate fasciculus, and corona radiata. The white matter clusters are augmented with expert anatomical labels and stored in a new type of atlas that we call a high-dimensional white matter atlas. We then show how to perform automatic segmentation of tractography from novel subjects by extending the spectral clustering solution, stored in the atlas, using the Nystrom method. We present results regarding the stability of our method and parameter choices. Finally we give results from an atlas creation and automatic segmentation experiment. We demonstrate that our automatic tractography segmentation identifies corresponding white matter regions across hemispheres and across subjects, enabling group comparison of white matter anatomy.}, authoraddress = {Golby Laboratory, Department of Neurosurgery, Brigham and Women's Hospital, Harvard Medical School, Boston, MA 02115, USA. lauren@csail.mit.edu}, keywords = {Algorithms ; Artificial Intelligence ; Computer Simulation ; Corpus Callosum/*anatomy \& histology ; Diffusion Magnetic Resonance Imaging/*methods ; Humans ; Image Enhancement/*methods ; Image Interpretation, Computer-Assisted/*methods ; Imaging, Three-Dimensional/*methods ; Models, Anatomic ; Models, Neurological ; Nerve Fibers, Myelinated/*ultrastructure ; Pattern Recognition, Automated/*methods ; Reproducibility of Results ; Sensitivity and Specificity ; Subtraction Technique}, language = {eng}, medline-aid = {10.1109/TMI.2007.906785 [doi]}, medline-crdt = {2007/11/29 09:00}, medline-da = {20071128}, medline-dcom = {20080122}, medline-edat = {2007/11/29 09:00}, medline-fau = {O'Donnell, Lauren J ; Westin, Carl-Fredrik}, medline-gr = {P41-RR13218/RR/NCRR NIH HHS/United States ; P41-RR15241/RR/NCRR NIH HHS/United States ; R01-AG20012/AG/NIA NIH HHS/United States ; R01-MH074794/MH/NIMH NIH HHS/United States ; U24-RR021382/RR/NCRR NIH HHS/United States ; U41-RR019703/RR/NCRR NIH HHS/United States}, medline-is = {0278-0062 (Print)}, medline-jid = {8310780}, medline-jt = {IEEE transactions on medical imaging}, medline-mhda = {2008/01/23 09:00}, medline-own = {NLM}, medline-pl = {United States}, medline-pmid = {18041271}, medline-pst = {ppublish}, medline-pt = {Journal Article ; Research Support, N.I.H., Extramural}, medline-sb = {IM}, medline-so = {IEEE Trans Med Imaging. 2007 Nov;26(11):1562-75.}, medline-stat = {MEDLINE}, url = {http://eutils.ncbi.nlm.nih.gov/entrez/eutils/elink.fcgi?cmd=prlinks&dbfrom=pubmed&retmode=ref&id=18041271}, year = 2007 } @Article{Bihan2001, Author = {Bihan, MD Denis Le and Mangin, JF and Poupon, C}, Title = {{Diffusion tensor imaging: concepts and applications}}, Journal = {Journal of Magnetic \ldots}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Bihan, Mangin, Poupon - 2001 - Diffusion tensor imaging concepts and applications.pdf:pdf}, url = {http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.114.9156\&rep=rep1\&type=pdf}, year = 2001 } @Article{LWT+03, Author = {Lazar, M. and Weinstein, D. M. and Tsuruda, J. S. and Hasan, K. M. and Arfanakis, K. and Meyerand, M. E. and Badie, B. and Rowley, H. A. and Haughton, V. and Field, A. and Alexander, A. L.}, Title = {White matter tractography using diffusion tensor deflection.}, Journal = {Hum Brain Mapp}, Volume = {18}, Number = {4}, Pages = {306-21}, abstract = {Diffusion tensor MRI provides unique directional diffusion information that can be used to estimate the patterns of white matter connectivity in the human brain. In this study, the behavior of an algorithm for white matter tractography is examined. The algorithm, called TEND, uses the entire diffusion tensor to deflect the estimated fiber trajectory. Simulations and imaging experiments on in vivo human brains were performed to investigate the behavior of the tractography algorithm. The simulations show that the deflection term is less sensitive than the major eigenvector to image noise. In the human brain imaging experiments, estimated tracts were generated in corpus callosum, corticospinal tract, internal capsule, corona radiata, superior longitudinal fasciculus, inferior longitudinal fasciculus, fronto-occipital fasciculus, and uncinate fasciculus. This approach is promising for mapping the organizational patterns of white matter in the human brain as well as mapping the relationship between major fiber trajectories and the location and extent of brain lesions.}, authoraddress = {Department of Physics, University of Utah, Salt Lake City, Utah, USA.}, keywords = {Algorithms ; Brain Mapping/*methods ; Corpus Callosum/physiology ; Humans ; Nerve Fibers, Myelinated/*physiology ; Neural Pathways/physiology ; Pyramidal Tracts/physiology}, language = {eng}, medline-aid = {10.1002/hbm.10102 [doi]}, medline-ci = {Copyright 2003 Wiley-Liss, Inc.}, medline-crdt = {2003/03/13 04:00}, medline-da = {20030312}, medline-dcom = {20030530}, medline-edat = {2003/03/13 04:00}, medline-fau = {Lazar, Mariana ; Weinstein, David M ; Tsuruda, Jay S ; Hasan, Khader M ; Arfanakis, Konstantinos ; Meyerand, M Elizabeth ; Badie, Benham ; Rowley, Howard A ; Haughton, Victor ; Field, Aaron ; Alexander, Andrew L}, medline-gr = {MH62015/MH/NIMH NIH HHS/United States ; P30 CA42014/CA/NCI NIH HHS/United States}, medline-is = {1065-9471 (Print)}, medline-jid = {9419065}, medline-jt = {Human brain mapping}, medline-lr = {20071114}, medline-mhda = {2003/05/31 05:00}, medline-own = {NLM}, medline-pl = {United States}, medline-pmid = {12632468}, medline-pst = {ppublish}, medline-pt = {Comparative Study ; Journal Article ; Research Support, U.S. Gov't, P.H.S.}, medline-sb = {IM}, medline-so = {Hum Brain Mapp. 2003 Apr;18(4):306-21.}, medline-stat = {MEDLINE}, url = {http://eutils.ncbi.nlm.nih.gov/entrez/eutils/elink.fcgi?cmd=prlinks&dbfrom=pubmed&retmode=ref&id=12632468}, year = 2003 } @Article{Heil, Author = {Heil, Christopher}, Title = {{No Title}}, Journal = {Proofs}, Number = {1}, Pages = {2--5}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Heil - Unknown - No Title.pdf:pdf} } @Article{Rules2004, Author = {Rules, Association}, Title = {{Outline of the Course 1 . Introduction and Terminology 2 . Data Warehousing ( sketch ) Statement of the Problem}}, Pages = {155--187}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Rules - 2004 - Outline of the Course 1 . Introduction and Terminology 2 . Data Warehousing ( sketch ) Statement of the Problem.pdf:pdf}, year = 2004 } @Article{Papadakis1999, Author = {Papadakis, NG and Xing, D and Houston, GC and Smith, JM}, Title = {{A study of rotationally invariant and symmetric indices of diffusion anisotropy}}, Journal = {Magnetic resonance \ldots}, url = {http://linkinghub.elsevier.com/retrieve/pii/S0730725X99000296}, year = 1999 } @Article{Drepper2007, Author = {Drepper, Ulrich and Hat, Red}, Title = {{What Every Programmer Should Know About Memory}}, Journal = {Changes}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Drepper, Hat - 2007 - What Every Programmer Should Know About Memory.pdf:pdf}, year = 2007 } @Article{WWS+08, Author = {Wedeen, V. J. and Wang, R. P. and Schmahmann, J. D. and Benner, T. and Tseng, W. Y. and Dai, G. and Pandya, D. N. and Hagmann, P. and D'Arceuil, H. and de Crespigny, A. J.}, Title = {Diffusion spectrum magnetic resonance imaging ({DSI}) tractography of crossing fibers.}, Journal = {Neuroimage}, Volume = {41}, Number = {4}, Pages = {1267-77}, abstract = {MRI tractography is the mapping of neural fiber pathways based on diffusion MRI of tissue diffusion anisotropy. Tractography based on diffusion tensor imaging (DTI) cannot directly image multiple fiber orientations within a single voxel. To address this limitation, diffusion spectrum MRI (DSI) and related methods were developed to image complex distributions of intravoxel fiber orientation. Here we demonstrate that tractography based on DSI has the capacity to image crossing fibers in neural tissue. DSI was performed in formalin-fixed brains of adult macaque and in the brains of healthy human subjects. Fiber tract solutions were constructed by a streamline procedure, following directions of maximum diffusion at every point, and analyzed in an interactive visualization environment (TrackVis). We report that DSI tractography accurately shows the known anatomic fiber crossings in optic chiasm, centrum semiovale, and brainstem; fiber intersections in gray matter, including cerebellar folia and the caudate nucleus; and radial fiber architecture in cerebral cortex. In contrast, none of these examples of fiber crossing and complex structure was identified by DTI analysis of the same data sets. These findings indicate that DSI tractography is able to image crossing fibers in neural tissue, an essential step toward non-invasive imaging of connectional neuroanatomy.}, authoraddress = {Department of Radiology, MGH Martinos Center for Biomedical Imaging, Harvard Medical School, Charlestown, MA 02129, USA. van@nmr.mgh.harvard.edu}, keywords = {Adult ; Algorithms ; Animals ; Brain/anatomy \& histology ; Diffusion Magnetic Resonance Imaging/*methods ; Female ; Humans ; Image Processing, Computer-Assisted/methods ; Macaca fascicularis ; Male ; Middle Aged ; Nerve Fibers/*physiology ; Neural Pathways/*anatomy \& histology/*physiology}, language = {eng}, medline-aid = {S1053-8119(08)00253-X [pii] ; 10.1016/j.neuroimage.2008.03.036 [doi]}, medline-crdt = {2008/05/23 09:00}, medline-da = {20080616}, medline-dcom = {20080829}, medline-dep = {20080408}, medline-edat = {2008/05/23 09:00}, medline-fau = {Wedeen, V J ; Wang, R P ; Schmahmann, J D ; Benner, T ; Tseng, W Y I ; Dai, G ; Pandya, D N ; Hagmann, P ; D'Arceuil, H ; de Crespigny, A J}, medline-gr = {1R01 MH 64044/MH/NIMH NIH HHS/United States ; 1R01 MH67980/MH/NIMH NIH HHS/United States ; 1R01EB00790/EB/NIBIB NIH HHS/United States ; 1R01NS401285/NS/NINDS NIH HHS/United States ; 1S10RR016811-01/RR/NCRR NIH HHS/United States ; P41RR14075/RR/NCRR NIH HHS/United States}, medline-is = {1053-8119 (Print)}, medline-jid = {9215515}, medline-jt = {NeuroImage}, medline-mhda = {2008/08/30 09:00}, medline-own = {NLM}, medline-phst = {2007/11/30 [received] ; 2008/03/14 [revised] ; 2008/03/17 [accepted] ; 2008/04/08 [aheadofprint]}, medline-pl = {United States}, medline-pmid = {18495497}, medline-pst = {ppublish}, medline-pt = {Journal Article ; Research Support, N.I.H., Extramural ; Research Support, Non-U.S. Gov't}, medline-sb = {IM}, medline-so = {Neuroimage. 2008 Jul 15;41(4):1267-77. Epub 2008 Apr 8.}, medline-stat = {MEDLINE}, url = {http://eutils.ncbi.nlm.nih.gov/entrez/eutils/elink.fcgi?cmd=prlinks&dbfrom=pubmed&retmode=ref&id=18495497}, year = 2008 } @Article{Indyk2003, Author = {Indyk, Piotr and Venkatasubramanian, Suresh}, Title = {{Approximate congruence in nearly linear time}}, Journal = {Computational Geometry}, Volume = {24}, Pages = {115--128}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Indyk, Venkatasubramanian - 2003 - Approximate congruence in nearly linear time.pdf:pdf}, keywords = {bottleneck distance,computational geometry,hall,metric entropy,pattern matching,point set matching,s}, year = 2003 } @Article{Arsigny2009, Author = {Arsigny, Vincent and Commowick, Olivier and Ayache, Nicholas and Pennec, Xavier}, Title = {{A Fast and Log-Euclidean Polyaffine Framework for Locally Linear Registration}}, Journal = {Journal of Mathematical Imaging and Vision}, Volume = {33}, Number = {2}, Pages = {222--238}, doi = {10.1007/s10851-008-0135-9}, file = {::}, issn = {0924-9907}, keywords = {arsigny,ayache,commowick,diffeomorphisms,ing,locally affine transformations,log-euclidean,medical imag-,n,non-rigid registration,o,ode,pennec,polyaffine transformations,v,x}, month = jan, url = {http://www.springerlink.com/index/10.1007/s10851-008-0135-9}, year = 2009 } @Article{Close2009, Author = {Close, Thomas G and Tournier, Jacques-Donald and Calamante, Fernando and Johnston, Leigh a and Mareels, Iven and Connelly, Alan}, Title = {{A software tool to generate simulated white matter structures for the assessment of fibre-tracking algorithms.}}, Journal = {NeuroImage}, Volume = {47}, Number = {4}, Pages = {1288--300}, abstract = {The assessment of Diffusion-Weighted MRI (DW-MRI) fibre-tracking algorithms has been limited by the lack of an appropriate 'gold standard'. Practical limitations of alternative methods and physical models have meant that numerical simulations have become the method of choice in practice. However, previous numerical phantoms have consisted of separate fibres embedded in homogeneous backgrounds, which do not capture the true nature of white matter. In this paper we describe a method that is able to randomly generate numerical structures consisting of densely packed bundles of fibres, which are much more representative of human white matter, and simulate the DW-MR images that would arise from them under many imaging conditions. User-defined parameters may be adjusted to produce structures with a range of complexities that spans the levels we would expect to find in vivo. These structures are shown to contain many different features that occur in human white matter and which could confound fibre-tracking algorithms, such as tract kissing and crossing. Furthermore, combinations of such features can be sampled by the random generation of many different structures with consistent levels of complexity. The proposed software provides means for quantitative assessment via direct comparison between tracking results and the exact location of the generated fibres. This should greatly improve our understanding of algorithm performance and therefore prove an important tool for fibre tracking development.}, doi = {10.1016/j.neuroimage.2009.03.077}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Close et al. - 2009 - A software tool to generate simulated white matter structures for the assessment of fibre-tracking algorithms..pdf:pdf}, issn = {1095-9572}, keywords = {Algorithms,Brain,Brain: anatomy \& histology,Computer Simulation,Humans,Image Enhancement,Image Enhancement: methods,Image Interpretation, Computer-Assisted,Image Interpretation, Computer-Assisted: methods,Magnetic Resonance Imaging,Magnetic Resonance Imaging: methods,Models, Anatomic,Models, Neurological,Nerve Fibers, Myelinated,Nerve Fibers, Myelinated: ultrastructure,Pattern Recognition, Automated,Pattern Recognition, Automated: methods,Reproducibility of Results,Sensitivity and Specificity,Software}, pmid = {19361565}, publisher = {Elsevier Inc.}, url = {http://www.ncbi.nlm.nih.gov/pubmed/19361565}, year = 2009 } @Misc{TheMendeleySupportTeam2010, Author = {{The Mendeley Support Team}}, Title = {{Getting Started with Mendeley}}, abstract = {A quick introduction to Mendeley. Learn how Mendeley creates your personal digital library, how to organize and annotate documents, how to collaborate and share with colleagues, and how to generate citations and bibliographies.}, address = {London}, booktitle = {Mendeley Desktop}, file = {:usr/share/doc/mendeleydesktop/FAQ.pdf:pdf}, keywords = {Mendeley,how-to,user manual}, pages = {1--14}, publisher = {Mendeley Ltd.}, url = {http://www.mendeley.com}, year = 2010 } @conference{pickalov2006tra, author = {Pickalov, V. and Basser, P.J.}, booktitle = {3rd IEEE International Symposium on Biomedical Imaging: Nano to Macro, 2006}, pages = {710--713}, title = {{3d tomographic reconstruction of the average propagator from mri data}}, year = 2006 } @Article{PCC+08, Author = {Perrin, M. and Cointepas, Y. and Cachia, A. and Poupon, C. and Thirion, B. and Riviere, D. and Cathier, P. and El Kouby, V. and Constantinesco, A. and Le Bihan, D. and Mangin, J. F.}, Title = {Connectivity-{B}ased {P}arcellation of the {C}ortical {M}antle {U}sing q-{B}all {D}iffusion {I}maging.}, Journal = {Int J Biomed Imaging}, Volume = {2008}, Pages = {368406}, abstract = {This paper exploits the idea that each individual brain region has a specific connection profile to create parcellations of the cortical mantle using MR diffusion imaging. The parcellation is performed in two steps. First, the cortical mantle is split at a macroscopic level into 36 large gyri using a sulcus recognition system. Then, for each voxel of the cortex, a connection profile is computed using a probabilistic tractography framework. The tractography is performed from q fields using regularized particle trajectories. Fiber ODF are inferred from the q-balls using a sharpening process focusing the weight around the q-ball local maxima. A sophisticated mask of propagation computed from a T1-weighted image perfectly aligned with the diffusion data prevents the particles from crossing the cortical folds. During propagation, the particles father child particles in order to improve the sampling of the long fascicles. For each voxel, intersection of the particle trajectories with the gyri lead to a connectivity profile made up of only 36 connection strengths. These profiles are clustered on a gyrus by gyrus basis using a K-means approach including spatial regularization. The reproducibility of the results is studied for three subjects using spatial normalization.}, authoraddress = {NeuroSpin Institut d'Imagerie BioMedicale, Commissariat l'Energie Atomique (CEA), Gif-sur-Yvette 91191, France.}, language = {eng}, medline-aid = {10.1155/2008/368406 [doi]}, medline-crdt = {2008/04/11 09:00}, medline-da = {20080410}, medline-edat = {2008/04/11 09:00}, medline-fau = {Perrin, Muriel ; Cointepas, Yann ; Cachia, Arnaud ; Poupon, Cyril ; Thirion, Bertrand ; Riviere, Denis ; Cathier, Pascal ; El Kouby, Vincent ; Constantinesco, Andre ; Le Bihan, Denis ; Mangin, Jean-Francois}, medline-is = {1687-4188 (Print)}, medline-jid = {101250756}, medline-jt = {International journal of biomedical imaging}, medline-mhda = {2008/04/11 09:00}, medline-oid = {NLM: PMC2288697}, medline-own = {NLM}, medline-phst = {2007/09/01 [received] ; 2007/11/30 [revised] ; 2007/12/16 [accepted]}, medline-pl = {United States}, medline-pmc = {PMC2288697}, medline-pmid = {18401457}, medline-pst = {ppublish}, medline-pt = {Journal Article}, medline-so = {Int J Biomed Imaging. 2008;2008:368406.}, medline-stat = {In-Data-Review}, url = {http://eutils.ncbi.nlm.nih.gov/entrez/eutils/elink.fcgi?cmd=prlinks&dbfrom=pubmed&retmode=ref&id=18401457}, year = 2008 } @Article{Tsiaras2009, Author = {Tsiaras, Vassilis L}, Title = {{Algorithms for the Analysis and Visualization of Biomedical Networks}}, Journal = {October}, Number = {October}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Tsiaras - 2009 - Algorithms for the Analysis and Visualization of Biomedical Networks.pdf:pdf}, year = 2009 } @Article{Maaten2008, Author = {Maaten, L and Hinton, G}, Title = {{Visualizing data using t-sne}}, Journal = {Journal of Machine Learning Research}, url = {http://scholar.google.co.uk/scholar?q=hinton t-sne\&oe=utf-8\&rls=com.ubuntu:en-GB:official\&client=firefox-a\&um=1\&ie=UTF-8\&sa=N\&hl=en\&tab=ws\#2}, year = 2008 } @Article{NedjatiGilani2008ISMRM, Author = {Nedjati-Gilani, S. and Parker, G. J. and Alexander, D. C.}, Title = {Regularized super-resolution for diffusion \{{M}{RI}\}}, Journal = {Proc. Intl. Soc. Mag. Reson. Med.}, Volume = {16}, Pages = {41}, abstract = {We present a new regularized super-resolution method, which finds fibre orientations and volume fractions on a sub-voxel scale and helps distinguish various fibre configurations such as fanning, bending and partial volume effects. We treat the task as a general inverse problem, which we solve by regularization and optimization, and run our method on human brain data.}, file = {attachment\:NedjatiGilani2008ISMRM.pdf:attachment\:NedjatiGilani2008ISMRM.pdf:PDF}, year = 2008 } @Article{Jones1999, Author = {Jones, D K and Horsfield, M a and Simmons, a}, Title = {{Optimal strategies for measuring diffusion in anisotropic systems by magnetic resonance imaging.}}, Journal = {Magnetic resonance in medicine : official journal of the Society of Magnetic Resonance in Medicine / Society of Magnetic Resonance in Medicine}, Volume = {42}, Number = {3}, Pages = {515--25}, abstract = {The optimization of acquisition parameters for precise measurement of diffusion in anisotropic systems is described. First, an algorithm is presented that minimizes the bias inherent in making measurements with a fixed set of gradient vector directions by spreading out measurements in 3-dimensional gradient vector space. Next, it is shown how the set of b-matrices and echo time can be optimized for estimating the diffusion tensor and its scalar invariants. The standard deviation in the estimate of the tensor trace in a water phantom was reduced by more than 40\% and the artefactual anisotropy was reduced by more than 60\% when using the optimized scheme compared with a more conventional scheme for the same scan time, and marked improvements are demonstrated in the human brain with the optimized sequences. Use of these optimal schemes results in reduced scan times, increased precision, or improved resolution in diffusion tensor images. Magn Reson Med 42:515-525, 1999.}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Jones, Horsfield, Simmons - 1999 - Optimal strategies for measuring diffusion in anisotropic systems by magnetic resonance imaging..pdf:pdf}, issn = {0740-3194}, keywords = {Adult,Algorithms,Anisotropy,Brain,Brain: anatomy \& histology,Diffusion,Humans,Linear Models,Magnetic Resonance Imaging,Magnetic Resonance Imaging: methods,Models, Structural,Phantoms, Imaging,Water}, month = sep, pmid = {10467296}, url = {http://www.ncbi.nlm.nih.gov/pubmed/10467296}, year = 1999 } @Article{Szeliski2006, Author = {Szeliski, Richard}, Title = {{Image Alignment and Stitching: A Tutorial}}, Journal = {Foundations and Trends® in Computer Graphics and Vision}, Volume = {2}, Number = {1}, Pages = {1--104}, doi = {10.1561/0600000009}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Szeliski - 2006 - Image Alignment and Stitching A Tutorial.pdf:pdf}, issn = {1572-2740}, url = {http://www.nowpublishers.com/product.aspx?product=CGV\&doi=0600000009}, year = 2006 } @Article{Maddah_IEEEBI2008, Author = {Maddah, M. and Zollei, L. and Grimson, W. E. and Westin, C. F. and Wells, W. M.}, Title = {A {M}athematical {F}ramework for {I}ncorporating {A}natomical {K}nowledge in {DT}-{MRI} {A}nalysis.}, Journal = {Proc IEEE Int Symp Biomed Imaging}, Volume = {4543943}, Pages = {105-108}, abstract = {We propose a Bayesian approach to incorporate anatomical information in the clustering of fiber trajectories. An expectation-maximization (EM) algorithm is used to cluster the trajectories, in which an atlas serves as the prior on the labels. The atlas guides the clustering algorithm and makes the resulting bundles anatomically meaningful. In addition, it provides the seed points for the tractography and initial settings of the EM algorithm. The proposed approach provides a robust and automated tool for tract-oriented analysis both in a single subject and over a population.}, authoraddress = {Computer Science and Artificial Intelligence Laboratory, Massachusetts Institute of Technology, Cambridge, MA 02139, USA.}, language = {ENG}, medline-aid = {10.1109/ISBI.2008.4540943 [doi]}, medline-crdt = {2009/02/13 09:00}, medline-da = {20090305}, medline-edat = {2009/02/13 09:00}, medline-gr = {P41 RR013218-09/NCRR NIH HHS/United States ; R01 MH074794-02/NIMH NIH HHS/United States ; R01 NS051826-04/NINDS NIH HHS/United States ; U41 RR019703-03/NCRR NIH HHS/United States ; U54 EB005149-04/NIBIB NIH HHS/United States}, medline-is = {1945-7928 (Print)}, medline-jid = {101492570}, medline-jt = {Proceedings / IEEE International Symposium on Biomedical Imaging: from nano to macro. IEEE International Symposium on Biomedical Imaging}, medline-mhda = {2009/02/13 09:00}, medline-mid = {NIHMS88086}, medline-own = {NLM}, medline-pmc = {PMC2638065}, medline-pmid = {19212449}, medline-pst = {ppublish}, medline-pt = {JOURNAL ARTICLE}, medline-so = {Proc IEEE Int Symp Biomed Imaging. 2008;4543943:105-108.}, medline-stat = {Publisher}, url = {http://eutils.ncbi.nlm.nih.gov/entrez/eutils/elink.fcgi?cmd=prlinks&dbfrom=pubmed&retmode=ref&id=19212449}, year = 2008 } @Article{Santana2010, Author = {Santana, Roberto and Bielza, Concha and Larra, Pedro}, Title = {{Classification of MEG data using a combined machine learning approach Problem definition}}, Journal = {Challenge}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Santana, Bielza, Larra - 2010 - Classification of MEG data using a combined machine learning approach Problem definition.pdf:pdf}, year = 2010 } @Article{Kohn2009, Author = {K\"{o}hn, Alexander and Klein, Jan and Weiler, Florian and Peitgen, Heinz-Otto}, Title = {{A GPU-based fiber tracking framework using geometry shaders}}, Journal = {Proceedings of SPIE}, Pages = {72611J--72611J--10}, doi = {10.1117/12.812219}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/K\"{o}hn et al. - 2009 - A GPU-based fiber tracking framework using geometry shaders.pdf:pdf}, keywords = {diffusion tensor imaging,fiber tracking,gpu,visualization}, publisher = {Spie}, url = {http://link.aip.org/link/PSISDG/v7261/i1/p72611J/s1\&Agg=doi}, year = 2009 } @Misc{okada2006dtf, Author = {Okada, T. and Miki, Y. and Fushimi, Y. and Hanakawa, T. and Kanagaki, M. and Yamamoto, A. and Urayama, S. and Fukuyama, H. and Hiraoka, M. and Togashi, K.}, Title = {{Diffusion-Tensor Fiber Tractography: Intraindividual Comparison of 3.0-T and 1.5-T MR Imaging 1}}, journal = {Radiology}, number = {2}, pages = {668--678}, publisher = {RSNA}, volume = {238}, year = 2006 } @Article{Mittmann2010, Author = {Mittmann, Adiel and Nobrega, Tiago H C and Comunello, Eros and Pinto, Juliano P O and Dellani, Paulo R and Stoeter, Peter and von Wangenheim, Aldo}, Title = {{Performing Real-Time Interactive Fiber Tracking.}}, Journal = {Journal of digital imaging : the official journal of the Society for Computer Applications in Radiology}, abstract = {Fiber tracking is a technique that, based on a diffusion tensor magnetic resonance imaging dataset, locates the fiber bundles in the human brain. Because it is a computationally expensive process, the interactivity of current fiber tracking tools is limited. We propose a new approach, which we termed real-time interactive fiber tracking, which aims at providing a rich and intuitive environment for the neuroradiologist. In this approach, fiber tracking is executed automatically every time the user acts upon the application. Particularly, when the volume of interest from which fiber trajectories are calculated is moved on the screen, fiber tracking is executed, even while it is being moved. We present our fiber tracking tool, which implements the real-time fiber tracking concept by using the video card's graphics processing units to execute the fiber tracking algorithm. Results show that real-time interactive fiber tracking is feasible on computers equipped with common, low-cost video cards.}, doi = {10.1007/s10278-009-9266-9}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Mittmann et al. - 2010 - Performing Real-Time Interactive Fiber Tracking..pdf:pdf}, issn = {1618-727X}, keywords = {11 which finds,diffusion tensor imaging,fiber tracking,fiber trajectories by following,graphics processing units,of them being the,real-time applications,streamline method,the main diffusion}, month = feb, pmid = {20155382}, url = {http://www.ncbi.nlm.nih.gov/pubmed/20155382}, year = 2010 } @Article{Kindlmann2007, Author = {Kindlmann, Gordon and Tricoche, Xavier and Westin, Carl-Fredrik}, Title = {{Delineating white matter structure in diffusion tensor MRI with anisotropy creases.}}, Journal = {Medical image analysis}, Volume = {11}, Number = {5}, Pages = {492--502}, abstract = {Geometric models of white matter architecture play an increasing role in neuroscientific applications of diffusion tensor imaging, and the most popular method for building them is fiber tractography. For some analysis tasks, however, a compelling alternative may be found in the first and second derivatives of diffusion anisotropy. We extend to tensor fields the notion from classical computer vision of ridges and valleys, and define anisotropy creases as features of locally extremal tensor anisotropy. Mathematically, these are the loci where the gradient of anisotropy is orthogonal to one or more eigenvectors of its Hessian. We propose that anisotropy creases provide a basis for extracting a skeleton of the major white matter pathways, in that ridges of anisotropy coincide with interiors of fiber tracts, and valleys of anisotropy coincide with the interfaces between adjacent but distinctly oriented tracts. The crease extraction algorithm we present generates high-quality polygonal models of crease surfaces, which are further simplified by connected-component analysis. We demonstrate anisotropy creases on measured diffusion MRI data, and visualize them in combination with tractography to confirm their anatomic relevance.}, doi = {10.1016/j.media.2007.07.005}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Kindlmann, Tricoche, Westin - 2007 - Delineating white matter structure in diffusion tensor MRI with anisotropy creases..pdf:pdf}, issn = {1361-8415}, keywords = {Algorithms,Anisotropy,Artificial Intelligence,Brain,Brain: cytology,Cluster Analysis,Diffusion Magnetic Resonance Imaging,Diffusion Magnetic Resonance Imaging: methods,Humans,Image Enhancement,Image Enhancement: methods,Image Interpretation, Computer-Assisted,Image Interpretation, Computer-Assisted: methods,Imaging, Three-Dimensional,Imaging, Three-Dimensional: methods,Nerve Fibers, Myelinated,Nerve Fibers, Myelinated: ultrastructure,Neural Pathways,Neural Pathways: cytology,Pattern Recognition, Automated,Pattern Recognition, Automated: methods,Reproducibility of Results,Sensitivity and Specificity}, pmid = {17804278}, url = {http://www.ncbi.nlm.nih.gov/pubmed/17804278}, year = 2007 } @Article{Koles1991, Author = {Koles, Z J}, Title = {{The quantitative extraction and topographic mapping of the abnormal components in the clinical EEG.}}, Journal = {Electroencephalography and clinical neurophysiology}, Volume = {79}, Number = {6}, Pages = {440--7}, abstract = {A method is described which seems to be effective for extracting the abnormal components from the clinical EEG. The approach involves the use of a set a spatial patterns which are common to recorded and 'normal' EEGs and which can account for maximally different proportions of the combined variances in both EEGs. These spatial factors are used to decompose the EEG into orthogonal temporal wave forms which can be judged by the expert electroencephalographer to be abnormal, normal or of artifactual origin. The original EEG is then reconstructed using only the abnormal components and principal component analysis is used to present the spatial topography of the abnormal components. The effectiveness of the method is discussed along with its value for localization of abnormal sources. It is suggested, in conclusion, that the approach described may be optimal for interpretation of the clinical EEG since it allows what is best in terms of quantitative analysis of the EEG to be combined with the best that is available in terms of expert qualitative analysis.}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Koles - 1991 - The quantitative extraction and topographic mapping of the abnormal components in the clinical EEG..pdf:pdf}, issn = {0013-4694}, keywords = {Brain,Brain Mapping,Brain: physiology,Electroencephalography,Electroencephalography: methods,Humans,Signal Processing, Computer-Assisted}, month = dec, pmid = {1721571}, url = {http://www.ncbi.nlm.nih.gov/pubmed/1721571}, year = 1991 } @Article{andersson2002mbm, Author = {Andersson, J.L.R. and Skare, S.}, Title = {{A model-based method for retrospective correction of geometric distortions in diffusion-weighted EPI}}, Journal = {Neuroimage}, Volume = {16}, Number = {1}, Pages = {177--199}, publisher = {Elsevier Inc.}, year = 2002 } @Article{behrens2005rca, Author = {Behrens, T E and Johansen-Berg, H.}, Title = {{Relating connectional architecture to grey matter function using diffusion imaging.}}, Journal = {Philos Trans R Soc Lond B Biol Sci}, Volume = {360}, Number = {1457}, Pages = {903--11}, file = {attachment\:behrens_dti_connectivity_function_2005.pdf:attachment\:behrens_dti_connectivity_function_2005.pdf:PDF}, year = 2005 } @Article{Hall2009, Author = {Hall, Matt G and Alexander, Daniel C}, Title = {{Convergence and parameter choice for Monte-Carlo simulations of diffusion MRI.}}, Journal = {IEEE transactions on medical imaging}, Volume = {28}, Number = {9}, Pages = {1354--64}, abstract = {This paper describes a general and flexible Monte- Carlo simulation framework for diffusing spins that generates realistic synthetic data for diffusion magnetic resonance imaging. Similar systems in the literature consider only simple substrates and their authors do not consider convergence and parameter optimization. We show how to run Monte-Carlo simulations within complex irregular substrates. We compare the results of the Monte-Carlo simulation to an analytical model of restricted diffusion to assess precision and accuracy of the generated results. We obtain an optimal combination of spins and updates for a given run time by trading off number of updates in favor of number of spins such that precision and accuracy of sythesized data are both optimized. Further experiments demonstrate the system using a tissue environment that current analytic models cannot capture. This tissue model incorporates swelling, abutting, and deformation. Swelling-induced restriction in the extracellular space due to the effects of abutting cylinders leads to large departures from the predictions of the analytical model, which does not capture these effects. This swelling-induced restriction may be an important mechanism in explaining the changes in apparent diffusion constant observed in the aftermath of acute ischemic stroke.}, doi = {10.1109/TMI.2009.2015756}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Hall, Alexander - 2009 - Convergence and parameter choice for Monte-Carlo simulations of diffusion MRI..pdf:pdf}, issn = {1558-0062}, keywords = {Algorithms,Brain Edema,Brain Edema: pathology,Brain Ischemia,Brain Ischemia: pathology,Computer Simulation,Diffusion Magnetic Resonance Imaging,Diffusion Magnetic Resonance Imaging: methods,Humans,Monte Carlo Method,Reproducibility of Results,Stroke,Stroke: pathology}, month = sep, pmid = {19273001}, url = {http://www.ncbi.nlm.nih.gov/pubmed/19273001}, year = 2009 } @Article{Perbet, Author = {Perbet, Frank}, Title = {{Correlated Probabilistic Trajectories for Pedestrian Motion Detection}}, Journal = {Image (Rochester, N.Y.)}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Perbet - Unknown - Correlated Probabilistic Trajectories for Pedestrian Motion Detection.pdf:pdf} } @Article{Vazirani1994, Author = {Vazirani, Vijay V}, Title = {{MAXIMUM MATCHING ALGORITHM}}, Journal = {Combinatorica}, Volume = {14}, Number = {i}, Pages = {71--109}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Vazirani - 1994 - MAXIMUM MATCHING ALGORITHM.pdf:pdf}, year = 1994 } @Article{boykov2004ecm, Author = {Boykov, Y. and Kolmogorov, V.}, Title = {{An experimental comparison of min-cut/max-flow algorithms for energy minimization in vision}}, Journal = {IEEE Transactions on Pattern Analysis and Machine Intelligence}, Volume = {26}, Number = {9}, Pages = {1124--1137}, year = 2004 } @Article{Fillard2009a, Author = {Fillard, Pierre and Poupon, Cyril}, Title = {{A Novel Global Tractography Algorithm based on an Adaptive Spin Glass Model}}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Fillard, Poupon - 2009 - A Novel Global Tractography Algorithm based on an Adaptive Spin Glass Model.pdf:pdf}, year = 2009 } @Article{0266-5611-19-5-303, Author = {Jansons, Kalvis M and Alexander, Daniel C}, Title = {Persistent angular structure: new insights from diffusion magnetic resonance imaging data}, Journal = {Inverse Problems}, Volume = {19}, Number = {5}, Pages = {1031-1046}, abstract = {We determine a statistic called the (radially) persistent angular structure (PAS) from samples of the Fourier transform of a three-dimensional function. The method has applications in diffusion magnetic resonance imaging (MRI), which samples the Fourier transform of the probability density function of particle displacements. The PAS is then a representation of the relative mobility of particles in each direction. In PAS-MRI, we compute the PAS in each voxel of an image. This technique has biomedical applications, where it reveals the orientations of microstructural fibres, such as white-matter fibres in the brain. Scanner time is a significant factor in determining the amount of data available in clinical brain scans. Here, we use measurements acquired for diffusion-tensor MRI, which is a routine diffusion imaging technique, but extract richer information. In particular, PAS-MRI can resolve the orientations of crossing fibres.We test PAS-MRI on human brain data and on synthetic data. The human brain data set comes from a standard acquisition scheme for diffusion-tensor MRI in which the samples in each voxel lie on a sphere in Fourier space.}, url = {http://stacks.iop.org/0266-5611/19/1031}, year = 2003 } @Article{ODonnell_AJNR06, Author = {O'Donnell, L. J. and Kubicki, M. and Shenton, M. E. and Dreusicke, M. H. and Grimson, W. E. and Westin, C. F.}, Title = {A method for clustering white matter fiber tracts.}, Journal = {AJNR Am J Neuroradiol}, Volume = {27}, Number = {5}, Pages = {1032-6}, abstract = {BACKGROUND/PURPOSE: Despite its potential for visualizing white matter fiber tracts in vivo, diffusion tensor tractography has found only limited applications in clinical research in which specific anatomic connections between distant regions need to be evaluated. We introduce a robust method for fiber clustering that guides the separation of anatomically distinct fiber tracts and enables further estimation of anatomic connectivity between distant brain regions. METHODS: Line scanning diffusion tensor images (LSDTI) were acquired on a 1.5T magnet. Regions of interest for several anatomically distinct fiber tracts were manually drawn; then, white matter tractography was performed by using the Runge-Kutta method to interpolate paths (fiber traces) following the major directions of diffusion, in which traces were seeded only within the defined regions of interest. Next, a fully automatic procedure was applied to fiber traces, grouping them according to a pairwise similarity function that takes into account the shapes of the fibers and their spatial locations. RESULTS: We demonstrated the ability of the clustering algorithm to separate several fiber tracts which are otherwise difficult to define (left and right fornix, uncinate fasciculus and inferior occipitofrontal fasciculus, and corpus callosum fibers). CONCLUSION: This method successfully delineates fiber tracts that can be further analyzed for clinical research purposes. Hypotheses regarding specific fiber connections and their abnormalities in various neuropsychiatric disorders can now be tested.}, authoraddress = {MIT Computer Science and AI Lab, Cambridge, MA 02139, USA.}, keywords = {Adolescent ; Adult ; Brain/*anatomy \& histology ; *Diffusion Magnetic Resonance Imaging/methods ; Humans ; Middle Aged}, language = {eng}, medline-aid = {27/5/1032 [pii]}, medline-crdt = {2006/05/12 09:00}, medline-da = {20060511}, medline-dcom = {20061030}, medline-edat = {2006/05/12 09:00}, medline-fau = {O'Donnell, L J ; Kubicki, M ; Shenton, M E ; Dreusicke, M H ; Grimson, W E L ; Westin, C F}, medline-gr = {1-R01-NS051826-01/NS/NINDS NIH HHS/United States ; K02 MH 01110/MH/NIMH NIH HHS/United States ; P41 RR13218/RR/NCRR NIH HHS/United States ; R03 MH 068464-02/MH/NIMH NIH HHS/United States ; U54 EB005149/EB/NIBIB NIH HHS/United States}, medline-is = {0195-6108 (Print)}, medline-jid = {8003708}, medline-jt = {AJNR. American journal of neuroradiology}, medline-lr = {20080214}, medline-mhda = {2006/10/31 09:00}, medline-own = {NLM}, medline-pl = {United States}, medline-pmid = {16687538}, medline-pst = {ppublish}, medline-pt = {Journal Article ; Research Support, N.I.H., Extramural ; Research Support, Non-U.S. Gov't ; Research Support, U.S. Gov't, Non-P.H.S.}, medline-sb = {IM}, medline-so = {AJNR Am J Neuroradiol. 2006 May;27(5):1032-6.}, medline-stat = {MEDLINE}, url = {http://eutils.ncbi.nlm.nih.gov/entrez/eutils/elink.fcgi?cmd=prlinks&dbfrom=pubmed&retmode=ref&id=16687538}, year = 2006 } @Article{Perrin2008, Author = {Perrin, Muriel and Cointepas, Yann and Cachia, Arnaud and Poupon, Cyril and Thirion, Bertrand and Rivi\`{e}re, Denis and Cathier, Pascal and {El Kouby}, Vincent and Constantinesco, Andr\'{e} and {Le Bihan}, Denis and Mangin, Jean-Fran\c{c}ois}, Title = {{Connectivity-Based Parcellation of the Cortical Mantle Using q-Ball Diffusion Imaging.}}, Journal = {International journal of biomedical imaging}, Volume = {2008}, Pages = {368406}, abstract = {This paper exploits the idea that each individual brain region has a specific connection profile to create parcellations of the cortical mantle using MR diffusion imaging. The parcellation is performed in two steps. First, the cortical mantle is split at a macroscopic level into 36 large gyri using a sulcus recognition system. Then, for each voxel of the cortex, a connection profile is computed using a probabilistic tractography framework. The tractography is performed from q fields using regularized particle trajectories. Fiber ODF are inferred from the q-balls using a sharpening process focusing the weight around the q-ball local maxima. A sophisticated mask of propagation computed from a T1-weighted image perfectly aligned with the diffusion data prevents the particles from crossing the cortical folds. During propagation, the particles father child particles in order to improve the sampling of the long fascicles. For each voxel, intersection of the particle trajectories with the gyri lead to a connectivity profile made up of only 36 connection strengths. These profiles are clustered on a gyrus by gyrus basis using a K-means approach including spatial regularization. The reproducibility of the results is studied for three subjects using spatial normalization.}, doi = {10.1155/2008/368406}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Perrin et al. - 2008 - Connectivity-Based Parcellation of the Cortical Mantle Using q-Ball Diffusion Imaging..pdf:pdf}, issn = {1687-4188}, pmid = {18401457}, url = {http://www.ncbi.nlm.nih.gov/pubmed/18401457}, year = 2008 } @PhdThesis{maddah2008quantitative, Author = {Maddah, M.}, Title = {{Quantitative Analysis of Cerebral White Matter Anatomy from Diffusion MRI}}, School = {Citeseer}, year = 2008 } @Article{Schmahmann2007, Author = {Schmahmann, Jeremy D and Pandya, Deepak N and Wang, Ruopeng and Dai, Guangping and Arceuil, Helen E D and Crespigny, Alex J De and Wedeen, Van J}, Title = {{Association fibre pathways of the brain : parallel observations from diffusion spectrum imaging and autoradiography}}, Journal = {Brain}, Pages = {630--653}, doi = {10.1093/brain/awl359}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Schmahmann et al. - 2007 - Association fibre pathways of the brain parallel observations from diffusion spectrum imaging and autoradiography.pdf:pdf}, keywords = {abbreviations,af ¼ arcuate fasciculus,ass ¼ superior limb,callosum,cb ¼ cingulum bundle,cc ¼ corpus,cs ¼ central sulcus,diffusion tensor imaging,disconnection,dsi ¼ diffusion spectrum,dti ¼ diffusion tensor,dwi ¼ diffusion weighted,emc ¼ extreme capsule,epi ¼ echoplanar imaging,fibre bundles,fof ¼ fronto-occipital fasciculus,ilf ¼ inferior longitudinal,image,imaging,isotope,of the arcuate sulcus,tract tracing,tractography}, year = 2007 } @Article{PPC+05, Author = {Perrin, M. and Poupon, C. and Cointepas, Y. and Rieul, B. and Golestani, N. and Pallier, C. and Riviere, D. and Constantinesco, A. and Le Bihan, D. and Mangin, J. F.}, Title = {Fiber tracking in q-ball fields using regularized particle trajectories.}, Journal = {Inf Process Med Imaging}, Volume = {19}, Pages = {52-63}, abstract = {Most of the approaches dedicated to fiber tracking from diffusion-weighted MR data rely on a tensor model. However, the tensor model can only resolve a single fiber orientation within each imaging voxel. New emerging approaches have been proposed to obtain a better representation of the diffusion process occurring in fiber crossing. In this paper, we adapt a tracking algorithm to the q-ball representation, which results from a spherical Radon transform of high angular resolution data. This algorithm is based on a Monte-Carlo strategy, using regularized particle trajectories to sample the white matter geometry. The method is validated using a phantom of bundle crossing made up of haemodialysis fibers. The method is also applied to the detection of the auditory tract in three human subjects.}, authoraddress = {Service Hospitalier Frederic Joliot, CEA, 91401 Orsay, France. perrin@shfj.cea.fr}, keywords = {Algorithms ; *Artificial Intelligence ; Brain/*cytology ; Diffusion Magnetic Resonance Imaging/*methods ; Humans ; Image Enhancement/methods ; Image Interpretation, Computer-Assisted/*methods ; Imaging, Three-Dimensional/*methods ; Nerve Fibers, Myelinated/*ultrastructure ; Pattern Recognition, Automated/*methods ; Reproducibility of Results ; Sensitivity and Specificity}, language = {eng}, medline-crdt = {2007/03/16 09:00}, medline-da = {20070314}, medline-dcom = {20070406}, medline-edat = {2007/03/16 09:00}, medline-fau = {Perrin, M ; Poupon, C ; Cointepas, Y ; Rieul, B ; Golestani, N ; Pallier, C ; Riviere, D ; Constantinesco, A ; Le Bihan, D ; Mangin, J F}, medline-is = {1011-2499 (Print)}, medline-jid = {9216871}, medline-jt = {Information processing in medical imaging : proceedings of the ... conference}, medline-mhda = {2007/04/07 09:00}, medline-own = {NLM}, medline-pl = {Germany}, medline-pmid = {17354684}, medline-pst = {ppublish}, medline-pt = {Journal Article}, medline-sb = {IM}, medline-so = {Inf Process Med Imaging. 2005;19:52-63.}, medline-stat = {MEDLINE}, url = {http://eutils.ncbi.nlm.nih.gov/entrez/eutils/elink.fcgi?cmd=prlinks&dbfrom=pubmed&retmode=ref&id=17354684}, year = 2005 } @Article{Neji2008a, Author = {Neji, Radhou\`{e}ne and Gilles, Jean-fran\c{c}ois Deux and Mezri, Fleury and Georg, Maatouk}, Title = {{A Kernel-based Approach to Diffusion Tensor and Fiber Clustering in the Human Skeletal Muscle}}, Journal = {October}, Number = {October}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Neji et al. - 2008 - A Kernel-based Approach to Diffusion Tensor and Fiber Clustering in the Human Skeletal Muscle.pdf:pdf}, year = 2008 } @Article{Tournier2007, Author = {Tournier, J-Donald and Calamante, Fernando and Connelly, Alan}, Title = {{Robust determination of the fibre orientation distribution in diffusion MRI: non-negativity constrained super-resolved spherical deconvolution.}}, Journal = {NeuroImage}, Volume = {35}, Number = {4}, Pages = {1459--72}, abstract = {Diffusion-weighted (DW) MR images contain information about the orientation of brain white matter fibres that potentially can be used to study human brain connectivity in vivo using tractography techniques. Currently, the diffusion tensor model is widely used to extract fibre directions from DW-MRI data, but fails in regions containing multiple fibre orientations. The spherical deconvolution technique has recently been proposed to address this limitation. It provides an estimate of the fibre orientation distribution (FOD) by assuming the DW signal measured from any fibre bundle is adequately described by a single response function. However, the deconvolution is ill-conditioned and susceptible to noise contamination. This tends to introduce artefactual negative regions in the FOD, which are clearly physically impossible. In this study, the introduction of a constraint on such negative regions is proposed to improve the conditioning of the spherical deconvolution. This approach is shown to provide FOD estimates that are robust to noise whilst preserving angular resolution. The approach also permits the use of super-resolution, whereby more FOD parameters are estimated than were actually measured, improving the angular resolution of the results. The method provides much better defined fibre orientation estimates, and allows orientations to be resolved that are separated by smaller angles than previously possible. This should allow tractography algorithms to be designed that are able to track reliably through crossing fibre regions.}, doi = {10.1016/j.neuroimage.2007.02.016}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Tournier, Calamante, Connelly - 2007 - Robust determination of the fibre orientation distribution in diffusion MRI non-negativity constrained super-resolved spherical deconvolution..pdf:pdf}, issn = {1053-8119}, keywords = {Algorithms,Brain,Brain: cytology,Computer Simulation,Data Interpretation, Statistical,Diffusion Magnetic Resonance Imaging,Humans,Image Processing, Computer-Assisted,Models, Statistical,Nerve Fibers,Nerve Fibers: physiology,Reproducibility of Results}, pmid = {17379540}, url = {http://www.ncbi.nlm.nih.gov/pubmed/17379540}, year = 2007 } @Article{Descoteaux2009, Author = {Descoteaux, Maxime and Deriche, Rachid and Kn\"{o}sche, Thomas R and Anwander, Alfred}, Title = {{Deterministic and probabilistic tractography based on complex fibre orientation distributions.}}, Journal = {IEEE transactions on medical imaging}, Volume = {28}, Number = {2}, Pages = {269--86}, abstract = {We propose an integral concept for tractography to describe crossing and splitting fibre bundles based on the fibre orientation distribution function (ODF) estimated from high angular resolution diffusion imaging (HARDI). We show that in order to perform accurate probabilistic tractography, one needs to use a fibre ODF estimation and not the diffusion ODF. We use a new fibre ODF estimation obtained from a sharpening deconvolution transform (SDT) of the diffusion ODF reconstructed from q-ball imaging (QBI). This SDT provides new insight into the relationship between the HARDI signal, the diffusion ODF, and the fibre ODF. We demonstrate that the SDT agrees with classical spherical deconvolution and improves the angular resolution of QBI. Another important contribution of this paper is the development of new deterministic and new probabilistic tractography algorithms using the full multidirectional information obtained through use of the fibre ODF. An extensive comparison study is performed on human brain datasets comparing our new deterministic and probabilistic tracking algorithms in complex fibre crossing regions. Finally, as an application of our new probabilistic tracking, we quantify the reconstruction of transcallosal fibres intersecting with the corona radiata and the superior longitudinal fasciculus in a group of eight subjects. Most current diffusion tensor imaging (DTI)-based methods neglect these fibres, which might lead to incorrect interpretations of brain functions.}, doi = {10.1109/TMI.2008.2004424}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Descoteaux et al. - 2009 - Deterministic and probabilistic tractography based on complex fibre orientation distributions..pdf:pdf}, issn = {1558-0062}, keywords = {Algorithms,Brain,Brain: anatomy \& histology,Diffusion Magnetic Resonance Imaging,Diffusion Magnetic Resonance Imaging: methods,Echo-Planar Imaging,Echo-Planar Imaging: methods,Humans,Image Enhancement,Image Enhancement: methods,Image Processing, Computer-Assisted,Image Processing, Computer-Assisted: methods,Models, Neurological,Models, Statistical,Nerve Fibers,Nerve Fibers: ultrastructure,Normal Distribution,Reproducibility of Results,Sensitivity and Specificity}, month = feb, pmid = {19188114}, url = {http://www.ncbi.nlm.nih.gov/pubmed/19188114}, year = 2009 } @Article{zhang1997birch, Author = {Zhang, T. and Ramakrishnan, R. and Livny, M.}, Title = {{BIRCH: A new data clustering algorithm and its applications}}, Journal = {Data Mining and Knowledge Discovery}, Volume = {1}, Number = {2}, Pages = {141--182}, publisher = {Springer}, year = 1997 } @Article{Leemans2005MagResMed, Author = {Leemans, A. and Sijbers, J. and Verhoye, M. and {Van der Linden}, A. and {Van Dyck}, D. }, Title = {Mathematical framework for simulating diffusion tensor \{{M}{R}\} neural fiber bundles}, Journal = {Magnetic Resonance in Medicine}, Volume = {53}, Number = {4}, Pages = {944-953}, doi = {10.1002/mrm.20418}, file = {attachment\:Leemans2005MagResMed.pdf:attachment\:Leemans2005MagResMed.pdf:PDF}, publisher = {Wiley-Liss, Inc.}, url = {http://dx.doi.org/10.1002/mrm.20418}, year = 2005 } @Article{Jones2002, Author = {Jones, Derek K. and Basser, Peter J.}, Title = {{Diffusion-tensor MRI: theory, experimental design and data analysis - a technical review}}, Journal = {NMR in Biomedicine}, Volume = {15}, Number = {7-8}, Pages = {456--467}, abstract = {This article treats the theoretical underpinnings of diffusion-tensor magnetic resonance imaging (DT-MRI), as well as experimental design and data analysis issues. We review the mathematical model underlying DT-MRI, discuss the quantitative parameters that are derived from the measured effective diffusion tensor, and describe artifacts thet arise in typical DT-MRI acquisitions. We also discuss difficulties in identifying appropriate models to describe water diffusion in heterogeneous tissues, as well as in interpreting experimental data obtained in such issues. Finally, we describe new statistical methods that have been developed to analyse DT-MRI data, and their potential uses in clinical and multi-site studies. Copyright � 2002 John Wiley \& Sons, Ltd.}, doi = {10.1002/nbm.783}, shorttitle = {Diffusion-tensor MRI}, url = {http://dx.doi.org/10.1002/nbm.783}, year = 2002 } @Article{Mining1997, Author = {Mining, Data and Discovery, Knowledge}, Title = {{BIRCH : A New Data Clustering Algorithm and Its Applications}}, Journal = {Knowledge Creation Diffusion Utilization}, Volume = {182}, Pages = {141--182}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Mining, Discovery - 1997 - BIRCH A New Data Clustering Algorithm and Its Applications.pdf:pdf}, keywords = {data classification and compression,data clustering,incremental algorithm,very large databases}, year = 1997 } @Article{Chan, Author = {Chan, Cy and Drensky, Vesselin and Edelman, Alan and Kan, Raymond and Koev, Plamen}, Title = {{On Computing Schur Functions and Series Thereof}}, Journal = {Journal of Algebraic Combinatorics}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Chan et al. - Unknown - On Computing Schur Functions and Series Thereof.pdf:pdf}, keywords = {computing,hypergeometric function of a,matrix argument,schur function} } @Article{MKW+08, Author = {Maddah, M. and Kubicki, M. and Wells, W. M. and Westin, C. F. and Shenton, M. E. and Grimson, W. E.}, Title = {Findings in schizophrenia by tract-oriented {DT}-{MRI} analysis.}, Journal = {Med Image Comput Comput Assist Interv Int Conf Med Image Comput Comput Assist Interv}, Volume = {11}, Number = {Pt 1}, Pages = {917-24}, abstract = {This paper presents a tract-oriented analysis of diffusion tensor (DT) images of the human brain. We demonstrate that unlike the commonly used ROI-based methods for population studies, our technique is sensitive to the local variation of diffusivity parameters along the fiber tracts. We show the strength of the proposed approach in identifying the differences in schizophrenic data compared to controls. Statistically significant drops in fractional anisotropy are observed along the genu and bilaterally in the splenium, as well as an increase in principal eigenvalue in uncinate fasciculus. This is the first tract-oriented clinical study in which an anatomical atlas is used to guide the algorithm.}, authoraddress = {Computer Science and Artificial Intelligence Laboratory, Massachusetts Institute of Technology, Cambridge, MA, USA. mmaddah@mit.edu}, keywords = {Algorithms ; *Artificial Intelligence ; Brain Diseases/*diagnosis ; Diffusion Magnetic Resonance Imaging/*methods ; Female ; Humans ; Image Enhancement/methods ; Image Interpretation, Computer-Assisted/*methods ; Male ; Nerve Fibers, Myelinated/*pathology ; Pattern Recognition, Automated/*methods ; Reproducibility of Results ; Schizophrenia/*diagnosis ; Sensitivity and Specificity}, language = {eng}, medline-crdt = {2008/11/05 09:00}, medline-da = {20081104}, medline-dcom = {20081209}, medline-edat = {2008/11/05 09:00}, medline-fau = {Maddah, Mahnaz ; Kubicki, Marek ; Wells, William M ; Westin, Carl-Fredrik ; Shenton, Martha E ; Grimson, W Eric L}, medline-jid = {101249582}, medline-jt = {Medical image computing and computer-assisted intervention : MICCAI ... International Conference on Medical Image Computing and Computer-Assisted Intervention}, medline-mhda = {2008/12/17 09:00}, medline-own = {NLM}, medline-pl = {Germany}, medline-pmid = {18979833}, medline-pst = {ppublish}, medline-pt = {Evaluation Studies ; Journal Article}, medline-sb = {IM}, medline-so = {Med Image Comput Comput Assist Interv Int Conf Med Image Comput Comput Assist Interv. 2008;11(Pt 1):917-24.}, medline-stat = {MEDLINE}, url = {http://eutils.ncbi.nlm.nih.gov/entrez/eutils/elink.fcgi?cmd=prlinks&dbfrom=pubmed&retmode=ref&id=18979833}, year = 2008 } @Article{MaddahMIA2008, Author = {Maddah, M. and Grimson, W. E. and Warfield, S. K. and Wells, W. M.}, Title = {A unified framework for clustering and quantitative analysis of white matter fiber tracts.}, Journal = {Med Image Anal}, Volume = {12}, Number = {2}, Pages = {191-202}, abstract = {We present a novel approach for joint clustering and point-by-point mapping of white matter fiber pathways. Knowledge of the point correspondence along the fiber pathways is not only necessary for accurate clustering of the trajectories into fiber bundles, but also crucial for any tract-oriented quantitative analysis. We employ an expectation-maximization (EM) algorithm to cluster the trajectories in a gamma mixture model context. The result of clustering is the probabilistic assignment of the fiber trajectories to each cluster, an estimate of the cluster parameters, i.e. spatial mean and variance, and point correspondences. The fiber bundles are modeled by the mean trajectory and its spatial variation. Point-by-point correspondence of the trajectories within a bundle is obtained by constructing a distance map and a label map from each cluster center at every iteration of the EM algorithm. This offers a time-efficient alternative to pairwise curve matching of all trajectories with respect to each cluster center. The proposed method has the potential to benefit from an anatomical atlas of fiber tracts by incorporating it as prior information in the EM algorithm. The algorithm is also capable of handling outliers in a principled way. The presented results confirm the efficiency and effectiveness of the proposed framework for quantitative analysis of diffusion tensor MRI.}, authoraddress = {Computer Science and Artificial Intelligence Laboratory, Massachusetts Institute of Technology, 32 Vassar Street, Cambridge, USA. mmaddah@mit.edu}, keywords = {Algorithms ; *Artificial Intelligence ; Brain/*anatomy \& histology ; *Cluster Analysis ; Diffusion Magnetic Resonance Imaging/*methods ; Humans ; Image Enhancement/methods ; Image Interpretation, Computer-Assisted/*methods ; Imaging, Three-Dimensional/methods ; Likelihood Functions ; Models, Biological ; Models, Statistical ; Nerve Fibers, Myelinated/*ultrastructure ; Pattern Recognition, Automated/*methods ; Reproducibility of Results ; Sensitivity and Specificity}, language = {eng}, medline-aid = {S1361-8415(07)00099-0 [pii] ; 10.1016/j.media.2007.10.003 [doi]}, medline-crdt = {2008/01/09 09:00}, medline-da = {20080416}, medline-dcom = {20080520}, medline-dep = {20071025}, medline-edat = {2008/01/09 09:00}, medline-fau = {Maddah, Mahnaz ; Grimson, W Eric L ; Warfield, Simon K ; Wells, William M}, medline-gr = {P30 HD018655/HD/NICHD NIH HHS/United States ; P30 HD018655-26/HD/NICHD NIH HHS/United States ; P41 RR013218/RR/NCRR NIH HHS/United States ; P41 RR013218-010001/RR/NCRR NIH HHS/United States ; P41 RR013218-010002/RR/NCRR NIH HHS/United States ; P41 RR013218-010010/RR/NCRR NIH HHS/United States ; R01 RR021885/RR/NCRR NIH HHS/United States ; R01 RR021885-01A1/RR/NCRR NIH HHS/United States ; R01 RR021885-02/RR/NCRR NIH HHS/United States ; R03 CA126466/CA/NCI NIH HHS/United States ; R03 CA126466-01A1/CA/NCI NIH HHS/United States ; R03 CA126466-02/CA/NCI NIH HHS/United States ; R21 MH067054/MH/NIMH NIH HHS/United States ; R21 MH067054-01A1/MH/NIMH NIH HHS/United States ; R21 MH067054-02/MH/NIMH NIH HHS/United States ; U41 RR019703/RR/NCRR NIH HHS/United States ; U54 EB005149/EB/NIBIB NIH HHS/United States}, medline-is = {1361-8423 (Electronic)}, medline-jid = {9713490}, medline-jt = {Medical image analysis}, medline-lr = {20090406}, medline-mhda = {2008/05/21 09:00}, medline-mid = {NIHMS49862}, medline-oid = {NLM: NIHMS49862 ; NLM: PMC2615202}, medline-own = {NLM}, medline-phst = {2006/11/18 [received] ; 2007/10/02 [revised] ; 2007/10/02 [accepted] ; 2007/10/25 [aheadofprint]}, medline-pl = {Netherlands}, medline-pmc = {PMC2615202}, medline-pmid = {18180197}, medline-pst = {ppublish}, medline-pt = {Journal Article ; Research Support, N.I.H., Extramural ; Research Support, Non-U.S. Gov't}, medline-sb = {IM}, medline-so = {Med Image Anal. 2008 Apr;12(2):191-202. Epub 2007 Oct 25.}, medline-stat = {MEDLINE}, url = {http://eutils.ncbi.nlm.nih.gov/entrez/eutils/elink.fcgi?cmd=prlinks&dbfrom=pubmed&retmode=ref&id=18180197}, year = 2008 } @Article{Tuch2005, Author = {Tuch, David S and Wisco, Jonathan J and Khachaturian, Mark H and Vanduffel, Wim and Ekstrom, Leeland B and Ko, Rolf}, Title = {{Q-ball imaging of macaque white matter architecture}}, Number = {May}, Pages = {869--879}, doi = {10.1098/rstb.2005.1651}, keywords = {connectivity,diffusion magnetic resonance imaging,high angular resolution diffusion,imaging,macaque,tractography,white matter}, year = 2005 } @Article{George2009, Author = {George, Kyriazis and Erwan, Le Pennec and Pencho, Petrushev and Dominique, Picard}, Title = {{Inversion of noisy Radon transform by SVD based needlets arXiv : 0809 . 3332v2 [ math . ST ] 17 Aug 2009}}, Pages = {1--35}, arxivid = {arXiv:0809.3332v2}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/George et al. - 2009 - Inversion of noisy Radon transform by SVD based needlets arXiv 0809 . 3332v2 math . ST 17 Aug 2009.pdf:pdf}, year = 2009 } @Article{Hagmann2007PLoSONE, Author = {Hagmann, Patric and Kurant, Maciej and Gigandet, Xavier and Thiran, Patrick and Wedeen, Van J. and Meuli, Reto and Thiran, Jean-Philippe }, Title = {Mapping human whole-brain structural networks with diffusion {MRI}.}, Journal = {PLoS ONE}, Volume = {2}, Number = {7}, Pages = {e597}, abstract = {Understanding the large-scale structural network formed by neurons is a major challenge in system neuroscience. A detailed connectivity map covering the entire brain would therefore be of great value. Based on diffusion MRI, we propose an efficient methodology to generate large, comprehensive and individual white matter connectional datasets of the living or dead, human or animal brain. This non-invasive tool enables us to study the basic and potentially complex network properties of the entire brain. For two human subjects we find that their individual brain networks have an exponential node degree distribution and that their global organization is in the form of a small world.}, doi = {10.1371/journal.pone.0000597}, file = {attachment\:Hagmann2007PLoSONE.pdf:attachment\:Hagmann2007PLoSONE.pdf:PDF}, year = 2007 } @Article{torrey1956bed, Author = {Torrey, H.C.}, Title = {{Bloch equations with diffusion terms}}, Journal = {Physical Review}, Volume = {104}, Number = {3}, Pages = {563--565}, publisher = {APS}, year = 1956 } @Article{Perrin2005PhilTransRoySoc, Author = {Perrin, Muriel and Poupon, Cyril and Rieul, Bernard and Leroux, Patrick and Constantinesco, Andr and Mangin, Jean-Franois and LeBihan, Denis}, Title = {Validation of q-ball imaging with a diffusion fibre-crossing phantom on a clinical scanner}, Journal = {Philosophical Transactions of the Royal Society B: Biological Sciences}, Volume = {360}, Number = {1457}, Pages = {881-91}, abstract = {Magnetic resonance (MR) diffusion imaging provides a valuable tool used for inferring structural anisotropy of brain white matter connectivity from diffusion tensor imaging. Recently, several high angular resolution diffusion models were introduced in order to overcome the inadequacy of the tensor model for describing fibre crossing within a single voxel. Among them, q-ball imaging (QBI), inherited from the q-space method, relies on a spherical Radon transform providing a direct relationship between the diffusion-weighted MR signal and the orientation distribution function (ODF). Experimental validation of these methods in a model system is necessary to determine the accuracy of the methods and to optimize them. A diffusion phantom made up of two textile rayon fibre (comparable in diameter to axons) bundles, crossing at $90^o$, was designed and dedicated to ex vivo q-ball validation on a clinical scanner. Normalized ODFs were calculated inside regions of interest corresponding to monomodal and bimodal configurations of underlying structures. Threedimensional renderings of ODFs revealed monomodal shapes for voxels containing single-fibre population and bimodal patterns for voxels located within the crossing area. Principal orientations were estimated from ODFs and were compared with a priori structural fibre directions, validating efficiency of QBI for depicting fibre crossing. In the homogeneous regions, QBI detected the fibre angle with an accuracy of $19^o$ and in the fibre-crossing region with an accuracy of $30^o$.}, doi = {10.1098/rstb.2005.1650}, file = {attachment\:Perrin2005PhilTransRoySoc.pdf:attachment\:Perrin2005PhilTransRoySoc.pdf:PDF}, url = {http://journals.royalsociety.org/content/mldn6494e2xf23ta}, year = 2005 } @Article{Frey2008, Author = {Frey, S and Campbell, JSW and Pike, GB}, Title = {\ldots human language pathways with high angular resolution diffusion fiber tractography}, Journal = {Journal of Neuroscience}, url = {http://neuro.cjb.net/cgi/content/abstract/28/45/11435}, year = 2008 } @Article{Stejskal1965JChemPhys, Author = {E. O. Stejskal and J. E. Tanner}, Title = {Spin Diffusion Measurements: Spin Echoes in the Presence of a Time-Dependent Field Gradient}, Journal = {The Journal of Chemical Physics}, Volume = {42}, Number = {1}, Pages = {288-292}, doi = {10.1063/1.1695690}, publisher = {AIP}, url = {http://link.aip.org/link/?JCP/42/288/1}, year = 1965 } @Article{Reisert, Author = {Reisert, Marco and Mader, Irina and Kiselev, Valerij}, Title = {{Global Reconstruction of Neuronal Fibres}}, Journal = {Lecture Notes in Computer Science}, Pages = {1--12}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Reisert, Mader, Kiselev - Unknown - Global Reconstruction of Neuronal Fibres.pdf:pdf} } @Article{Koles1998, Author = {Koles, Z J and Soong, a C}, Title = {{EEG source localization: implementing the spatio-temporal decomposition approach.}}, Journal = {Electroencephalography and clinical neurophysiology}, Volume = {107}, Number = {5}, Pages = {343--52}, abstract = {OBJECTIVES: The spatio-temporal decomposition (STD) approach was used to localize the sources of simulated electroencephalograms (EEGs) to gain experience with the approach for analyzing real data. METHODS: The STD approach used is similar to the multiple signal classification method (MUSIC) in that it requires the signal subspace containing the sources of interest to be isolated in the EEG measurement space. It is different from MUSIC in that it allows more general methods of spatio-temporal decomposition to be used that may be better suited to the background EEG. RESULTS: If the EEG data matrix is not corrupted by noise, the STD approach can be used to locate multiple dipole sources of the EEG one at a time without a priori knowledge of the number of active sources in the signal space. In addition, the common-spatial-patterns method of spatio-temporal decomposition is superior to the eigenvector decomposition for localizing activity that is ictal in nature. CONCLUSIONS: The STD approach appears to be able to provide a means of localizing the equivalent dipole sources of realistic brain sources and that, even under difficult noise conditions and only 2 or 3 s of available EEG, the precision of the localization can be as low as a few mm.}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Koles, Soong - 1998 - EEG source localization implementing the spatio-temporal decomposition approach..pdf:pdf}, issn = {0013-4694}, keywords = {Artifacts,Brain,Brain Mapping,Brain Mapping: methods,Brain: physiology,Computer Simulation,Electrodes,Electroencephalography,Electroencephalography: instrumentation,Evaluation Studies as Topic,Humans,Models, Neurological,Time Factors}, month = nov, pmid = {9872437}, url = {http://www.ncbi.nlm.nih.gov/pubmed/9872437}, year = 1998 } @Article{roebroeck2008hrd, Author = {Roebroeck, A. and Galuske, R. and Formisano, E. and Chiry, O. and Bratzke, H. and Ronen, I. and Kim, D. and Goebel, R.}, Title = {{High-resolution diffusion tensor imaging and tractography of the human optic chiasm at 9.4 T}}, Journal = {Neuroimage}, Volume = {39}, Number = {1}, Pages = {157--168}, publisher = {Elsevier}, year = 2008 } @Article{To, Author = {To, Introduction}, Title = {{INTRODUCTION TO PROBABILITY}}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/To - Unknown - INTRODUCTION TO PROBABILITY.pdf:pdf} } @Article{Catani2008, Author = {Catani, M and Mesulam, M}, Title = {{The arcuate fasciculus and the disconnection theme in language and aphasia: \ldots}}, Journal = {Cortex}, url = {http://linkinghub.elsevier.com/retrieve/pii/S0010945208001111}, year = 2008 } @Article{Walter2010, Author = {Walter, Thomas and Shattuck, David W and Baldock, Richard and Bastin, Mark E and Carpenter, Anne E and Duce, Suzanne and Ellenberg, Jan and Fraser, Adam and Hamilton, Nicholas and Pieper, Steve and Ragan, Mark A and Schneider, Jurgen E and Tomancak, Pavel and H\'{e}rich\'{e}, Jean-karim}, Title = {{Visualization of image data from cells to organisms}}, Journal = {Nature Publishing Group}, Volume = {7}, Number = {3s}, Pages = {S26--S41}, doi = {10.1038/nmeth.1431}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Walter et al. - 2010 - Visualization of image data from cells to organisms.pdf:pdf}, issn = {1548-7091}, publisher = {Nature Publishing Group}, url = {http://dx.doi.org/10.1038/nmeth.1431}, year = 2010 } @Article{Jbabdi2007, Author = {Jbabdi, S and Woolrich, M W and Andersson, J L and Behrens, T E}, Title = {{A Bayesian framework for global tractography}}, Journal = {NeuroImage}, Volume = {37}, Pages = {116--129}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Jbabdi et al. - 2007 - A Bayesian framework for global tractography.pdf:pdf}, year = 2007 } @Article{Lewis2005, Author = {Lewis, David a and Hashimoto, Takanori and Volk, David W}, Title = {{Cortical inhibitory neurons and schizophrenia.}}, Journal = {Nature reviews. Neuroscience}, Volume = {6}, Number = {4}, Pages = {312--24}, abstract = {Impairments in certain cognitive functions, such as working memory, are core features of schizophrenia. Convergent findings indicate that a deficiency in signalling through the TrkB neurotrophin receptor leads to reduced GABA (gamma-aminobutyric acid) synthesis in the parvalbumin-containing subpopulation of inhibitory GABA neurons in the dorsolateral prefrontal cortex of individuals with schizophrenia. Despite both pre- and postsynaptic compensatory responses, the resulting alteration in perisomatic inhibition of pyramidal neurons contributes to a diminished capacity for the gamma-frequency synchronized neuronal activity that is required for working memory function. These findings reveal specific targets for therapeutic interventions to improve cognitive function in individuals with schizophrenia.}, doi = {10.1038/nrn1648}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Lewis, Hashimoto, Volk - 2005 - Cortical inhibitory neurons and schizophrenia..pdf:pdf}, issn = {1471-003X}, keywords = {Animals,Cerebral Cortex,Cerebral Cortex: cytology,Cerebral Cortex: physiology,Cerebral Cortex: physiopathology,Humans,Nerve Net,Nerve Net: pathology,Nerve Net: physiopathology,Neural Inhibition,Neural Inhibition: physiology,Neurons,Neurons: cytology,Neurons: physiology,Schizophrenia,Schizophrenia: pathology,Schizophrenia: physiopathology}, month = apr, pmid = {15803162}, url = {http://www.ncbi.nlm.nih.gov/pubmed/15803162}, year = 2005 } @Article{Kenkre1997JMagRes, Author = {V. M. Kenkre and Eiichi Fukushima and D. Sheltraw}, Title = {Simple Solutions of the Torrey-Bloch Equations in the NMR Study of Molecular Diffusion}, Journal = {Journal of Magnetic Resonance}, Volume = {128}, Number = {1}, Pages = {62 - 69}, abstract = {A simple technique for solving the Torrey-Bloch equations appearing in the calculation of the NMR signal under gradient fields is presented. It is applicable to arbitrary time dependence of the gradient field to arbitrary initial distribution of spins, and to spin motion on discrete lattices as well as in the continuum under conditions of unrestricted diffusion. Known results are recovered as particular cases and new results are presented. The discrete lattice results are shown to be similar to known results for restricted diffusion in the continuum. Also presented is a surprising equivalence between results for a simple two-site hopping model and earlier expressions for the NMR signal for spins undergoing restricted diffusion in a continuum.}, doi = {DOI: 10.1006/jmre.1997.1216}, issn = {1090-7807}, url = {http://www.sciencedirect.com/science/article/B6WJX-45KN26H-6/2/817cb1d5d119831cc0ccf5284d324a37}, year = 1997 } @Article{Ashburner2000NeuroImage, Author = {Ashburner, John and Friston, Karl J.}, Title = {Voxel-Based Morphometry - The Methods}, Journal = {NeuroImage}, Volume = {11}, Pages = {805-821}, abstract = {At its simplest, voxel-based morphometry (VBM) involves a voxel-wise comparison of the local concentration of gray matter between two groups of subjects. The procedure is relatively straightforward and involves spatially normalizing high-resolution images from all the subjects in the study into the same stereotactic space. This is followed by segmenting the gray matter from the spatially normalized images and smoothing the gray-matter segments. Voxel-wise parametric statistical tests which compare the smoothed gray-matter images from the two groups are performed. Corrections for multiple comparisons are made using the theory of Gaussian random fields. This paper describes the steps involved in VBM, with particular emphasis on segmenting gray matter from MR images with nonuniformity artifact. We provide evaluations of the assumptions that underpin the method, including the accuracy of the segmentation and the assumptions made about the statistical distribution of the data.-}, doi = {10.1006/nimg.2000.0582}, file = {attachment\:Ashburner2000NeuroImage.pdf:attachment\:Ashburner2000NeuroImage.pdf:PDF}, publisher = {Elsevier}, year = 2000 } @Article{Ding2003a, Author = {Ding, Zhaohua and Gore, John C and Anderson, Adam W}, Title = {{Classification and quantification of neuronal fiber pathways using diffusion tensor MRI.}}, Journal = {Magnetic resonance in medicine : official journal of the Society of Magnetic Resonance in Medicine / Society of Magnetic Resonance in Medicine}, Volume = {49}, Number = {4}, Pages = {716--21}, abstract = {Quantitative characterization of neuronal fiber pathways in vivo is of significant neurological and clinical interest. Using the capability of MR diffusion tensor imaging to determine the local orientations of neuronal fibers, novel algorithms were developed to bundle neuronal fiber pathways reconstructed in vivo with diffusion tensor images and to quantify various physical and geometric properties of fiber bundles. The reliability of the algorithms was examined with reproducibility tests. Illustrative results show that consistent physical and geometric measurements of novel properties of neuronal tissue can be obtained, which offer considerable potential for the quantitative study of fiber pathways in vivo.}, doi = {10.1002/mrm.10415}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Ding, Gore, Anderson - 2003 - Classification and quantification of neuronal fiber pathways using diffusion tensor MRI..pdf:pdf}, issn = {0740-3194}, keywords = {Algorithms,Brain Mapping,Brain Mapping: methods,Humans,Image Processing, Computer-Assisted,Image Processing, Computer-Assisted: methods,Magnetic Resonance Imaging,Magnetic Resonance Imaging: methods,Nerve Fibers,Nerve Fibers: classification,Neural Pathways,Neural Pathways: anatomy \& histology,Reproducibility of Results}, pmid = {12652543}, url = {http://www.ncbi.nlm.nih.gov/pubmed/12652543}, year = 2003 } @Article{powell2005mtp, Author = {Powell, HWR and Parker, GJM and Alexander, DC and Symms, MR and Boulby, PA and Wheeler-Kingshott, CAM and Barker, GJ and Koepp, MJ and Duncan, JS}, Title = {{MR tractography predicts visual field defects following temporal lobe resection}}, Journal = {Neurology}, Volume = {65}, Number = {4}, Pages = {596--599}, publisher = {AAN Enterprises}, year = 2005 } @Article{lawes2008abs, Author = {Lawes, I. N. C. and Barrick, T.R. and Murugam, V. and Spierings, N. and Evans, D.R. and Song, M. and Clark, C. A.}, Title = {{Atlas-based segmentation of white matter tracts of the human brain using diffusion tensor tractography and comparison with classical dissection.}}, Journal = {Neuroimage}, Volume = {39}, Pages = {62--79}, file = {attachment\:lawes_dti_atlas-based_segmentation_2008.pdf:attachment\:lawes_dti_atlas-based_segmentation_2008.pdf:PDF}, year = 2008 } @Article{Tanaka1999, Author = {Tanaka, Hidefumi}, Title = {{Circular asymmetry of the paleomagnetic directions observed at low latitude volcanic sites}}, Journal = {Simulation}, Number = {4}, Pages = {1279--1286}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Tanaka - 1999 - Circular asymmetry of the paleomagnetic directions observed at low latitude volcanic sites.pdf:pdf}, year = 1999 } @Article{MaddahIPMI2007, Author = {Maddah, M. and Wells, 3rd, W. M. and Warfield, S. K. and Westin, C. F. and Grimson, W. E.}, Title = {Probabilistic clustering and quantitative analysis of white matter fiber tracts.}, Journal = {Inf Process Med Imaging}, Volume = {20}, Pages = {372-83}, abstract = {A novel framework for joint clustering and point-by-point mapping of white matter fiber pathways is presented. Accurate clustering of the trajectories into fiber bundles requires point correspondence determined along the fiber pathways. This knowledge is also crucial for any tract-oriented quantitative analysis. We employ an expectation-maximization (EM) algorithm to cluster the trajectories in a Gamma mixture model context. The result of clustering is the probabilistic assignment of the fiber trajectories to each cluster, an estimate of the cluster parameters, and point correspondences along the trajectories. Point-by-point correspondence of the trajectories within a bundle is obtained by constructing a distance map and a label map from each cluster center at every iteration of the EM algorithm. This offers a time-efficient alternative to pairwise curve matching of all trajectories with respect to each cluster center. Probabilistic assignment of the trajectories to clusters is controlled by imposing a minimum threshold on the membership probabilities, to remove outliers in a principled way. The presented results confirm the efficiency and effectiveness of the proposed framework for quantitative analysis of diffusion tensor MRI.}, authoraddress = {Computer Science and Artificial Intelligence Laboratory, Massachusetts Institute of Technology, Cambridge, MA 02139, USA. mmaddah@mit.edu}, keywords = {Algorithms ; Artificial Intelligence ; Brain/*cytology ; Cluster Analysis ; Diffusion Magnetic Resonance Imaging/*methods ; Humans ; Image Enhancement/methods ; Image Interpretation, Computer-Assisted/*methods ; Imaging, Three-Dimensional/*methods ; Models, Neurological ; Models, Statistical ; Nerve Fibers, Myelinated/*ultrastructure ; Neural Pathways/*cytology ; Pattern Recognition, Automated/*methods ; Reproducibility of Results ; Sensitivity and Specificity}, language = {eng}, medline-crdt = {2007/07/19 09:00}, medline-da = {20070718}, medline-dcom = {20070831}, medline-edat = {2007/07/19 09:00}, medline-fau = {Maddah, Mahnaz ; Wells, William M 3rd ; Warfield, Simon K ; Westin, Carl-Fredrik ; Grimson, W Eric L}, medline-gr = {P30 HD018655/HD/NICHD NIH HHS/United States ; P41 RR013218/RR/NCRR NIH HHS/United States ; R01 RR021885/RR/NCRR NIH HHS/United States ; R03 CA126466/CA/NCI NIH HHS/United States ; R21 MH067054/MH/NIMH NIH HHS/United States ; U41 RR019703/RR/NCRR NIH HHS/United States ; U54 EB005149/EB/NIBIB NIH HHS/United States}, medline-is = {1011-2499 (Print)}, medline-jid = {9216871}, medline-jt = {Information processing in medical imaging : proceedings of the ... conference}, medline-lr = {20071203}, medline-mhda = {2007/09/01 09:00}, medline-own = {NLM}, medline-pl = {Germany}, medline-pmid = {17633714}, medline-pst = {ppublish}, medline-pt = {Evaluation Studies ; Journal Article ; Research Support, N.I.H., Extramural ; Research Support, Non-U.S. Gov't ; Research Support, U.S. Gov't, Non-P.H.S.}, medline-sb = {IM}, medline-so = {Inf Process Med Imaging. 2007;20:372-83.}, medline-stat = {MEDLINE}, url = {http://eutils.ncbi.nlm.nih.gov/entrez/eutils/elink.fcgi?cmd=prlinks&dbfrom=pubmed&retmode=ref&id=17633714}, year = 2007 } @Article{descoteaux2009deterministic, Author = {Descoteaux, M. and Deriche, R. and Knoesche, T. and Anwander, A.}, Title = {{Deterministic and probabilistic tractography based on complex fibre orientation distributions}}, Journal = {IEEE Trans Med Imaging}, Volume = {28}, Number = {2}, Pages = {269--86}, year = 2009 } @Article{Smith2006NeuroImage, Author = {Smith, Stephen M. and Jenkinson, Mark and Johansen-Berg, Heidi and Rueckert, Daniel and Nichols, Thomas E. and Mackay, Clare E. and Watkins, Kate E. and Ciccarelli, Olga and Cader, Zaheer and Matthews, Paul M. and Behrens, Timothy E.J.}, Title = {Tract-based spatial statistics: Voxelwise analysis of multi-subject diffusion data}, Journal = {NeuroImage}, Volume = {31}, Pages = {1487-1505}, abstract = {There has been much recent interest in using magnetic resonance diffusion imaging to provide information about anatomical connectivity in the brain, by measuring the anisotropic diffusion of water in white matter tracts. One of the measures most commonly derived from diffusion data is fractional anisotropy (FA), which quantifies how strongly directional the local tract structure is. Many imaging studies are starting to use FA images in voxelwise statistical analyses, in order to localise brain changes related to development, degeneration and disease. However, optimal analysis is compromised by the use of standard registration algorithms; there has not to date been a satisfactory solution to the question of how to align FA images from multiple subjects in a way that allows for valid conclusions to be drawn from the subsequent voxelwise analysis. Furthermore, the arbitrariness of the choice of spatial smoothing extent has not yet been resolved. In this paper, we present a new method that aims to solve these issues via (a) carefully tuned non-linear registration, followed by (b) projection onto an alignment-invariant tract representation (the mean FA skeleton). We refer to this new approach as Tract-Based Spatial Statistics (TBSS). TBSS aims to improve the sensitivity, objectivity and interpretability of analysis of multi-subject diffusion imaging studies. We describe TBSS in detail and present example TBSS results from several diffusion imaging studies.}, file = {attachment\:Smith2006NeuroImage.pdf:attachment\:Smith2006NeuroImage.pdf:PDF}, publisher = {Elsevier}, year = 2006 } @Article{SAM+05, Author = {Sherbondy, A. and Akers, D. and Mackenzie, R. and Dougherty, R. and Wandell, B.}, Title = {Exploring connectivity of the brain's white matter with dynamic queries.}, Journal = {IEEE Trans Vis Comput Graph}, Volume = {11}, Number = {4}, Pages = {419-30}, abstract = {Diffusion Tensor Imaging (DTI) is a magnetic resonance imaging method that can be used to measure local information about the structure of white matter within the human brain. Combining DTI data with the computational methods of MR tractography, neuroscientists can estimate the locations and sizes of nerve bundles (white matter pathways) that course through the human brain. Neuroscientists have used visualization techniques to better understand tractography data, but they often struggle with the abundance and complexity of the pathways. In this paper, we describe a novel set of interaction techniques that make it easier to explore and interpret such pathways. Specifically, our application allows neuroscientists to place and interactively manipulate box or ellipsoid-shaped regions to selectively display pathways that pass through specific anatomical areas. These regions can be used in coordination with a simple and flexible query language which allows for arbitrary combinations of these queries using Boolean logic operators. A representation of the cortical surface is provided for specifying queries of pathways that may be relevant to gray matter structures and for displaying activation information obtained from functional magnetic resonance imaging. By precomputing the pathways and their statistical properties, we obtain the speed necessary for interactive question-and-answer sessions with brain researchers. We survey some questions that researchers have been asking about tractography data and show how our system can be used to answer these questions efficiently.}, authoraddress = {Department of Electrical Engineering, James H. Clark Center, 318 Campus Dr., Room S324, Stanford University, Stanford, CA 94305, USA. Sherbond@stanford.edu}, keywords = {Algorithms ; Animals ; Brain/*cytology ; *Computer Graphics ; Computer Simulation ; Diffusion Magnetic Resonance Imaging/*methods ; Humans ; Image Enhancement/*methods ; Image Interpretation, Computer-Assisted/*methods ; Imaging, Three-Dimensional/methods ; Models, Neurological ; Nerve Fibers, Myelinated/*ultrastructure ; Nerve Net/cytology ; Neural Pathways/*cytology ; Numerical Analysis, Computer-Assisted ; Online Systems ; *User-Computer Interface}, language = {eng}, medline-aid = {10.1109/TVCG.2005.59 [doi]}, medline-crdt = {2005/09/06 09:00}, medline-da = {20050905}, medline-dcom = {20050923}, medline-edat = {2005/09/06 09:00}, medline-fau = {Sherbondy, Anthony ; Akers, David ; Mackenzie, Rachel ; Dougherty, Robert ; Wandell, Brian}, medline-is = {1077-2626 (Print)}, medline-jid = {9891704}, medline-jt = {IEEE transactions on visualization and computer graphics}, medline-mhda = {2005/09/24 09:00}, medline-own = {NLM}, medline-pl = {United States}, medline-pmid = {16138552}, medline-pst = {ppublish}, medline-pt = {Evaluation Studies ; Journal Article}, medline-sb = {IM}, medline-so = {IEEE Trans Vis Comput Graph. 2005 Jul-Aug;11(4):419-30.}, medline-stat = {MEDLINE}, url = {http://eutils.ncbi.nlm.nih.gov/entrez/eutils/elink.fcgi?cmd=prlinks&dbfrom=pubmed&retmode=ref&id=16138552}, year = 2005 } @Article{ODonnell_MICCAI09, Author = {O'Donnell, L. J. and Westin, C. F. and Golby, A. J.}, Title = {Tract-based morphometry for white matter group analysis.}, Journal = {Neuroimage}, Volume = {45}, Number = {3}, Pages = {832-44}, abstract = {We introduce an automatic method that we call tract-based morphometry, or TBM, for measurement and analysis of diffusion MRI data along white matter fiber tracts. Using subject-specific tractography bundle segmentations, we generate an arc length parameterization of the bundle with point correspondences across all fibers and all subjects, allowing tract-based measurement and analysis. In this paper we present a quantitative comparison of fiber coordinate systems from the literature and we introduce an improved optimal match method that reduces spatial distortion and improves intra- and inter-subject variability of FA measurements. We propose a method for generating arc length correspondences across hemispheres, enabling a TBM study of interhemispheric diffusion asymmetries in the arcuate fasciculus (AF) and cingulum bundle (CB). The results of this study demonstrate that TBM can detect differences that may not be found by measuring means of scalar invariants in entire tracts, such as the mean diffusivity (MD) differences found in AF. We report TBM results of higher fractional anisotropy (FA) in the left hemisphere in AF (caused primarily by lower lambda(3), the smallest eigenvalue of the diffusion tensor, in the left AF), and higher left hemisphere FA in CB (related to higher lambda(1), the largest eigenvalue of the diffusion tensor, in the left CB). By mapping the significance levels onto the tractography trajectories for each structure, we demonstrate the anatomical locations of the interhemispheric differences. The TBM approach brings analysis of DTI data into the clinically and neuroanatomically relevant framework of the tract anatomy.}, authoraddress = {Department of Neurosurgery, Brigham and Women's Hospital, Harvard Medical School, Boston MA, USA. odonnell@bwh.harvard.edu}, language = {eng}, medline-aid = {S1053-8119(08)01282-2 [pii] ; 10.1016/j.neuroimage.2008.12.023 [doi]}, medline-crdt = {2009/01/22 09:00}, medline-da = {20090309}, medline-dep = {20081225}, medline-edat = {2009/01/22 09:00}, medline-fau = {O'Donnell, Lauren J ; Westin, Carl-Fredrik ; Golby, Alexandra J}, medline-gr = {K08NS048063/NS/NINDS NIH HHS/United States ; P41RR13218/RR/NCRR NIH HHS/United States ; P41RR15241/RR/NCRR NIH HHS/United States ; R01AG20012/AG/NIA NIH HHS/United States ; R01MH074794/MH/NIMH NIH HHS/United States ; U41RR019703/RR/NCRR NIH HHS/United States ; U54EB005149/EB/NIBIB NIH HHS/United States}, medline-is = {1095-9572 (Electronic)}, medline-jid = {9215515}, medline-jt = {NeuroImage}, medline-mhda = {2009/01/22 09:00}, medline-own = {NLM}, medline-phst = {2008/08/18 [received] ; 2008/11/13 [revised] ; 2008/12/08 [accepted] ; 2008/12/25 [aheadofprint]}, medline-pl = {United States}, medline-pmid = {19154790}, medline-pst = {ppublish}, medline-pt = {Journal Article ; Research Support, N.I.H., Extramural}, medline-sb = {IM}, medline-so = {Neuroimage. 2009 Apr 15;45(3):832-44. Epub 2008 Dec 25.}, medline-stat = {In-Process}, url = {http://eutils.ncbi.nlm.nih.gov/entrez/eutils/elink.fcgi?cmd=prlinks&dbfrom=pubmed&retmode=ref&id=19154790}, year = 2009 } @Article{Kuo, Author = {Kuo, L W and Chen, J H and Wedeen, V J and Tseng, W Y}, Title = {{Optimization of diffusion spectrum imaging and q-ball imaging on clinical MRI system}}, Journal = {Neuroimage}, Volume = {vol}, Pages = {41pp7--18}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Kuo et al. - Unknown - Optimization of diffusion spectrum imaging and q-ball imaging on clinical MRI system.pdf:pdf} } @Article{candes2008ics, Author = {Cand{\`e}s, E.J. and Wakin, M.B.}, Title = {{An introduction to compressive sampling}}, Journal = {IEEE Signal Processing Magazine}, Volume = {25}, Number = {2}, Pages = {21--30}, publisher = {New York, NY: Institute of Electrical \& Electronic Engineers, c1991-}, year = 2008 } @Article{Poldrack2008, Author = {Poldrack, Russell a and Fletcher, Paul C and Henson, Richard N and Worsley, Keith J and Brett, Matthew and Nichols, Thomas E}, Title = {{Guidelines for reporting an fMRI study.}}, Journal = {NeuroImage}, Volume = {40}, Number = {2}, Pages = {409--14}, abstract = {In this editorial, we outline a set of guidelines for the reporting of methods and results in functional magnetic resonance imaging studies and provide a checklist to assist authors in preparing manuscripts that meet these guidelines.}, doi = {10.1016/j.neuroimage.2007.11.048}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Poldrack et al. - 2008 - Guidelines for reporting an fMRI study..pdf:pdf}, issn = {1053-8119}, keywords = {Guidelines as Topic,Magnetic Resonance Imaging,Publishing,Publishing: standards}, pmid = {18191585}, url = {http://www.ncbi.nlm.nih.gov/pubmed/18191585}, year = 2008 } @Article{Miki2007, Author = {Miki, Y and Urayama, S and Fushimi, Y and Okada, T and Hanakawa, T and Fukuyama, H}, Title = {{Diffusion Tensor Fiber Tractography of the Optic Radiation : Analysis with 6- , 12- , 40- , and 81-}}, Journal = {Ajnr. American Journal Of Neuroradiology}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Miki et al. - 2007 - Diffusion Tensor Fiber Tractography of the Optic Radiation Analysis with 6- , 12- , 40- , and 81-.pdf:pdf}, year = 2007 } @Article{Glasser2008, Author = {Glasser, MF and Rilling, JK}, Title = {{DTI tractography of the human brain's language pathways}}, Journal = {Cerebral Cortex}, url = {http://cercor.oxfordjournals.org/cgi/content/abstract/bhn011}, year = 2008 } @Article{Wedeen, Author = {Wedeen, V and Wang, R and Schmahmann, J and Benner, T and Tseng, W and Dai, G and Pandya, D and Hagmann, P and D\^a arceuil, H and A}, Title = {{de Crespigny, "Diffusion spectrum magnetic resonance imaging (dsi) tractography of crossing fibers,"}}, Journal = {NeuroImage}, Volume = {vol}, Pages = {41no4pp1267--1277}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Wedeen et al. - Unknown - de Crespigny, Diffusion spectrum magnetic resonance imaging (dsi) tractography of crossing fibers,.pdf:pdf} } @Article{Nannen2003c, Author = {Nannen, Volker and Groningen, Rijksuniversiteit}, Title = {{The Paradox of Overfitting}}, Journal = {Artificial Intelligence}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Nannen, Groningen - 2003 - The Paradox of Overfitting.pdf:pdf}, year = 2003 } @Article{Chen2009, Author = {Chen, Wei and Ding, Zi'ang and Zhang, Song and MacKay-Brandt, Anna and Correia, Stephen and Qu, Huamin and Crow, John Allen and Tate, David F and Yan, Zhicheng and Peng, Qunsheng}, Title = {{A novel interface for interactive exploration of DTI fibers.}}, Journal = {IEEE transactions on visualization and computer graphics}, Volume = {15}, Number = {6}, Pages = {1433--40}, abstract = {Visual exploration is essential to the visualization and analysis of densely sampled 3D DTI fibers in biological specimens, due to the high geometric, spatial, and anatomical complexity of fiber tracts. Previous methods for DTI fiber visualization use zooming, color-mapping, selection, and abstraction to deliver the characteristics of the fibers. However, these schemes mainly focus on the optimization of visualization in the 3D space where cluttering and occlusion make grasping even a few thousand fibers difficult. This paper introduces a novel interaction method that augments the 3D visualization with a 2D representation containing a low-dimensional embedding of the DTI fibers. This embedding preserves the relationship between the fibers and removes the visual clutter that is inherent in 3D renderings of the fibers. This new interface allows the user to manipulate the DTI fibers as both 3D curves and 2D embedded points and easily compare or validate his or her results in both domains. The implementation of the framework is GPU based to achieve real-time interaction. The framework was applied to several tasks, and the results show that our method reduces the user's workload in recognizing 3D DTI fibers and permits quick and accurate DTI fiber selection.}, doi = {10.1109/TVCG.2009.112}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Chen et al. - 2009 - A novel interface for interactive exploration of DTI fibers..pdf:pdf}, issn = {1077-2626}, keywords = {Algorithms,Animals,Brain,Brain: anatomy \& histology,Cluster Analysis,Computer Graphics,Diffusion Magnetic Resonance Imaging,Diffusion Magnetic Resonance Imaging: methods,Heart,Heart: anatomy \& histology,Hindlimb,Models, Biological,Myofibrils,Nerve Fibers,Swine,User-Computer Interface}, pmid = {19834218}, url = {http://www.ncbi.nlm.nih.gov/pubmed/19834218}, year = 2009 } @Article{ZLW+03, Author = {Zhai, G. and Lin, W. and Wilber, K. P. and Gerig, G. and Gilmore, J. H.}, Title = {Comparisons of regional white matter diffusion in healthy neonates and adults performed with a 3.0-{T} head-only {MR} imaging unit.}, Journal = {Radiology}, Volume = {229}, Number = {3}, Pages = {673-81}, abstract = {PURPOSE: To evaluate the normal brains of adults and neonates for regional and age-related differences in apparent diffusion coefficient (ADC) and fractional anisotropy (FA). MATERIALS AND METHODS: Eight healthy adults and 20 healthy neonates were examined with a 3.0-T head-only magnetic resonance (MR) imaging unit by using a single-shot diffusion-tensor sequence. Trace ADC maps, FA maps, directional maps of the putative directions of white matter (WM) tracts, and fiber-tracking maps were obtained. Regions of interest-eight in WM and one in gray matter (GM)-were predefined for the ADC and FA measurements. The Student t test was used to compare FA and ADC between adults and neonates, whereas the Tukey multiple-comparison test was used to compare FA and ADC in different brain regions in the adult and neonate groups. RESULTS: A global elevation in ADC (P <.001) in both GM and WM and a reduction in FA (P <.001) in WM were observed in neonates as compared with these values in adults. In addition, significant regional variations in FA and ADC were observed in both groups. Regional variations in FA and ADC were less remarkable in adults, whereas neonates had consistently higher FA values and lower ADC values in the central WM as compared with these values in the peripheral WM. Fiber tracking revealed only major WM tracts in the neonates but fibers extending to the peripheral WM in the adults. CONCLUSION: There were regional differences in FA and ADC values in the neonates; such variations were less remarkable in the adults.}, authoraddress = {Department of Biomedical Engineering, University of North Carolina at Chapel Hill, CB \#7515, Chapel Hill, NC 27599, USA.}, keywords = {Adult ; Age Factors ; Brain/*anatomy \& histology ; Diffusion Magnetic Resonance Imaging/*instrumentation ; Humans ; Infant, Newborn ; ROC Curve}, language = {eng}, medline-aid = {10.1148/radiol.2293021462 [doi] ; 229/3/673 [pii]}, medline-crdt = {2003/12/06 05:00}, medline-da = {20031205}, medline-dcom = {20040112}, medline-edat = {2003/12/06 05:00}, medline-fau = {Zhai, Guihua ; Lin, Weili ; Wilber, Kathy P ; Gerig, Guido ; Gilmore, John H}, medline-gr = {HD03110/HD/NICHD NIH HHS/United States ; MH 33127/MH/NIMH NIH HHS/United States ; R01 NS 37312/NS/NINDS NIH HHS/United States}, medline-is = {0033-8419 (Print)}, medline-jid = {0401260}, medline-jt = {Radiology}, medline-lr = {20071114}, medline-mhda = {2004/01/13 05:00}, medline-own = {NLM}, medline-pl = {United States}, medline-pmid = {14657305}, medline-pst = {ppublish}, medline-pt = {Comparative Study ; Journal Article ; Research Support, U.S. Gov't, P.H.S.}, medline-sb = {AIM ; IM}, medline-so = {Radiology. 2003 Dec;229(3):673-81.}, medline-stat = {MEDLINE}, url = {http://eutils.ncbi.nlm.nih.gov/entrez/eutils/elink.fcgi?cmd=prlinks&dbfrom=pubmed&retmode=ref&id=14657305}, year = 2003 } @Article{Voineskos_Neuroimage09, Author = {Voineskos, A. N. and O'Donnell, L. J. and Lobaugh, N. J. and Markant, D. and Ameis, S. H. and Niethammer, M. and Mulsant, B. H. and Pollock, B. G. and Kennedy, J. L. and Westin, C. F. and Shenton, M. E.}, Title = {Quantitative examination of a novel clustering method using magnetic resonance diffusion tensor tractography.}, Journal = {Neuroimage}, Volume = {45}, Number = {2}, Pages = {370-6}, abstract = {MR diffusion tensor imaging (DTI) can measure and visualize organization of white matter fibre tracts in vivo. DTI is a relatively new imaging technique, and new tools developed for quantifying fibre tracts require evaluation. The purpose of this study was to compare the reliability of a novel clustering approach with a multiple region of interest (MROI) approach in both healthy and disease (schizophrenia) populations. DTI images were acquired in 20 participants (n=10 patients with schizophrenia: 56+/-15 years; n=10 controls: 51+/-20 years) (1.5 T GE system) with diffusion gradients applied in 23 non-collinear directions, repeated three times. Whole brain seeding and creation of fibre tracts were then performed. Interrater reliability of the clustering approach, and the MROI approach, were each evaluated and the methods compared. There was high spatial (voxel-based) agreement within and between the clustering and MROI methods. Fractional anisotropy, trace, and radial and axial diffusivity values showed high intraclass correlation (p<0.001 for all tracts) for each approach. Differences in scalar indices of diffusion between the clustering and MROI approach were minimal. The excellent interrater reliability of the clustering method and high agreement with the MROI method, quantitatively and spatially, indicates that the clustering method can be used with confidence. The clustering method avoids biases of ROI drawing and placement, and, not limited by a priori predictions, may be a more robust and efficient way to identify and measure white matter tracts of interest.}, authoraddress = {Geriatric Mental Health Program, Centre for Addiction and Mental Health, Department of Psychiatry, University of Toronto, Canada.}, language = {eng}, medline-aid = {S1053-8119(08)01281-0 [pii] ; 10.1016/j.neuroimage.2008.12.028 [doi]}, medline-crdt = {2009/01/23 09:00}, medline-da = {20090223}, medline-dep = {20081229}, medline-edat = {2009/01/23 09:00}, medline-fau = {Voineskos, Aristotle N ; O'Donnell, Lauren J ; Lobaugh, Nancy J ; Markant, Doug ; Ameis, Stephanie H ; Niethammer, Marc ; Mulsant, Benoit H ; Pollock, Bruce G ; Kennedy, James L ; Westin, Carl Fredrik ; Shenton, Martha E}, medline-gr = {1P50 MH08272/MH/NIMH NIH HHS/United States ; P41 RR13218/RR/NCRR NIH HHS/United States ; R01 MH 50740/MH/NIMH NIH HHS/United States ; R01 MH074794/MH/NIMH NIH HHS/United States ; U41-RR019703/RR/NCRR NIH HHS/United States ; U54GM072977-01/GM/NIGMS NIH HHS/United States}, medline-is = {1095-9572 (Electronic)}, medline-jid = {9215515}, medline-jt = {NeuroImage}, medline-mhda = {2009/01/23 09:00}, medline-mid = {NIHMS85018}, medline-oid = {NLM: NIHMS85018 [Available on 04/01/10] ; NLM: PMC2646811 [Available on 04/01/10]}, medline-own = {NLM}, medline-phst = {2008/08/25 [received] ; 2008/11/05 [revised] ; 2008/12/08 [accepted] ; 2008/12/29 [aheadofprint]}, medline-pl = {United States}, medline-pmc = {PMC2646811}, medline-pmcr = {2010/04/01}, medline-pmid = {19159690}, medline-pst = {ppublish}, medline-pt = {Journal Article ; Research Support, N.I.H., Extramural ; Research Support, Non-U.S. Gov't ; Research Support, U.S. Gov't, Non-P.H.S.}, medline-sb = {IM}, medline-so = {Neuroimage. 2009 Apr 1;45(2):370-6. Epub 2008 Dec 29.}, medline-stat = {In-Process}, url = {http://eutils.ncbi.nlm.nih.gov/entrez/eutils/elink.fcgi?cmd=prlinks&dbfrom=pubmed&retmode=ref&id=19159690}, year = 2009 } @Article{Hagmann2006Radiographics, Author = {Hagmann, Patric and Jonasson, Lisa and Maeder, Philippe and Thiran, Jean-Philippe and Wedeen, Van J. and Meuli, Reto}, Title = {Understanding diffusion \{{M}{R}\} imaging techniques: \{{F}\}rom scalar diffusion-weighted imaging to diffusion tensor imaging and beyond}, Journal = {Radiographics}, Volume = {26}, Number = {suppl_1}, Pages = {S205-223}, abstract = {The complex structural organization of the white matter of the brain can be depicted in vivo in great detail with advanced diffusion magnetic resonance (MR) imaging schemes. Diffusion MR imaging techniques are increasingly varied, from the simplest and most commonly used technique-the mapping of apparent diffusion coefficient values-to the more complex, such as diffusion tensor imaging, q-ball imaging, diffusion spectrum imaging, and tractography. The type of structural information obtained differs according to the technique used. To fully understand how diffusion MR imaging works, it is helpful to be familiar with the physical principles of water diffusion in the brain and the conceptual basis of each imaging technique. Knowledge of the technique-specific requirements with regard to hardware and acquisition time, as well as the advantages, limitations, and potential interpretation pitfalls of each technique, is especially useful.}, doi = {10.1148/rg.26si065510}, eprint = {http://radiographics.rsnajnls.org/cgi/reprint/26/suppl_1/S205.pdf}, file = {attachment\:Hagmann2006Radiographics.pdf:attachment\:Hagmann2006Radiographics.pdf:PDF}, url = {http://radiographics.rsnajnls.org/cgi/content/abstract/26/suppl_1/S205}, year = 2006 } @Misc{Mendeley2009, Author = {Mendeley}, Title = {{Getting Started with Mendeley}}, address = {London}, annote = {Double click on the entry on the left to view the PDF.}, booktitle = {Mendeley Desktop}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Mendeley - 2009 - Getting Started with Mendeley.pdf:pdf}, keywords = {Mendeley}, publisher = {Mendeley Ltd.}, url = {http://www.mendeley.com}, year = 2009 } @Article{Schmid2010, Author = {Schmid, Benjamin and Schindelin, Johannes and Cardona, Albert and Longair, Mark and Heisenberg, Martin}, Title = {{A high-level 3D visualization API for Java and ImageJ.}}, Journal = {BMC bioinformatics}, Volume = {11}, Number = {1}, Pages = {274}, abstract = {ABSTRACT: BACKGROUND: Current imaging methods such as Magnetic Resonance Imaging (MRI), Confocal microscopy, Electron Microscopy (EM) or Selective Plane Illumination Microscopy (SPIM) yield three-dimensional (3D) data sets in need of appropriate computational methods for their analysis. The reconstruction, segmentation and registration are best approached from the 3D representation of the data set. RESULTS: Here we present a platform-independent framework based on Java and Java 3D for accelerated rendering of biological images. Our framework is seamlessly integrated into ImageJ, a free image processing package with a vast collection of community-developed biological image analysis tools. Our framework enriches the ImageJ software libraries with methods that greatly reduce the complexity of developing image analysis tools in an interactive 3D visualization environment. In particular, we provide high-level access to volume rendering, volume editing, surface extraction, and image annotation. The ability to rely on a library that removes the low-level details enables concentrating software development efforts on the algorithm implementation parts. CONCLUSIONS: Our framework enables biomedical image software development to be built with 3D visualization capabilities with very little effort. We offer the source code and convenient binary packages along with extensive documentation at http://3dviewer.neurofly.de.}, doi = {10.1186/1471-2105-11-274}, issn = {1471-2105}, month = may, pmid = {20492697}, url = {http://www.ncbi.nlm.nih.gov/pubmed/20492697}, year = 2010 } @Article{Durrleman2009, Author = {Durrleman, Stanley and Fillard, Pierre and Pennec, Xavier and Trouv\'{e}, Alain and Ayache, Nicholas}, Title = {{A statistical model of white matter fiber bundles based on currents.}}, Journal = {Information processing in medical imaging : proceedings of the ... conference}, Volume = {21}, Pages = {114--25}, abstract = {The purpose of this paper is to measure the variability of a population of white matter fiber bundles without imposing unrealistic geometrical priors. In this respect, modeling fiber bundles as currents seems particularly relevant, as it gives a metric between bundles which relies neither on point nor on fiber correspondences and which is robust to fiber interruption. First, this metric is included in a diffeomorphic registration scheme which consistently aligns sets of fiber bundles. In particular, we show that aligning directly fiber bundles may solve the aperture problem which appears when fiber mappings are constrained by tensors only. Second, the measure of variability of a population of fiber bundles is based on a statistical model which considers every bundle as a random diffeomorphic deformation of a common template plus a random non-diffeomorphic perturbation. Thus, the variability is decomposed into a geometrical part and a "texture" part. Our results on real data show that both parts may contain interesting anatomical features.}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Durrleman et al. - 2009 - A statistical model of white matter fiber bundles based on currents..pdf:pdf}, issn = {1011-2499}, keywords = {Algorithms,Artificial Intelligence,Brain,Brain: anatomy \& histology,Cluster Analysis,Computer Simulation,Diffusion Magnetic Resonance Imaging,Diffusion Magnetic Resonance Imaging: methods,Humans,Image Enhancement,Image Enhancement: methods,Image Interpretation, Computer-Assisted,Image Interpretation, Computer-Assisted: methods,Imaging, Three-Dimensional,Imaging, Three-Dimensional: methods,Models, Neurological,Models, Statistical,Nerve Fibers, Myelinated,Nerve Fibers, Myelinated: ultrastructure,Pattern Recognition, Automated,Pattern Recognition, Automated: methods,Reproducibility of Results,Sensitivity and Specificity}, month = jan, pmid = {19694257}, url = {http://www.ncbi.nlm.nih.gov/pubmed/19694257}, year = 2009 } @Article{Wedeen2008, Author = {Wedeen, VJ and Wang, RP and Schmahmann, JD and Benner, T}, Title = {{\ldots spectrum magnetic resonance imaging (DSI) tractography of crossing fibers}}, Journal = {Neuroimage}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Wedeen et al. - Unknown - de Crespigny, Diffusion spectrum magnetic resonance imaging (dsi) tractography of crossing fibers,.pdf:pdf}, url = {http://linkinghub.elsevier.com/retrieve/pii/S105381190800253X}, year = 2008 } @Article{Sotiras2009, Author = {Sotiras, Aristeidis and Neji, Radhou\`{e}ne and Nikos, Jean-fran\c{c}ois Deux and Mezri, Komodakis}, Title = {{Diffusion Tensor Registration Using Probability Kernels and Discrete Optimization}}, Journal = {Computer}, Number = {May}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Sotiras et al. - 2009 - Diffusion Tensor Registration Using Probability Kernels and Discrete Optimization.pdf:pdf}, year = 2009 } @Article{Basser1994, Author = {Basser, PJ and Mattiello, J and LeBihan, D}, Title = {{MR diffusion tensor spectroscopy and imaging}}, Journal = {Biophysical journal}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Basser, Mattiello, LeBihan - 1994 - MR diffusion tensor spectroscopy and imaging.pdf:pdf}, url = {http://linkinghub.elsevier.com/retrieve/pii/S0006349594807751}, year = 1994 } @Article{wakana2004ftba, Author = {Wakana, S. and Jiang, H. and Nagae-Poetscher, L. and van Zijl, P. and Mori, S.}, Title = {Fiber tract-based atlas of human white matter anatomy}, Journal = {Radiology}, Volume = {230}, Pages = {77-87}, file = {attachment\:wakana_fiber_tract-based_atlas_2004.pdf:attachment\:wakana_fiber_tract-based_atlas_2004.pdf:PDF}, publisher = {RSNA}, year = 2004 } @Article{Joy, Author = {Joy, Kenneth I}, Title = {{Numerical Methods for Particle Tracing in Vector Fields}}, Journal = {Science}, Pages = {1--7}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Joy - Unknown - Numerical Methods for Particle Tracing in Vector Fields.pdf:pdf} } @Article{Staempfli2006NeuroImage, Author = {Staempfli, P. and Jaermann, T. and Crelier, G.R. and Kollias, S. and Valavanis, A. and Boesiger, P.}, Title = {Resolving fiber crossing using advanced fast marching tractography based on diffusion tensor imaging}, Journal = {NeuroImage}, Volume = {30}, Number = {1}, Pages = {110-120}, abstract = {Magnetic resonance diffusion tensor tractography is a powerful tool for the non-invasive depiction of the white matter architecture in the human brain. However, due to limitations in the underlying tensor model, the technique is often unable to reconstruct correct trajectories in heterogeneous fiber arrangements, such as axonal crossings. A novel tractography method based on fast marching (FM) is proposed which is capable of resolving fiber crossings and also permits trajectories to branch. It detects heterogeneous fiber arrangements by incorporating information from the entire diffusion tensor. The FM speed function is adapted to the local tensor characteristics, allowing in particular to maintain the front evolution direction in crossing situations. In addition, the FM's discretization error is reduced by increasing the number of considered possible front evolution directions. The performance of the technique is demonstrated in artificial data and in the healthy human brain. Comparisons with standard FM tractography and conventional line propagation algorithms show that, in the presence of interfering structures, the proposed method is more accurate in reconstructing trajectories. The in vivo results illustrate that the elucidated major white matter pathways are consistent with known anatomy and that multiple crossings and tract branching are handled correctly.}, file = {attachment\:Staempfli2006NeuroImage.pdf:attachment\:Staempfli2006NeuroImage.pdf:PDF}, url = {http://www.sciencedirect.com/science/article/B6WNP-4HD8DK8-3/2/c67092fe40d5854eaa7e5e78808d9983}, year = 2006 } @Article{Aksoy2008MRM, Author = {Aksoy, Murat andi Liu, Chunle and Moseley, Michael E. and Bammer, Roland}, Title = {Single-Step Nonlinear Diffusion Tensor Estimation in the Presence of Microscopic and Macroscopic Motion}, Journal = {Magnetic Resonance in Medicine}, Volume = {59}, Pages = {11381150}, abstract = {Patient motion can cause serious artifacts in diffusion tensor imaging (DTI), diminishing the reliability of the estimated diffusion tensor information. Studies in this field have so far been limited mainly to the correction of miniscule physiological motion. In order to correct for gross patient motion it is not sufficient to correct for misregistration between successive shots; the change in the diffusion-encoding direction must also be accounted for. This becomes particularly important for multishot sequences, wherebyin the presence of motioneach shot is encoded with a different diffusion weighting. In this study a general mathematical framework to correct for gross patient motion present in a multishot and multicoil DTI scan is presented. A signal model is presented that includes the effect of rotational and translational motion in the patient frame of reference. This model was used to create a nonlinear leastsquares formulation, from which the diffusion tensors were obtained using a nonlinear conjugate gradient algorithm. Applications to both phantom simulations and in vivo studies showed that in the case of gross motion the proposed algorithm performs superiorly compared to conventional methods used for tensor estimation.}, owner = {ian}, timestamp = {2009.03.04}, year = 2008 } @Article{IturriaMedina2007NeuroImage, Author = {Iturria-Medina, Y. and Canales-Rodr{\'\i}guez, EJ and Melie-Garc{\'\i}a, L. and Vald{\'e}s-Hern{\'a}ndez, PA and Mart{\'\i}nez-Montes, E. and Alem{\'a}n-G{\'o}mez, Y. and S{\'a}nchez-Bornot, J M}, Title = {Characterizing brain anatomical connections using diffusion weighted \{{M}{RI}\} and graph theory}, Journal = {Neuroimage}, Volume = {36}, Number = {3}, Pages = {645-660}, abstract = {A new methodology based on Diffusion Weighted Magnetic Resonance Imaging (DW-MRI) and Graph Theory is presented for characterizing the anatomical connections between brain gray matter areas. In a first step, brain voxels are modeled as nodes of a non-directed graph in which the weight of an arc linking two neighbor nodes is assumed to be proportional to the probability of being connected by nervous fibers. This probability is estimated by means of probabilistic tissue segmentation and intravoxel white matter orientational distribution function, obtained from anatomical MRI and DW-MRI, respectively. A new tractography algorithm for finding white matter routes is also introduced. This algorithm solves the most probable path problem between any two nodes, leading to the assessment of probabilistic brain anatomical connection maps. In a second step, for assessing anatomical connectivity between K gray matter structures, the previous graph is redefined as a K+1 partite graph by partitioning the initial nodes set in K non-overlapped gray matter subsets and one subset clustering the remaining nodes. Three different measures are proposed for quantifying anatomical connections between any pair of gray matter subsets: Anatomical Connection Strength (ACS), Anatomical Connection Density (ACD) and Anatomical Connection Probability (ACP). This methodology was applied to both artificial and actual human data. Results show that nervous fiber pathways between some regions of interest were reconstructed correctly. Additionally, mean connectivity maps of ACS, ACD and ACP between 71 gray matter structures for five healthy subjects are presented.}, file = {attachment\:IturriaMedina2007NeuroImage.pdf:attachment\:IturriaMedina2007NeuroImage.pdf:PDF}, publisher = {Elsevier}, year = 2007 } @Misc{hyvarinen1998fim, Author = {Hyvarinen, A. and Oja, E.}, Title = {{The Fast-ICA MATLAB package}}, year = 1998 } @Article{Rosen2008, Author = {Rosen, Bruce}, Title = {2 -' ' >7}, Journal = {Engineering}, Number = {2001}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Rosen - 2008 - 2 -' ' 7.pdf:pdf}, year = 2008 } @PhdThesis{Tuch2002ThesisMIT, Author = {Tuch, D.S.}, Title = {Diffusion \{{M}{RI}\} of complex tissue structure}, School = {Massachusetts Institute of Technology, Division of Health Sciences and Technology}, abstract = {Magnetic resonance diffusion imaging provides an exquisitely sensitive probe of tissue microstructure. Owing to the microscopic length scale of diffusion in biological tissues, diffusion imaging can reveal histological architecture irresolvable by conventional magnetic resonance imaging methods. However, diffusion imaging methods to date have chiefly been based on analytical models of the underlying diffusion process. For example, diffusion tensor imaging assumes homogeneous Gaussian diffusion within each voxel, an assumption which is clearly invalid for the vast majority of the brain at presently achievable voxel resolutions. In this thesis I developed a diffusion imaging method capable of measuring the microscopic diffusion function within each voxel. In contrast to previous approaches to diffusion imaging, the method presented here does not require any assumptions on the underlying diffusion function. The model-independent approach can resolve complex intravoxel tissue structure including fiber crossing and fiber divergence within a single voxel. The method is capable of resolving not only deep white matter intersections, but also composite tissue structure at the cortical margin, and fiber-specific degeneration in neurodegenerative pathology. In sum, the approach can reveal complex intravoxel tissue structure previously thought to be beyond the scope of diffusion imaging methodology.}, publisher = {Massachusetts Institute of Technology}, year = 2002 } @Article{Durrleman2009a, Author = {Durrleman, Stanley and Fillard, Pierre and Pennec, Xavier and Trouv\'{e}, Alain and Ayache, Nicholas}, Title = {{A statistical model of white matter fiber bundles based on currents.}}, Journal = {Information processing in medical imaging : proceedings of the ... conference}, Volume = {21}, Pages = {114--25}, abstract = {The purpose of this paper is to measure the variability of a population of white matter fiber bundles without imposing unrealistic geometrical priors. In this respect, modeling fiber bundles as currents seems particularly relevant, as it gives a metric between bundles which relies neither on point nor on fiber correspondences and which is robust to fiber interruption. First, this metric is included in a diffeomorphic registration scheme which consistently aligns sets of fiber bundles. In particular, we show that aligning directly fiber bundles may solve the aperture problem which appears when fiber mappings are constrained by tensors only. Second, the measure of variability of a population of fiber bundles is based on a statistical model which considers every bundle as a random diffeomorphic deformation of a common template plus a random non-diffeomorphic perturbation. Thus, the variability is decomposed into a geometrical part and a "texture" part. Our results on real data show that both parts may contain interesting anatomical features.}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Durrleman et al. - 2009 - A statistical model of white matter fiber bundles based on currents..pdf:pdf}, issn = {1011-2499}, keywords = {Algorithms,Artificial Intelligence,Brain,Brain: anatomy \& histology,Cluster Analysis,Computer Simulation,Diffusion Magnetic Resonance Imaging,Diffusion Magnetic Resonance Imaging: methods,Humans,Image Enhancement,Image Enhancement: methods,Image Interpretation, Computer-Assisted,Image Interpretation, Computer-Assisted: methods,Imaging, Three-Dimensional,Imaging, Three-Dimensional: methods,Models, Neurological,Models, Statistical,Nerve Fibers, Myelinated,Nerve Fibers, Myelinated: ultrastructure,Pattern Recognition, Automated,Pattern Recognition, Automated: methods,Reproducibility of Results,Sensitivity and Specificity}, month = jan, pmid = {19694257}, url = {http://www.ncbi.nlm.nih.gov/pubmed/19694257}, year = 2009 } @Article{Hill2002, Author = {Hill, Murray}, Title = {{McLaren’s Improved Snub Cube and Other New Spherical Designs in Three Dimensions}}, Journal = {Sciences-New York}, Number = {1}, arxivid = {arXiv:math/0207211v1}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Hill - 2002 - McLaren’s Improved Snub Cube and Other New Spherical Designs in Three Dimensions.pdf:pdf}, year = 2002 } @Article{zhang2008identifying, Author = {Zhang, S. and Correia, S. and Laidlaw, D.H.}, Title = {{Identifying White-Matter Fiber Bundles in DTI Data Using an Automated Proximity-Based Fiber Clustering Method}}, Journal = {IEEE transactions on visualization and computer graphics}, Volume = {14}, Number = {5}, Pages = {1044}, publisher = {NIH Public Access}, year = 2008 } @Book{einstein1956itb, Author = {Einstein, A.}, Title = {Investigations on the {T}heory of the {B}rownian {M}ovement}, Publisher = {Dover Publications}, year = 1956 } @Article{Garyfallidis, Author = {Garyfallidis, Eleftherios}, Title = {{Diffusion MRI and Tractography Tracks vs Tracts}}, Journal = {Sciences-New York}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Garyfallidis - Unknown - Di usion MRI and Tractography Tracks vs Tracts.pdf:pdf} } @Article{Tegmark2008, Author = {Tegmark, Max}, Title = {{No Title}}, arxivid = {arXiv:astro-ph/9610094v1}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Tegmark - 2008 - No Title.pdf:pdf}, year = 2008 } @Article{wakana2007roq, Author = {Wakana, S. and Caprihan, A. and Panzenboeck, M. M. and Fallon, J.H. and Perry, M. and Gollub, R. L. and Hua, K. and Zhang, J. and Jiang, H. and Dubey, P. and Blitz, A. and van Zijl, P. and Mori, S.}, Title = {Reproducibility of quantitative tractography methods applied to cerebral white matter}, Journal = {Neuroimage}, Volume = {36}, Pages = {630-644}, file = {attachment\:wakana_reproducibility_2007.pdf:attachment\:wakana_reproducibility_2007.pdf:PDF}, publisher = {Elsevier}, year = 2007 } @Article{Dale2009, Author = {Dale, Darren and Droettboom, Michael and Firing, Eric and Hunter, John}, Title = {{Matplotlib}}, Journal = {Building}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Dale et al. - 2009 - Matplotlib.pdf:pdf}, year = 2009 } @Article{Hasan2007MRI, Author = {Hasan, Khader M.}, Title = {A framework for quality control and parameter optimization in diffusion tensor imaging: theoretical analysis and validation}, Journal = {Magnetic Resonance Imaging}, Volume = {25}, Pages = {11961202}, abstract = {In this communication, a theoretical framework for quality control and parameter optimization in diffusion tensor imaging (DTI) is presented and validated. The approach is based on the analytical error propagation of the mean diffusivity (Dav) obtained directly from the diffusion-weighted data acquired using rotationally invariant and uniformly distributed icosahedral encoding schemes. The error propagation of a recently described and validated cylindrical tensor model is further extrapolated to the spherical tensor case (diffusion anisotropy 0) to relate analytically the precision error in fractional tensor anisotropy (FA) with the mean diffusion-to-noise ratio (DNR). The approach provided simple analytical and empirical quality control measures for optimization of diffusion parameter space in an isotropic medium that can be tested using widely available water phantoms.}, file = {attachment\:Hasan2007MRI.pdf:attachment\:Hasan2007MRI.pdf:PDF}, year = 2007 } @Article{Jian2007bNeuroImage, Author = {Jian, Bing and Vemuri, Baba C. and Ozarslan, Evren and Carney, Paul R. and Mareci, Thomas H.}, Title = {Erratum to '\{{A}\} novel tensor distribution model for the diffusion-weighted \{{M}{R}\} signal'}, Journal = {NeuroImage}, Volume = {37}, Number = {2}, file = {attachment\:Jian2007bNeuroImage.pdf:attachment\:Jian2007bNeuroImage.pdf:PDF}, url = {http://www.sciencedirect.com/science/article/B6WNP-4S62RMR-5/2/160bb8aa9bf75adcf495557cec86868f}, year = 2007 } @InProceedings{Haro2008ISBI, Author = {Haro, Gloria and Lenglet, Christophe and Sapiro, Guillermo and Thompson, Paul M.}, Title = {On the Non-Uniform Complexity of Brain Connectivity}, BookTitle = {5th IEEE International Symposium on Biomedical Imaging: From Nano to Macro}, Pages = {FR-P2a (poster)}, abstract = {A stratification and manifold learning approach for analyzing High Angular Resolution Diffusion Imaging (HARDI) data is introduced in this paper. HARDI data provides highdimensional signals measuring the complex microstructure of biological tissues, such as the cerebral white matter. We show that these high-dimensional spaces may be understood as unions of manifolds of varying dimensions/complexity and densities. With such analysis, we use clustering to characterize the structural complexity of the white matter. We briefly present the underlying framework and numerical experiments illustrating this original and promising approach.}, file = {attachment\:Haro2008ISBI.pdf:attachment\:Haro2008ISBI.pdf:PDF}, url = {http://www.ieeexplore.ieee.org/search/freesrchabstract.jsp?arnumber=4541139&isnumber=4540908&punumber=4534844&k2dockey=4541139@ieeecnfs&query=&pos=0}, year = 2008 } @Article{Buchel2004CerebralCortex, Author = {Bchel, C. and Raedler, T. and Sommer, M. and Sach, M. and Weiller, C. and Koch, M. A.}, Title = {White matter asymmetry in the human brain: a diffusion tensor \{{M}{RI}\} study}, Journal = {Cerebral Cortex}, Volume = {14}, Pages = {945-951}, abstract = {Language ability and handedness are likely to be associated with asymmetry of the cerebral cortex (grey matter) and connectivity (white matter). Grey matter asymmetry, most likely linked to language has been identified with voxel-based morphometry (VBM) using T1-weighted images. Differences in white matter obtained with this technique are less consistent, probably due to the relative insensitivity of the T1 contrast to the ultrastructure of white matter. Furthermore, previous VBM studies failed to find differences related to handedness in either grey or white matter. We revisited these issues and investigated two independent groups of subjects with diffusion-tensor imaging (DTI) for asymmetries in white matter composition. Using voxel-based statistical analyses an asymmetry of the arcuate fascicle was observed, with higher fractional anisotropy in the left hemisphere. In addition, we show differences related to handedness in the white matter underneath the precentral gyrus contralateral to the dominant hand. Remarkably, these findings were very robust, even when investigating small groups of subjects. This highlights the sensitivity of DTI for white matter tissue differences, making it an ideal tool to study small patient populations.}, doi = {10.1093/cercor/bhh055}, file = {attachment\:Buchel2004CerebralCortex.pdf:attachment\:Buchel2004CerebralCortex.pdf:PDF}, year = 2004 } @Article{Ding2003, Author = {Ding, Z and Gore, J and Anderson, A}, Title = {{Classification and quantification of neuronal fiber pathways using diffusion tensor MRI}}, Journal = {Magn. Reson. Med.}, Volume = {49}, Pages = {716--721}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Ding, Gore, Anderson - 2003 - Classification and quantification of neuronal fiber pathways using diffusion tensor MRI.pdf:pdf}, year = 2003 } @Article{Canales-Rodriguez2009, Author = {Canales-Rodr\'{\i}guez, Erick Jorge and Melie-Garc\'{\i}a, Lester and Iturria-Medina, Yasser}, Title = {{Mathematical description of q-space in spherical coordinates: exact q-ball imaging.}}, Journal = {Magnetic resonance in medicine : official journal of the Society of Magnetic Resonance in Medicine / Society of Magnetic Resonance in Medicine}, Volume = {61}, Number = {6}, Pages = {1350--67}, abstract = {Novel methodologies have been recently developed to characterize the microgeometry of neural tissues and porous structures via diffusion MRI data. In line with these previous works, this article provides a detailed mathematical description of q-space in spherical coordinates that helps to highlight the differences and similarities between various related q-space methodologies proposed to date such as q-ball imaging (QBI), diffusion spectrum imaging (DSI), and diffusion orientation transform imaging (DOT). This formulation provides a direct relationship between the orientation distribution function (ODF) and the diffusion data without using any approximation. Under this relationship, the exact ODF can be computed by means of the Radon transform of the radial projection (in q-space) of the diffusion MRI signal. This new methodology, termed exact q-ball imaging (EQBI), was put into practice using an analytical ODF estimation in terms of spherical harmonics that allows obtaining model-free and model-based reconstructions. This work provides a new framework for combining information coming from diffusion data recorded on multiple spherical shells in q-space (hybrid diffusion imaging encoding scheme), which is capable of mapping ODF to a high accuracy. This represents a step toward a more efficient development of diffusion MRI experiments for obtaining better ODF estimates.}, doi = {10.1002/mrm.21917}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Canales-Rodr\'{\i}guez, Melie-Garc\'{\i}a, Iturria-Medina - 2009 - Mathematical description of q-space in spherical coordinates exact q-ball imaging..pdf:pdf}, issn = {1522-2594}, keywords = {Algorithms,Computer Simulation,Diffusion Magnetic Resonance Imaging,Diffusion Magnetic Resonance Imaging: methods,Image Enhancement,Image Enhancement: methods,Image Interpretation, Computer-Assisted,Image Interpretation, Computer-Assisted: methods,Imaging, Three-Dimensional,Imaging, Three-Dimensional: methods,Models, Biological,Reproducibility of Results,Sensitivity and Specificity}, pmid = {19319889}, url = {http://www.ncbi.nlm.nih.gov/pubmed/19319889}, year = 2009 } @Article{Wakana2007NeuroImage, Author = {Wakana, Setsu and Caprihan, Arvind and Panzenboeck, Martina M. and Fallon, James H. and Perry, Michele and Gollub, Randy L. and Hua, Kegang and Zhang, Jiangyang and Jiang, Hangyi and Dubey, Prachi and Blitz, Ari and {van Zijl}, Peter and Mori, Susumu}, Title = {Reproducibility of quantitative tractography methods applied to cerebral white matter}, Journal = {NeuroImage}, Volume = {36}, Number = {1}, Pages = {630-644}, abstract = {Tractography based on diffusion tensor imaging (DTI) allows visualization of white matter tracts. In this study, protocols to reconstruct eleven major white matter tracts are described. The protocols were refined by several iterations of intra- and inter-rater measurements and identification of sources of variability. Reproducibility of the established protocols was then tested by raters who did not have previous experience in tractography. The protocols were applied to a DTI database of adult normal subjects to study size, fractional anisotropy (FA), and T2 of individual white matter tracts. Distinctive features in FA and T2 were found for the corticospinal tract and callosal fibers. Hemispheric asymmetry was observed for the size of white matter tracts projecting to the temporal lobe. This protocol provides guidelines for reproducible DTI-based tract-specific quantification.}, file = {attachment\:Wakana2007NeuroImage.pdf:attachment\:Wakana2007NeuroImage.pdf:PDF}, publisher = {Elevier}, url = {http://www.sciencedirect.com/science/article/B6WNP-4N9DK04-1/2/6f4d33fa634a866aa907f16091a9bb67}, year = 2007 } @Article{Grady2006, Author = {Grady, Leo}, Title = {{Random walks for image segmentation.}}, Journal = {IEEE transactions on pattern analysis and machine intelligence}, Volume = {28}, Number = {11}, Pages = {1768--83}, abstract = {A novel method is proposed for performing multilabel, interactive image segmentation. Given a small number of pixels with user-defined (or predefined) labels, one can analytically and quickly determine the probability that a random walker starting at each unlabeled pixel will first reach one of the prelabeled pixels. By assigning each pixel to the label for which the greatest probability is calculated, a high-quality image segmentation may be obtained. Theoretical properties of this algorithm are developed along with the corresponding connections to discrete potential theory and electrical circuits. This algorithm is formulated in discrete space (i.e., on a graph) using combinatorial analogues of standard operators and principles from continuous potential theory, allowing it to be applied in arbitrary dimension on arbitrary graphs.}, doi = {10.1109/TPAMI.2006.233}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Grady - 2006 - Random walks for image segmentation..pdf:pdf}, issn = {0162-8828}, keywords = {Algorithms,Artificial Intelligence,Image Enhancement,Image Enhancement: methods,Image Interpretation, Computer-Assisted,Image Interpretation, Computer-Assisted: methods,Information Storage and Retrieval,Information Storage and Retrieval: methods,Models, Statistical,Pattern Recognition, Automated,Pattern Recognition, Automated: methods,Reproducibility of Results,Sensitivity and Specificity}, month = nov, pmid = {17063682}, url = {http://www.ncbi.nlm.nih.gov/pubmed/17063682}, year = 2006 } @Article{Good2001NeuroImage, Author = {Good, Catriona D. and Johnsrude, Ingrid S. and Ashburner, John and Henson, Richard N. A. and Friston, Karl J. and Frackowiak, Richard S. J.}, Title = {A Voxel-Based Morphometric Study of Ageing in 465 Normal Adult Human Brains}, Journal = {NeuroImage}, Volume = {14}, Pages = {21-36}, doi = {10.1006/nimg.2001.0786}, file = {attachment\:Good2001NeuroImage.pdf:attachment\:Good2001NeuroImage.pdf:PDF}, publisher = {Elsevier}, year = 2001 } @Article{Savadjiev2008, Author = {Savadjiev, Peter and Campbell, Jennifer S W and Descoteaux, Maxime and Deriche, Rachid and Pike, G Bruce and Siddiqi, Kaleem}, Title = {{Labeling of ambiguous subvoxel fibre bundle configurations in high angular resolution diffusion MRI.}}, Journal = {NeuroImage}, Volume = {41}, Number = {1}, Pages = {58--68}, abstract = {Whereas high angular resolution reconstruction methods for diffusion MRI can estimate multiple dominant fibre orientations within a single imaging voxel, they are fundamentally limited in certain cases of complex subvoxel fibre structures, resulting in ambiguous local orientation distribution functions. In this article we address the important problem of disambiguating such complex subvoxel fibre tract configurations, with the purpose of improving the performance of fibre tractography. We do so by extending a curve inference method to distinguish between the cases of curving and fanning fibre bundles using differential geometric estimates in a local neighbourhood. The key benefit of this method is the inference of curves, instead of only fibre orientations, to model the underlying fibre bundles. This in turn allows distinct fibre geometries that contain nearly identical sets of fibre orientations at a voxel, to be distinguished from one another. Experimental results demonstrate the ability of the method to successfully label voxels into one of the above categories and improve the performance of a fibre-tracking algorithm.}, doi = {10.1016/j.neuroimage.2008.01.028}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Savadjiev et al. - 2008 - Labeling of ambiguous subvoxel fibre bundle configurations in high angular resolution diffusion MRI..pdf:pdf}, issn = {1053-8119}, keywords = {Adult,Algorithms,Brain,Brain: anatomy \& histology,Brain: cytology,Diffusion Magnetic Resonance Imaging,Diffusion Magnetic Resonance Imaging: methods,Diffusion Magnetic Resonance Imaging: statistics \&,Humans,Image Processing, Computer-Assisted,Image Processing, Computer-Assisted: methods,Image Processing, Computer-Assisted: statistics \& ,Motor Cortex,Motor Cortex: cytology,Motor Cortex: physiology,Nerve Fibers,Nerve Fibers: physiology,Neural Pathways,Neural Pathways: anatomy \& histology,Neural Pathways: cytology,Neural Pathways: physiology}, pmid = {18367409}, url = {http://www.ncbi.nlm.nih.gov/pubmed/18367409}, year = 2008 } @Article{olver2010nist, Author = {Olver, F.W. and Lozier, D.W. and Boisvert, R.F. and Clark, C.W.}, Title = {{NIST handbook of mathematical functions}}, publisher = {Cambridge University Press New York, NY, USA}, year = 2010 } @Article{Cohen-adad, Author = {Cohen-adad, Julien and Mcnab, Jennifer and Gagoski, Borjan and Wedeen, Van and Wald, Lawrence and Hospital, Massachusetts General and States, United}, Title = {{OHBM https://www.aievolution.com/hbm1001/index.cfm?...}}, Pages = {1--6}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Cohen-adad et al. - Unknown - OHBM httpswww.aievolution.comhbm1001index.cfm....pdf:pdf} } @Article{Nannen2003a, Author = {Nannen, Volker}, Title = {{A Short Introduction to Model Selection , Kolmogorov Complexity and Minimum Description Length ( MDL )}}, Journal = {Complexity}, Number = {Mdl}, Pages = {1--23}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Nannen - 2003 - A Short Introduction to Model Selection , Kolmogorov Complexity and Minimum Description Length ( MDL ).pdf:pdf}, year = 2003 } @Article{Masutani2003EorJRadiography, Author = {Masutani, Yoshitaka and Aoki, Shigeki and Abe, Osamu and Hayashi, Naoto and Otomo, Kuni}, Title = {\{{MR}\} diffusion tensor imaging: recent advance and new techniques for diffusion tensor visualization}, Journal = {European Journal of Radiology}, Volume = {46}, Number = {1}, Pages = {53-66}, abstract = {Recently, diffusion tensor imaging is attracting the biomedical researchers for its application in depiction of fiber tracts based on diffusion anisotropy. In this paper, we briefly describe the basic theory of diffusion tensor MR imaging, the determination process of diffusion tensor, and the basic concepts of diffusion tensor visualization techniques. Several results of clinical application in our institute are also introduced. Finally, the limitations, advantages and disadvantages of the techniques are discussed for further application of diffusion tensor visualization.}, file = {Masutani2003EorJRadiography.pdf:Masutani2003EorJRadiography.pdf:PDF}, url = {http://www.sciencedirect.com/science/article/B6T6F-481N1XP-1/2/c1ca22568a2d933c2d6c23d493b98d1b}, year = 2003 } @Article{anwander2007cbp, Author = {Anwander, A. and Tittgemeyer, M. and von Cramon, D Y and Friederici, A D and Knosche, T R}, Title = {{Connectivity-Based Parcellation of {B}roca's {A}rea}}, Journal = {Cerebral Cortex}, Volume = {17}, Number = {4}, Pages = {816}, file = {attachment\:anwander_dti_broca_parcellation_2007.pdf:attachment\:anwander_dti_broca_parcellation_2007.pdf:PDF}, publisher = {Oxford Univ Press}, year = 2007 } @Article{Hermoye2006NeuroImage, Author = {Hermoye, Laurent and Saint-Martin, Christine and Cosnard, Guy and Lee, Seung-Koo and Kim, Jinna and Nassogne, Marie-Cecile and Menten, Renaud and Clapuyt, Philippe and Donohue, Pamela K. and Hua, Kegang and Wakana, Setsu and Jiang, Hangyi and {van Zijl}, Peter C.M. and Mori, Susumu}, Title = {Pediatric diffusion tensor imaging: Normal database and observation of the white matter maturation in early childhood}, Journal = {NeuroImage}, Volume = {29}, Number = {2}, Pages = {493-504}, abstract = {Recent advances in diffusion tensor imaging (DTI) have made it possible to reveal white matter anatomy and to detect neurological abnormalities in children. However, the clinical use of this technique is hampered by the lack of a normal standard of reference. The goal of this study was to initiate the establishment of a database of DTI images in children, which can be used as a normal standard of reference for diagnosis of pediatric neurological abnormalities. Seven pediatric volunteers and 23 pediatric patients (age range: 0-54 months) referred for clinical MR examinations, but whose brains were shown to be normal, underwent anatomical and DTI acquisitions on a 1.5 T MR scanner. The white matter maturation, as observed on DTI color maps, was described and illustrated. Changes in diffusion fractional anisotropy (FA), average apparent diffusion constant (ADCave), and T2-weighted (T2W) signal intensity were quantified in 12 locations to characterize the anatomical variability of the maturation process. Almost all prominent white matter tracts could be identified from birth, although their anisotropy was often low. The evolution of FA, shape, and size of the white matter tracts comprised generally three phases: rapid changes during the first 12 months; slow modifications during the second year; and relative stability after 24 months. The time courses of FA, ADCave, and T2W signal intensity confirmed our visual observations that maturation of the white matter and the normality of its architecture can be assessed with DTI in young children. The database is available online and is expected to foster the use of this promising technique in the diagnosis of pediatric pathologies.}, file = {attachment\:Hermoye2006NeuroImage.pdf:attachment\:Hermoye2006NeuroImage.pdf:PDF}, publisher = {Elsevier}, url = {http://www.sciencedirect.com/science/article/B6WNP-4H6GPNP-1/2/36429532df681a3d26bc67f5f3f8e9d9}, year = 2006 } @Article{Lee2007, Author = {Lee, Jae-gil and Han, Jiawei}, Title = {{Trajectory Clustering : A Partition-and-Group Framework ∗}}, Journal = {Group}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Lee, Han - 2007 - Trajectory Clustering A Partition-and-Group Framework ∗.pdf:pdf}, keywords = {a number of clustering,age processing,algorithms have been,and im-,data analysis,density-based clustering,market research,mdl principle,partition-and-group framework,pattern recognition,tering,trajectory clus-}, year = 2007 } @Article{BPP+00, Author = {Basser, P. J. and Pajevic, S. and Pierpaoli, C. and Duda, J. and Aldroubi, A.}, Title = {In vivo fiber tractography using {DT}-{MRI} data.}, Journal = {Magn Reson Med}, Volume = {44}, Number = {4}, Pages = {625-32}, abstract = {Fiber tract trajectories in coherently organized brain white matter pathways were computed from in vivo diffusion tensor magnetic resonance imaging (DT-MRI) data. First, a continuous diffusion tensor field is constructed from this discrete, noisy, measured DT-MRI data. Then a Frenet equation, describing the evolution of a fiber tract, was solved. This approach was validated using synthesized, noisy DT-MRI data. Corpus callosum and pyramidal tract trajectories were constructed and found to be consistent with known anatomy. The method's reliability, however, degrades where the distribution of fiber tract directions is nonuniform. Moreover, background noise in diffusion-weighted MRIs can cause a computed trajectory to hop from tract to tract. Still, this method can provide quantitative information with which to visualize and study connectivity and continuity of neural pathways in the central and peripheral nervous systems in vivo, and holds promise for elucidating architectural features in other fibrous tissues and ordered media.}, authoraddress = {Section on Tissue Biophysics and Biomimetics, NICHD, Bethesda, Maryland 20892-5772, USA. pjbasser@helix.nih.gov}, keywords = {Artifacts ; Brain/*anatomy \& histology ; Humans ; Image Processing, Computer-Assisted ; *Magnetic Resonance Imaging/methods ; Nerve Fibers}, language = {eng}, medline-aid = {10.1002/1522-2594(200010)44:4<625::AID-MRM17>3.0.CO;2-O [pii]}, medline-crdt = {2000/10/12 11:00}, medline-da = {20001103}, medline-dcom = {20001103}, medline-edat = {2000/10/12 11:00}, medline-fau = {Basser, P J ; Pajevic, S ; Pierpaoli, C ; Duda, J ; Aldroubi, A}, medline-is = {0740-3194 (Print)}, medline-jid = {8505245}, medline-jt = {Magnetic resonance in medicine : official journal of the Society of Magnetic Resonance in Medicine / Society of Magnetic Resonance in Medicine}, medline-lr = {20061115}, medline-mhda = {2001/02/28 10:01}, medline-own = {NLM}, medline-pl = {UNITED STATES}, medline-pmid = {11025519}, medline-pst = {ppublish}, medline-pt = {Journal Article ; Research Support, U.S. Gov't, Non-P.H.S.}, medline-sb = {IM}, medline-so = {Magn Reson Med. 2000 Oct;44(4):625-32.}, medline-stat = {MEDLINE}, url = {http://eutils.ncbi.nlm.nih.gov/entrez/eutils/elink.fcgi?cmd=prlinks&dbfrom=pubmed&retmode=ref&id=11025519}, year = 2000 } @Article{O'Donnell2007, Author = {O'Donnell, Lauren J and Westin, Carl-Fredrik and Golby, Alexandra J}, Title = {{Tract-based morphometry.}}, Journal = {Medical image computing and computer-assisted intervention : MICCAI ... International Conference on Medical Image Computing and Computer-Assisted Intervention}, Volume = {10}, Number = {Pt 2}, Pages = {161--8}, abstract = {Multisubject statistical analyses of diffusion tensor images in regions of specific white matter tracts have commonly measured only the mean value of a scalar invariant such as the fractional anisotropy (FA), ignoring the spatial variation of FA along the length of fiber tracts. We propose to instead perform tract-based morphometry (TBM), or the statistical analysis of diffusion MRI data in an anatomical tract-based coordinate system. We present a method for automatic generation of white matter tract arc length parameterizations, based on learning a fiber bundle model from tractography from multiple subjects. Our tract-based coordinate system enables TBM for the detection of white matter differences in groups of subjects. We present example TBM results from a study of interhemispheric differences in FA.}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/O'Donnell, Westin, Golby - 2007 - Tract-based morphometry..pdf:pdf}, keywords = {Algorithms,Artificial Intelligence,Brain,Brain: cytology,Cluster Analysis,Diffusion Magnetic Resonance Imaging,Diffusion Magnetic Resonance Imaging: methods,Humans,Image Enhancement,Image Enhancement: methods,Image Interpretation, Computer-Assisted,Image Interpretation, Computer-Assisted: methods,Imaging, Three-Dimensional,Imaging, Three-Dimensional: methods,Nerve Fibers, Myelinated,Nerve Fibers, Myelinated: ultrastructure,Neural Pathways,Neural Pathways: cytology,Pattern Recognition, Automated,Pattern Recognition, Automated: methods,Reproducibility of Results,Sensitivity and Specificity}, month = jan, pmid = {18044565}, url = {http://www.ncbi.nlm.nih.gov/pubmed/19154790}, year = 2007 } @Article{Correia2009a, Author = {Correia, Stephen and Lee, Stephanie Y and Voorn, Thom and Tate, David F and Paul, Robert H and Salloway, Stephen P and Malloy, Paul F and Laidlaw, David H}, Title = {{NIH Public Access}}, Journal = {Water}, Volume = {42}, Number = {2}, Pages = {568--581}, doi = {10.1016/j.neuroimage.2008.05.022.Quantitative}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Correia et al. - 2009 - NIH Public Access.pdf:pdf}, year = 2009 } @Article{toosy2004cfs, Author = {Toosy, A. T. and Ciccarelli, O. and Parker, G.J.M. and Wheeler-Kingshott, C. A. M. and Miller, D. H. and Thompson, A. J.}, Title = {Characterizing function--structure relationships in the human visual system with functional \{{M}{RI}\} and diffusion tensor imaging}, Journal = {Neuroimage}, Volume = {21}, Number = {4}, Pages = {1452--1463}, file = {attachment\:toosy_visual_fmri_dti_2003.pdf:attachment\:toosy_visual_fmri_dti_2003.pdf:PDF}, publisher = {Elsevier}, year = 2004 } @Article{Descoteaux2007, Author = {Descoteaux, M and Angelino, E and Fitzgibbons, S and Deriche, R}, Title = {{Regularized, fast, and robust analytical q-ball imaging}}, Journal = {Magnetic Resonance in Medicine}, Volume = {vol}, Pages = {58no3pp497--510}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Descoteaux et al. - 2007 - Regularized, fast, and robust analytical q-ball imaging.pdf:pdf}, year = 2007 } @Article{Zvitia2010a, Author = {Zvitia, Orly and Mayer, Arnaldo and Shadmi, Ran and Miron, Shmuel and Greenspan, Hayit K}, Title = {{Co-registration of white matter tractographies by adaptive-mean-shift and Gaussian mixture modeling.}}, Journal = {IEEE transactions on medical imaging}, Volume = {29}, Number = {1}, Pages = {132--45}, abstract = {In this paper, we present a robust approach to the registration of white matter tractographies extracted from diffusion tensor-magnetic resonance imaging scans. The fibers are projected into a high dimensional feature space based on the sequence of their 3-D coordinates. Adaptive mean-shift clustering is applied to extract a compact set of representative fiber-modes (FM). Each FM is assigned to a multivariate Gaussian distribution according to its population thereby leading to a Gaussian mixture model (GMM) representation for the entire set of fibers. The registration between two fiber sets is treated as the alignment of two GMMs and is performed by maximizing their correlation ratio. A nine-parameters affine transform is recovered and eventually refined to a twelve-parameters affine transform using an innovative mean-shift based registration refinement scheme presented in this paper. The validation of the algorithm on synthetic intrasubject data demonstrates its robustness to interrupted and deviating fiber artifacts as well as outliers. Using real intrasubject data, a comparison is conducted to other intensity based and fiber-based registration algorithms, demonstrating competitive results. An option for tracking-in-time, on specific white matter fiber tracts, is also demonstrated on the real data.}, doi = {10.1109/TMI.2009.2029097}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Zvitia et al. - 2010 - Co-registration of white matter tractographies by adaptive-mean-shift and Gaussian mixture modeling.(2).pdf:pdf}, issn = {1558-0062}, keywords = {Algorithms,Brain,Brain: anatomy \& histology,Cluster Analysis,Diffusion Tensor Imaging,Diffusion Tensor Imaging: methods,Humans,Image Processing, Computer-Assisted,Image Processing, Computer-Assisted: methods,Models, Neurological,Normal Distribution,Reproducibility of Results}, month = jan, pmid = {19709970}, url = {http://www.ncbi.nlm.nih.gov/pubmed/19709970}, year = 2010 } @Article{Mobbs2009, Author = {Mobbs, Dean and Yu, Rongjun and Meyer, Marcel and Passamonti, Luca and Seymour, Ben and Calder, Andrew J and Schweizer, Susanne and Frith, Chris D and Dalgleish, Tim}, Title = {{A key role for similarity in vicarious reward.}}, Journal = {Science (New York, N.Y.)}, Volume = {324}, Number = {5929}, Pages = {900}, abstract = {Humans appear to have an inherent prosocial tendency toward one another in that we often take pleasure in seeing others succeed. This fact is almost certainly exploited by game shows, yet why watching others win elicits a pleasurable vicarious rewarding feeling in the absence of personal economic gain is unclear. One explanation is that game shows use contestants who have similarities to the viewing population, thereby kindling kin-motivated responses (for example, prosocial behavior). Using a game show-inspired paradigm, we show that the interactions between the ventral striatum and anterior cingulate cortex subserve the modulation of vicarious reward by similarity, respectively. Our results support studies showing that similarity acts as a proximate neurobiological mechanism where prosocial behavior extends to unrelated strangers.}, doi = {10.1126/science.1170539}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Mobbs et al. - 2009 - A key role for similarity in vicarious reward..pdf:pdf}, issn = {1095-9203}, keywords = {Adult,Basal Ganglia,Basal Ganglia: physiology,Brain Mapping,Empathy,Female,Games, Experimental,Gyrus Cinguli,Gyrus Cinguli: physiology,Humans,Magnetic Resonance Imaging,Male,Prefrontal Cortex,Prefrontal Cortex: physiology,Reward,Self Concept,Social Behavior,Social Desirability,Young Adult}, month = may, pmid = {19443777}, url = {http://www.pubmedcentral.nih.gov/articlerender.fcgi?artid=2839480\&tool=pmcentrez\&rendertype=abstract}, year = 2009 } @Article{Nannen2003b, Author = {Nannen, Volker}, Title = {{A Short Introduction to Kolmogorov Complexity}}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Nannen - 2003 - A Short Introduction to Kolmogorov Complexity.pdf:pdf}, year = 2003 } @conference{Auerbach2004ISMRM, author = {Auerbach, E. J. and Ugurbil, K.}, journal = {Proc. Intl. Soc. Mag. Reson. Med.}, owner = {ian}, timestamp = {2009.03.04}, title = {Improvement in Diffusion MRI at 3T and Beyond with the Twice-Refocused Adiabatic Spin Echo (TRASE) Sequence}, year = 2004 } @Article{sherbondy2006mma, Author = {Sherbondy, AJ and Akers, DL and Dougherty, RF and Ben-Shachar, M. and Napel, S. and Wandell, BA}, Title = {{MetroTrac: A metropolis algorithm for probabilistic tractography}}, Journal = {Human Brain Mapping, Florence}, year = 2006 } @InProceedings{bjornemoMICCAI02, Author = {M. Bj\"ornemo and A. Brun and R. Kikinis and C.-F. Westin}, Title = {Regularized Stochastic White Matter Tractography Using Diffusion Tensor {MRI}}, BookTitle = {Fifth International Conference on Medical Image Computing and Computer-Assisted Intervention (MICCAI'02)}, Pages = {435--442}, Address = {Tokyo, Japan}, year = 2002 } @PhdThesis{maddah_phdthesis2008, Author = {Maddah, M.}, Title = {{Quantitative Analysis of Cerebral White Matter Anatomy from Diffusion MRI}}, School = {Massachusetts Institute of Technology}, year = 2008 } @Article{iturriamedina2007cba, Author = {Iturria-Medina, Y. and Canales-Rodr{\'\i}guez, EJ and Melie-Garc{\'\i}a, L. and Vald{\'e}s-Hern{\'a}ndez, PA and Mart{\'\i}nez-Montes, E. and Alem{\'a}n-G{\'o}mez, Y. and S{\'a}nchez-Bornot, JM}, Title = {Characterizing brain anatomical connections using diffusion weighted \{{M}{RI}\} and graph theory}, Journal = {Neuroimage}, Volume = {36}, Number = {3}, Pages = {645--660}, file = {attachment\:iturria-medinaet_dti_graph_2007.pdf:attachment\:iturria-medinaet_dti_graph_2007.pdf:PDF}, publisher = {Elsevier}, year = 2007 } @Article{Harel2001, Author = {Harel, David and Koren, Yehuda}, Title = {{On Clustering Using Random Walks}}, Pages = {18--41}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Harel, Koren - 2001 - On Clustering Using Random Walks.pdf:pdf}, year = 2001 } @Article{Kim2009, Author = {Kim, M S and Han, J}, Title = {{Chronicle: A two-stage density-based clustering algorithm for dynamic networks}}, Journal = {In: Discovery Science.}, Volume = {pp}, Pages = {152--167}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Kim, Han - 2009 - Chronicle A two-stage density-based clustering algorithm for dynamic networks.pdf:pdf}, year = 2009 } @Article{Tsai2007, Author = {Tsai, Andy and Westin, Carl-fredrik and Hero, Alfred O and Willsky, Alan S}, Title = {{Fiber tract clustering on manifolds with dual rooted-graphs}}, Journal = {in CVPR}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Tsai et al. - 2007 - Fiber tract clustering on manifolds with dual rooted-graphs.pdf:pdf}, year = 2007 } @Article{Staempfli2008NeuroImage, Author = {Staempfli, P. and Reischauer, C. and Jaermann, T. and Valavanis, A. and Kollias, S. and Boesiger, P.}, Title = {Combining {fMRI} and {DTI}: A framework for exploring the limits of {fMRI}-guided {DTI} fiber tracking and for verifying {DTI}-based fiber tractography results}, Journal = {NeuroImage}, Volume = {39}, Number = {1}, Pages = {119-126}, abstract = {A powerful, non-invasive technique for estimating and visualizing white matter tracts in the human brain in vivo is white matter fiber tractography that uses magnetic resonance diffusion tensor imaging. The success of this method depends strongly on the capability of the applied tracking algorithm and the quality of the underlying data set. However, DTI-based fiber tractography still lacks standardized validation. In the present work, a combined fMRI/DTI study was performed, both to develop a setup for verifying fiber tracking results using fMRI-derived functional connections and to explore the limitations of fMRI based DTI fiber tracking. Therefore, a minor fiber bundle that features several fiber crossings and intersections was examined: The striatum and its connections to the primary motor cortex were examined by using two approaches to derive the somatotopic organization of the striatum. First, an fMRI-based somatotopic map of the striatum was reconstructed, based on fMRI activations that were provoked by unilateral motor tasks. Second, fMRI-guided DTI fiber tracking was performed to generate DTI-based somatotopic maps, using a standard line propagation and an advanced fast marching algorithm. The results show that the fiber connections reconstructed by the advanced fast marching algorithm are in good agreement with known anatomy, and that the DTI-revealed somatotopy is similar to the fMRI somatotopy. Furthermore, the study illustrates that the combination of fMRI with DTI can supply additional information in order to choose reasonable seed regions for generating functionally relevant networks and to validate reconstructed fibers.}, file = {attachment\:Staempfli2008NeuroImage.pdf:attachment\:Staempfli2008NeuroImage.pdf:PDF}, publisher = {Elsevier}, url = {http://www.sciencedirect.com/science/article/B6WNP-4PHSC6C-2/2/dbb7febf8dca292f483c25d800bdf700}, year = 2008 } @Article{Kubicki2006, Author = {Kubicki, M and Shenton, M E}, Title = {{A Method for Clustering White Matter}}, Journal = {Ajnr. American Journal Of Neuroradiology}, Number = {May}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Kubicki, Shenton - 2006 - A Method for Clustering White Matter.pdf:pdf}, year = 2006 } @Article{Reese2003MRM, Author = {Reese, T.G. and Heid, O. and Weisskoff, R.M. and Wedeen, V.J.}, Title = {Reduction of eddy-current-induced distortion in diffusion MRI using a twice-refocused spin echo}, Journal = {Magnetic Resonance in Medicine}, Volume = {49}, Number = {1}, Pages = {177-182}, abstract = {CP: Copyright 2003 Wiley-Liss, Inc. ON: 1522-2594 PN: 0740-3194 AD: Department of Radiology, Massachusetts General Hospital, Boston, Massachusetts; Medical Engineering Division, Siemens AG, Erlangen, Germany; Epix Medical Inc., Cambridge, Massachusetts DOI: 10.1002/mrm.10308 US: http://dx.doi.org/10.1002/mrm.10308 AB: Image distortion due to field gradient eddy currents can create image artifacts in diffusion-weighted MR images. These images, acquired by measuring the attenuation of NMR signal due to directionally dependent diffusion, have recently been shown to be useful in the diagnosis and assessment of acute stroke and in mapping of tissue structure. This work presents an improvement on the spin-echo (SE) diffusion sequence that displays less distortion and consequently improves image quality. Adding a second refocusing pulse provides better image quality with less distortion at no cost in scanning efficiency or effectiveness, and allows more flexible diffusion gradient timing. By adjusting the timing of the diffusion gradients, eddy currents with a single exponential decay constant can be nulled, and eddy currents with similar decay constants can be greatly reduced. This new sequence is demonstrated in phantom measurements and in diffusion anisotropy images of normal human brain. Magn Reson Med 49:177-182, 2003. 2003 Wiley-Liss, Inc.}, owner = {ian}, timestamp = {2009.03.12}, year = 2003 } @Article{Hua2008NeuroImage, Author = {Hua, Kegang and Zhang, Jiangyang and Wakana, Setsu and Jiang, Hangyi and Li, Xin and Reich, Daniel S. and Calabresi, Peter A. and Pekar, James J. and {van Zijl}, Peter C.M. and Mori, Susumu}, Title = {Tract probability maps in stereotaxic spaces: Analyses of white matter anatomy and tract-specific quantification}, Journal = {NeuroImage}, Volume = {39}, Number = {1}, Pages = {336-347}, abstract = {Diffusion tensor imaging (DTI) is an exciting new MRI modality that can reveal detailed anatomy of the white matter. DTI also allows us to approximate the 3D trajectories of major white matter bundles. By combining the identified tract coordinates with various types of MR parameter maps, such as T2 and diffusion properties, we can perform tract-specific analysis of these parameters. Unfortunately, 3D tract reconstruction is marred by noise, partial volume effects, and complicated axonal structures. Furthermore, changes in diffusion anisotropy under pathological conditions could alter the results of 3D tract reconstruction. In this study, we created a white matter parcellation atlas based on probabilistic maps of 11 major white matter tracts derived from the DTI data from 28 normal subjects. Using these probabilistic maps, automated tract-specific quantification of fractional anisotropy and mean diffusivity were performed. Excellent correlation was found between the automated and the individual tractography-based results. This tool allows efficient initial screening of the status of multiple white matter tracts. }, file = {attachment\:Hua2008NeuroImage.pdf:attachment\:Hua2008NeuroImage.pdf:PDF}, publisher = {Elsevier}, url = {http://www.sciencedirect.com/science/article/B6WNP-4PF1WFR-5/2/c08a39189151d2b118cf7f8805fe8e2a}, year = 2008 } @Article{Tuch2002, Author = {Tuch, David S. and Reese, Timothy G. and Wiegell, Mette R. and Makris, Nikos and Belliveau, John W. and Wedeen, Van J.}, Title = {{High angular resolution diffusion imaging reveals intravoxel white matter fiber heterogeneity}}, Journal = {Magnetic Resonance in Medicine}, Volume = {48}, Number = {4}, Pages = {577--582}, abstract = {Magnetic resonance (MR) diffusion tensor imaging (DTI) can resolve the white matter fiber orientation within a voxel provided that the fibers are strongly aligned. However, a given voxel may contain a distribution of fiber orientations due to, for example, intravoxel fiber crossing. The present study sought to test whether a geodesic, high b-value diffusion gradient sampling scheme could resolve multiple fiber orientations within a single voxel. In regions of fiber crossing the diffusion signal exhibited multiple local maxima/minima as a function of diffusion gradient orientation, indicating the presence of multiple intravoxel fiber orientations. The multimodality of the observed diffusion signal precluded the standard tensor reconstruction, so instead the diffusion signal was modeled as arising from a discrete mixture of Gaussian diffusion processes in slow exchange, and the underlying mixture of tensors was solved for using a gradient descent scheme. The multitensor reconstruction resolved multiple intravoxel fiber populations corresponding to known fiber anatomy. Magn Reson Med 48:577-582, 2002. � 2002 Wiley-Liss, Inc.}, doi = {10.1002/mrm.10268}, url = {http://dx.doi.org/10.1002/mrm.10268}, year = 2002 } @Article{Kerkyacharian2007, Author = {Kerkyacharian, G and Petrushev, P and Picard, D and Willer, T}, Title = {{Needlet algorithms for estimation in inverse problems}}, Journal = {Electron. J. Stat}, Volume = {1}, Pages = {30--76}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Kerkyacharian et al. - 2007 - Needlet algorithms for estimation in inverse problems.pdf:pdf}, year = 2007 } @Article{Edition, Author = {Edition, Second}, Title = {{Statistical Pattern Stas-tical Pattern Recognit ion}}, Journal = {Pattern Recognition}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Edition - Unknown - Statistical Pattern Stas-tical Pattern Recognit ion.pdf:pdf} } @Article{Sverre2009, Author = {Sverre, Dag}, Title = {{Fast numerical computations with Cython}}, Number = {SciPy}, Pages = {15--22}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Sverre - 2009 - Fast numerical computations with Cython.pdf:pdf}, year = 2009 } @Article{Heim2007ComputationalStatisticsDataAnalysis, Author = {Heim, S. and Fahrmeir, L. and Eilers, P.H.C. and Marx, B.D.}, Title = {3D space-varying coefficient models with application to diffusion tensor imaging}, Journal = {Computational Statistics \& Data Analysis}, Volume = {51}, Number = {12}, Pages = {6212-6228}, abstract = {The present methodological development and the primary application field originate from diffusion tensor imaging (DTI), a powerful nuclear magnetic resonance technique which enables the quantification of microscopical tissue properties. The current analysis framework of separate voxelwise regressions is reformulated as a 3D space-varying coefficient model (SVCM) for the entire set of diffusion tensor images recorded on a 3D voxel grid. The SVCM unifies the three-step cascade of standard data processing (voxelwise regression, smoothing, interpolation) into one framework based on B-spline basis functions. Thereby strength is borrowed from spatially correlated voxels to gain a regularization effect right at the estimation stage. Two SVCM variants are conceptualized: a full tensor product approach and a sequential approximation, rendering the SVCM numerically and computationally feasible even for the huge dimension of the joint model in a realistic setup. A simulation study shows that both approaches outperform the standard method of voxelwise regression with subsequent regularization. Application of the fast sequential method to real DTI data demonstrates the inherent ability to increase the grid resolution by evaluating the incorporated basis functions at intermediate points. The resulting continuous regularized tensor field may serve as basis for multiple applications, yet, ameloriation of local adaptivity is desirable. }, file = {attachment\:Heim2007ComputationalStatisticsDataAnalysis.pdf:attachment\:Heim2007ComputationalStatisticsDataAnalysis.pdf:PDF}, publisher = {Elsevier}, url = {http://www.sciencedirect.com/science/article/B6V8V-4MV74WR-2/2/882882c104fa98632263c151db9fda23}, year = 2007 } @Article{Heller, Author = {Heller, Katherine A}, Title = {{Bayesian Hierarchical Clustering}}, Journal = {Neuroscience}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Heller - Unknown - Bayesian Hierarchical Clustering.pdf:pdf} } @Book{behrens2009diffusion, Author = {Behrens, T.E.J.}, Title = {{Diffusion MRI: From Quantitative Measurement to In-vivo Neuroanatomy}}, Publisher = {Academic Press}, year = 2009 } @Article{Descoteaux2007a, Author = {Descoteaux, M and Angelino, E and Fitzgibbons, S and Deriche, R}, Title = {{Regularized, fast, and robust analytical q-ball imaging}}, Journal = {Magnetic Resonance in Medicine}, Volume = {vol}, Pages = {58no3pp497--510}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Descoteaux et al. - 2007 - Regularized, fast, and robust analytical q-ball imaging.pdf:pdf}, year = 2007 } @Article{PHW03, Author = {Parker, G. J. and Haroon, H. A. and Wheeler-Kingshott, C. A.}, Title = {A framework for a streamline-based probabilistic index of connectivity ({PIC}o) using a structural interpretation of {MRI} diffusion measurements.}, Journal = {J Magn Reson Imaging}, Volume = {18}, Number = {2}, Pages = {242-54}, abstract = {PURPOSE: To establish a general methodology for quantifying streamline-based diffusion fiber tracking methods in terms of probability of connection between points and/or regions. MATERIALS AND METHODS: The commonly used streamline approach is adapted to exploit the uncertainty in the orientation of the principal direction of diffusion defined for each image voxel. Running the streamline process repeatedly using Monte Carlo methods to exploit this inherent uncertainty generates maps of connection probability. Uncertainty is defined by interpreting the shape of the diffusion orientation profile provided by the diffusion tensor in terms of the underlying microstructure. RESULTS: Two candidates for describing the uncertainty in the diffusion tensor are proposed and maps of probability of connection to chosen start points or regions are generated in a number of major tracts. CONCLUSION: The methods presented provide a generic framework for utilizing streamline methods to generate probabilistic maps of connectivity.}, authoraddress = {Imaging Science and Biomedical Engineering, University of Manchester, Manchester, UK. geoff.parker@man.ac.uk}, keywords = {Anisotropy ; Brain/*anatomy \& histology ; Diffusion ; Diffusion Magnetic Resonance Imaging/*methods ; Echo-Planar Imaging ; Humans ; Models, Statistical ; Monte Carlo Method ; *Probability ; Uncertainty}, language = {eng}, medline-aid = {10.1002/jmri.10350 [doi]}, medline-ci = {Copyright 2003 Wiley-Liss, Inc.}, medline-crdt = {2003/07/29 05:00}, medline-da = {20030728}, medline-dcom = {20040129}, medline-edat = {2003/07/29 05:00}, medline-fau = {Parker, Geoffrey J M ; Haroon, Hamied A ; Wheeler-Kingshott, Claudia A M}, medline-is = {1053-1807 (Print)}, medline-jid = {9105850}, medline-jt = {Journal of magnetic resonance imaging : JMRI}, medline-lr = {20061115}, medline-mhda = {2004/01/30 05:00}, medline-own = {NLM}, medline-pl = {United States}, medline-pmid = {12884338}, medline-pst = {ppublish}, medline-pt = {Journal Article ; Research Support, Non-U.S. Gov't}, medline-sb = {IM}, medline-so = {J Magn Reson Imaging. 2003 Aug;18(2):242-54.}, medline-stat = {MEDLINE}, url = {http://eutils.ncbi.nlm.nih.gov/entrez/eutils/elink.fcgi?cmd=prlinks&dbfrom=pubmed&retmode=ref&id=12884338}, year = 2003 } @Article{Garyfallidis2009, Author = {Garyfallidis, Eleftherios}, Title = {{Towards an accurate brain tractography using di usion weighted imaging 1 Introduction}}, Journal = {Imaging}, Number = {June}, Pages = {1--25}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Garyfallidis - 2009 - Towards an accurate brain tractography using di usion weighted imaging 1 Introduction.pdf:pdf}, year = 2009 } @Article{Liu2007NeuroImage, Author = {Liu, Tianming and Li, Hai and Wong, Kelvin and Tarokh, Ashley and Guo, Lei and Wong, Stephen T.C.}, Title = {Brain tissue segmentation based on \{{D}{TI}\} data}, Journal = {NeuroImage}, Volume = {15}, Number = {1}, Pages = {114-123}, abstract = {We present a method for automated brain tissue segmentation based on the multi-channel fusion of diffusion tensor imaging (DTI) data. The method is motivated by the evidence that independent tissue segmentation based on DTI parametric images provides complementary information of tissue contrast to the tissue segmentation based on structural MRI data. This has important applications in defining accurate tissue maps when fusing structural data with diffusion data. In the absence of structural data, tissue segmentation based on DTI data provides an alternative means to obtain brain tissue segmentation. Our approach to the tissue segmentation based on DTI data is to classify the brain into two compartments by utilizing the tissue contrast existing in a single channel. Specifically, because the apparent diffusion coefficient (ADC) values in the cerebrospinal fluid (CSF) are more than twice that of gray matter (GM) and white matter (WM), we use ADC images to distinguish CSF and non-CSF tissues. Additionally, fractional anisotropy (FA) images are used to separate WM from non-WM tissues, as highly directional white matter structures have much larger fractional anisotropy values. Moreover, other channels to separate tissue are explored, such as eigenvalues of the tensor, relative anisotropy (RA), and volume ratio (VR). We developed an approach based on the Simultaneous Truth and Performance Level Estimation (STAPLE) algorithm that combines these two-class maps to obtain a complete tissue segmentation map of CSF, GM, and WM. Evaluations are provided to demonstrate the performance of our approach. Experimental results of applying this approach to brain tissue segmentation and deformable registration of DTI data and spoiled gradient-echo (SPGR) data are also provided.}, file = {attachment\:Liu2007NeuroImage.pdf:attachment\:Liu2007NeuroImage.pdf:PDF}, publisher = {Elsevier}, url = {http://www.sciencedirect.com/science/article/B6WNP-4P61N6N-3/2/a1e3c8c3d22d6c80fa4693813e380a76}, year = 2007 } @Article{Alexander2007Neurotherapeutics, Author = {Alexander, Andrew L. and Lee, Jee Eun and Lazar, Mariana and Field, Aaron S.}, Title = {Diffusion Tensor Imaging of the Brain}, Journal = {Neurotherapeutics}, Volume = {4}, Number = {3}, Pages = {316-329}, abstract = {Diffusion tensor imaging (DTI) is a promising method for characterizing microstructural changes or differences with neuropathology and treatment. The diffusion tensor may be used to characterize the magnitude, the degree of anisotropy, and the orientation of directional diffusion. This review addresses the biological mechanisms, acquisition, and analysis of DTI measurements. The relationships between DTI measures and white matter pathologic features (e.g., ischemia, myelination, axonal damage, inflammation, and edema) are summarized. Applications of DTI to tissue characterization in neurotherapeutic applications are reviewed. The interpretations of common DTI measures (mean diffusivity, MD; fractional anisotropy, FA; radial diffusivity, $D_r$; and axial diffusivity, $D_a$) are discussed. In particular, FA is highly sensitive to microstructural changes, but not very specific to the type of changes (e.g., radial or axial). To maximize the specificity and better characterize the tissue microstructure, future studies should use multiple diffusion tensor measures (e.g., MD and FA, or $D_a$ and $D_r$).}, doi = {10.1016/j.nurt.2007.05.011}, file = {attachment\:Alexander2007Neurotherapeutics.pdf:attachment\:Alexander2007Neurotherapeutics.pdf:PDF}, year = 2007 } @Article{canalesrodriguez2009mdq, Author = {Canales-Rodr{\'\i}guez, E.J. and Melie-Garc{\'\i}a, L. and Iturria-Medina, Y. and Center, C.N.}, Title = {{Mathematical description of q-space in spherical coordinates: Exact q-ball imaging.}}, Journal = {Magnetic resonance in medicine: official journal of the Society of Magnetic Resonance in Medicine/Society of Magnetic Resonance in Medicine}, year = 2009 } @Article{Yen2009, Author = {Yen, Luh and Fouss, Francois and Decaestecker, Christine and Francq, Pascal and Saerens, Marco}, Title = {{Graph nodes clustering with the sigmoid commute-time kernel: A comparative study}}, Journal = {Data \& Knowledge Engineering}, Volume = {68}, Number = {3}, Pages = {338--361}, doi = {10.1016/j.datak.2008.10.006}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Yen et al. - 2009 - Graph nodes clustering with the sigmoid commute-time kernel A comparative study(2).pdf:pdf}, issn = {0169023X}, publisher = {Elsevier B.V.}, url = {http://linkinghub.elsevier.com/retrieve/pii/S0169023X0800147X}, year = 2009 } @Article{Hartley, Author = {Hartley, Richard and Zisserman, Andrew}, Title = {{in computervision Multiple View Geometry in Computer Vision}} } @Article{Oishi2008NeuroImage, Author = {Oishi, Kenichi and Zilles, Karl and Amunts, Katrin and Faria, Andreia and Jiang, Hangyi and Li, Xin and Akhter, Kazi and Hua, Kegang and Woods, Roger and Toga, Arthur W. and Pike, G. Bruce and Rosa-Neto, Pedro and Evans, Alan and Zhang, Jiangyang and Huang, Hao and Miller, Michael I. and {van Zijl}, Peter C. M. and Mazziotta, John and Mori, Susumu}, Title = {Human brain white matter atlas: Identification and assignment of common anatomical structures in superficial white matter}, Journal = {NeuroImage}, Volume = {in press}, abstract = {Structural delineation and assignment are the fundamental steps in understanding the anatomy of the human brain. The white matter has been structurally defined in the past only at its core regions (deep white matter). However, the most peripheral white matter areas, which are interleaved between the cortex and the deep white matter, have lacked clear anatomical definitions and parcellations. We used axonal fiber alignment information from diffusion tensor imaging (DTI) to delineate the peripheral white matter, and investigated its relationship with the cortex and the deep white matter. Using DTI data from 81 healthy subjects, we identified nine common, blade-like anatomical regions, which were further parcellated into 21 subregions based on the cortical anatomy. Four short association fiber tracts connecting adjacent gyri (U-fibers) were also identified reproducibly among the healthy population. We anticipate that this atlas will be useful resource for atlas-based white matter anatomical studies.}, file = {attachment\:Oishi2008NeuroImage.pdf:attachment\:Oishi2008NeuroImage.pdf:PDF}, year = 2008 } @Article{Jianu2009, Author = {Jianu, Radu and Demiralp, Cağatay and Laidlaw, David H}, Title = {{Exploring 3D DTI fiber tracts with linked 2D representations.}}, Journal = {IEEE transactions on visualization and computer graphics}, Volume = {15}, Number = {6}, Pages = {1449--56}, abstract = {We present a visual exploration paradigm that facilitates navigation through complex fiber tracts by combining traditional 3D model viewing with lower dimensional representations. To this end, we create standard streamtube models along with two two-dimensional representations, an embedding in the plane and a hierarchical clustering tree, for a given set of fiber tracts. We then link these three representations using both interaction and color obtained by embedding fiber tracts into a perceptually uniform color space. We describe an anecdotal evaluation with neuroscientists to assess the usefulness of our method in exploring anatomical and functional structures in the brain. Expert feedback indicates that, while a standalone clinical use of the proposed method would require anatomical landmarks in the lower dimensional representations, the approach would be particularly useful in accelerating tract bundle selection. Results also suggest that combining traditional 3D model viewing with lower dimensional representations can ease navigation through the complex fiber tract models, improving exploration of the connectivity in the brain.}, doi = {10.1109/TVCG.2009.141}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Jianu, Demiralp, Laidlaw - 2009 - Exploring 3D DTI fiber tracts with linked 2D representations..pdf:pdf}, issn = {1077-2626}, keywords = {Algorithms,Brain,Brain: anatomy \& histology,Cluster Analysis,Computer Graphics,Diffusion Magnetic Resonance Imaging,Diffusion Magnetic Resonance Imaging: methods,Humans,Image Processing, Computer-Assisted,Image Processing, Computer-Assisted: methods,Imaging, Three-Dimensional,Imaging, Three-Dimensional: methods,Models, Biological,Nerve Fibers}, pmid = {19834220}, url = {http://www.ncbi.nlm.nih.gov/pubmed/19834220}, year = 2009 } @Article{Oliphant2003, Author = {Oliphant, Travis E}, Title = {{SciPy Tutorial}}, Number = {September}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Oliphant - 2003 - SciPy Tutorial.pdf:pdf}, year = 2003 } @InProceedings{Wedeen2000, Author = {Wedeen, VJ and Reese, TG and Tuch, DS and Weigel, MR and Dou, JG and Weiskoff, RM and Chessler, D}, Title = {{Mapping fiber orientation spectra in cerebral white matter with Fourier-transform diffusion MRI}}, BookTitle = {Proc. Intl. Sot. Mag. Reson. Med}, Volume = {8}, Pages = {82}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Wedeen et al. - 2000 - Mapping fiber orientation spectra in cerebral white matter with Fourier-transform diffusion MRI.pdf:pdf}, url = {http://cds.ismrm.org/ismrm-2000/PDF1/0082.pdf}, year = 2000 } @conference{o2006high, author = {O'Donnell, L. and Westin, CF}, booktitle = {International Society of Magnetic Resonance in Medicine (ISMRM)}, organization = {Citeseer}, title = {{A high-dimensional fiber tract atlas}}, year = 2006 } @Article{Sorensen1999, Author = {Sorensen, A. Gregory and Wu, Ona and Copen, William A. and Davis, Timothy L. and Gonzalez, R. Gilberto and Koroshetz, Walter J. and Reese, Timothy G. and Rosen, Bruce R. and Wedeen, Van J. and Weisskoff, Robert M.}, Title = {{Human Acute Cerebral Ischemia: Detection of Changes in Water Diffusion Anisotropy by Using MR Imaging}}, Journal = {Radiology}, Volume = {212}, Number = {3}, Pages = {785--792}, abstract = {PURPOSE: To (a) determine the optimal choice of a scalar metric of anisotropy and (b) determine by means of magnetic resonance imaging if changes in diffusion anisotropy occurred in acute human ischemic stroke. MATERIALS AND METHODS: The full diffusion tensor over the entire brain was measured. To optimize the choice of a scalar anisotropy metric, the performances of scalar indices in simulated models and in a healthy volunteer were analyzed. The anisotropy, trace apparent diffusion coefficient (ADC), and eigenvalues of the diffusion tensor in lesions and contralateral normal brain were compared in 50 patients with stroke. RESULTS: Changes in anisotropy in patients were quantified by using fractional anisotropy because it provided the best performance in terms of contrast-to-noise ratio as a function of signal-to-noise ratio in simulations. The anisotropy of ischemic white matter decreased (P = .01). Changes in anisotropy in ischemic gray matter were not significant (P = .63). The trace ADC decreased for ischemic gray matter and white matter (P < .001). The first and second eigenvalues decreased in both ischemic gray and ischemic white matter (P < .001). The third eigenvalue decreased in ischemic gray (P = .001) and white matter (P = .03). CONCLUSION: Gray matter is mildly anisotropic in normal and early ischemic states. However, early white matter ischemia is associated with not only changes in trace ADC values but also significant changes in the anisotropy, or shape, of the water self-diffusion tensor.}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Sorensen et al. - 1999 - Human Acute Cerebral Ischemia Detection of Changes in Water Diffusion Anisotropy by Using MR Imaging.html:html}, month = sep, shorttitle = {Human Acute Cerebral Ischemia}, url = {http://radiology.rsnajnls.org/cgi/content/abstract/212/3/785}, year = 1999 } @Article{Ang2003, Author = {Ang, Y O N G T and Yengaard, J E N S R N and Akkenberg, B Ente P and Undersen, H A N S J \O rgen G G}, Title = {{STEREOLOGY OF NEURONAL CONNECTIONS ( MYELINATED FIBERS OF WHITE MATTER AND SYNAPSES OF NEOCORTEX ) IN}}, Journal = {Methods}, Pages = {171--182}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Ang et al. - 2003 - STEREOLOGY OF NEURONAL CONNECTIONS ( MYELINATED FIBERS OF WHITE MATTER AND SYNAPSES OF NEOCORTEX ) IN.pdf:pdf}, keywords = {human brain,myelinated nerve fibers,neocortex,stereology,synapse,white matter}, year = 2003 } @Article{Tournier2008, Author = {Tournier, J-Donald and Yeh, Chun-Hung and Calamante, Fernando and Cho, Kuan-Hung and Connelly, Alan and Lin, Ching-Po}, Title = {{Resolving crossing fibres using constrained spherical deconvolution: validation using diffusion-weighted imaging phantom data.}}, Journal = {NeuroImage}, Volume = {42}, Number = {2}, Pages = {617--25}, abstract = {Diffusion-weighted imaging can potentially be used to infer the connectivity of the human brain in vivo using fibre-tracking techniques, and is therefore of great interest to neuroscientists and clinicians. A key requirement for fibre tracking is the accurate estimation of white matter fibre orientations within each imaging voxel. The diffusion tensor model, which is widely used for this purpose, has been shown to be inadequate in crossing fibre regions. A number of approaches have recently been proposed to address this issue, based on high angular resolution diffusion-weighted imaging (HARDI) data. In this study, an experimental model of crossing fibres, consisting of water-filled plastic capillaries, is used to thoroughly assess three such techniques: constrained spherical deconvolution (CSD), super-resolved CSD (super-CSD) and Q-ball imaging (QBI). HARDI data were acquired over a range of crossing angles and b-values, from which fibre orientations were computed using each technique. All techniques were capable of resolving the two fibre populations down to a crossing angle of 45 degrees , and down to 30 degrees for super-CSD. A bias was observed in the fibre orientations estimated by QBI for crossing angles other than 90 degrees, consistent with previous simulation results. Finally, for a 45 degrees crossing, the minimum b-value required to resolve the fibre orientations was 4000 s/mm(2) for QBI, 2000 s/mm(2) for CSD, and 1000 s/mm(2) for super-CSD. The quality of estimation of fibre orientations may profoundly affect fibre tracking attempts, and the results presented provide important additional information regarding performance characteristics of well-known methods.}, doi = {10.1016/j.neuroimage.2008.05.002}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Tournier et al. - 2008 - Resolving crossing fibres using constrained spherical deconvolution validation using diffusion-weighted imaging phantom data..pdf:pdf}, issn = {1095-9572}, keywords = {Algorithms,Artificial Intelligence,Brain,Brain: anatomy \& histology,Diffusion Magnetic Resonance Imaging,Diffusion Magnetic Resonance Imaging: instrumentat,Diffusion Magnetic Resonance Imaging: methods,Humans,Image Enhancement,Image Enhancement: methods,Image Interpretation, Computer-Assisted,Image Interpretation, Computer-Assisted: methods,Imaging, Three-Dimensional,Imaging, Three-Dimensional: methods,Nerve Fibers, Myelinated,Nerve Fibers, Myelinated: ultrastructure,Pattern Recognition, Automated,Pattern Recognition, Automated: methods,Phantoms, Imaging,Reproducibility of Results,Sensitivity and Specificity}, pmid = {18583153}, url = {http://www.ncbi.nlm.nih.gov/pubmed/18583153}, year = 2008 } @Article{Aganj2010, Author = {Aganj, Iman and Lenglet, Christophe and Jahanshad, Neda and Yacoub, Essa and Harel, Noam and Thompson, Paul M and Series, I M A Preprint and E, Church Street S}, Title = {{A HOUGH TRANSFORM GLOBAL PROBABILISTIC APPROACH A Hough Transform Global Probabilistic Approach to Multiple- Subject Diffusion MRI Tractography}}, Pages = {612--626}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Aganj et al. - 2010 - A HOUGH TRANSFORM GLOBAL PROBABILISTIC APPROACH A Hough Transform Global Probabilistic Approach to Multiple- Subject Diffusion MRI Tractography.pdf:pdf}, year = 2010 } @Article{Sherbondy2008JVision, Author = {Sherbondy, Anthony J. and Dougherty, Robert F. and Ben-Shachar, Michal and Napel, Sandy and Wandell, Brian A.}, Title = {{ConTrack: Finding the most likely pathways between brain regions using diffusion tractography}}, Journal = {J. Vis.}, Volume = {8}, Number = {9}, Pages = {1-16}, abstract = {Magnetic resonance diffusion-weighted imaging coupled with fiber tractography (DFT) is the only non-invasive method for measuring white matter pathways in the living human brain. DFT is often used to discover new pathways. But there are also many applications, particularly in visual neuroscience, in which we are confident that two brain regions are connected, and we wish to find the most likely pathway forming the connection. In several cases, current DFT algorithms fail to find these candidate pathways. To overcome this limitation, we have developed a probabilistic DFT algorithm (ConTrack) that identifies the most likely pathways between two regions. We introduce the algorithm in three parts: a sampler to generate a large set of potential pathways, a scoring algorithm that measures the likelihood of a pathway, and an inferential step to identify the most likely pathways connecting two regions. In a series of experiments using human data, we show that ConTrack estimates known pathways at positions that are consistent with those found using a high quality deterministic algorithm. Further we show that separating sampling and scoring enables ConTrack to identify valid pathways, known to exist, that are missed by other deterministic and probabilistic DFT algorithms.}, file = {attachment\:Sherbondy-2008-jov-8-9-15.pdf:attachment\:Sherbondy-2008-jov-8-9-15.pdf:PDF}, issn = {1534-7362}, keywords = {diffusion imaging, fiber tractography, MT+, corpus callosum, optic radiation}, month = {7}, url = {http://journalofvision.org/8/9/15/}, year = 2008 } @Article{jones1999osm, Author = {Jones, DK and Horsfield, MA and Simmons, A.}, Title = {{Optimal strategies for measuring diffusion in anisotropic systems by magnetic resonance imaging}}, Journal = {optimization}, Volume = {525}, year = 1999 } @Article{Dauguet2007, Author = {Dauguet, Julien and Peled, Sharon and Berezovskii, Vladimir and Delzescaux, Thierry and Warfield, Simon K and Born, Richard and Westin, Carl-Fredrik}, Title = {{Comparison of fiber tracts derived from in-vivo DTI tractography with 3D histological neural tract tracer reconstruction on a macaque brain.}}, Journal = {NeuroImage}, Volume = {37}, Number = {2}, Pages = {530--8}, abstract = {Since the introduction of diffusion weighted imaging (DWI) as a method for examining neural connectivity, its accuracy has not been formally evaluated. In this study, we directly compared connections that were visualized using injected neural tract tracers (WGA-HRP) with those obtained using in-vivo diffusion tensor imaging (DTI) tractography. First, we injected the tracer at multiple sites in the brain of a macaque monkey; second, we reconstructed the histological sections of the labeled fiber tracts in 3D; third, we segmented and registered the fibers (somatosensory and motor tracts) with the anatomical in-vivo MRI from the same animal; and last, we conducted fiber tracing along the same pathways on the DTI data using a classical diffusion tracing technique with the injection sites as seeds. To evaluate the performance of DTI fiber tracing, we compared the fibers derived from the DTI tractography with those segmented from the histology. We also studied the influence of the parameters controlling the tractography by comparing Dice superimposition coefficients between histology and DTI segmentations. While there was generally good visual agreement between the two methods, our quantitative comparisons reveal certain limitations of DTI tractography, particularly for regions at remote locations from seeds. We have thus demonstrated the importance of appropriate settings for realistic tractography results.}, doi = {10.1016/j.neuroimage.2007.04.067}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Dauguet et al. - 2007 - Comparison of fiber tracts derived from in-vivo DTI tractography with 3D histological neural tract tracer reconstruction on a macaque brain..pdf:pdf}, issn = {1053-8119}, keywords = {Animals,Anisotropy,Brain,Brain: anatomy \& histology,Diffusion Magnetic Resonance Imaging,Image Processing, Computer-Assisted,Imaging, Three-Dimensional,Immunohistochemistry,Macaca,Nerve Fibers,Nerve Fibers: ultrastructure,Neural Pathways,Neural Pathways: cytology}, pmid = {17604650}, url = {http://www.ncbi.nlm.nih.gov/pubmed/17604650}, year = 2007 } @InProceedings{Fillard2006ISBI, Author = {Fillard, Pierre and Arsigny, Vincent and Pennec, Xavier and Ayache, Nicholas}, Title = {The Tensor Distribution Function}, BookTitle = {Third IEEE International Symposium on Biomedical Imaging: From Nano to Macro}, Pages = {(abstract)}, Publisher = {IEEE}, abstract = {Diffusion tensor MRI is an imaging modality that is gaining importance in clinical applications. However, in a clinical environment, data have to be acquired rapidly, often at the detriment of the image quality. We propose a new variational framework that specifically targets low quality DT-MRI. The Rician nature of the noise on the images leads us to a maximum likelihood strategy to estimate the tensor field. To further reduce the noise, we optimally exploit the spatial correlation by adding to the estimation an anisotropic regularization term. This criterion is easily optimized thanks to the use of the recently introduced Log-Euclidean metrics. Results on real clinical data show promising improvements of fiber tracking in the brain and the spinal cord.}, year = 2006 } @Article{ZHK+06, Author = {Zhuang, J. and Hrabe, J. and Kangarlu, A. and Xu, D. and Bansal, R. and Branch, C. A. and Peterson, B. S.}, Title = {Correction of eddy-current distortions in diffusion tensor images using the known directions and strengths of diffusion gradients.}, Journal = {J Magn Reson Imaging}, Volume = {24}, Number = {5}, Pages = {1188-93}, abstract = {PURPOSE: To correct eddy-current artifacts in diffusion tensor (DT) images without the need to obtain auxiliary scans for the sole purpose of correction. MATERIALS AND METHODS: DT images are susceptible to distortions caused by eddy currents induced by large diffusion gradients. We propose a new postacquisition correction algorithm that does not require any auxiliary reference scans. It also avoids the problematic procedure of cross-correlating images with significantly different contrasts. A linear model is used to describe the dependence of distortion parameters (translation, scaling, and shear) on the diffusion gradients. The model is solved numerically to provide an individual correction for every diffusion-weighted (DW) image. RESULTS: The assumptions of the linear model were successfully verified in a series of experiments on a silicon oil phantom. The correction obtained for this phantom was compared with correction obtained by a previously published method. The algorithm was then shown to markedly reduce eddy-current distortions in DT images from human subjects. CONCLUSION: The proposed algorithm can accurately correct eddy-current artifacts in DT images. Its principal advantages are that only images with comparable signals and contrasts are cross-correlated, and no additional scans are required.}, authoraddress = {Magnetic Resonance Imaging Unit, Department of Psychiatry, Columbia College of Physicians and Surgeons, New York, New York, USA. jc.zhuang@gmail.com}, keywords = {*Algorithms ; Brain/*anatomy \& histology ; Diffusion Magnetic Resonance Imaging/*methods ; Echo-Planar Imaging/instrumentation/*methods ; Humans ; Image Enhancement/*methods ; Image Interpretation, Computer-Assisted/*methods ; Phantoms, Imaging ; Reproducibility of Results ; Sensitivity and Specificity}, language = {eng}, medline-aid = {10.1002/jmri.20727 [doi]}, medline-ci = {Copyright (c) 2006 Wiley-Liss, Inc.}, medline-crdt = {2006/10/07 09:00}, medline-da = {20061030}, medline-dcom = {20070130}, medline-edat = {2006/10/07 09:00}, medline-fau = {Zhuang, Jiancheng ; Hrabe, Jan ; Kangarlu, Alayar ; Xu, Dongrong ; Bansal, Ravi ; Branch, Craig A ; Peterson, Bradley S}, medline-gr = {DA017820/DA/NIDA NIH HHS/United States ; K02 MH074677-01/MH/NIMH NIH HHS/United States ; MH068318/MH/NIMH NIH HHS/United States ; MH59139/MH/NIMH NIH HHS/United States ; MH74677/MH/NIMH NIH HHS/United States ; R01 DA017820-03/DA/NIDA NIH HHS/United States ; R01 MH068318-03/MH/NIMH NIH HHS/United States}, medline-is = {1053-1807 (Print)}, medline-jid = {9105850}, medline-jt = {Journal of magnetic resonance imaging : JMRI}, medline-lr = {20081120}, medline-mhda = {2007/01/31 09:00}, medline-mid = {NIHMS44414}, medline-oid = {NLM: NIHMS44414 ; NLM: PMC2364728}, medline-own = {NLM}, medline-pl = {United States}, medline-pmc = {PMC2364728}, medline-pmid = {17024663}, medline-pst = {ppublish}, medline-pt = {Evaluation Studies ; Journal Article ; Research Support, N.I.H., Extramural ; Research Support, Non-U.S. Gov't}, medline-sb = {IM}, medline-so = {J Magn Reson Imaging. 2006 Nov;24(5):1188-93.}, medline-stat = {MEDLINE}, url = {http://eutils.ncbi.nlm.nih.gov/entrez/eutils/elink.fcgi?cmd=prlinks&dbfrom=pubmed&retmode=ref&id=17024663}, year = 2006 } @Article{Rothwell, Author = {Rothwell, John}, Title = {{HBM2010 Program at a Glance *}}, Journal = {Program}, Pages = {2010--2010}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Rothwell - Unknown - HBM2010 Program at a Glance.pdf:pdf} } @Article{Mori2008NeuroImage, Author = {Mori, Susumu and Oishi, Kenichi and Jiang, Hangyi and Jiang, Li and Li, Xin and Akhter, Kazi and Hua, Kegang and Faria, Andreia V. and Mahmood, Asif and Woods, Roger and Toga, Arthur W. and Pike, G. Bruce and Neto, Pedro Rosa and Evans, Alan and Zhang, Jiangyang and Huang, Hao and Miller, Michael I. and {van Zijl}, Peter and Mazziotta, John}, Title = {Stereotaxic white matter atlas based on diffusion tensor imaging in an ICBM template}, Journal = {NeuroImage}, Volume = {40}, Number = {2}, Pages = {570-582}, abstract = {Brain registration to a stereotaxic atlas is an effective way to report anatomic locations of interest and to perform anatomic quantification. However, existing stereotaxic atlases lack comprehensive coordinate information about white matter structures. In this paper, white matter-specific atlases in stereotaxic coordinates are introduced. As a reference template, the widely used ICBM-152 was used. The atlas contains fiber orientation maps and hand-segmented white matter parcellation maps based on diffusion tensor imaging (DTI). Registration accuracy by linear and non-linear transformation was measured, and automated template-based white matter parcellation was tested. The results showed a high correlation between the manual ROI-based and the automated approaches for normal adult populations. The atlases are freely available and believed to be a useful resource as a target template and for automated parcellation methods. }, file = {attachment\:Mori2008NeuroImage.pdf:attachment\:Mori2008NeuroImage.pdf:PDF}, publisher = {Elsevier}, url = {http://www.sciencedirect.com/science/article/B6WNP-4RH37X2-1/2/24add3aed52eb682f7064260c33384e4}, year = 2008 } @conference{weinstein1999tad, author = {Weinstein, D. and Kindlmann, G. and Lundberg, E.}, booktitle = {Proceedings of the conference on Visualization'99: celebrating ten years}, organization = {IEEE Computer Society Press Los Alamitos, CA, USA}, pages = {249--253}, title = {{Tensorlines: Advection-diffusion based propagation through diffusion tensor fields}}, year = 1999 } @Misc{tenenbaum2000ggf, Author = {Tenenbaum, J.B. and Silva, V. and Langford, J.C.}, Title = {{A global geometric framework for nonlinear dimensionality reduction}}, journal = {Science}, number = {5500}, pages = {2319--2323}, volume = {290}, year = 2000 } @Article{Loper1990, Author = {Loper, David and Annua, Benton E R Spin-up}, Title = {{Bingham statistics}}, Journal = {Statistics}, Volume = {2}, Number = {c}, Pages = {45--47}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Loper, Annua - 1990 - Bingham statistics.pdf:pdf}, year = 1990 } @Article{chamberlain2008gma, Author = {Chamberlain, S.R. and Menzies, L.A. and Fineberg, N.A. and del Campo, N. and Suckling, J. and Craig, K. and M{\"u}ller, U. and Robbins, T.W. and Bullmore, E.T. and Sahakian, B.J.}, Title = {{Grey matter abnormalities in trichotillomania: morphometric magnetic resonance imaging study}}, Journal = {The British Journal of Psychiatry}, Volume = {193}, Number = {3}, Pages = {216--221}, publisher = {RCP}, year = 2008 } @Article{Batchelor2006, Author = {Batchelor, P G and Calamante, F and Tournier, J D and Atkinson, D and Hill, D L and Connelly, A}, Title = {{Quantification of the shape of fiber tracts}}, Journal = {Magn. Reson. Med}, Volume = {55}, Pages = {894--903}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Batchelor et al. - 2006 - Quantification of the shape of fiber tracts.pdf:pdf}, year = 2006 } @Article{Qazi2009, Author = {Qazi, Arish a and Radmanesh, Alireza and O'Donnell, Lauren and Kindlmann, Gordon and Peled, Sharon and Whalen, Stephen and Westin, Carl-Fredrik and Golby, Alexandra J}, Title = {{Resolving crossings in the corticospinal tract by two-tensor streamline tractography: Method and clinical assessment using fMRI.}}, Journal = {NeuroImage}, Volume = {47 Suppl 2}, Pages = {T98--106}, abstract = {An inherent drawback of the traditional diffusion tensor model is its limited ability to provide detailed information about multidirectional fiber architecture within a voxel. This leads to erroneous fiber tractography results in locations where fiber bundles cross each other. This may lead to the inability to visualize clinically important tracts such as the lateral projections of the corticospinal tract. In this report, we present a deterministic two-tensor eXtended Streamline Tractography (XST) technique, which successfully traces through regions of crossing fibers. We evaluated the method on simulated and in vivo human brain data, comparing the results with the traditional single-tensor and with a probabilistic tractography technique. By tracing the corticospinal tract and correlating with fMRI-determined motor cortex in both healthy subjects and patients with brain tumors, we demonstrate that two-tensor deterministic streamline tractography can accurately identify fiber bundles consistent with anatomy and previously not detected by conventional single-tensor tractography. When compared to the dense connectivity maps generated by probabilistic tractography, the method is computationally efficient and generates discrete geometric pathways that are simple to visualize and clinically useful. Detection of crossing white matter pathways can improve neurosurgical visualization of functionally relevant white matter areas.}, doi = {10.1016/j.neuroimage.2008.06.034}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Qazi et al. - 2009 - Resolving crossings in the corticospinal tract by two-tensor streamline tractography Method and clinical assessment using fMRI..pdf:pdf}, issn = {1095-9572}, keywords = {Algorithms,Brain Neoplasms,Brain Neoplasms: physiopathology,Computer Simulation,Female,Humans,Magnetic Resonance Imaging,Magnetic Resonance Imaging: methods,Male,Middle Aged,Models, Theoretical,Motor Cortex,Motor Cortex: pathology,Motor Cortex: physiopathology,Probability,Pyramidal Tracts,Pyramidal Tracts: pathology}, pmid = {18657622}, url = {http://www.ncbi.nlm.nih.gov/pubmed/18657622}, year = 2009 } @Article{Neji2008, Author = {Neji, R and Fleury, G and Deux, J-f and Rahmouni, A and Bassez, G and Vignaud, A and Paragios, N and Mas, Laboratoire and Paris, Ecole Centrale and Galen, Equipe and Saclay, Inria}, Title = {{SUPPORT VECTOR DRIVEN MARKOV RANDOM FIELDS TOWARDS DTI SEGMENTATION OF THE HUMAN SKELETAL MUSCLE b b b b}}, Pages = {923--926}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Neji et al. - 2008 - SUPPORT VECTOR DRIVEN MARKOV RANDOM FIELDS TOWARDS DTI SEGMENTATION OF THE HUMAN SKELETAL MUSCLE b b b b.pdf:pdf}, year = 2008 } @Article{Okada2006, Author = {Okada, Tsutomu}, Title = {{Diffusion-Tensor Fiber Purpose : Methods : Results : Conclusion :}}, Volume = {238}, Number = {2}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Okada - 2006 - Diffusion-Tensor Fiber Purpose Methods Results Conclusion.pdf:pdf}, year = 2006 } @Article{moriBook, Author = {Mori, S. and Wakana, S. and Nagae-Poetscher, LM and Van Zijl, PCM}, Title = {{MRI atlas of human white matter}}, Journal = {American Journal of Neuroradiology}, publisher = {Am Soc Neuroradiology} } @Article{Basser1994BiophysicalJ, Author = {Basser, Peter J. and Mattiello, James and LeBihan, Denis}, Title = {{MR} Diffusion Tensor Spectroscopy and Imaging}, Journal = {Biophysical Journal}, Volume = {66}, Pages = {259-267}, abstract = {This paper describes a new {NMR} imaging modality-{MR} diffusion tensor imaging. It consists of estimating an effective diffusion tensor, $D_{\textrm{eff}}$, within a voxel, and then displaying useful quantities derived from it. We show how the phenomenon of anisotropic diffusion of water (or metabolites) in anisotropic tissues, measured noninvasively by these {NMR} methods, is exploited to determine fiber tract orientation and mean particle displacements. Once $D_{\textrm{eff}}$ is estimated from a series of {NMR} pulsed-gradient, spin-echo experiments, a tissue's three orthotropic axes can be determined. They coincide with the eigen- vectors of $D_{\textrm{eff}}$, while the effective diffusivities along these orthotropic directions are the eigenvalues of $D_{\textrm{eff}}$. Diffusion ellipsoids, constructed in each voxel from $D_{\textrm{eff}}$, depict both these orthotropic axes and the mean diffusion distances in these directions. Moreover, the three scalar invariants of $D_{\textrm{eff}}$, which are independent of the tissue's orientation in the laboratory frame of reference, reveal useful information about molecular mobility reflective of local microstructure and anatomy. Inherently, tensors (like $D_{\textrm{eff}}$) describing transport processes in anisotropic media contain new information within a macroscopic voxel that scalars (such as the apparent diffusivity, proton density, $T_1$, and $T_2$) do not.}, year = 1994 } @Article{Avants2010, Author = {Avants, Brian B and Tustison, Nick and Song, Gang}, Title = {{Advanced Normalization Tools ( ANTS )}}, Journal = {Computing}, Pages = {1--33}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Avants, Tustison, Song - 2010 - Advanced Normalization Tools ( ANTS ).pdf:pdf}, year = 2010 } @Article{Beaulieu2002NMRBiomed, Author = {Christian Beaulieu}, Title = {The basis of anisotropic water diffusion in the nervous system - a technical review}, Journal = {NMR in Biomedicine}, Volume = {15}, Number = {7-8}, Pages = {435-455}, doi = {10.1002/nbm.782}, owner = {ian}, timestamp = {2009.04.27}, url = {http://dx.doi.org/10.1002/nbm.782}, year = 2002 } @Book{Callaghan1991OUP, Author = {Callaghan, Paul T.}, Title = {Principles of Nuclear Magnetic Resonance Microscopy}, Publisher = {Oxford University Press}, owner = {ian}, timestamp = {2009.03.12}, url = {http://books.google.co.uk/books?id=yjrjT_W5hygC}, year = 1991 } @Article{ODonnell_MICCAI07, Author = {O'Donnell, L. J. and Westin, C. F. and Golby, A. J.}, Title = {Tract-based morphometry.}, Journal = {Med Image Comput Comput Assist Interv Int Conf Med Image Comput Comput Assist Interv}, Volume = {10}, Number = {Pt 2}, Pages = {161-8}, abstract = {Multisubject statistical analyses of diffusion tensor images in regions of specific white matter tracts have commonly measured only the mean value of a scalar invariant such as the fractional anisotropy (FA), ignoring the spatial variation of FA along the length of fiber tracts. We propose to instead perform tract-based morphometry (TBM), or the statistical analysis of diffusion MRI data in an anatomical tract-based coordinate system. We present a method for automatic generation of white matter tract arc length parameterizations, based on learning a fiber bundle model from tractography from multiple subjects. Our tract-based coordinate system enables TBM for the detection of white matter differences in groups of subjects. We present example TBM results from a study of interhemispheric differences in FA.}, authoraddress = {Golby Surgical Brain Mapping Laboratory, Department of Neurosurgery, Brigham and Women's Hospital, Harvard Medical School, Boston MA, USA. odonnell@bwh.harvard.edu}, keywords = {Algorithms ; *Artificial Intelligence ; Brain/*cytology ; Cluster Analysis ; Diffusion Magnetic Resonance Imaging/*methods ; Humans ; Image Enhancement/methods ; Image Interpretation, Computer-Assisted/*methods ; Imaging, Three-Dimensional/*methods ; Nerve Fibers, Myelinated/*ultrastructure ; Neural Pathways/cytology ; Pattern Recognition, Automated/*methods ; Reproducibility of Results ; Sensitivity and Specificity}, language = {eng}, medline-crdt = {2007/11/30 09:00}, medline-da = {20071129}, medline-dcom = {20080103}, medline-edat = {2007/11/30 09:00}, medline-fau = {O'Donnell, Lauren J ; Westin, Carl-Fredrik ; Golby, Alexandra J}, medline-gr = {P41 RR15241-01A1/RR/NCRR NIH HHS/United States ; P41RR13218/RR/NCRR NIH HHS/United States ; R01 AG20012-01/AG/NIA NIH HHS/United States ; R01MH074794/MH/NIMH NIH HHS/United States ; U41RR019703/RR/NCRR NIH HHS/United States ; U54EB005149/EB/NIBIB NIH HHS/United States}, medline-jid = {101249582}, medline-jt = {Medical image computing and computer-assisted intervention : MICCAI ... International Conference on Medical Image Computing and Computer-Assisted Intervention}, medline-mhda = {2008/01/04 09:00}, medline-own = {NLM}, medline-pl = {Germany}, medline-pmid = {18044565}, medline-pst = {ppublish}, medline-pt = {Journal Article ; Research Support, N.I.H., Extramural}, medline-sb = {IM}, medline-so = {Med Image Comput Comput Assist Interv Int Conf Med Image Comput Comput Assist Interv. 2007;10(Pt 2):161-8.}, medline-stat = {MEDLINE}, url = {http://eutils.ncbi.nlm.nih.gov/entrez/eutils/elink.fcgi?cmd=prlinks&dbfrom=pubmed&retmode=ref&id=18044565}, year = 2007 } @Article{Intelligence2009, Author = {Intelligence, Comp}, Title = {{Spatial Filtering and Single-Trial Classification of EEG during Vowel Speech Imagery}}, Journal = {Science And Technology}, Volume = {5}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Intelligence - 2009 - Spatial Filtering and Single-Trial Classification of EEG during Vowel Speech Imagery.pdf:pdf}, keywords = {4259 nagatsuta,bci,csp,eeg,hama,imagery,japan 226-8503,mailing address,midori-ku,r2-15,spatial filter,speech,vowel,yoko-}, year = 2009 } @Article{Becher1999, Author = {Becher, B and Giacomini, P S and Pelletier, D and McCrea, E and Prat, a and Antel, J P}, Title = {{Interferon-gamma secretion by peripheral blood T-cell subsets in multiple sclerosis: correlation with disease phase and interferon-beta therapy.}}, Journal = {Annals of neurology}, Volume = {45}, Number = {2}, Pages = {247--50}, abstract = {Interferon-gamma (IFN-gamma) is implicated as a participant in the immune effector and regulatory mechanisms considered to mediate the pathogenesis of multiple sclerosis (MS). We have used an intracellular cytokine staining technique to demonstrate that the proportion of ex vivo peripheral blood CD4 and CD8 T-cell subsets expressing IFN-gamma is increased in secondary progressing (SP) MS patients, whereas the values in untreated relapsing-remitting (RR) MS patients are reduced compared with those of controls. Patients treated with interferon-beta (IFN-beta) have an even more significant reduction in the percentage of IFN-gamma-secreting cells. The finding that the number of IFN-gamma-expressing CD8 cells is increased in SPMS patients, a group with reduced functional suppressor activity, and is most significantly reduced by IFN-beta therapy, which increases suppressor activity, indicates that IFN-gamma secretion by CD8 T cells and functional suppressor defects attributed to this cell subset in MS can be dissociated.}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Becher et al. - 1999 - Interferon-gamma secretion by peripheral blood T-cell subsets in multiple sclerosis correlation with disease phase and interferon-beta therapy..pdf:pdf}, issn = {0364-5134}, keywords = {Adult,Female,Humans,Interferon-beta,Interferon-beta: therapeutic use,Interferon-gamma,Interferon-gamma: secretion,Male,Middle Aged,Multiple Sclerosis,Multiple Sclerosis: immunology,Multiple Sclerosis: therapy,T-Lymphocyte Subsets,T-Lymphocyte Subsets: immunology,T-Lymphocytes,T-Lymphocytes: immunology}, month = feb, pmid = {9989628}, url = {http://www.ncbi.nlm.nih.gov/pubmed/9989628}, year = 1999 } @Article{Corouge2004, Author = {Corouge, Isabelle and Gouttard, Sylvain and Gerig, Guido}, Title = {{Accepted for oral presentation A Statistical Shape Model of Individual Fiber Tracts Extracted from Diffusion Tensor MRI}}, Journal = {Analysis}, Volume = {3217}, Number = {Part II}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Corouge, Gouttard, Gerig - 2004 - Accepted for oral presentation A Statistical Shape Model of Individual Fiber Tracts Extracted from Diffusion Tensor MRI.pdf:pdf}, keywords = {diffusion tensor imaging,statistical shape modelling}, year = 2004 } @Article{Mangin2002, Author = {Mangin, J-F and Poupon, C and Cointepas, Y and Rivi\`{e}re, D and Papadopoulos-Orfanos, D and Clark, C a and R\'{e}gis, J and {Le Bihan}, D}, Title = {{A framework based on spin glass models for the inference of anatomical connectivity from diffusion-weighted MR data - a technical review.}}, Journal = {NMR in biomedicine}, Volume = {15}, Number = {7-8}, Pages = {481--92}, abstract = {A family of methods aiming at the reconstruction of a putative fascicle map from any diffusion-weighted dataset is proposed. This fascicle map is defined as a trade-off between local information on voxel microstructure provided by diffusion data and a priori information on the low curvature of plausible fascicles. The optimal fascicle map is the minimum energy configuration of a simulated spin glass in which each spin represents a fascicle piece. This spin glass is embedded into a simulated magnetic external field that tends to align the spins along the more probable fiber orientations according to diffusion models. A model of spin interactions related to the curvature of the underlying fascicles introduces a low bending potential constraint. Hence, the optimal configuration is a trade-off between these two kind of forces acting on the spins. Experimental results are presented for the simplest spin glass model made up of compass needles located in the center of each voxel of a tensor based acquisition.}, doi = {10.1002/nbm.780}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Mangin et al. - 2002 - A framework based on spin glass models for the inference of anatomical connectivity from diffusion-weighted MR data - a technical review..pdf:pdf}, issn = {0952-3480}, keywords = {Algorithms,Astrocytes,Astrocytes: cytology,Brain,Brain Mapping,Brain Mapping: methods,Brain: cytology,Diffusion Magnetic Resonance Imaging,Diffusion Magnetic Resonance Imaging: methods,Humans,Image Enhancement,Image Enhancement: methods,Imaging, Three-Dimensional,Imaging, Three-Dimensional: methods,Methods,Models, Biological,Nerve Fibers, Myelinated,Nerve Fibers, Myelinated: pathology,Nerve Net,Nerve Net: cytology,Neural Pathways,Neural Pathways: cytology,Pattern Recognition, Automated,Quality Control,Spin Labels}, pmid = {12489097}, url = {http://www.ncbi.nlm.nih.gov/pubmed/12489097}, year = 2002 } @Article{Dryden2005, Author = {Dryden, Ian L.}, Title = {{Statistical analysis on high-dimensional spheres and shape spaces}}, Journal = {The Annals of Statistics}, Volume = {33}, Number = {4}, Pages = {1643--1665}, arxivid = {arXiv:math/0508279v1}, doi = {10.1214/009053605000000264}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Dryden - 2005 - Statistical analysis on high-dimensional spheres and shape spaces.pdf:pdf}, issn = {0090-5364}, keywords = {and phrases,bingham distribution,complex bingham,complex watson,di-}, month = aug, url = {http://projecteuclid.org/Dienst/getRecord?id=euclid.aos/1123250225/}, year = 2005 } @Article{Ziyan, Author = {Ziyan, U and Sabuncu, M R and O’donnell, L J and C}, Title = {{-F. Westin. Nonlinear registration of diffusion mr images based on fiber bundles}}, Journal = {In Medical Image Computing and Computer-Assisted Intervention (MICCAI ’}, Volume = {07}, Number = {volume4791}, Pages = {351--358}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Ziyan et al. - Unknown - -F. Westin. Nonlinear registration of diffusion mr images based on fiber bundles.pdf:pdf} } @Article{Orasis2007, Author = {Orasis, Projet}, Title = {{Optimization of Discrete Markov Random Fields via Dual Decomposition}}, Journal = {Computer}, Number = {April}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Orasis - 2007 - Optimization of Discrete Markov Random Fields via Dual Decomposition.pdf:pdf}, year = 2007 } @Article{Delivery, Author = {Delivery, Price and Cost, Total and Brimpari, Minodora}, Title = {{PC World UK Computer Superstore - Buy cheap c ... PC World UK Computer Superstore - Buy cheap c ...}}, Journal = {Computer}, Volume = {5610000}, Pages = {1--9}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Delivery, Cost, Brimpari - Unknown - PC World UK Computer Superstore - Buy cheap c ... PC World UK Computer Superstore - Buy cheap c ....pdf:pdf} } @Article{Engel, Author = {Engel, Klaus and Hadwiger, Markus and Kniss, Joe M and Lefohn, Aaron E and Weiskopf, Daniel}, Title = {{Real-Time Volume Graphics Real-Time Volume Graphics}}, Journal = {Notes}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Engel et al. - Unknown - Real-Time Volume Graphics Real-Time Volume Graphics.pdf:pdf} } @Article{Neji2009, Author = {Neji, Radhou\`{e}ne and Ahmed, Jean-fran\c{c}ois Deux and Nikos, Besbes and Georg, Komodakis and Mezri, Langs and Alain, Maatouk and Guillaume, Rahmouni and Gilles, Bassez and Paragios, Nikos}, Title = {{Manifold-driven Grouping of Skeletal Muscle Fibers}}, Journal = {Science}, Number = {February}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Neji et al. - 2009 - Manifold-driven Grouping of Skeletal Muscle Fibers.pdf:pdf}, year = 2009 } @Article{Ib2001, Author = {Ib, Luis}, Title = {{TUTORIAL on QUATERNIONS Part I}}, Journal = {Seminar}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Ib - 2001 - TUTORIAL on QUATERNIONS Part I.pdf:pdf}, year = 2001 } @Article{Wainwright2005, Author = {Wainwright, Martin J and Jordan, Michael I}, Title = {{A Variational Principle for Graphical Models}}, Journal = {Electrical Engineering}, Number = {March}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Wainwright, Jordan - 2005 - A Variational Principle for Graphical Models.pdf:pdf}, year = 2005 } @Article{Hosey2005MagResMed, Author = {Hosey, T. and Williams, G. and Ansorge, R.}, Title = {{Inference of multiple fiber orientations in high angular resolution diffusion imaging}}, Journal = {Magnetic Resonance in Medicine}, Volume = {54}, Number = {6}, Pages = {1480-1489}, abstract = {A method is presented that is capable of determining more than one fiber orientation within a single voxel in high angular resolution diffusion imaging (HARDI) data sets. This method is an extension of the Markov chain method recently introduced to diffusion tensor imaging (DTI) analysis, allowing the probability density function of up to 2 intra-voxel fiber orientations to be inferred. The multiple fiber architecture within a voxel is then assessed by calculating the relative probabilities of a 1 and 2 fiber model. It is demonstrated that for realistic signal to noise ratios, it is possible to accurately characterize the directions of 2 intersecting fibers using a 2 fiber model. The shortcomings of under-fitting a 2 fiber model, or over-fitting a 1 fiber model, are explored. This new algorithm enhances the tools available for fiber tracking.}, file = {attachment\:Hosey2005MagResMed.pdf:attachment\:Hosey2005MagResMed.pdf:PDF}, year = 2005 } @Article{Yen2009a, Author = {Yen, Luh and Fouss, Francois and Decaestecker, Christine and Francq, Pascal and Saerens, Marco}, Title = {{Graph nodes clustering with the sigmoid commute-time kernel: A comparative study}}, Journal = {Data \& Knowledge Engineering}, Volume = {68}, Number = {3}, Pages = {338--361}, doi = {10.1016/j.datak.2008.10.006}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Yen et al. - 2009 - Graph nodes clustering with the sigmoid commute-time kernel A comparative study(2).pdf:pdf}, issn = {0169023X}, publisher = {Elsevier B.V.}, url = {http://linkinghub.elsevier.com/retrieve/pii/S0169023X0800147X}, year = 2009 } @Article{Ziyan2007, Author = {Ziyan, Ulas and Sabuncu, Mert R. and Grimson, W. Eric. L. and Westin, Carl-Fredrik}, Title = {{A Robust Algorithm for Fiber-Bundle Atlas Construction}}, Journal = {2007 IEEE 11th International Conference on Computer Vision}, Pages = {1--8}, doi = {10.1109/ICCV.2007.4409143}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Ziyan et al. - 2007 - A Robust Algorithm for Fiber-Bundle Atlas Construction.pdf:pdf}, isbn = {978-1-4244-1630-1}, issn = {1550-5499}, month = oct, publisher = {Ieee}, url = {http://ieeexplore.ieee.org/lpdocs/epic03/wrapper.htm?arnumber=4409143}, year = 2007 } @Article{Baldi2009, Author = {Baldi, P. and Kerkyacharian, G. and Marinucci, D. and Picard, D.}, Title = {{Asymptotics for spherical needlets}}, Journal = {The Annals of Statistics}, Volume = {37}, Number = {3}, Pages = {1150--1171}, arxivid = {arXiv:math/0606599v2}, doi = {10.1214/08-AOS601}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Baldi et al. - 2009 - Asymptotics for spherical needlets.pdf:pdf}, issn = {0090-5364}, keywords = {High-frequency asymptotics, spherical needlets, ra}, url = {http://projecteuclid.org/euclid.aos/1239369018}, year = 2009 } @Article{MoriNeuron2006, Author = {Mori, Susumu and Zhang, Jiangyang}, Title = {Principles of Diffusion Tensor Imaging and Its Applications to Basic Neuroscience Research}, Journal = {Neuron}, Volume = {51}, Pages = {527-39}, abstract = {The brain contains more than 100 billion neurons that communicate with each other via axons for the formation of complex neural networks. The structural mapping of such networks during health and disease states is essential for understanding brain function. However, our understanding of brain structural connectivity is surprisingly limited, due in part to the lack of noninvasive methodologies to study axonal anatomy. Diffusion tensor imaging (DTI) is a recently developed MRI technique that can measure macroscopic axonal organization in nervous system tissues. In this article, the principles of DTI methodologies are explained, and several applications introduced, including visualization of axonal tracts in myelin and axonal injuries as well as human brain and mouse embryonic development. The strengths and limitations of DTI and key areas for future research and development are also discussed.}, file = {attachment\:Mori_Neuron_2006.pdf:attachment\:Mori_Neuron_2006.pdf:PDF}, year = 2006 } @Article{Heid2000ISMRM, Author = {Heid, O.}, Title = {Eddy current-nulled diffusion weighting.}, Journal = {In: Proceedings of the 8th Annual Meeting of ISMRM, Denver}, Pages = {799}, owner = {ian}, timestamp = {2009.03.12}, year = 2000 } @Article{merboldt1992diffusion, Author = {Merboldt, K.D. and H{\\"a}nicke, W. and Bruhn, H. and Gyngell, M.L. and Frahm, J.}, Title = {{Diffusion imaging of the human brain in vivo using high-speed STEAM MRI}}, Journal = {Magnetic Resonance in Medicine}, Volume = {23}, Number = {1}, Pages = {179--192}, issn = {1522-2594}, publisher = {John Wiley \& Sons}, year = 1992 } @Article{Descoteaux2009a, Author = {Descoteaux, Maxime and Deriche, Rachid and Kn\"{o}sche, Thomas R and Anwander, Alfred}, Title = {{Deterministic and probabilistic tractography based on complex fibre orientation distributions.}}, Journal = {IEEE transactions on medical imaging}, Volume = {28}, Number = {2}, Pages = {269--86}, abstract = {We propose an integral concept for tractography to describe crossing and splitting fibre bundles based on the fibre orientation distribution function (ODF) estimated from high angular resolution diffusion imaging (HARDI). We show that in order to perform accurate probabilistic tractography, one needs to use a fibre ODF estimation and not the diffusion ODF. We use a new fibre ODF estimation obtained from a sharpening deconvolution transform (SDT) of the diffusion ODF reconstructed from q-ball imaging (QBI). This SDT provides new insight into the relationship between the HARDI signal, the diffusion ODF, and the fibre ODF. We demonstrate that the SDT agrees with classical spherical deconvolution and improves the angular resolution of QBI. Another important contribution of this paper is the development of new deterministic and new probabilistic tractography algorithms using the full multidirectional information obtained through use of the fibre ODF. An extensive comparison study is performed on human brain datasets comparing our new deterministic and probabilistic tracking algorithms in complex fibre crossing regions. Finally, as an application of our new probabilistic tracking, we quantify the reconstruction of transcallosal fibres intersecting with the corona radiata and the superior longitudinal fasciculus in a group of eight subjects. Most current diffusion tensor imaging (DTI)-based methods neglect these fibres, which might lead to incorrect interpretations of brain functions.}, doi = {10.1109/TMI.2008.2004424}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Descoteaux et al. - 2009 - Deterministic and probabilistic tractography based on complex fibre orientation distributions..pdf:pdf}, issn = {1558-0062}, keywords = {Algorithms,Brain,Brain: anatomy \& histology,Diffusion Magnetic Resonance Imaging,Diffusion Magnetic Resonance Imaging: methods,Echo-Planar Imaging,Echo-Planar Imaging: methods,Humans,Image Enhancement,Image Enhancement: methods,Image Processing, Computer-Assisted,Image Processing, Computer-Assisted: methods,Models, Neurological,Models, Statistical,Nerve Fibers,Nerve Fibers: ultrastructure,Normal Distribution,Reproducibility of Results,Sensitivity and Specificity}, month = feb, pmid = {19188114}, url = {http://www.ncbi.nlm.nih.gov/pubmed/19188114}, year = 2009 } @InProceedings{Deriche2007ISBI, Author = {DERICHE, r. AND DESCOTEAUX, M.}, Title = {Splitting Tracking Through Crossing Fibers: Multidirectional Q-Ball Tracking-}, BookTitle = {4th IEEE International Symposium on Biomedical Imaging: From Nano to Macro (ISBI07)}, Pages = {756759}, abstract = {We present a new tracking algorithm based on the full multidirectional information of the diffusion orientation distribution function (ODF) estimated from Q-Ball Imaging (QBI). From the ODF, we extract all available maxima and then extend streamline (STR) tracking to allow for splitting in multiple directions (SPLIT-STR). Our new algorithm SPLIT-STR overcomes important limitations of classical diffusion tensor streamline tracking in regions of low anisotropy and regions of fiber crossings. Not only can the tracking propagate through fiber crossings but it can also deal with fibers fanning and branching. SPLIT-STR algorithm is efficient and validated on synthetic data, on a biological phantom and compared against probabilistic tensor tracking on a human brain dataset with known crossing fibers}, owner = {ian}, timestamp = {2009.03.10}, year = 2007 } @Article{Garyfallidis2009a, Author = {Garyfallidis, Eleftherios and Brett, Matthew and Nimmo-smith, Ian}, Title = {{Fast Dimensionality Reduction for Brain Tractography Clustering}}, Journal = {Sciences-New York}, Pages = {7--10}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Garyfallidis, Brett, Nimmo-smith - 2009 - Fast Dimensionality Reduction for Brain Tractography Clustering.pdf:pdf}, year = 2009 } @Article{JohansenBerg2004ProcNatAcadSci, Author = {Johansen-Berg, H and Behrens, T E and Robson, M D and Drobnjak, I and Rushworth, M F and Brady, JM and Smith, S M and Higham, D J and Matthews, P M}, Title = {Changes in connectivity profiles define functionally distinct regions in human medial frontal cortex.}, Journal = {Proc. Natl. Acad. Sci. USA}, Volume = {101}, Number = {36}, Pages = {13335-13340}, abstract = {A fundamental issue in neuroscience is the relation between structure and function. However, gross landmarks do not correspond well to microstructural borders and cytoarchitecture cannot be visualized in a living brain used for functional studies. Here, we used diffusion-weighted and functional MRI to test structure-function relations directly. Distinct neocortical regions were defined as volumes having similar connectivity profiles and borders identified where connectivity changed. Without using prior information, we found an abrupt profile change where the border between supplementary motor area (SMA) and pre-SMA is expected. Consistent with this anatomical assignment, putative SMA and pre-SMA connected to motor and prefrontal regions, respectively. Excellent spatial correlations were found between volumes defined by using connectivity alone and volumes activated during tasks designed to involve SMA or pre-SMA selectively. This finding demonstrates a strong relationship between structure and function in medial frontal cortex and offers a strategy for testing such correspondences elsewhere in the brain.}, file = {attachment\:JohansenBerg2004ProcNatAcadSci.pdf:attachment\:JohansenBerg2004ProcNatAcadSci.pdf:PDF}, year = 2004 } @Article{Maddah_MICCA2005, Author = {Maddah, M. and Mewes, A. U. and Haker, S. and Grimson, W. E. and Warfield, S. K.}, Title = {Automated atlas-based clustering of white matter fiber tracts from {DTMRI}.}, Journal = {Med Image Comput Comput Assist Interv Int Conf Med Image Comput Comput Assist Interv}, Volume = {8}, Number = {Pt 1}, Pages = {188-95}, abstract = {A new framework is presented for clustering fiber tracts into anatomically known bundles. This work is motivated by medical applications in which variation analysis of known bundles of fiber tracts in the human brain is desired. To include the anatomical knowledge in the clustering, we invoke an atlas of fiber tracts, labeled by the number of bundles of interest. In this work, we construct such an atlas and use it to cluster all fiber tracts in the white matter. To build the atlas, we start with a set of labeled ROIs specified by an expert and extract the fiber tracts initiating from each ROI. Affine registration is used to project the extracted fiber tracts of each subject to the atlas, whereas their B-spline representation is used to efficiently compare them to the fiber tracts in the atlas and assign cluster labels. Expert visual inspection of the result confirms that the proposed method is very promising and efficient in clustering of the known bundles of fiber tracts.}, authoraddress = {Computer Science and Artificial Intelligence Laboratory, Massachussets Institute of Technology, Cambridge, MA 02139, USA. mmaddah@bwh.harvard.edu}, keywords = {Algorithms ; Anatomy, Artistic ; *Artificial Intelligence ; Brain/*cytology ; Computer Simulation ; Diffusion Magnetic Resonance Imaging/*methods ; Humans ; Image Enhancement/*methods ; Image Interpretation, Computer-Assisted/*methods ; Imaging, Three-Dimensional/methods ; Medical Illustration ; Models, Anatomic ; Nerve Fibers, Myelinated/*ultrastructure ; Pattern Recognition, Automated/*methods ; Reproducibility of Results ; Sensitivity and Specificity}, language = {eng}, medline-crdt = {2006/05/12 09:00}, medline-da = {20060511}, medline-dcom = {20060609}, medline-edat = {2006/05/12 09:00}, medline-fau = {Maddah, Mahnaz ; Mewes, Andrea U J ; Haker, Steven ; Grimson, W Eric L ; Warfield, Simon K}, medline-gr = {1U54 EB005149/EB/NIBIB NIH HHS/United States ; P01 CA67165/CA/NCI NIH HHS/United States ; P41 RR13218/RR/NCRR NIH HHS/United States ; R01 CA109246/CA/NCI NIH HHS/United States ; R01 LM007861/LM/NLM NIH HHS/United States ; R21 MH67054/MH/NIMH NIH HHS/United States}, medline-jid = {101249582}, medline-jt = {Medical image computing and computer-assisted intervention : MICCAI ... International Conference on Medical Image Computing and Computer-Assisted Intervention}, medline-lr = {20071114}, medline-mhda = {2006/06/10 09:00}, medline-own = {NLM}, medline-pl = {Germany}, medline-pmid = {16685845}, medline-pst = {ppublish}, medline-pt = {Evaluation Studies ; Journal Article ; Research Support, N.I.H., Extramural ; Research Support, Non-U.S. Gov't ; Research Support, U.S. Gov't, Non-P.H.S.}, medline-sb = {IM}, medline-so = {Med Image Comput Comput Assist Interv Int Conf Med Image Comput Comput Assist Interv. 2005;8(Pt 1):188-95.}, medline-stat = {MEDLINE}, url = {http://eutils.ncbi.nlm.nih.gov/entrez/eutils/elink.fcgi?cmd=prlinks&dbfrom=pubmed&retmode=ref&id=16685845}, year = 2005 } @Article{Lenglet2010a, Author = {Lenglet, Christophe and Series, I M A Preprint and Hall, Lind and E, Church Street S and Aganj, Iman and Sapiro, Guillermo}, Title = {{ODF MAXIMA EXTRACTION IN INSTITUTE FOR MATHEMATICS AND ITS APPLICATIONS ODF Maxima Extraction in Spherical Harmonic Representation via Analytical Search Space Reduction}}, Journal = {Methods}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Lenglet et al. - 2010 - ODF MAXIMA EXTRACTION IN INSTITUTE FOR MATHEMATICS AND ITS APPLICATIONS ODF Maxima Extraction in Spherical Harmonic Representation via Analytical Search Space Reduction.pdf:pdf}, year = 2010 } @Article{Com, Author = {Com, Bookboon}, Title = {{RANDOM VARIABLES I}}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Com - Unknown - RANDOM VARIABLES I.pdf:pdf} } @Article{Mai, Author = {Mai, Thanh and Ngoc, Pham and Picard, Dominique}, Title = {{Localized deconvolution on the sphere}}, Pages = {1--33}, arxivid = {arXiv:0908.1952v1}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Mai, Ngoc, Picard - Unknown - Localized deconvolution on the sphere.pdf:pdf}, keywords = {62g05 62g08 62g20 62c10,and phrases,minimax estima-,msc 2000 subject classification,second- generation wavelets,statistical inverse problems,tion} } @Article{Alexander2005NeuroImage, Author = {Alexander, Daniel C. and Barker, Gareth J.}, Title = {Optimal imaging parameters for fiber-orientation estimation in diffusion MRI}, Journal = {NeuroImage}, Volume = {27}, Pages = {357 367}, abstract = {This study uses Monte Carlo simulations to investigate the optimal value of the diffusion weighting factor b for estimating white-matter fiber orientations using diffusion MRI with a standard spherical sampling scheme. We devise an algorithm for determining the optimal echo time, pulse width, and pulse separation in the pulsed-gradient spinecho sequence for a specific value of b. The Monte Carlo simulations provide an estimate of the optimal value of b for recovering one and two fiber orientations. We show that the optimum is largely independent of the noise level in the measurements and the number of gradient directions and that the optimum depends only weakly on the diffusion anisotropy, the maximum gradient strength, and the spin spin relaxation time. The optimum depends strongly on the mean diffusivity. In brain tissue, the optima we estimate are in the ranges [0.7, 1.0] \times 10^9 s m^{-2} and [2.2, 2.8] \times 10^9 s m^{-2} for the one- and two-fiber cases, respectively. The best b for estimating the fractional anisotropy is slightly higher than for estimating fiber directions in the one-fiber case and slightly lower in the two-fiber case. To estimate Tr(D) in the onefiber case, the optimal setting is higher still. Simulations suggest that a ratio of high to low b measurements of 5 to 1 is a good compromise for measuring fiber directions and size and shape indices.}, owner = {ian}, timestamp = {2009.03.04}, year = 2005 } @Article{Yeh2010, Author = {Yeh, F and Wedeen, V and Tseng, W}, Title = {{Generalized Q-Sampling Imaging.}}, Journal = {IEEE transactions on medical imaging}, Number = {c}, abstract = {Based on the Fourier transform relation between diffusion MR signals and the underlying diffusion displacement, a new relation is derived to estimate the spin distribution function (SDF) directly from diffusion MR signals. This relation leads to an imaging method called generalized q-sampling imaging (GQI), which can obtain the SDF from the shell sampling scheme used in q-ball imaging (QBI) or the grid sampling scheme used in diffusion spectrum imaging (DSI). The accuracy of GQI was evaluated by a simulation study and an in vivo experiment in comparison with QBI and DSI. The simulation results showed that the accuracy of GQI was comparable to that of QBI and DSI. The simulation study of GQI also showed that an anisotropy index, named quantitative anisotropy, was correlated with the volume fraction of the resolved fiber component. The in vivo images of GQI demonstrated that SDF patterns were similar to the ODFs reconstructed by QBI or DSI. The tractography generated from GQI was also similar to those generated from QBI and DSI. In conclusion, the proposed GQI method can be applied to grid or shell sampling schemes and can provide directional and quantitative information about the crossing fibers.}, doi = {10.1109/TMI.2010.2045126}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Yeh, Wedeen, Tseng - 2010 - Generalized Q-Sampling Imaging..pdf:pdf}, issn = {1558-0062}, month = mar, pmid = {20304721}, url = {http://www.ncbi.nlm.nih.gov/pubmed/20304721}, year = 2010 } @Article{Cook2007, Author = {Cook, P. A. and Symms, M. and Boulby, P. A. and Alexander, D. C.}, Title = {Optimal acquisition orders of diffusion-weighted {MRI} measurements.}, Journal = {J Magn Reson Imaging}, Volume = {25}, Number = {5}, Pages = {1051-8}, abstract = {PURPOSE: To propose a new method to optimize the ordering of gradient directions in diffusion-weighted MRI so that partial scans have the best spherical coverage. MATERIALS AND METHODS: Diffusion-weighted MRI often uses a spherical sampling scheme, which acquires images sequentially with diffusion-weighting gradients in unique directions distributed isotropically on the hemisphere. If not all of the measurements can be completed, the quality of diffusion tensors fitted to the partial scan is sensitive to the order of the gradient directions in the scanner protocol. If the directions are in a random order, then a partial scan may cover some parts of the hemisphere densely but other parts sparsely and thus provide poor spherical coverage. We compare the results of ordering with previously published methods for optimizing the acquisition in simulation. RESULTS: Results show that all methods produce similar results and all improve the accuracy of the estimated diffusion tensors significantly over unordered acquisitions. CONCLUSION: The new ordering method improves the spherical coverage of partial scans and has the advantage of maintaining the optimal coverage of the complete scan.}, authoraddress = {Centre for Medical Image Computing, Department of Computer Science University College London, London, UK. p.cook@cs.ucl.ac.uk}, keywords = {Algorithms ; Anisotropy ; Brain Mapping/*methods ; Diffusion Magnetic Resonance Imaging/*methods ; Humans ; Image Enhancement/*methods ; Image Processing, Computer-Assisted}, language = {eng}, medline-aid = {10.1002/jmri.20905 [doi]}, medline-ci = {(c) 2007 Wiley-Liss, Inc.}, medline-crdt = {2007/04/26 09:00}, medline-da = {20070430}, medline-dcom = {20070628}, medline-edat = {2007/04/26 09:00}, medline-fau = {Cook, Philip A ; Symms, Mark ; Boulby, Philip A ; Alexander, Daniel C}, medline-is = {1053-1807 (Print)}, medline-jid = {9105850}, medline-jt = {Journal of magnetic resonance imaging : JMRI}, medline-mhda = {2007/06/29 09:00}, medline-own = {NLM}, medline-pl = {United States}, medline-pmid = {17457801}, medline-pst = {ppublish}, medline-pt = {Journal Article ; Research Support, Non-U.S. Gov't}, medline-sb = {IM}, medline-so = {J Magn Reson Imaging. 2007 May;25(5):1051-8.}, medline-stat = {MEDLINE}, url = {http://eutils.ncbi.nlm.nih.gov/entrez/eutils/elink.fcgi?cmd=prlinks&dbfrom=pubmed&retmode=ref&id=17457801}, year = 2007 } @Article{O'Donnell2009, Author = {O'Donnell, Lauren J and Westin, Carl-Fredrik and Golby, Alexandra J}, Title = {{Tract-based morphometry for white matter group analysis.}}, Journal = {NeuroImage}, Volume = {45}, Number = {3}, Pages = {832--44}, abstract = {We introduce an automatic method that we call tract-based morphometry, or TBM, for measurement and analysis of diffusion MRI data along white matter fiber tracts. Using subject-specific tractography bundle segmentations, we generate an arc length parameterization of the bundle with point correspondences across all fibers and all subjects, allowing tract-based measurement and analysis. In this paper we present a quantitative comparison of fiber coordinate systems from the literature and we introduce an improved optimal match method that reduces spatial distortion and improves intra- and inter-subject variability of FA measurements. We propose a method for generating arc length correspondences across hemispheres, enabling a TBM study of interhemispheric diffusion asymmetries in the arcuate fasciculus (AF) and cingulum bundle (CB). The results of this study demonstrate that TBM can detect differences that may not be found by measuring means of scalar invariants in entire tracts, such as the mean diffusivity (MD) differences found in AF. We report TBM results of higher fractional anisotropy (FA) in the left hemisphere in AF (caused primarily by lower lambda(3), the smallest eigenvalue of the diffusion tensor, in the left AF), and higher left hemisphere FA in CB (related to higher lambda(1), the largest eigenvalue of the diffusion tensor, in the left CB). By mapping the significance levels onto the tractography trajectories for each structure, we demonstrate the anatomical locations of the interhemispheric differences. The TBM approach brings analysis of DTI data into the clinically and neuroanatomically relevant framework of the tract anatomy.}, doi = {10.1016/j.neuroimage.2008.12.023}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/O'Donnell, Westin, Golby - 2009 - Tract-based morphometry for white matter group analysis..pdf:pdf}, issn = {1095-9572}, keywords = {Brain,Brain Mapping,Brain Mapping: methods,Brain: anatomy \& histology,Diffusion Magnetic Resonance Imaging,Humans,Image Processing, Computer-Assisted,Image Processing, Computer-Assisted: methods}, pmid = {19154790}, publisher = {Elsevier Inc.}, url = {http://www.ncbi.nlm.nih.gov/pubmed/19154790}, year = 2009 } @Article{Descoteaux2007MagResMed, Author = {Descoteaux, Maxime and Angelino, Elaine and Fitzgibbons, Shaun and Deriche, Rachid}, Title = {Regularized, fast, and robust analytical Q-ball imaging}, Journal = {Magnetic Resonance in Medicine}, Volume = {58}, Number = {3}, Pages = {497-510}, abstract = {We propose a regularized, fast, and robust analytical solution for the Q-ball imaging (QBI) reconstruction of the orientation distribution function (ODF) together with its detailed validation and a discussion on its benefits over the state-of-the-art. Our analytical solution is achieved by modeling the raw high angular resolution diffusion imaging signal with a spherical harmonic basis that incorporates a regularization term based on the Laplace Beltrami operator defined on the unit sphere. This leads to an elegant mathematical simplification of the FunkRadon transform which approximates the ODF. We prove a new corollary of the FunkHecke theorem to obtain this simplification. Then, we show that the LaplaceBeltrami regularization is theoretically and practically better than Tikhonov regularization. At the cost of slightly reducing angular resolution, the LaplaceBeltrami regularization reduces ODF estimation errors and improves fiber detection while reducing angular error in the ODF maxima detected. Finally, a careful quantitative validation is performed against ground truth from synthetic data and against real data from a biological phantom and a human brain dataset. We show that our technique is also able to recover known fiber crossings in the human brain and provides the practical advantage of being up to 15 times faster than original numerical QBI method.}, doi = {10.1002/mrm.21277}, file = {attachment\:Descoteaux2007MagResMed.pdf:attachment\:Descoteaux2007MagResMed.pdf:PDF}, publisher = {Wiley-Liss, Inc.}, url = {http://dx.doi.org/10.1002/mrm.21277}, year = 2007 } @TechReport{Zhuang2008Kentucky, Author = {Zhuang, Qi and Gold, Brian T. and Huang, Ruiwang and Liang, Xuwei and Cao, Ning and Zhang, Jun}, Title = {Generalized Diffusion Simulation-Based Tractography}, Institution = {Technical Report CMIDA-HiPSCCS 009-08, Department of Computer Science, University of Kentucky, KY}, abstract = {Diffusion weighted imaging ({DWI}) techniques have been used to study human brain white matter fiber structures in vivo. Commonly used standard diffusion tensor magnetic resonance imaging ({DTI}) tractography derived from the second order diffusion tensor model has limitations in its ability to resolve complex fiber tracts. We propose a new fiber tracking method based on the generalized diffusion tensor ({GDT}) model. This new method better models the anisotropic diffusion process in human brain by using the generalized diffusion simulation-based fiber tractography ({GDST}). Due to the additional information provided by {GDT}, the {GDST} method simulates the underlying physical diffusion process of the human brain more accurately than does the standard {DTI} method. The effectiveness of the new fiber tracking algorithm was demonstrated via analyses on real and synthetic {DWI} datasets. In addition, the general analytic expression of high order b matrix is derived in the case of twice refocused spin-echo ({TRSE}) pulse sequence which is used in the {DWI} data acquisition. Based on our results, we discuss the benefits of {GDT} and the second order diffusion tensor on fiber tracking.}, owner = {ian}, timestamp = {2008.10.01}, year = 2008 } @Article{Bar-Shir2008JMR, Author = {Bar-Shir, Amnon and Avram, Liat and zarslan, Evren and Basser, Peter J. and Cohen, Yoram}, Title = {The effect of the diffusion time and pulse gradient duration ratio on the diffraction pattern and the structural information estimated from q-space diffusion MR: Experiments and simulations}, Journal = {Journal of Magnetic Resonance}, Volume = {194}, Pages = {230236}, owner = {ian}, timestamp = {2009.03.05}, year = 2008 } @Article{king1994q, Author = {King, M.D. and Houseman, J. and Roussel, S.A. and Van Bruggen, N. and Williams, S.R. and Gadian, D.G.}, Title = {{q-Space imaging of the brain}}, Journal = {Magnetic Resonance in Medicine}, Volume = {32}, Number = {6}, Pages = {707--713}, issn = {1522-2594}, publisher = {John Wiley \& Sons}, year = 1994 } @Book{MAB04, Author = {{Matt A. Bernstein} and {Kevin F. King} and {Xiaohong Joe Zhou}}, Title = {Handbook of {MRI} {P}ulse {S}equences}, Publisher = {Elsevier Academic Press}, year = 2004 } @Article{Reese2003, Author = {Reese, T G and Heid, O and Weisskoff, R M and Wedeen, V J}, Title = {{Reduction of eddy-current-induced distortion in diffusion MRI using a twice-refocused spin echo.}}, Journal = {Magnetic resonance in medicine : official journal of the Society of Magnetic Resonance in Medicine / Society of Magnetic Resonance in Medicine}, Volume = {49}, Number = {1}, Pages = {177--82}, abstract = {Image distortion due to field gradient eddy currents can create image artifacts in diffusion-weighted MR images. These images, acquired by measuring the attenuation of NMR signal due to directionally dependent diffusion, have recently been shown to be useful in the diagnosis and assessment of acute stroke and in mapping of tissue structure. This work presents an improvement on the spin-echo (SE) diffusion sequence that displays less distortion and consequently improves image quality. Adding a second refocusing pulse provides better image quality with less distortion at no cost in scanning efficiency or effectiveness, and allows more flexible diffusion gradient timing. By adjusting the timing of the diffusion gradients, eddy currents with a single exponential decay constant can be nulled, and eddy currents with similar decay constants can be greatly reduced. This new sequence is demonstrated in phantom measurements and in diffusion anisotropy images of normal human brain.}, doi = {10.1002/mrm.10308}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Reese et al. - 2003 - Reduction of eddy-current-induced distortion in diffusion MRI using a twice-refocused spin echo..pdf:pdf}, issn = {0740-3194}, keywords = {Artifacts,Brain,Brain: anatomy \& histology,Brain: pathology,Echo-Planar Imaging,Echo-Planar Imaging: methods,Humans,Magnetic Resonance Imaging,Magnetic Resonance Imaging: methods,Phantoms, Imaging,Stroke,Stroke: diagnosis}, pmid = {12509835}, url = {http://www.ncbi.nlm.nih.gov/pubmed/12509835}, year = 2003 } @Article{Yu, Author = {Yu, Hwanjo and Yang, Jiong}, Title = {{Classifying Large Data Sets Using SVMs with Hierarchical Clusters}}, Journal = {Science}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Yu, Yang - Unknown - Classifying Large Data Sets Using SVMs with Hierarchical Clusters.pdf:pdf}, keywords = {hierarchical cluster,support vector machines} } @Article{Zanche2008, Author = {Zanche, N De and Pruessmann, K P and Boesiger, P}, Title = {{Preliminary Experience with Visualization of Intracortical Fibers by Focused High-Resolution}}, Journal = {Ajnr. American Journal Of Neuroradiology}, doi = {10.3174/ajnr.A0742}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Zanche, Pruessmann, Boesiger - 2008 - Preliminary Experience with Visualization of Intracortical Fibers by Focused High-Resolution.pdf:pdf}, year = 2008 } @Article{DavisTMI02, Author = {Davies, R. H. and Twining, C. J. and Cootes, T. F. and Waterton, J. C. and Taylor, C. J.}, Title = {A minimum description length approach to statistical shape modeling.}, Journal = {IEEE Trans Med Imaging}, Volume = {21}, Number = {5}, Pages = {525-37}, abstract = {We describe a method for automatically building statistical shape models from a training set of example boundaries/surfaces. These models show considerable promise as a basis for segmenting and interpreting images. One of the drawbacks of the approach is, however, the need to establish a set of dense correspondences between all members of a set of training shapes. Often this is achieved by locating a set of "landmarks" manually on each training image, which is time consuming and subjective in two dimensions and almost impossible in three dimensions. We describe how shape models can be built automatically by posing the correspondence problem as one of finding the parameterization for each shape in the training set. We select the set of parameterizations that build the "best" model. We define "best" as that which minimizes the description length of the training set, arguing that this leads to models with good compactness, specificity and generalization ability. We show how a set of shape parameterizations can be represented and manipulated in order to build a minimum description length model. Results are given for several different training sets of two-dimensional boundaries, showing that the proposed method constructs better models than other approaches including manual landmarking-the current gold standard. We also show that the method can be extended straightforwardly to three dimensions.}, authoraddress = {Division of Imaging Science and Biomedical Engineering, University of Manchester, UK. rhodri.h.davies@stud.man.ac.uk}, keywords = {*Algorithms ; Animals ; *Artificial Intelligence ; Brain/anatomy \& histology ; Brain Ischemia/diagnosis ; Cartilage, Articular/anatomy \& histology ; Hand/anatomy \& histology ; Heart Ventricles ; Hip/radiography/ultrasonography ; Hip Prosthesis ; Humans ; Image Enhancement/*methods ; Image Interpretation, Computer-Assisted/*methods ; Information Theory ; Kidney/anatomy \& histology ; Knee ; Magnetic Resonance Imaging ; *Models, Statistical ; Multivariate Analysis ; Normal Distribution ; Pattern Recognition, Automated ; Quality Control ; Rats ; Rats, Inbred F344 ; Rats, Sprague-Dawley ; Sensitivity and Specificity ; Stochastic Processes}, language = {eng}, medline-aid = {10.1109/TMI.2002.1009388 [doi]}, medline-crdt = {2002/06/20 10:00}, medline-da = {20020619}, medline-dcom = {20021227}, medline-edat = {2002/06/20 10:00}, medline-fau = {Davies, Rhodri H ; Twining, Carole J ; Cootes, Tim F ; Waterton, John C ; Taylor, Chris J}, medline-is = {0278-0062 (Print)}, medline-jid = {8310780}, medline-jt = {IEEE transactions on medical imaging}, medline-lr = {20061115}, medline-mhda = {2002/12/28 04:00}, medline-own = {NLM}, medline-pl = {United States}, medline-pmid = {12071623}, medline-pst = {ppublish}, medline-pt = {Comparative Study ; Journal Article ; Research Support, Non-U.S. Gov't}, medline-sb = {IM}, medline-so = {IEEE Trans Med Imaging. 2002 May;21(5):525-37.}, medline-stat = {MEDLINE}, url = {http://eutils.ncbi.nlm.nih.gov/entrez/eutils/elink.fcgi?cmd=prlinks&dbfrom=pubmed&retmode=ref&id=12071623}, year = 2002 } @Article{Papadakis2000, Author = {Papadakis, N G and Murrills, C D and Hall, L D and Huang, C L and {Adrian Carpenter}, T}, Title = {{Minimal gradient encoding for robust estimation of diffusion anisotropy.}}, Journal = {Magnetic resonance imaging}, Volume = {18}, Number = {6}, Pages = {671--9}, abstract = {This study has investigated the relationship between the noise sensitivity of measurement by magnetic resonance imaging (MRI) of the diffusion tensor (D) of water and the number N of diffusion-weighting (DW) gradient directions, using computer simulations of strongly anisotropic fibers with variable orientation. The DW directions uniformly sampled the diffusion ellipsoid surface. It is shown that the variation of the signal-to-noise ratio (SNR) of three ideally rotationally invariant scalars of D due to variable fiber orientation provides an objective quantitative measure for the diffusion ellipsoid sampling efficiency, which is independent of the SNR value of the baseline signal obtained without DW; the SNR variation decreased asymptotically with increasing N. The minimum number N(0) of DW directions, which minimized the SNR variation of the three scalars of D was determined, thereby achieving the most efficient ellipsoid sampling. The resulting time efficient diffusion tensor imaging (DTI) protocols provide robust estimation of diffusion anisotropy in the presence of noise and can improve the repeatability/reliability of DTI experiments when there is high variability in the orientation of similar anisotropic structures, as for example, in studies which require repeated measurement of one individual, intersubject comparisons or multicenter studies.}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Papadakis et al. - 2000 - Minimal gradient encoding for robust estimation of diffusion anisotropy..pdf:pdf}, issn = {0730-725X}, keywords = {Anisotropy,Computer Simulation,Humans,Magnetic Resonance Imaging,Magnetic Resonance Imaging: methods,Models, Theoretical,Statistics as Topic}, month = jul, pmid = {10930776}, url = {http://www.ncbi.nlm.nih.gov/pubmed/10930776}, year = 2000 } @Article{roberts2005fdi, Author = {Roberts, T. P. L. and Liu, F. and Kassner, A. and Mori, S. and Guha, A.}, Title = {{Fiber Density Index Correlates with Reduced Fractional Anisotropy in White Matter of Patients with Glioblastoma}}, Journal = {American Journal of Neuroradiology}, Volume = {26}, Number = {9}, Pages = {2183--2186}, file = {attachment\:roberts_FA_glioblastoma_2005.pdf:attachment\:roberts_FA_glioblastoma_2005.pdf:PDF}, publisher = {Am Soc Neuroradiology}, year = 2005 } @Article{Baas2008, Author = {Baas, Matthias}, Title = {{Python Computer Graphics Kit}}, Journal = {Interface}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Baas - 2008 - Python Computer Graphics Kit.pdf:pdf}, year = 2008 } @Article{Odonnell_MICCAI05, Author = {O'Donnell, L. and Westin, C. F.}, Title = {White matter tract clustering and correspondence in populations.}, Journal = {Med Image Comput Comput Assist Interv Int Conf Med Image Comput Comput Assist Interv}, Volume = {8}, Number = {Pt 1}, Pages = {140-7}, abstract = {We present a novel method for finding white matter fiber correspondences and clusters across a population of brains. Our input is a collection of paths from tractography in every brain. Using spectral methods we embed each path as a vector in a high dimensional space. We create the embedding space so that it is common across all brains, consequently similar paths in all brains will map to points near each other in the space. By performing clustering in this space we are able to find matching fiber tract clusters in all brains. In addition, we automatically obtain correspondence of tractographic paths across brains: by selecting one or several paths of interest in one brain, the most similar paths in all brains are obtained as the nearest points in the high-dimensional space.}, authoraddress = {MIT Computer Science and Artificial Intelligence Lab, Cambridge MA, USA. lauren@csail.mit.edu}, keywords = {Algorithms ; *Artificial Intelligence ; Brain/*anatomy \& histology ; Cluster Analysis ; Diffusion Magnetic Resonance Imaging/*methods ; Humans ; Image Enhancement/methods ; Image Interpretation, Computer-Assisted/*methods ; Imaging, Three-Dimensional/*methods ; Nerve Fibers, Myelinated/*ultrastructure ; Pattern Recognition, Automated/*methods ; Reproducibility of Results ; Sensitivity and Specificity}, language = {eng}, medline-crdt = {2006/05/12 09:00}, medline-da = {20060511}, medline-dcom = {20060609}, medline-edat = {2006/05/12 09:00}, medline-fau = {O'Donnell, Lauren ; Westin, Carl-Fredrik}, medline-gr = {1-R01-NS051826-01/NS/NINDS NIH HHS/United States ; P41-RR13218/RR/NCRR NIH HHS/United States ; U24 RR021382/RR/NCRR NIH HHS/United States ; U54 EB005149/EB/NIBIB NIH HHS/United States}, medline-jid = {101249582}, medline-jt = {Medical image computing and computer-assisted intervention : MICCAI ... International Conference on Medical Image Computing and Computer-Assisted Intervention}, medline-lr = {20071114}, medline-mhda = {2006/06/10 09:00}, medline-own = {NLM}, medline-pl = {Germany}, medline-pmid = {16685839}, medline-pst = {ppublish}, medline-pt = {Comparative Study ; Evaluation Studies ; Journal Article ; Research Support, N.I.H., Extramural}, medline-sb = {IM}, medline-so = {Med Image Comput Comput Assist Interv Int Conf Med Image Comput Comput Assist Interv. 2005;8(Pt 1):140-7.}, medline-stat = {MEDLINE}, url = {http://eutils.ncbi.nlm.nih.gov/entrez/eutils/elink.fcgi?cmd=prlinks&dbfrom=pubmed&retmode=ref&id=16685839}, year = 2005 } @Article{Sorland2002MagResChem, Author = {Srland, Geir Humborstad and Aksnes, Dagfinn}, Title = {Artefacts and pitfalls in diffusion measurements by NMR}, Journal = {Magnetic Resonance in Chemistry}, Volume = {40}, Number = {13}, Pages = {S139-S146}, abstract = {When applying pulsed field gradient (PFG) NMR experiments to determine the molecular mobility characterized by the diffusion coefficient, it is crucial to have control over all experimental parameters that may affect the performance of the diffusion experiment. This could be diffusion measurement in the presence of magnetic field transients, internal magnetic field gradients, either constant or spatially varying, convection, mechanical vibrations, or in the presence of physical restrictions affecting the diffusion propagator. The effect of these parameters on the diffusion experiment is discussed and visualized. It is also outlined how to minimize their influence on the measured diffusivity that is extracted from the PFG-NMR experiment. For an expanded and more general treatment we refer to the excellent reviews by Dr William S. Price (Concepts Magn. Reson. 1997; 9: 299; 1998; 10: 197) and the references therein.}, doi = {10.1002/mrc.1112}, owner = {ian}, timestamp = {2009.03.12}, url = {http://dx.doi.org/10.1002/mrc.1112}, year = 2002 } @Article{ZiyanMICCAI07, Author = {Ziyan, U. and Sabuncu, M. R. and O'Donnell, L. J. and Westin, C. F.}, Title = {Nonlinear registration of diffusion {MR} images based on fiber bundles.}, Journal = {Med Image Comput Comput Assist Interv Int Conf Med Image Comput Comput Assist Interv}, Volume = {10}, Number = {Pt 1}, Pages = {351-8}, abstract = {In this paper, we explore the use of fiber bundles extracted from diffusion MR images for a nonlinear registration algorithm. We employ a white matter atlas to automatically label major fiber bundles and to establish correspondence between subjects. We propose a polyaffine framework to calculate a smooth and invertible nonlinear warp field based on these correspondences, and derive an analytical solution for the reorientation of the tensor fields under the polyaffine transformation. We demonstrate our algorithm on a group of subjects and show that it performs comparable to a higher dimensional nonrigid registration algorithm.}, authoraddress = {MIT Computer Science and Artificial Intelligence Lab, Cambridge MA, USA. ulas@mit.edu}, keywords = {*Algorithms ; *Artificial Intelligence ; Brain/*anatomy \& histology ; Diffusion Magnetic Resonance Imaging/*methods ; Image Enhancement/*methods ; Image Interpretation, Computer-Assisted/*methods ; Imaging, Three-Dimensional/*methods ; Nerve Fibers, Myelinated/*ultrastructure ; Nonlinear Dynamics ; Pattern Recognition, Automated/*methods ; Reproducibility of Results ; Sensitivity and Specificity}, language = {eng}, medline-crdt = {2007/12/07 09:00}, medline-da = {20071204}, medline-dcom = {20080103}, medline-edat = {2007/12/07 09:00}, medline-fau = {Ziyan, Ulas ; Sabuncu, Mert R ; O'Donnell, Lauren J ; Westin, Carl-Fredrik}, medline-gr = {P41-RR13218/RR/NCRR NIH HHS/United States ; P41-RR15241/RR/NCRR NIH HHS/United States ; R01-AG20012/AG/NIA NIH HHS/United States ; R01-MH074794/MH/NIMH NIH HHS/United States ; U54-EB005149/EB/NIBIB NIH HHS/United States}, medline-jid = {101249582}, medline-jt = {Medical image computing and computer-assisted intervention : MICCAI ... International Conference on Medical Image Computing and Computer-Assisted Intervention}, medline-mhda = {2008/01/04 09:00}, medline-own = {NLM}, medline-pl = {Germany}, medline-pmid = {18051078}, medline-pst = {ppublish}, medline-pt = {Journal Article ; Research Support, N.I.H., Extramural ; Research Support, Non-U.S. Gov't}, medline-sb = {IM}, medline-so = {Med Image Comput Comput Assist Interv Int Conf Med Image Comput Comput Assist Interv. 2007;10(Pt 1):351-8.}, medline-stat = {MEDLINE}, url = {http://eutils.ncbi.nlm.nih.gov/entrez/eutils/elink.fcgi?cmd=prlinks&dbfrom=pubmed&retmode=ref&id=18051078}, year = 2007 } @Article{Aganj, Author = {Aganj, I and Lenglet, C and Keriven, R and Sapiro, G and Harel, N and Thompson, P}, Title = {{A Hough Transform Global Approach to Diffusion MRI Tractography}}, Journal = {Methods}, Pages = {4--4}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Aganj et al. - Unknown - A Hough Transform Global Approach to Diffusion MRI Tractography.pdf:pdf} } @Article{Pedersen2008, Author = {Pedersen, Michael Syskind and Baxter, Bill and Rish\o j, Christian and Theobald, Douglas L and Larsen, Jan and Strimmer, Korbinian and Christiansen, Lars and Hansen, Kai and Wilkinson, Leland and He, Liguo and Thibaut, Loic and Bar, Miguel}, Title = {{The Matrix Cookbook [}}, Journal = {Matrix}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Pedersen et al. - 2008 - The Matrix Cookbook.pdf:pdf}, keywords = {acknowledgements,and suggestions,bill baxter,christian rish\o j,contributions,derivative of,derivative of inverse matrix,determinant,differentiate a matrix,douglas l,esben,matrix algebra,matrix identities,matrix relations,thank the following for,theobald,we would like to}, year = 2008 } @Article{denislebihan2006aap, Author = {Denis Le Bihan, MD and Poupon, C. and Amadon, A. and Lethimonnier, F.}, Title = {{Artifacts and pitfalls in diffusion MRI}}, Journal = {Journal of Magnetic Resonance Imaging}, Volume = {24}, Pages = {478--488}, year = 2006 } @Article{bernstein2005handbook, Author = {Bernstein, M.A. and King, K.E. and Zhou, X.J. and Fong, W.}, Title = {{Handbook of MRI pulse sequences}}, Journal = {Medical Physics}, Volume = {32}, Pages = {1452}, year = 2005 } @Article{BJ02, Author = {Basser, P. J. and Jones, D. K.}, Title = {Diffusion-tensor {MRI}: theory, experimental design and data analysis - a technical review.}, Journal = {NMR Biomed}, Volume = {15}, Number = {7-8}, Pages = {456-67}, abstract = {This article treats the theoretical underpinnings of diffusion-tensor magnetic resonance imaging (DT-MRI), as well as experimental design and data analysis issues. We review the mathematical model underlying DT-MRI, discuss the quantitative parameters that are derived from the measured effective diffusion tensor, and describe artifacts that arise in typical DT-MRI acquisitions. We also discuss difficulties in identifying appropriate models to describe water diffusion in heterogeneous tissues, as well as in interpreting experimental data obtained in such issues. Finally, we describe new statistical methods that have been developed to analyse DT-MRI data, and their potential uses in clinical and multi-site studies.}, authoraddress = {Section on Tissue Biophysics and Biomimetics, NICHD, National Institutes of Health, Bethesda, MD 20892, USA.}, keywords = {Anisotropy ; Artifacts ; Brain/cytology/metabolism ; Diffusion ; Diffusion Magnetic Resonance Imaging/instrumentation/*methods ; Image Enhancement/*methods ; *Models, Biological ; Models, Chemical ; Nerve Fibers/chemistry/*metabolism/*pathology ; Neural Pathways/chemistry/cytology/metabolism ; Research Design ; Water/chemistry}, language = {eng}, medline-aid = {10.1002/nbm.783 [doi]}, medline-ci = {Copyright 2002 John Wiley & Sons, Ltd.}, medline-da = {20021218}, medline-dcom = {20030701}, medline-edat = {2002/12/19 04:00}, medline-fau = {Basser, Peter J ; Jones, Derek K}, medline-is = {0952-3480 (Print)}, medline-jid = {8915233}, medline-jt = {NMR in biomedicine}, medline-lr = {20061115}, medline-mhda = {2003/07/02 05:00}, medline-own = {NLM}, medline-pl = {England}, medline-pmid = {12489095}, medline-pst = {ppublish}, medline-pt = {Journal Article ; Research Support, Non-U.S. Gov't ; Review}, medline-pubm = {Print}, medline-rf = {107}, medline-rn = {7732-18-5 (Water)}, medline-sb = {IM}, medline-so = {NMR Biomed. 2002 Nov-Dec;15(7-8):456-67.}, medline-stat = {MEDLINE}, url = {http://eutils.ncbi.nlm.nih.gov/entrez/eutils/elink.fcgi?cmd=prlinks\&dbfrom=pubmed\&retmode=ref\&id=12489095}, year = 2002 } @Article{Maaten2008a, Author = {Maaten, Laurens Van Der and Hinton, Geoffrey}, Title = {{Visualizing Data using t-SNE}}, Journal = {Journal of Machine Learning Research}, Volume = {9}, Pages = {2579--2605}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Maaten, Hinton - 2008 - Visualizing Data using t-SNE.pdf:pdf}, keywords = {dimensionality reduction,embedding algorithms,manifold learning,multidimensional scaling,visualization}, year = 2008 } @Article{Parker2005PhilTransRoySoc, Author = {Parker, G J and Alexander, D C}, Title = {Probabilistic anatomical connectivity derived from the microscopic persistent angular structure of cerebral tissue}, Journal = {Philos Trans R Soc Lond B Biol Sci.}, Volume = {360}, Number = {1457}, Pages = {893-902}, abstract = {Recently developed methods to extract the persistent angular structure (PAS) of axonal fibre bundles from diffusion-weighted magnetic resonance imaging (MRI) data are applied to drive probabilistic fibre tracking, designed to provide estimates of anatomical cerebral connectivity. The behaviour of the PAS function in the presence of realistic data noise is modelled for a range of single and multiple fibre configurations. This allows probability density functions (PDFs) to be generated that are parametrized according to the anisotropy of individual fibre populations. The PDFs are incorporated in a probabilistic fibre-tracking method to allow the estimation of whole-brain maps of anatomical connection probability. These methods are applied in two exemplar experiments in the corticospinal tract to show that it is possible to connect the entire primary motor cortex (M1) when tracing from the cerebral peduncles, and that the reverse experiment of tracking from M1 successfully identifies high probability connection via the pyramidal tracts. Using the extracted PAS in probabilistic fibre tracking allows higher specificity and sensitivity than previously reported fibre tracking using diffusion-weighted MRI in the corticospinal tract.}, file = {attachment\:Parker2005PhilTransRoySoc.pdf:attachment\:Parker2005PhilTransRoySoc.pdf:PDF}, year = 2005 } @Article{Kreher2008ISMRM, Author = {Kreher, B. W. and Mader, I. and Kiselev, V. G.}, Title = {Gibbs Tracking: A Novel Approach for the Reconstruction of Neuronal Pathways}, Journal = {Proc. Intl. Soc. Mag. Reson. Med.}, Volume = {16}, Pages = {425}, abstract = {Fibre tractography based on diffusion weighted MRI is a powerful method to extract the anatomical connectivity in white matter in vivo. The main idea of the currently available methods of fibre tracking is the reconstruction of long neuronal pathways in small successive steps by following the local, voxel-defined fibre direction. Starting from local information on the diffusivity, long-distance connections are determined. This method is inherently prone to instability, since a mistake at a single crossing affects radically the final result. In this paper we present a method based on a new principle. Instead of walking successively through the volume all neuronal pathways and the totality of the signal is taken into account at the same time. This novel approach is capable to reconstruct crossing and spreading fibre configuration.}, file = {attachment\:Kreher2008ISMRM.pdf:attachment\:Kreher2008ISMRM.pdf:PDF}, year = 2008 } @Article{Bea02, Author = {Beaulieu, C.}, Title = {The basis of anisotropic water diffusion in the nervous system - a technical review.}, Journal = {NMR Biomed}, Volume = {15}, Number = {7-8}, Pages = {435-55}, abstract = {Anisotropic water diffusion in neural fibres such as nerve, white matter in spinal cord, or white matter in brain forms the basis for the utilization of diffusion tensor imaging (DTI) to track fibre pathways. The fact that water diffusion is sensitive to the underlying tissue microstructure provides a unique method of assessing the orientation and integrity of these neural fibres, which may be useful in assessing a number of neurological disorders. The purpose of this review is to characterize the relationship of nuclear magnetic resonance measurements of water diffusion and its anisotropy (i.e. directional dependence) with the underlying microstructure of neural fibres. The emphasis of the review will be on model neurological systems both in vitro and in vivo. A systematic discussion of the possible sources of anisotropy and their evaluation will be presented followed by an overview of various studies of restricted diffusion and compartmentation as they relate to anisotropy. Pertinent pathological models, developmental studies and theoretical analyses provide further insight into the basis of anisotropic diffusion and its potential utility in the nervous system.}, authoraddress = {Department of Biomedical Engineering, Faculty of Medicine, University of Alberta, Edmonton, Canada. christian.beaulieu@ualberta.ca}, keywords = {*Anisotropy ; Brain/metabolism/pathology ; Brain Chemistry ; Diffusion ; Diffusion Magnetic Resonance Imaging/*methods ; Models, Biological ; Nerve Fibers/chemistry/metabolism/pathology ; Nervous System/chemistry/*metabolism/*pathology ; Nervous System Diseases/metabolism/pathology ; Spinal Cord/chemistry/cytology/metabolism ; Water/*chemistry}, language = {eng}, medline-aid = {10.1002/nbm.782 [doi]}, medline-ci = {Copyright 2002 John Wiley & Sons, Ltd.}, medline-da = {20021218}, medline-dcom = {20030701}, medline-edat = {2002/12/19 04:00}, medline-fau = {Beaulieu, Christian}, medline-is = {0952-3480 (Print)}, medline-jid = {8915233}, medline-jt = {NMR in biomedicine}, medline-lr = {20061115}, medline-mhda = {2003/07/02 05:00}, medline-own = {NLM}, medline-pl = {England}, medline-pmid = {12489094}, medline-pst = {ppublish}, medline-pt = {Journal Article ; Research Support, Non-U.S. Gov't ; Review}, medline-pubm = {Print}, medline-rf = {131}, medline-rn = {7732-18-5 (Water)}, medline-sb = {IM}, medline-so = {NMR Biomed. 2002 Nov-Dec;15(7-8):435-55.}, medline-stat = {MEDLINE}, url = {http://eutils.ncbi.nlm.nih.gov/entrez/eutils/elink.fcgi?cmd=prlinks\&dbfrom=pubmed\&retmode=ref\&id=12489094}, year = 2002 } @Article{Sciences2009, Author = {Sciences, Cognition Brain}, Title = {{Michaelmas Term 2008}}, Journal = {Sciences-New York}, Pages = {9469--9469}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Sciences - 2009 - Michaelmas Term 2008.pdf:pdf}, year = 2009 } @Article{Zvitia2010, Author = {Zvitia, Orly and Mayer, Arnaldo and Shadmi, Ran and Miron, Shmuel and Greenspan, Hayit K}, Title = {{Co-registration of white matter tractographies by adaptive-mean-shift and Gaussian mixture modeling.}}, Journal = {IEEE transactions on medical imaging}, Volume = {29}, Number = {1}, Pages = {132--45}, abstract = {In this paper, we present a robust approach to the registration of white matter tractographies extracted from diffusion tensor-magnetic resonance imaging scans. The fibers are projected into a high dimensional feature space based on the sequence of their 3-D coordinates. Adaptive mean-shift clustering is applied to extract a compact set of representative fiber-modes (FM). Each FM is assigned to a multivariate Gaussian distribution according to its population thereby leading to a Gaussian mixture model (GMM) representation for the entire set of fibers. The registration between two fiber sets is treated as the alignment of two GMMs and is performed by maximizing their correlation ratio. A nine-parameters affine transform is recovered and eventually refined to a twelve-parameters affine transform using an innovative mean-shift based registration refinement scheme presented in this paper. The validation of the algorithm on synthetic intrasubject data demonstrates its robustness to interrupted and deviating fiber artifacts as well as outliers. Using real intrasubject data, a comparison is conducted to other intensity based and fiber-based registration algorithms, demonstrating competitive results. An option for tracking-in-time, on specific white matter fiber tracts, is also demonstrated on the real data.}, doi = {10.1109/TMI.2009.2029097}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Zvitia et al. - 2010 - Co-registration of white matter tractographies by adaptive-mean-shift and Gaussian mixture modeling..pdf:pdf}, issn = {1558-0062}, keywords = {Algorithms,Brain,Brain: anatomy \& histology,Cluster Analysis,Diffusion Tensor Imaging,Diffusion Tensor Imaging: methods,Humans,Image Processing, Computer-Assisted,Image Processing, Computer-Assisted: methods,Models, Neurological,Normal Distribution,Reproducibility of Results}, month = jan, pmid = {19709970}, url = {http://www.ncbi.nlm.nih.gov/pubmed/19709970}, year = 2010 } @Article{Martinez2007, Author = {Martinez, Aleix M}, Title = {{Spherical-Homoscedastic Distributions : The Equivalency of Spherical and Normal Distributions in Classification}}, Journal = {Journal of Machine Learning Research}, Volume = {8}, Pages = {1583--1623}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Martinez - 2007 - Spherical-Homoscedastic Distributions The Equivalency of Spherical and Normal Distributions in Classification.pdf:pdf}, keywords = {computer vision,directional data,linear and non-linear classifiers,norm normalization,normal distributions,spherical distributions}, year = 2007 } @Article{Tu2007TransMedIm, Author = {Tu, Zhuowen and Narr, Katherine L. and Dollar, Piotr and Dinov, Ivo and Thompson, Paul M. and Toga, Arthur W.}, Title = {Brain Anatomical Structure Segmentation by Hybrid Discriminative/Generative Models}, Journal = {Transactions on Medical Imaging}, Volume = {in press}, abstract = {In this paper, a hybrid discriminative/generative model for brain anatomical structure segmentation is proposed. The learning aspect of the approach is emphasized. In the discriminative appearance models, various cues such as intensity and curvatures are combined to locally capture the complex appearances of different anatomical structures. A probabilistic boosting tree (PBT) framework is adopted to learn multi-class discriminative models that combine hundreds of features across different scales. On the generative side, Principal Component Analysis (PCA) shape models are used to capture the global shape information about each anatomical structure. The parameters to combine the discriminative appearance and generative shape models are also automatically learned. Thus low-level and highlevel information is learned and integrated in a hybrid model. Segmentations are obtained by minimizing an energy function associated with the proposed hybrid model. Finally, a gridface structure is designed to explicitly represent the 3D region topology. This representation handles an arbitrary number of regions and facilitates fast surface evolution. Our system was trained and tested on a set of 3D MRI volumes and the results obtained are encouraging.}, file = {attachment\:Tu2007TransMedIm.pdf:attachment\:Tu2007TransMedIm.pdf:PDF}, year = 2007 } @Article{Duru2010, Author = {Duru, Dilek G\"{o}ksel and Ozkan, Mehmed}, Title = {{Determination of neural fiber connections based on data structure algorithm.}}, Journal = {Computational intelligence and neuroscience}, Volume = {2010}, Pages = {251928}, abstract = {The brain activity during perception or cognition is mostly examined by functional magnetic resonance imaging (fMRI). However, the cause of the detected activity relies on the anatomy. Diffusion tensor magnetic resonance imaging (DTMRI) as a noninvasive modality providing in vivo anatomical information allows determining neural fiber connections which leads to brain mapping. Still a complete map of fiber paths representing the human brain is missing in literature. One of the main drawbacks of reliable fiber mapping is the correct detection of the orientation of multiple fibers within a single imaging voxel. In this study a method based on linear data structures is proposed to define the fiber paths regarding their diffusivity. Another advantage of the proposed method is that the analysis is applied on entire brain diffusion tensor data. The implementation results are promising, so that the method will be developed as a rapid fiber tractography algorithm for the clinical use as future study.}, doi = {10.1155/2010/251928}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Duru, Ozkan - 2010 - Determination of neural fiber connections based on data structure algorithm..pdf:pdf}, issn = {1687-5273}, keywords = {Algorithms,Brain,Brain: anatomy \& histology,Diffusion Tensor Imaging,Diffusion Tensor Imaging: methods,Humans,Image Processing, Computer-Assisted,Image Processing, Computer-Assisted: methods,Linear Models,Neural Pathways,Neural Pathways: anatomy \& histology,Uncertainty}, month = jan, pmid = {20069047}, url = {http://www.pubmedcentral.nih.gov/articlerender.fcgi?artid=2801001\&tool=pmcentrez\&rendertype=abstract}, year = 2010 } @Article{Tang1997, Author = {Tang, Y and Nyengaard, J R}, Title = {{A stereological method for estimating the total length and size of myelin fibers in human brain white matter.}}, Journal = {Journal of neuroscience methods}, Volume = {73}, Number = {2}, Pages = {193--200}, abstract = {A practically unbiased stereological method to obtain estimates of the volume and total length of nerve fibers in brain white matter is described. The sampling scheme is designed so that the majority of brain white matter is left intact, thus providing the possibility for resampling and further analysis. Uniform sampling of one complete hemispherical white matter is performed. The volume fraction of nerve fibers in white matter is estimated by point counting. The total length of nerve fibers was estimated from the product of the volume of white matter, obtained with the Cavalieri principle, and the fiber length density, obtained from the isotropic, uniform random sections which were ensured by the isector. The size of nerve fibers was derived by measuring the profile diameter perpendicular to its longest axis. The influence of the postmortem fixation delay on nerve fiber parameters was investigated in one dog and one pig. The criteria for identification of nerve fiber profiles at light microscopy were evaluated using electron microscopy.}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Tang, Nyengaard - 1997 - A stereological method for estimating the total length and size of myelin fibers in human brain white matter..pdf:pdf}, issn = {0165-0270}, keywords = {Adolescent,Adult,Animals,Brain,Brain: ultrastructure,Dogs,Female,Humans,Middle Aged,Models, Neurological,Nerve Fibers, Myelinated,Nerve Fibers, Myelinated: ultrastructure,Neurosciences,Neurosciences: methods,Swine}, month = may, pmid = {9196291}, url = {http://www.ncbi.nlm.nih.gov/pubmed/9196291}, year = 1997 } @Article{margolis5nal, Author = {Margolis, G. and Pickett, JP}, Title = {{New applications of the Luxol fast blue myelin stain.}}, Journal = {Laboratory investigation; a journal of technical methods and pathology}, Volume = {5}, Number = {6}, Pages = {459} } @Article{Ghosh2008, Author = {Ghosh, Aurobrata and Tsigaridas, Elias and Descoteaux, Maxime and Comon, Pierre and Mourrain, Bernard and Deriche, Rachid}, Title = {{A polynomial based approach to extract the maxima of an antipodally symmetric spherical function and its application to extract fiber directions from the Orientation Distribution Function in Diffusion MRI}}, Journal = {Tensor}, Pages = {237--248}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Ghosh et al. - 2008 - A polynomial based approach to extract the maxima of an antipodally symmetric spherical function and its application to extract fiber directions from the Orientation Distribution Function in Diffusion MRI.pdf:pdf}, year = 2008 } @Article{HTJ+03, Author = {Hagmann, P. and Thiran, J. P. and Jonasson, L. and Vandergheynst, P. and Clarke, S. and Maeder, P. and Meuli, R.}, Title = {D{TI} mapping of human brain connectivity: statistical fibre tracking and virtual dissection.}, Journal = {Neuroimage}, Volume = {19}, Number = {3}, Pages = {545-54}, abstract = {Several approaches have been used to trace axonal trajectories from diffusion MRI data. If such techniques were first developed in a deterministic framework reducing the diffusion information to one single main direction, more recent approaches emerged that were statistical in nature and that took into account the whole diffusion information. Based on diffusion tensor MRI data coming from normal brains, this paper presents how brain connectivity could be modelled globally by means of a random walk algorithm. The mass of connections thus generated was then virtually dissected to uncover different tracts. Corticospinal, corticobulbar, and corticothalamic tracts, the corpus callosum, the limbic system, several cortical association bundles, the cerebellar peduncles, and the medial lemniscus were all investigated. The results were then displayed in the form of an in vivo brain connectivity atlas. The connectivity pattern and the individual fibre tracts were then compared to known anatomical data; a good matching was found.}, authoraddress = {Signal Processing Institute, Swiss Federal Institute of Technology, 1015 Lausanne, Switzerland. patric.hagmann@epfl.ch}, keywords = {Algorithms ; Axons/physiology ; Brain/*anatomy \& histology ; *Brain Mapping ; Cerebellum/anatomy \& histology/physiology ; Cerebral Cortex/anatomy \& histology/physiology ; Computer Graphics ; Humans ; Image Processing, Computer-Assisted ; Magnetic Resonance Imaging ; Models, Neurological ; Nerve Fibers/*physiology ; Neural Pathways/*anatomy \& histology ; Pyramidal Tracts/anatomy \& histology/physiology ; Thalamus/anatomy \& histology/physiology}, language = {eng}, medline-aid = {S1053811903001423 [pii]}, medline-crdt = {2003/07/26 05:00}, medline-da = {20030725}, medline-dcom = {20030909}, medline-edat = {2003/07/26 05:00}, medline-fau = {Hagmann, P ; Thiran, J-P ; Jonasson, L ; Vandergheynst, P ; Clarke, S ; Maeder, P ; Meuli, R}, medline-is = {1053-8119 (Print)}, medline-jid = {9215515}, medline-jt = {NeuroImage}, medline-lr = {20041117}, medline-mhda = {2003/09/10 05:00}, medline-own = {NLM}, medline-pl = {United States}, medline-pmid = {12880786}, medline-pst = {ppublish}, medline-pt = {Clinical Trial ; Journal Article}, medline-sb = {IM}, medline-so = {Neuroimage. 2003 Jul;19(3):545-54.}, medline-stat = {MEDLINE}, url = {http://eutils.ncbi.nlm.nih.gov/entrez/eutils/elink.fcgi?cmd=prlinks&dbfrom=pubmed&retmode=ref&id=12880786}, year = 2003 } @Article{Wassermann2004, Author = {Wassermann, Demian and Deriche, Rachid}, Title = {{Simultaneous Manifold Learning and Clustering : Grouping White Matter Fiber Tracts Using a Volumetric White Matter Atlas}}, Journal = {International Journal of Computer Vision}, Pages = {1--8}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Wassermann, Deriche - 2004 - Simultaneous Manifold Learning and Clustering Grouping White Matter Fiber Tracts Using a Volumetric White Matter Atlas.pdf:pdf}, year = 2004 } @Article{BKK+04, Author = {Bodammer, N. and Kaufmann, J. and Kanowski, M. and Tempelmann, C.}, Title = {Eddy current correction in diffusion-weighted imaging using pairs of images acquired with opposite diffusion gradient polarity.}, Journal = {Magn Reson Med}, Volume = {51}, Number = {1}, Pages = {188-93}, abstract = {In echo-planar-based diffusion-weighted imaging (DWI) and diffusion tensor imaging (DTI), the evaluation of diffusion parameters such as apparent diffusion coefficients and anisotropy indices is affected by image distortions that arise from residual eddy currents produced by the diffusion-sensitizing gradients. Correction methods that coregister diffusion-weighted and non-diffusion-weighted images suffer from the different contrast properties inherent in these image types. Here, a postprocessing correction scheme is introduced that makes use of the inverse characteristics of distortions generated by gradients with reversed polarity. In this approach, only diffusion-weighted images with identical contrast are included for correction. That is, non-diffusion-weighted images are not needed as a reference for registration. Furthermore, the acquisition of an additional dataset with moderate diffusion-weighting as suggested by Haselgrove and Moore (Magn Reson Med 1996;36:960-964) is not required. With phantom data it is shown that the theoretically expected symmetry of distortions is preserved in the images to a very high degree, demonstrating the practicality of the new method. Results from human brain images are also presented.}, authoraddress = {Department of Neurology II, Otto von Guericke University Magdeburg, Germany. bodammer@neuro2.med.uni-magdeburg.de}, keywords = {Algorithms ; Brain/*anatomy \& histology ; Diffusion Magnetic Resonance Imaging/*methods ; Humans ; *Image Processing, Computer-Assisted ; Phantoms, Imaging}, language = {eng}, medline-aid = {10.1002/mrm.10690 [doi]}, medline-ci = {Copyright 2003 Wiley-Liss, Inc.}, medline-crdt = {2004/01/06 05:00}, medline-da = {20040105}, medline-dcom = {20040507}, medline-edat = {2004/01/06 05:00}, medline-fau = {Bodammer, Nils ; Kaufmann, Jorn ; Kanowski, Martin ; Tempelmann, Claus}, medline-is = {0740-3194 (Print)}, medline-jid = {8505245}, medline-jt = {Magnetic resonance in medicine : official journal of the Society of Magnetic Resonance in Medicine / Society of Magnetic Resonance in Medicine}, medline-lr = {20061115}, medline-mhda = {2004/05/08 05:00}, medline-own = {NLM}, medline-pl = {United States}, medline-pmid = {14705060}, medline-pst = {ppublish}, medline-pt = {Journal Article ; Research Support, Non-U.S. Gov't}, medline-sb = {IM}, medline-so = {Magn Reson Med. 2004 Jan;51(1):188-93.}, medline-stat = {MEDLINE}, url = {http://eutils.ncbi.nlm.nih.gov/entrez/eutils/elink.fcgi?cmd=prlinks&dbfrom=pubmed&retmode=ref&id=14705060}, year = 2004 } @Article{Correia2009b, Author = {Correia, Stephen and Lee, Stephanie Y and Voorn, Thom and Tate, David F and Paul, Robert H and Salloway, Stephen P and Malloy, Paul F and Laidlaw, David H}, Title = {{NIH Public Access}}, Journal = {Water}, Volume = {42}, Number = {2}, Pages = {568--581}, doi = {10.1016/j.neuroimage.2008.05.022.Quantitative}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Correia et al. - 2009 - NIH Public Access.pdf:pdf}, year = 2009 } @Article{Wang1999, Author = {Wang, Y and Berg, P and Scherg, M}, Title = {{Common spatial subspace decomposition applied to analysis of brain responses under multiple task conditions: a simulation study.}}, Journal = {Clinical neurophysiology : official journal of the International Federation of Clinical Neurophysiology}, Volume = {110}, Number = {4}, Pages = {604--14}, abstract = {A method, called common spatial subspace decomposition, is presented which can extract signal components specific to one condition from multiple magnetoencephalography/electroencephalography data sets of multiple task conditions. Signal matrices or covariance matrices are decomposed using spatial factors common to multiple conditions. The spatial factors and corresponding spatial filters are then dissociated into specific and common parts, according to the common spatial subspace which exists among the data sets. Finally, the specific signal components are extracted using the corresponding spatial filters and spatial factors. The relationship between this decomposition and spatio-temporal source models is described in this paper. Computer simulations suggest that this method can facilitate the analysis of brain responses under multiple task conditions and merits further application.}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Wang, Berg, Scherg - 1999 - Common spatial subspace decomposition applied to analysis of brain responses under multiple task conditions a simulation study..pdf:pdf}, issn = {1388-2457}, keywords = {Brain,Brain Mapping,Brain: physiology,Computer Simulation,Humans,Models, Neurological,Task Performance and Analysis}, month = apr, pmid = {10378728}, url = {http://www.ncbi.nlm.nih.gov/pubmed/10378728}, year = 1999 } @Article{Zhai2003, Author = {Zhai, Guihua and Lin, Weili and Wilber, Kathy P and Gerig, Guido and Gilmore, John H}, Title = {{Comparisons of regional white matter diffusion in healthy neonates and adults performed with a 3.0-T head-only MR imaging unit.}}, Journal = {Radiology}, Volume = {229}, Number = {3}, Pages = {673--81}, abstract = {PURPOSE: To evaluate the normal brains of adults and neonates for regional and age-related differences in apparent diffusion coefficient (ADC) and fractional anisotropy (FA). MATERIALS AND METHODS: Eight healthy adults and 20 healthy neonates were examined with a 3.0-T head-only magnetic resonance (MR) imaging unit by using a single-shot diffusion-tensor sequence. Trace ADC maps, FA maps, directional maps of the putative directions of white matter (WM) tracts, and fiber-tracking maps were obtained. Regions of interest-eight in WM and one in gray matter (GM)-were predefined for the ADC and FA measurements. The Student t test was used to compare FA and ADC between adults and neonates, whereas the Tukey multiple-comparison test was used to compare FA and ADC in different brain regions in the adult and neonate groups. RESULTS: A global elevation in ADC (P <.001) in both GM and WM and a reduction in FA (P <.001) in WM were observed in neonates as compared with these values in adults. In addition, significant regional variations in FA and ADC were observed in both groups. Regional variations in FA and ADC were less remarkable in adults, whereas neonates had consistently higher FA values and lower ADC values in the central WM as compared with these values in the peripheral WM. Fiber tracking revealed only major WM tracts in the neonates but fibers extending to the peripheral WM in the adults. CONCLUSION: There were regional differences in FA and ADC values in the neonates; such variations were less remarkable in the adults.}, doi = {10.1148/radiol.2293021462}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Zhai et al. - 2003 - Comparisons of regional white matter diffusion in healthy neonates and adults performed with a 3.0-T head-only MR imaging unit..pdf:pdf}, issn = {0033-8419}, keywords = {Adult,Age Factors,Brain,Brain: anatomy \& histology,Diffusion Magnetic Resonance Imaging,Diffusion Magnetic Resonance Imaging: instrumentat,Humans,Infant, Newborn,ROC Curve}, month = dec, pmid = {14657305}, url = {http://www.ncbi.nlm.nih.gov/pubmed/14657305}, year = 2003 } @Article{Fillard2009, Author = {Fillard, P. and Poupon, C. and Mangin, J.F.}, Title = {{Spin Tracking: A Novel Global Tractography Algorithm}}, Journal = {NeuroImage}, Volume = {47}, Pages = {S127--S127}, doi = {10.1016/S1053-8119(09)71230-3}, issn = {10538119}, url = {http://dx.doi.org/10.1016/S1053-8119(09)71230-3}, year = 2009 } @Article{Behrens2003NatureNeuroscience, Author = {Behrens, T E J and Johansen-Berg, H and Woolrich, M W and Wheeler-Kingshott, C A M and Boulby, P A and Barker, G J and Sillery, E L and Sheehan, K and Ciccarellu, O and Thompson, A J and Brady, J M and Matthews, P M}, Title = {Non-invasive mapping of connections between human thalamus and cortex using diffusion imaging}, Journal = {Nature Neuroscience}, Volume = {6}, Number = {7}, Pages = {750-757}, abstract = {Evidence concerning anatomical connectivities in the human brain is sparse and based largely on limited post-mortem observations. Diffusion tensor imaging has previously been used to define large white-matter tracts in the living human brain, but this technique has had limited success in tracing pathways into gray matter. Here we identified specific connections between human thalamus and cortex using a novel probabilistic tractography algorithm with diffusion imaging data. Classification of thalamic gray matter based on cortical connectivity patterns revealed distinct subregions whose locations correspond to nuclei described previously in histological studies. The connections that we found between thalamus and cortex were similar to those reported for non-human primates and were reproducible between individuals. Our results provide the first quantitative demonstration of reliable inference of anatomical connectivity between human gray matter structures using diffusion data and the first connectivity-based segmentation of gray matter.}, file = {attachment\:Behrens2003NatureNeuroscience.pdf:attachment\:Behrens2003NatureNeuroscience.pdf:PDF}, publisher = {Nature Publishing Group}, year = 2003 } @Article{Joya, Author = {Joy, Kenneth I}, Title = {{Numerical Methods for Particle Tracing in Vector Fields}}, Journal = {Science}, Pages = {1--7}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Joy - Unknown - Numerical Methods for Particle Tracing in Vector Fields.pdf:pdf} } @Article{Blankertz2008, Author = {Blankertz, Benjamin and Tomioka, Ryota and Lemm, Steven and Kawanabe, Motoaki and M\"{u}ller, Klaus-robert}, Title = {{Optimizing Spatial Filters for Robust EEG Single-Trial Analysis}}, Journal = {Signal Processing}, Volume = {XX}, Pages = {1--12}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Blankertz et al. - 2008 - Optimizing Spatial Filters for Robust EEG Single-Trial Analysis.pdf:pdf}, year = 2008 } @Article{WirestamMRM2006, Author = {Wirestam, R. and Bibic, A. and Latt, J. and Brockstedt, S. and Stahlberg, F.}, Title = {{Denoising of complex MRI data by wavelet-domain filtering: Application to high-b-value diffusion-weighted imaging}}, Journal = {Magnetic Resonance in Medicine}, Volume = {56}, Number = {5}, publisher = {Wiley Subscription Services, Inc., A Wiley Company Hoboken}, year = 2006 } @Article{Lenglet2010, Author = {Lenglet, Christophe and Series, I M A Preprint and Hall, Lind and E, Church Street S and Aganj, Iman and Sapiro, Guillermo}, Title = {{ODF MAXIMA EXTRACTION IN INSTITUTE FOR MATHEMATICS AND ITS APPLICATIONS ODF Maxima Extraction in Spherical Harmonic Representation via Analytical Search Space Reduction}}, Journal = {Methods}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Lenglet et al. - 2010 - ODF MAXIMA EXTRACTION IN INSTITUTE FOR MATHEMATICS AND ITS APPLICATIONS ODF Maxima Extraction in Spherical Harmonic Representation via Analytical Search Space Reduction.pdf:pdf}, year = 2010 } @Article{Bai2009, Author = {Bai, Y}, Title = {{Correcting for Motion between Acquisitions in Diffusion MR Imaging}}, Journal = {Chart}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Bai - 2009 - Correcting for Motion between Acquisitions in Diffusion MR Imaging.pdf:pdf}, year = 2009 } @Book{mcrobbie2006mpp, Author = {McRobbie, D.W. and Moore, E.A. and Graves, M.J.}, Title = {{MRI from Picture to Proton}}, Publisher = {Cambridge University Press}, year = 2006 } @Article{Tang1997a, Author = {Tang, Y and Nyengaard, J R}, Title = {{A stereological method for estimating the total length and size of myelin fibers in human brain white matter.}}, Journal = {Journal of neuroscience methods}, Volume = {73}, Number = {2}, Pages = {193--200}, abstract = {A practically unbiased stereological method to obtain estimates of the volume and total length of nerve fibers in brain white matter is described. The sampling scheme is designed so that the majority of brain white matter is left intact, thus providing the possibility for resampling and further analysis. Uniform sampling of one complete hemispherical white matter is performed. The volume fraction of nerve fibers in white matter is estimated by point counting. The total length of nerve fibers was estimated from the product of the volume of white matter, obtained with the Cavalieri principle, and the fiber length density, obtained from the isotropic, uniform random sections which were ensured by the isector. The size of nerve fibers was derived by measuring the profile diameter perpendicular to its longest axis. The influence of the postmortem fixation delay on nerve fiber parameters was investigated in one dog and one pig. The criteria for identification of nerve fiber profiles at light microscopy were evaluated using electron microscopy.}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Tang, Nyengaard - 1997 - A stereological method for estimating the total length and size of myelin fibers in human brain white matter..pdf:pdf}, issn = {0165-0270}, keywords = {Adolescent,Adult,Animals,Brain,Brain: ultrastructure,Dogs,Female,Humans,Middle Aged,Models, Neurological,Nerve Fibers, Myelinated,Nerve Fibers, Myelinated: ultrastructure,Neurosciences,Neurosciences: methods,Swine}, month = may, pmid = {9196291}, url = {http://www.ncbi.nlm.nih.gov/pubmed/9196291}, year = 1997 } @Article{Bullmore2009, Author = {Bullmore, E and Sporns, O}, Title = {{Complex brain networks: graph theoretical analysis of structural and functional systems}}, Journal = {Nature Reviews Neuroscience}, Volume = {10}, Number = {3}, Pages = {186--198}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Bullmore, Sporns - 2009 - Complex brain networks graph theoretical analysis of structural and functional systems.pdf:pdf}, year = 2009 } @Article{Pajevic1999, Author = {Pajevic, Sinisa and Pierpaoli, Carlo}, Title = {{Color schemes to represent the orientation of anisotropic tissues from diffusion tensor data: Application to white matter fiber tract mapping in the human brain}}, Journal = {Magnetic Resonance in Medicine}, Volume = {42}, Number = {3}, abstract = {This paper investigates the use of color to represent the directional information contained in the diffusion tensor. Ideally, one wants to take into account both the properties of human color vision and of the given display hardware to produce a representation in which differences in the orientation of anisotropic structures are proportional to the perceived differences in color. It is argued here that such a goal cannot be achieved in general and therefore, empirical or heuristic schemes, which avoid some of the common artifacts of previously proposed approaches, are implemented. Directionally encoded color (DEC) maps of the human brain obtained using these schemes clearly show the main association, projection, and commissural white matter pathways. In the brainstem, motor and sensory pathways are easily identified and can be differentiated from the transverse pontine fibers and the cerebellar peduncles. DEC maps obtained from diffusion tensor imaging data provide a simple and effective way to visualize fiber direction, useful for investigating the structural anatomy of different organs. Magn Reson Med 42:526-540, 1999. © 1999 Wiley-Liss, Inc.}, doi = {10.1002/(SICI)1522-2594(199909)42:3<526::AID-MRM15>3.0.CO;2-J}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Pajevic, Pierpaoli - 1999 - Color schemes to represent the orientation of anisotropic tissues from diffusion tensor data Application to white matter fiber tract mapping in the human brain.pdf:pdf}, url = {http://www3.interscience.wiley.com/journal/63500786/abstract}, year = 1999 } @Article{DauguetNeuroImage2007, Author = {Dauguet, J. and Peled, S. and Berezovskii, V. and Delzescaux, T. and Warfield, S. K. and Born, R. and Westin, C. F.}, Title = {Comparison of fiber tracts derived from in-vivo {DTI} tractography with 3{D} histological neural tract tracer reconstruction on a macaque brain.}, Journal = {Neuroimage}, Volume = {37}, Number = {2}, Pages = {530-8}, abstract = {Since the introduction of diffusion weighted imaging (DWI) as a method for examining neural connectivity, its accuracy has not been formally evaluated. In this study, we directly compared connections that were visualized using injected neural tract tracers (WGA-HRP) with those obtained using in-vivo diffusion tensor imaging (DTI) tractography. First, we injected the tracer at multiple sites in the brain of a macaque monkey; second, we reconstructed the histological sections of the labeled fiber tracts in 3D; third, we segmented and registered the fibers (somatosensory and motor tracts) with the anatomical in-vivo MRI from the same animal; and last, we conducted fiber tracing along the same pathways on the DTI data using a classical diffusion tracing technique with the injection sites as seeds. To evaluate the performance of DTI fiber tracing, we compared the fibers derived from the DTI tractography with those segmented from the histology. We also studied the influence of the parameters controlling the tractography by comparing Dice superimposition coefficients between histology and DTI segmentations. While there was generally good visual agreement between the two methods, our quantitative comparisons reveal certain limitations of DTI tractography, particularly for regions at remote locations from seeds. We have thus demonstrated the importance of appropriate settings for realistic tractography results.}, authoraddress = {Computational Radiology Laboratory, Children's Hospital, Harvard Medical School, Boston, USA. dauguet@bwh.harvard.edu}, keywords = {Animals ; Anisotropy ; Brain/*anatomy \& histology ; *Diffusion Magnetic Resonance Imaging ; Image Processing, Computer-Assisted ; *Imaging, Three-Dimensional ; Immunohistochemistry ; Macaca ; Nerve Fibers/ultrastructure ; Neural Pathways/*cytology}, language = {eng}, medline-aid = {S1053-8119(07)00328-X [pii] ; 10.1016/j.neuroimage.2007.04.067 [doi]}, medline-crdt = {2007/07/03 09:00}, medline-da = {20070730}, medline-dcom = {20071012}, medline-dep = {20070524}, medline-edat = {2007/07/03 09:00}, medline-fau = {Dauguet, Julien ; Peled, Sharon ; Berezovskii, Vladimir ; Delzescaux, Thierry ; Warfield, Simon K ; Born, Richard ; Westin, Carl-Fredrik}, medline-gr = {P01 HD18655/HD/NICHD NIH HHS/United States ; P30-EY12196/EY/NEI NIH HHS/United States ; P41 RR013218/RR/NCRR NIH HHS/United States ; R01 HL074942/HL/NHLBI NIH HHS/United States ; R01 RR021885/RR/NCRR NIH HHS/United States ; R01-MH50747/MH/NIMH NIH HHS/United States ; R21 MH067054/MH/NIMH NIH HHS/United States ; U41 RR019703/RR/NCRR NIH HHS/United States ; U54 EB005149/EB/NIBIB NIH HHS/United States}, medline-is = {1053-8119 (Print)}, medline-jid = {9215515}, medline-jt = {NeuroImage}, medline-lr = {20071203}, medline-mhda = {2007/10/13 09:00}, medline-own = {NLM}, medline-phst = {2007/01/25 [received] ; 2007/04/05 [revised] ; 2007/04/10 [accepted] ; 2007/05/24 [aheadofprint]}, medline-pl = {United States}, medline-pmid = {17604650}, medline-pst = {ppublish}, medline-pt = {Comparative Study ; Journal Article ; Research Support, N.I.H., Extramural ; Research Support, Non-U.S. Gov't ; Research Support, U.S. Gov't, Non-P.H.S.}, medline-sb = {IM}, medline-so = {Neuroimage. 2007 Aug 15;37(2):530-8. Epub 2007 May 24.}, medline-stat = {MEDLINE}, url = {http://eutils.ncbi.nlm.nih.gov/entrez/eutils/elink.fcgi?cmd=prlinks&dbfrom=pubmed&retmode=ref&id=17604650}, year = 2007 } @Article{Jonasson2007, Author = {Jonasson, Lisa and Bresson, Xavier and Thiran, Jean-Philippe and Wedeen, Van J and Hagmann, Patric}, Title = {{Representing diffusion MRI in 5-D simplifies regularization and segmentation of white matter tracts.}}, Journal = {IEEE transactions on medical imaging}, Volume = {26}, Number = {11}, Pages = {1547--54}, abstract = {We present a new five-dimensional (5-D) space representation of diffusion magnetic resonance imaging (dMRI) of high angular resolution. This 5-D space is basically a non-Euclidean space of position and orientation in which crossing fiber tracts can be clearly disentangled, that cannot be separated in three-dimensional position space. This new representation provides many possibilities for processing and analysis since classical methods for scalar images can be extended to higher dimensions even if the spaces are not Euclidean. In this paper, we show examples of how regularization and segmentation of dMRI is simplified with this new representation. The regularization is used with the purpose of denoising and but also to facilitate the segmentation task by using several scales, each scale representing a different level of resolution. We implement in five dimensions the Chan-Vese method combined with active contours without edges for the segmentation and the total variation functional for the regularization. The purpose of this paper is to explore the possibility of segmenting white matter structures directly as entirely separated bundles in this 5-D space. We will present results from a synthetic model and results on real data of a human brain acquired with diffusion spectrum magnetic resonance imaging (MRI), one of the dMRI of high angular resolution available. These results will lead us to the conclusion that this new high-dimensional representation indeed simplifies the problem of segmentation and regularization.}, doi = {10.1109/TMI.2007.899168}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Jonasson et al. - 2007 - Representing diffusion MRI in 5-D simplifies regularization and segmentation of white matter tracts..pdf:pdf}, issn = {0278-0062}, keywords = {Algorithms,Artificial Intelligence,Brain,Brain: anatomy \& histology,Diffusion Magnetic Resonance Imaging,Diffusion Magnetic Resonance Imaging: methods,Humans,Image Enhancement,Image Enhancement: methods,Image Interpretation, Computer-Assisted,Image Interpretation, Computer-Assisted: methods,Imaging, Three-Dimensional,Imaging, Three-Dimensional: methods,Nerve Fibers, Myelinated,Nerve Fibers, Myelinated: ultrastructure,Pattern Recognition, Automated,Pattern Recognition, Automated: methods,Reproducibility of Results,Sensitivity and Specificity}, month = nov, pmid = {18041269}, url = {http://www.ncbi.nlm.nih.gov/pubmed/18041269}, year = 2007 } @Article{Frenkel2003, Author = {Frenkel, Max and Basri, Ronen}, Title = {{Using the Fast Marching Method}}, Pages = {35--51}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Frenkel, Basri - 2003 - Using the Fast Marching Method.pdf:pdf}, year = 2003 } @Article{Laidlaw, Author = {Laidlaw, David H}, Title = {{Similarity Coloring of DTI Fiber Tracts}}, Journal = {Science}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Laidlaw - Unknown - Similarity Coloring of DTI Fiber Tracts.pdf:pdf} } @Article{Parker2004BJR, Author = {Parker, G J M}, Title = {{Analysis of MR diffusion weighted images}}, Journal = {Br J Radiol}, Volume = {77}, Number = {suppl_2}, Pages = {S176-185}, abstract = {Diffusion-weighted MR images provide information that is present in no other imaging modality. Whilst some of this information may be appreciated visually in diffusion weighted images, much of it may be extracted only with the aid of data post-processing. This review summarizes the methods available for interpreting diffusion weighted imaging (DWI) information using the diffusion tensor and other models of the DWI signal. This is followed by an overview of methods that allow the estimation of fibre tract orientation and that provide estimates of the routes and degree of anatomical cerebral white matter connectivity. }, doi = {10.1259/bjr/81090732}, eprint = {http://bjr.birjournals.org/cgi/reprint/77/suppl_2/S176.pdf}, file = {attachment\:Parker2004BJR.pdf:attachment\:Parker2004BJR.pdf:PDF}, url = {http://bjr.birjournals.org/cgi/content/abstract/77/suppl_2/S176}, year = 2004 } @Article{Prentice1984, Author = {Prentice, Michael J.}, Title = {{A distribution-free method of interval estimation for unsigned directional data}}, Journal = {Biometrika}, Volume = {71}, Number = {1}, Pages = {147--154}, doi = {10.1093/biomet/71.1.147}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Prentice - 1984 - A distribution-free method of interval estimation for unsigned directional data.pdf:pdf}, issn = {0006-3444}, url = {http://biomet.oxfordjournals.org/cgi/doi/10.1093/biomet/71.1.147}, year = 1984 } @Article{MelieGarcia2008NeuroImage, Author = {Melie-Garcia, Lester and Canales-Rodriguez, Erick J. and Aleman-Gomez, Yasser and Lin, Ching-Po and Iturria-Medina, Yasser and Valdes-Hernandez, Pedro A. }, Title = {A bayesian framework to identify principal intravoxel diffusion profiles based on diffusion-weighted \{{M}{R}\} imaging}, Journal = {NeuroImage}, Volume = {42}, Number = {2}, Pages = {750-770}, abstract = {In this paper we introduce a new method to characterize the intravoxel anisotropy based on diffusion-weighted imaging (DWI). The proposed solution, under a fully Bayesian formalism, deals with the problem of joint Bayesian Model selection and parameter estimation to reconstruct the principal diffusion profiles or primary fiber orientations in a voxel. We develop an efficient stochastic algorithm based on the reversible jump Markov chain Monte Carlo (RJMCMC) method in order to perform the Bayesian computation. RJMCMC is a good choice for this problem because of its ability to jump between models of different dimensionality. This methodology provides posterior estimates of the parameters of interest (fiber orientation, diffusivities etc) unconditional of the model assumed. It also gives an empirical posterior distribution of the number of primary nerve fiber orientations given the DWI data. Different probability maps can be assessed using this methodology: 1) the intravoxel fiber orientation map (or orientational distribution function) that gives the probability of finding a fiber in a particular spatial orientation; 2) a three-dimensional map of the probability of finding a particular number of fibers in each voxel; 3) a three-dimensional MaxPro (maximum probability) map that provides the most probable number of fibers for each voxel. In order to study the performance and reliability of the presented approach, we tested it on synthetic data; an ex-vivo phantom of intersecting capillaries; and DWI data from a human subject.}, file = {attachment\:MelieGarcia2008NeuroImage.pdf:attachment\:MelieGarcia2008NeuroImage.pdf:PDF}, publisher = {Elsevier}, url = {http://www.sciencedirect.com/science/article/B6WNP-4SD6SK8-3/2/8c1ea05184c975fa63eb37b877737d9f}, year = 2008 } @Article{Dougherty2005, Author = {Dougherty, Robert F and Ben-Shachar, Michal and Bammer, Roland and Brewer, Alyssa a and Wandell, Brian a}, Title = {{Functional organization of human occipital-callosal fiber tracts.}}, Journal = {Proceedings of the National Academy of Sciences of the United States of America}, Volume = {102}, Number = {20}, Pages = {7350--5}, abstract = {Diffusion tensor imaging (DTI) and fiber tracking (FT) were used to measure the occipital lobe fiber tracts connecting the two hemispheres in individual human subjects. These tracts are important for normal vision. Also, damage to portions of these tracts is associated with alexia. To assess the reliability of the DTI-FT measurements, occipital-callosal projections were estimated from each subject's left and right hemispheres independently. The left and right estimates converged onto the same positions within the splenium. We further characterized the properties of the estimated occipital-callosal fiber tracts by combining them with functional MRI. We used functional MRI to identify visual field maps in cortex and labeled fibers by the cortical functional response at the fiber endpoint. This labeling reveals a regular organization of the fibers within the splenium. The dorsal visual maps (dorsal V3, V3A, V3B, V7) send projections through a large band in the middle of the splenium, whereas ventral visual maps (ventral V3, V4) send projections through the inferior-anterior corner of the splenium. The agreement between the independent left/right estimates, further supported by previous descriptions of homologous tracts in macaque, validates the DTI-FT methods. However, a principal limitation of these methods is low sensitivity: a large number of fiber tracts that connect homotopic regions of ventral and lateral visual cortex were undetected. We conclude that most of the estimated tracts are real and can be localized with a precision of 1-2 mm, but many tracts are missed because of data and algorithm limitations.}, doi = {10.1073/pnas.0500003102}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Dougherty et al. - 2005 - Functional organization of human occipital-callosal fiber tracts..pdf:pdf}, issn = {0027-8424}, keywords = {Adult,Algorithms,Brain Mapping,Corpus Callosum,Corpus Callosum: cytology,Echo-Planar Imaging,Echo-Planar Imaging: methods,Female,Humans,Magnetic Resonance Imaging,Male,Middle Aged,Occipital Lobe,Occipital Lobe: cytology,Visual Fields,Visual Fields: physiology}, month = may, pmid = {15883384}, url = {http://www.ncbi.nlm.nih.gov/pubmed/15883384}, year = 2005 } @Article{Behrens2007NeuroImage, Author = {Behrens, T.E.J. and Johansen-Berg, H. and Jbabdi, S. and Rushworth, M.F.S. and Woolrich, M.W.}, Title = {Probabilistic diffusion tractography with multiple fibre orientations: What can we gain?}, Journal = {NeuroImage}, Volume = {34}, Number = {1}, Pages = {144-155}, abstract = {We present a direct extension of probabilistic diffusion tractography to the case of multiple fibre orientations. Using automatic relevance determination, we are able to perform online selection of the number of fibre orientations supported by the data at each voxel, simplifying the problem of tracking in a multi-orientation field. We then apply the identical probabilistic algorithm to tractography in the multi- and single-fibre cases in a number of example systems which have previously been tracked successfully or unsuccessfully with single-fibre tractography. We show that multi-fibre tractography offers significant advantages in sensitivity when tracking non-dominant fibre populations, but does not dramatically change tractography results for the dominant pathways.}, file = {attachment\:Behrens2007NeuroImage.pdf:attachment\:Behrens2007NeuroImage.pdf:PDF}, publisher = {Elsevier}, url = {http://www.sciencedirect.com/science/article/B6WNP-4M6SBH3-4/2/043728426dfb426bd39df3b8d3751bed}, year = 2007 } @Article{Catani2002NeuroImage, Author = {Catani, Marco and Howard, Robert J. and Pajevic, Sinisa and Jones, Derek K.}, Title = {Virtual {in vivo} interactive dissection of white matter fasciculi in the human brain }, Journal = {NeuroImage}, Volume = {17}, Pages = {77-94}, abstract = {This work reports the use of diffusion tensor magnetic resonance tractography to visualize the three-dimensional (3D) structure of the major white matter fasciculi within living human brain. Specifically, we applied this technique to visualize in vivo (i) the superior longitudinal (arcuate) fasciculus, (ii) the inferior longitudinal fasciculus, (iii) the superior fronto-occipital (subcallosal) fasciculus, (iv) the inferior frontooccipital fasciculus, (v) the uncinate fasciculus, (vi) the cingulum, (vii) the anterior commissure, (viii) the corpus callosum, (ix) the internal capsule, and (x) the fornix. These fasciculi were first isolated and were then interactively displayed as a 3D-rendered object. The virtual tract maps obtained in vivo using this approach were faithful to the classical descriptions of white matter anatomy that have previously been documented in postmortem studies. Since we have been able to interactively delineate and visualize white matter fasciculi over their entire length in vivo, in a manner that has only previously been possible by histological means, virtual in vivo interactive dissection (VIVID) adds a new dimension to anatomical descriptions of the living human brain.}, doi = {10.1006/nimg.2002.1136}, file = {attachment\:Catani2002NeuroImage.pdf:attachment\:Catani2002NeuroImage.pdf:PDF}, publisher = {Elsevier}, year = 2002 } @Article{Marinucci2008a, Author = {Marinucci, D and Pietrobon, D and Balbi, A and Baldi, P and Cabella, P and Kerkyacharian, G and Natoli, P and Picard, D and Vittorio, N}, Title = {{Spherical Needlets for CMB Data Analysis}}, Volume = {000}, Number = {February}, arxivid = {arXiv:0707.0844v1}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Marinucci et al. - 2008 - Spherical Needlets for CMB Data Analysis.pdf:pdf}, year = 2008 } @Article{DoughertyPNAS2005, Author = {Dougherty, R. F. and Ben-Shachar, M. and Bammer, R. and Brewer, A. A. and Wandell, B. A.}, Title = {Functional organization of human occipital-callosal fiber tracts.}, Journal = {Proc Natl Acad Sci U S A}, Volume = {102}, Number = {20}, Pages = {7350-5}, abstract = {Diffusion tensor imaging (DTI) and fiber tracking (FT) were used to measure the occipital lobe fiber tracts connecting the two hemispheres in individual human subjects. These tracts are important for normal vision. Also, damage to portions of these tracts is associated with alexia. To assess the reliability of the DTI-FT measurements, occipital-callosal projections were estimated from each subject's left and right hemispheres independently. The left and right estimates converged onto the same positions within the splenium. We further characterized the properties of the estimated occipital-callosal fiber tracts by combining them with functional MRI. We used functional MRI to identify visual field maps in cortex and labeled fibers by the cortical functional response at the fiber endpoint. This labeling reveals a regular organization of the fibers within the splenium. The dorsal visual maps (dorsal V3, V3A, V3B, V7) send projections through a large band in the middle of the splenium, whereas ventral visual maps (ventral V3, V4) send projections through the inferior-anterior corner of the splenium. The agreement between the independent left/right estimates, further supported by previous descriptions of homologous tracts in macaque, validates the DTI-FT methods. However, a principal limitation of these methods is low sensitivity: a large number of fiber tracts that connect homotopic regions of ventral and lateral visual cortex were undetected. We conclude that most of the estimated tracts are real and can be localized with a precision of 1-2 mm, but many tracts are missed because of data and algorithm limitations.}, authoraddress = {Stanford Institute for Reading and Learning, Department of Psychology, Stanford University, Stanford, CA 94305, USA. bobd@stanford.edu}, keywords = {Adult ; Algorithms ; *Brain Mapping ; Corpus Callosum/*cytology ; Echo-Planar Imaging/methods ; Female ; Humans ; Magnetic Resonance Imaging ; Male ; Middle Aged ; Occipital Lobe/*cytology ; Visual Fields/physiology}, language = {eng}, medline-aid = {0500003102 [pii] ; 10.1073/pnas.0500003102 [doi]}, medline-crdt = {2005/05/11 09:00}, medline-da = {20050518}, medline-dcom = {20050713}, medline-dep = {20050509}, medline-edat = {2005/05/11 09:00}, medline-fau = {Dougherty, Robert F ; Ben-Shachar, Michal ; Bammer, Roland ; Brewer, Alyssa A ; Wandell, Brian A}, medline-gr = {EY-015000/EY/NEI NIH HHS/United States ; EY-03164/EY/NEI NIH HHS/United States}, medline-is = {0027-8424 (Print)}, medline-jid = {7505876}, medline-jt = {Proceedings of the National Academy of Sciences of the United States of America}, medline-lr = {20081120}, medline-mhda = {2005/07/14 09:00}, medline-oid = {NLM: PMC1129102}, medline-own = {NLM}, medline-phst = {2005/05/09 [aheadofprint]}, medline-pl = {United States}, medline-pmc = {PMC1129102}, medline-pmid = {15883384}, medline-pst = {ppublish}, medline-pt = {Comparative Study ; Journal Article ; Research Support, N.I.H., Extramural ; Research Support, Non-U.S. Gov't ; Research Support, U.S. Gov't, P.H.S.}, medline-sb = {IM}, medline-so = {Proc Natl Acad Sci U S A. 2005 May 17;102(20):7350-5. Epub 2005 May 9.}, medline-stat = {MEDLINE}, url = {http://eutils.ncbi.nlm.nih.gov/entrez/eutils/elink.fcgi?cmd=prlinks&dbfrom=pubmed&retmode=ref&id=15883384}, year = 2005 } @Article{ValentinaTomassini09192007, Author = {Tomassini, Valentina and Jbabdi, Saad and Klein, Johannes C. and Behrens, Timothy E. J. and Pozzilli, Carlo and Matthews, Paul M. and Rushworth, Matthew F. S. and Johansen-Berg, Heidi}, Title = {Diffusion-Weighted Imaging Tractography-Based Parcellation of the Human Lateral Premotor Cortex Identifies Dorsal and Ventral Subregions with Anatomical and Functional Specializations}, Journal = {J. Neurosci.}, Volume = {27}, Number = {38}, Pages = {10259-10269}, abstract = {Lateral premotor cortex (PM) in the macaque monkey can be segregated into structurally and functionally distinct subregions, including a major division between dorsal (PMd) and ventral (PMv) parts, which have distinct cytoarchitecture, function, and patterns of connectivity with both frontal and parietal cortical areas. The borders of their subregions are less well defined in the human brain. Here we use diffusion tractography to identify a reproducible border between dorsal and ventral subregions of human precentral gyrus. We derive connectivity fingerprints for the two subregions and demonstrate that each has a distinctive pattern of connectivity with frontal cortex and lateral parietal cortex, suggesting that these areas correspond to human PMd and PMv. Although putative human PMd has a high probability of connection with the superior parietal lobule, dorsal prefrontal cortex, and cingulate cortex, human PMv has a higher probability of connection with the anterior inferior parietal lobule and ventral prefrontal cortex. Finally, we assess the correspondence between our PMd/PMv border and local sulcal and functional anatomy. The location of the border falls at the level of the gyral branch that divides the inferior precentral sulcus from the superior precentral sulcus and corresponded closely to the location of a functional border defined using previous functional magnetic resonance imaging studies.}, doi = {10.1523/JNEUROSCI.2144-07.2007}, eprint = {http://www.jneurosci.org/cgi/reprint/27/38/10259.pdf}, file = {attachment\:tomassini_parcellation_2007.pdf:attachment\:tomassini_parcellation_2007.pdf:PDF}, url = {http://www.jneurosci.org/cgi/content/abstract/27/38/10259}, year = 2007 } @Article{Behrens2003MRM, Author = {Behrens, T. E. J. and Woolrich, M. W. and Jenkinson, M. and Johansen-Berg, H. and Nunes, R. G. and Clare, S. and Matthews, P. M. and Brady, J. M. and Smith, S. M.}, Title = {Characterization and propagation of uncertainty in diffusion-weighted \{{M}{R}\} imaging}, Journal = {Magnetic Resonance in Medicine}, Volume = {50}, Pages = {1077-1088}, abstract = {A fully probabilistic framework is presented for estimating local probability density functions on parameters of interest in a model of diffusion. This technique is applied to the estimation of parameters in the diffusion tensor model, and also to a simple partial volume model of diffusion. In both cases the parameters of interest include parameters defining local fiber direction. A technique is then presented for using these density functions to estimate global connectivity (i.e., the probability of the existence of a connection through the data field, between any two distant points), allowing for the quantification of belief in tractography results. This technique is then applied to the estimation of the cortical connectivity of the human thalamus. The resulting connectivity distributions correspond well with predictions from invasive tracer methods in nonhuman primate.}, file = {attachment\:Behrens2003MRM.pdf:attachment\:Behrens2003MRM.pdf:PDF}, publisher = {Wiley-Liss}, year = 2003 } @Article{ODonnell_MICCAI06, Author = {O'Donnell, L. and Westin, C. F.}, Title = {High-dimensional white matter atlas generation and group analysis.}, Journal = {Med Image Comput Comput Assist Interv Int Conf Med Image Comput Comput Assist Interv}, Volume = {9}, Number = {Pt 2}, Pages = {243-51}, abstract = {We present a two-step process including white matter atlas generation and automatic segmentation. Our atlas generation method is based on population fiber clustering. We produce an atlas which contains high-dimensional descriptors of fiber bundles as well as anatomical label information. We use the atlas to automatically segment tractography in the white matter of novel subjects and we present quantitative results (FA measurements) in segmented white matter regions from a small population. We demonstrate reproducibility of these measurements across scans. In addition, we introduce the idea of using clustering for automatic matching of anatomical structures across hemispheres.}, authoraddress = {Computer Science and Artificial Intelligence Laboratory, Massachusetts Institute of Technology, Cambridge MA, USA. lauren@csail.mit.edu}, keywords = {Algorithms ; Anatomy, Artistic/methods ; *Artificial Intelligence ; Brain/*anatomy \& histology ; Cluster Analysis ; Computer Simulation ; Diffusion Magnetic Resonance Imaging/*methods ; Humans ; Image Enhancement/methods ; Image Interpretation, Computer-Assisted/*methods ; Imaging, Three-Dimensional/methods ; Medical Illustration ; Models, Anatomic ; Nerve Fibers, Myelinated/*ultrastructure ; Neural Pathways/*anatomy \& histology ; Pattern Recognition, Automated/*methods ; Reproducibility of Results ; Sensitivity and Specificity}, language = {eng}, medline-crdt = {2007/03/16 09:00}, medline-da = {20070314}, medline-dcom = {20070406}, medline-edat = {2007/03/16 09:00}, medline-fau = {O'Donnell, Lauren ; Westin, Carl-Fredrik}, medline-gr = {P41 RR15241-01A1/RR/NCRR NIH HHS/United States ; P41-RR13218/RR/NCRR NIH HHS/United States ; R01 AG20012-01/AG/NIA NIH HHS/United States ; R01 MH 50747/MH/NIMH NIH HHS/United States ; U24-RR021382/RR/NCRR NIH HHS/United States ; U54-EB005149/EB/NIBIB NIH HHS/United States}, medline-jid = {101249582}, medline-jt = {Medical image computing and computer-assisted intervention : MICCAI ... International Conference on Medical Image Computing and Computer-Assisted Intervention}, medline-lr = {20071203}, medline-mhda = {2007/04/07 09:00}, medline-own = {NLM}, medline-pl = {Germany}, medline-pmid = {17354778}, medline-pst = {ppublish}, medline-pt = {Evaluation Studies ; Journal Article ; Research Support, N.I.H., Extramural}, medline-sb = {IM}, medline-so = {Med Image Comput Comput Assist Interv Int Conf Med Image Comput Comput Assist Interv. 2006;9(Pt 2):243-51.}, medline-stat = {MEDLINE}, url = {http://eutils.ncbi.nlm.nih.gov/entrez/eutils/elink.fcgi?cmd=prlinks&dbfrom=pubmed&retmode=ref&id=17354778}, year = 2006 } @Article{Correia2009, Author = {Correia, Marta Morgado}, Title = {{Development of Methods for the Acquisition and Analysis of Diffusion Weighted MRI Data}}, Journal = {Brain}, Number = {June}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Correia - 2009 - Development of Methods for the Acquisition and Analysis of Diffusion Weighted MRI Data.pdf:pdf}, year = 2009 } @Article{jbabdi2007bfg, Author = {Jbabdi, S. and Woolrich, MW and Andersson, JLR and Behrens, TEJ}, Title = {{A Bayesian framework for global tractography}}, Journal = {Neuroimage}, Volume = {37}, Number = {1}, Pages = {116--129}, publisher = {Elsevier}, year = 2007 } @Article{Jian2007aNeuroImage, Author = {Jian, Bing and Vemuri, Baba C. and Ozarslan, Evren and Carney, Paul R. and Mareci, Thomas H.}, Title = {A novel tensor distribution model for the diffusion-weighted \{{M}{R}\} signal}, Journal = {NeuroImage}, Volume = {37}, Number = {1}, Pages = {164-176}, abstract = {Diffusion MRI is a non-invasive imaging technique that allows the measurement of water molecule diffusion through tissue in vivo. The directional features of water diffusion allow one to infer the connectivity patterns prevalent in tissue and possibly track changes in this connectivity over time for various clinical applications. In this paper, we present a novel statistical model for diffusion-weighted MR signal attenuation which postulates that the water molecule diffusion can be characterized by a continuous mixture of diffusion tensors. An interesting observation is that this continuous mixture and the MR signal attenuation are related through the Laplace transform of a probability distribution over symmetric positive definite matrices. We then show that when the mixing distribution is a Wishart distribution, the resulting closed form of the Laplace transform leads to a Rigaut-type asymptotic fractal expression, which has been phenomenologically used in the past to explain the MR signal decay but never with a rigorous mathematical justification until now. Our model not only includes the traditional diffusion tensor model as a special instance in the limiting case, but also can be adjusted to describe complex tissue structure involving multiple fiber populations. Using this new model in conjunction with a spherical deconvolution approach, we present an efficient scheme for estimating the water molecule displacement probability functions on a voxel-by-voxel basis. Experimental results on both simulations and real data are presented to demonstrate the robustness and accuracy of the proposed algorithms.}, file = {attachment\:Jian2007aNeuroImage.pdf:attachment\:Jian2007aNeuroImage.pdf:PDF}, url = {http://www.sciencedirect.com/science/article/B6WNP-4NMSRV9-3/2/b4bc62020864c9b5767ce1e87874128a}, year = 2007 } @Article{Chen2006, Author = {Chen, Bin and Guo, Hua and Song, Allen W}, Title = {{Correction for direction-dependent distortions in diffusion tensor imaging using matched magnetic field maps.}}, Journal = {NeuroImage}, Volume = {30}, Number = {1}, Pages = {121--9}, abstract = {Diffusion tensor imaging (DTI) has seen increased usage in clinical and basic science research in the past decade. By assessing the water diffusion anisotropy within biological tissues, e.g. brain, researchers can infer different fiber structures important for neural pathways. A typical DTI data set contains at least one base image and six diffusion-weighted images along non-collinear encoding directions. The resultant images can then be combined to derive the three principal axes of the diffusion tensor and their respective cross terms, which can in turn be used to compute fractional anisotropy (FA) maps, apparent diffusion coefficient (ADC) maps, and to construct axonal fibers. The above operations all assume that DTI images along different diffusion-weighting directions for the same brain register to each other without spatial distortions. This assumption is generally false, as the large diffusion-weighting gradients would usually induce eddy currents to generate diffusion-weighting direction-dependent field gradients, leading to mis-registration within the DTI data set. Traditional methods for correcting magnetic field-induced distortions do not usually take into account these direction-dependent eddy currents unique for DTI, and they are usually time-consuming because multiple phase images need to be acquired. In this report, we describe our theory and implementation of an efficient and effective method to correct for the main field and eddy current-induced direction-dependent distortions for DTI images under a unified framework to facilitate the daily practice of DTI acquisitions.}, doi = {10.1016/j.neuroimage.2005.09.008}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Chen, Guo, Song - 2006 - Correction for direction-dependent distortions in diffusion tensor imaging using matched magnetic field maps..pdf:pdf}, issn = {1053-8119}, keywords = {Anisotropy,Artifacts,Brain,Brain Mapping,Brain: anatomy \& histology,Diffusion Magnetic Resonance Imaging,Diffusion Magnetic Resonance Imaging: statistics \&,Echo-Planar Imaging,Echo-Planar Imaging: statistics \& numerical data,Humans,Image Enhancement,Image Enhancement: methods,Image Processing, Computer-Assisted,Image Processing, Computer-Assisted: statistics \& ,Mathematical Computing,Nerve Fibers,Nerve Fibers: ultrasonography,Neural Pathways,Neural Pathways: anatomy \& histology,Phantoms, Imaging}, month = mar, pmid = {16242966}, url = {http://www.ncbi.nlm.nih.gov/pubmed/16242966}, year = 2006 } @Article{Corouge2006, Author = {Corouge, Isabelle and Fletcher, P Thomas and Joshi, Sarang and Gouttard, Sylvain and Gerig, Guido}, Title = {{Fiber tract-oriented statistics for quantitative diffusion tensor MRI analysis.}}, Journal = {Medical image analysis}, Volume = {10}, Number = {5}, Pages = {786--98}, abstract = {Quantitative diffusion tensor imaging (DTI) has become the major imaging modality to study properties of white matter and the geometry of fiber tracts of the human brain. Clinical studies mostly focus on regional statistics of fractional anisotropy (FA) and mean diffusivity (MD) derived from tensors. Existing analysis techniques do not sufficiently take into account that the measurements are tensors, and thus require proper interpolation and statistics of tensors, and that regions of interest are fiber tracts with complex spatial geometry. We propose a new framework for quantitative tract-oriented DTI analysis that systematically includes tensor interpolation and averaging, using nonlinear Riemannian symmetric space. A new measure of tensor anisotropy, called geodesic anisotropy (GA) is applied and compared with FA. As a result, tracts of interest are represented by the geometry of the medial spine attributed with tensor statistics (average and variance) calculated within cross-sections. Feasibility of our approach is demonstrated on various fiber tracts of a single data set. A validation study, based on six repeated scans of the same subject, assesses the reproducibility of this new DTI data analysis framework.}, doi = {10.1016/j.media.2006.07.003}, file = {:home/eg309/.local/share/data/Mendeley Ltd./Mendeley Desktop/Downloaded/Corouge et al. - 2006 - Fiber tract-oriented statistics for quantitative diffusion tensor MRI analysis..pdf:pdf}, issn = {1361-8415}, keywords = {Algorithms,Artificial Intelligence,Brain,Brain: cytology,Computer Simulation,Diffusion Magnetic Resonance Imaging,Diffusion Magnetic Resonance Imaging: methods,Feasibility Studies,Humans,Image Enhancement,Image Enhancement: methods,Image Interpretation, Computer-Assisted,Image Interpretation, Computer-Assisted: methods,Imaging, Three-Dimensional,Imaging, Three-Dimensional: methods,Information Storage and Retrieval,Information Storage and Retrieval: methods,Models, Neurological,Models, Statistical,Neural Pathways,Neural Pathways: cytology,Pattern Recognition, Automated,Pattern Recognition, Automated: methods,Reproducibility of Results,Sensitivity and Specificity}, pmid = {16926104}, url = {http://www.ncbi.nlm.nih.gov/pubmed/16926104}, year = 2006 } dipy-0.13.0/doc/documentation.rst000066400000000000000000000005571317371701200167210ustar00rootroot00000000000000.. _documentation: Documentation =================== Contents: .. toctree:: :maxdepth: 2 introduction mission installation examples_index faq developers cite devel/index theory/index reference/index reference_cmd/index api_changes Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` dipy-0.13.0/doc/examples/000077500000000000000000000000001317371701200151255ustar00rootroot00000000000000dipy-0.13.0/doc/examples/.gitignore000066400000000000000000000000361317371701200171140ustar00rootroot00000000000000gqs_tracks.npy ten_tracks.npy dipy-0.13.0/doc/examples/README000066400000000000000000000005151317371701200160060ustar00rootroot00000000000000Examples -------- These are the dipy examples. They are built as docs in the dipy ``examples_built`` directory in the documentation. If you add an example (yes please!), please remember to add it to the ``valid_examples.txt`` file located in this directory and to the ``examples_index.rst`` file listing in the ``doc`` directory. dipy-0.13.0/doc/examples/affine_registration_3d.py000066400000000000000000000245111317371701200221120ustar00rootroot00000000000000""" ========================================== Affine Registration in 3D ========================================== This example explains how to compute an affine transformation to register two 3D volumes by maximization of their Mutual Information [Mattes03]_. The optimization strategy is similar to that implemented in ANTS [Avants11]_. """ import numpy as np from dipy.viz import regtools from dipy.data import fetch_stanford_hardi, read_stanford_hardi from dipy.data.fetcher import fetch_syn_data, read_syn_data from dipy.align.imaffine import (transform_centers_of_mass, AffineMap, MutualInformationMetric, AffineRegistration) from dipy.align.transforms import (TranslationTransform3D, RigidTransform3D, AffineTransform3D) """ Let's fetch two b0 volumes, the static image will be the b0 from the Stanford HARDI dataset """ fetch_stanford_hardi() nib_stanford, gtab_stanford = read_stanford_hardi() static = np.squeeze(nib_stanford.get_data())[..., 0] static_grid2world = nib_stanford.affine """ Now the moving image """ fetch_syn_data() nib_syn_t1, nib_syn_b0 = read_syn_data() moving = np.array(nib_syn_b0.get_data()) moving_grid2world = nib_syn_b0.affine """ We can see that the images are far from aligned by drawing one on top of the other. The images don't even have the same number of voxels, so in order to draw one on top of the other we need to resample the moving image on a grid of the same dimensions as the static image, we can do this by "transforming" the moving image using an identity transform """ identity = np.eye(4) affine_map = AffineMap(identity, static.shape, static_grid2world, moving.shape, moving_grid2world) resampled = affine_map.transform(moving) regtools.overlay_slices(static, resampled, None, 0, "Static", "Moving", "resampled_0.png") regtools.overlay_slices(static, resampled, None, 1, "Static", "Moving", "resampled_1.png") regtools.overlay_slices(static, resampled, None, 2, "Static", "Moving", "resampled_2.png") """ .. figure:: resampled_0.png :align: center .. figure:: resampled_1.png :align: center .. figure:: resampled_2.png :align: center Input images before alignment. """ """ We can obtain a very rough (and fast) registration by just aligning the centers of mass of the two images """ c_of_mass = transform_centers_of_mass(static, static_grid2world, moving, moving_grid2world) """ We can now transform the moving image and draw it on top of the static image, registration is not likely to be good, but at least they will occupy roughly the same space """ transformed = c_of_mass.transform(moving) regtools.overlay_slices(static, transformed, None, 0, "Static", "Transformed", "transformed_com_0.png") regtools.overlay_slices(static, transformed, None, 1, "Static", "Transformed", "transformed_com_1.png") regtools.overlay_slices(static, transformed, None, 2, "Static", "Transformed", "transformed_com_2.png") """ .. figure:: transformed_com_0.png :align: center .. figure:: transformed_com_1.png :align: center .. figure:: transformed_com_2.png :align: center Registration result by aligning the centers of mass of the images. """ """ This was just a translation of the moving image towards the static image, now we will refine it by looking for an affine transform. We first create the similarity metric (Mutual Information) to be used. We need to specify the number of bins to be used to discretize the joint and marginal probability distribution functions (PDF), a typical value is 32. We also need to specify the percentage (an integer in (0, 100]) of voxels to be used for computing the PDFs, the most accurate registration will be obtained by using all voxels, but it is also the most time-consuming choice. We specify full sampling by passing None instead of an integer """ nbins = 32 sampling_prop = None metric = MutualInformationMetric(nbins, sampling_prop) """ To avoid getting stuck at local optima, and to accelerate convergence, we use a multi-resolution strategy (similar to ANTS [Avants11]_) by building a Gaussian Pyramid. To have as much flexibility as possible, the user can specify how this Gaussian Pyramid is built. First of all, we need to specify how many resolutions we want to use. This is indirectly specified by just providing a list of the number of iterations we want to perform at each resolution. Here we will just specify 3 resolutions and a large number of iterations, 10000 at the coarsest resolution, 1000 at the medium resolution and 100 at the finest. These are the default settings """ level_iters = [10000, 1000, 100] """ To compute the Gaussian pyramid, the original image is first smoothed at each level of the pyramid using a Gaussian kernel with the requested sigma. A good initial choice is [3.0, 1.0, 0.0], this is the default """ sigmas = [3.0, 1.0, 0.0] """ Now we specify the sub-sampling factors. A good configuration is [4, 2, 1], which means that, if the original image shape was (nx, ny, nz) voxels, then the shape of the coarsest image will be about (nx//4, ny//4, nz//4), the shape in the middle resolution will be about (nx//2, ny//2, nz//2) and the image at the finest scale has the same size as the original image. This set of factors is the default """ factors = [4, 2, 1] """ Now we go ahead and instantiate the registration class with the configuration we just prepared """ affreg = AffineRegistration(metric=metric, level_iters=level_iters, sigmas=sigmas, factors=factors) """ Using AffineRegistration we can register our images in as many stages as we want, providing previous results as initialization for the next (the same logic as in ANTS). The reason why it is useful is that registration is a non-convex optimization problem (it may have more than one local optima), which means that it is very important to initialize as close to the solution as possible. For example, lets start with our (previously computed) rough transformation aligning the centers of mass of our images, and then refine it in three stages. First look for an optimal translation. The dictionary regtransforms contains all available transforms, we obtain one of them by providing its name and the dimension (either 2 or 3) of the image we are working with (since we are aligning volumes, the dimension is 3) """ transform = TranslationTransform3D() params0 = None starting_affine = c_of_mass.affine translation = affreg.optimize(static, moving, transform, params0, static_grid2world, moving_grid2world, starting_affine=starting_affine) """ If we look at the result, we can see that this translation is much better than simply aligning the centers of mass """ transformed = translation.transform(moving) regtools.overlay_slices(static, transformed, None, 0, "Static", "Transformed", "transformed_trans_0.png") regtools.overlay_slices(static, transformed, None, 1, "Static", "Transformed", "transformed_trans_1.png") regtools.overlay_slices(static, transformed, None, 2, "Static", "Transformed", "transformed_trans_2.png") """ .. figure:: transformed_trans_0.png :align: center .. figure:: transformed_trans_1.png :align: center .. figure:: transformed_trans_2.png :align: center Registration result by translating the moving image, using Mutual Information. """ """ Now lets refine with a rigid transform (this may even modify our previously found optimal translation) """ transform = RigidTransform3D() params0 = None starting_affine = translation.affine rigid = affreg.optimize(static, moving, transform, params0, static_grid2world, moving_grid2world, starting_affine=starting_affine) """ This produces a slight rotation, and the images are now better aligned """ transformed = rigid.transform(moving) regtools.overlay_slices(static, transformed, None, 0, "Static", "Transformed", "transformed_rigid_0.png") regtools.overlay_slices(static, transformed, None, 1, "Static", "Transformed", "transformed_rigid_1.png") regtools.overlay_slices(static, transformed, None, 2, "Static", "Transformed", "transformed_rigid_2.png") """ .. figure:: transformed_rigid_0.png :align: center .. figure:: transformed_rigid_1.png :align: center .. figure:: transformed_rigid_2.png :align: center Registration result with a rigid transform, using Mutual Information. """ """ Finally, lets refine with a full affine transform (translation, rotation, scale and shear), it is safer to fit more degrees of freedom now, since we must be very close to the optimal transform """ transform = AffineTransform3D() params0 = None starting_affine = rigid.affine affine = affreg.optimize(static, moving, transform, params0, static_grid2world, moving_grid2world, starting_affine=starting_affine) """ This results in a slight shear and scale """ transformed = affine.transform(moving) regtools.overlay_slices(static, transformed, None, 0, "Static", "Transformed", "transformed_affine_0.png") regtools.overlay_slices(static, transformed, None, 1, "Static", "Transformed", "transformed_affine_1.png") regtools.overlay_slices(static, transformed, None, 2, "Static", "Transformed", "transformed_affine_2.png") """ .. figure:: transformed_affine_0.png :align: center .. figure:: transformed_affine_1.png :align: center .. figure:: transformed_affine_2.png :align: center Registration result with an affine transform, using Mutual Information. .. [Mattes03] Mattes, D., Haynor, D. R., Vesselle, H., Lewellen, T. K., Eubank, W. (2003). PET-CT image registration in the chest using free-form deformations. IEEE Transactions on Medical Imaging, 22(1), 120-8. .. [Avants11] Avants, B. B., Tustison, N., & Song, G. (2011). Advanced Normalization Tools ( ANTS ), 1-35. .. include:: ../links_names.inc """ dipy-0.13.0/doc/examples/brain_extraction_dwi.py000066400000000000000000000057671317371701200217140ustar00rootroot00000000000000""" =================================== Brain segmentation with median_otsu =================================== We show how to extract brain information and mask from a b0 image using dipy_'s ``segment.mask`` module. First import the necessary modules: """ import numpy as np import nibabel as nib """ Download and read the data for this tutorial. The ``scil_b0`` dataset contains different data from different companies and models. For this example, the data comes from a 1.5 Tesla Siemens MRI. """ from dipy.data.fetcher import fetch_scil_b0, read_siemens_scil_b0 fetch_scil_b0() img = read_siemens_scil_b0() data = np.squeeze(img.get_data()) """ ``img`` contains a nibabel Nifti1Image object. Data is the actual brain data as a numpy ndarray. Segment the brain using DIPY's ``mask`` module. ``median_otsu`` returns the segmented brain data and a binary mask of the brain. It is possible to fine tune the parameters of ``median_otsu`` (``median_radius`` and ``num_pass``) if extraction yields incorrect results but the default parameters work well on most volumes. For this example, we used 2 as ``median_radius`` and 1 as ``num_pass`` """ from dipy.segment.mask import median_otsu b0_mask, mask = median_otsu(data, 2, 1) """ Saving the segmentation results is very easy using nibabel. We need the ``b0_mask``, and the binary mask volumes. The affine matrix which transform the image's coordinates to the world coordinates is also needed. Here, we choose to save both images in ``float32``. """ mask_img = nib.Nifti1Image(mask.astype(np.float32), img.affine) b0_img = nib.Nifti1Image(b0_mask.astype(np.float32), img.affine) fname = 'se_1.5t' nib.save(mask_img, fname + '_binary_mask.nii.gz') nib.save(b0_img, fname + '_mask.nii.gz') """ Quick view of the results middle slice using ``matplotlib``. """ import matplotlib.pyplot as plt from dipy.core.histeq import histeq sli = data.shape[2] // 2 plt.figure('Brain segmentation') plt.subplot(1, 2, 1).set_axis_off() plt.imshow(histeq(data[:, :, sli].astype('float')).T, cmap='gray', origin='lower') plt.subplot(1, 2, 2).set_axis_off() plt.imshow(histeq(b0_mask[:, :, sli].astype('float')).T, cmap='gray', origin='lower') plt.savefig('median_otsu.png') """ .. figure:: median_otsu.png :align: center An application of median_otsu for brain segmentation. ``median_otsu`` can also automatically crop the outputs to remove the largest possible number of background voxels. This makes outputted data significantly smaller. Auto-cropping in ``median_otsu`` is activated by setting the ``autocrop`` parameter to ``True``. """ b0_mask_crop, mask_crop = median_otsu(data, 4, 4, autocrop=True) """ Saving cropped data using nibabel as demonstrated previously. """ mask_img_crop = nib.Nifti1Image(mask_crop.astype(np.float32), img.affine) b0_img_crop = nib.Nifti1Image( b0_mask_crop.astype(np.float32), img.affine) nib.save(mask_img_crop, fname + '_binary_mask_crop.nii.gz') nib.save(b0_img_crop, fname + '_mask_crop.nii.gz') """ .. include:: ../links_names.inc """dipy-0.13.0/doc/examples/bundle_registration.py000066400000000000000000000056371317371701200215550ustar00rootroot00000000000000""" ========================== Direct Bundle Registration ========================== This example explains how you can register two bundles from two different subjects directly in the space of streamlines [Garyfallidis15]_, [Garyfallidis14]_. To show the concept we will use two pre-saved cingulum bundles. The algorithm used here is called Streamline-based Linear Registration (SLR) [Garyfallidis15]_. """ from dipy.viz import fvtk from time import sleep from dipy.data import two_cingulum_bundles cb_subj1, cb_subj2 = two_cingulum_bundles() from dipy.align.streamlinear import StreamlineLinearRegistration from dipy.tracking.streamline import set_number_of_points """ An important step before running the registration is to resample the streamlines so that they both have the same number of points per streamline. Here we will use 20 points. This step is not optional. Inputting streamlines with different number of points will break the theoretical advantages of using the SLR as explained in [Garyfallidis15]_. """ cb_subj1 = set_number_of_points(cb_subj1, 20) cb_subj2 = set_number_of_points(cb_subj2, 20) """ Let's say now that we want to move the ``cb_subj2`` (moving) so that it can be aligned with ``cb_subj1`` (static). Here is how this is done. """ srr = StreamlineLinearRegistration() srm = srr.optimize(static=cb_subj1, moving=cb_subj2) """ After the optimization is finished we can apply the transformation to ``cb_subj2``. """ cb_subj2_aligned = srm.transform(cb_subj2) def show_both_bundles(bundles, colors=None, show=False, fname=None): ren = fvtk.ren() ren.SetBackground(1., 1, 1) for (i, bundle) in enumerate(bundles): color = colors[i] lines = fvtk.streamtube(bundle, color, linewidth=0.3) lines.RotateX(-90) lines.RotateZ(90) fvtk.add(ren, lines) if show: fvtk.show(ren) if fname is not None: sleep(1) fvtk.record(ren, n_frames=1, out_path=fname, size=(900, 900)) show_both_bundles([cb_subj1, cb_subj2], colors=[fvtk.colors.orange, fvtk.colors.red], fname='before_registration.png') """ .. figure:: before_registration.png :align: center Before bundle registration. """ show_both_bundles([cb_subj1, cb_subj2_aligned], colors=[fvtk.colors.orange, fvtk.colors.red], fname='after_registration.png') """ .. figure:: after_registration.png :align: center After bundle registration. As you can see the two cingulum bundles are well aligned although they contain many streamlines of different length and shape. .. [Garyfallidis15] Garyfallidis et. al, "Robust and efficient linear registration of white-matter fascicles in the space of streamlines", Neuroimage, 117:124-140, 2015. .. [Garyfallidis14] Garyfallidis et. al, "Direct native-space fiber bundle alignment for group comparisons", ISMRM, 2014. """ dipy-0.13.0/doc/examples/combined_workflow_creation.py000066400000000000000000000110051317371701200230720ustar00rootroot00000000000000""" ============================================================ Creating a new combined workflow. ============================================================ A ``CombinedWorkflow`` is a series of dipy_ workflows organized together in a way that the output of a workflow serves as input for the next one. """ """ First create your ``CombinedWorkflow`` class. Your ``CombinedWorkflow`` class file is usually located in the ``dipy/workflows`` directory. """ from dipy.workflows.combined_workflow import CombinedWorkflow """ ``CombinedWorkflow`` is the base class that will be extended to create our combined workflow. """ from dipy.workflows.denoise import NLMeansFlow from dipy.workflows.segment import MedianOtsuFlow """ ``MedianOtsuFlow`` and ``NLMeansFlow`` will be combined to create our processing section. """ class DenoiseAndSegment(CombinedWorkflow): """ ``DenoiseAndSegment`` is the name of our combined workflow. Note that it needs to extend CombinedWorkflow for everything to work properly. """ def _get_sub_flows(self): return [ NLMeansFlow, MedianOtsuFlow ] """ It is mandatory to implement this method if you want to make all the sub workflows parameters available in commandline. """ def run(self, input_files, out_dir='', out_file='processed.nii.gz'): """ Parameters ---------- input_files : string Path to the input files. This path may contain wildcards to process multiple inputs at once. out_dir : string, optional Where the resulting file will be saved. (default '') out_file : string, optional Name of the result file to be saved. (default 'processed.nii.gz') """ """ Just like a normal workflow, it is mandatory to have out_dir as a parameter. It is also mandatory to put 'out_' in front of every parameter that is going to be an output. Lastly, all out_ params needs to be at the end of the params list. The class docstring part is very important, you need to document every parameter as they will be used with inspection to build the command line argument parser. """ io_it = self.get_io_iterator() for in_file, out_file in io_it: nl_flow = NLMeansFlow() self.run_sub_flow(nl_flow, in_file, out_dir=out_dir) denoised = nl_flow.last_generated_outputs['out_denoised'] me_flow = MedianOtsuFlow() self.run_sub_flow(me_flow, denoised, out_dir=out_dir) """ Use ``self.get_io_iterator()`` in every workflow you create. This creates an ``IOIterator`` object that create output file names and directory structure based on the inputs and some other advanced output strategy parameters. Iterating on the ``IOIterator`` object you created previously you conveniently get all input and output paths for every input file found when globbin the input parameters. In the ``IOIterator`` loop you can see how we create a new ``NLMeans`` workflow then run it using ``self.run_sub_flow``. Running it this way will pass any workflow specific parameter that was retreived from the command line and will append the ones you specify as optional parameters (``out_dir`` in this case). Lastly, the outputs paths are retrived using ``workflow.last_generated_outputs``. This allows to use ``denoise`` as the input for the ``MedianOtsuFlow``. """ """ This is it for the combined workflow class! Now to be able to call it easily via command line, you need this last bit of code. It is usually in an executable file located in ``bin``. """ from dipy.workflows.flow_runner import run_flow """ This is the method that will wrap everything that is needed to make a workflow ready then run it. """ if __name__ == "__main__": run_flow(DenoiseAndSegment()) """ This is the only thing needed to make your workflow available through command line. Now just call the script you just made with ``-h`` to see the argparser help text:: python combined_workflow_creation.py --help You should see all your parameters available along with some extra common ones like logging file and force overwrite. Also all the documentation you wrote about each parameter is there. Also note that every sub workflow optional parameter is available. Now call it for real with a nifti file to see the results. Experiment with the parameters and see the results:: python combined_workflow_creation.py volume.nii.gz .. include:: ../links_names.inc """ dipy-0.13.0/doc/examples/contextual_enhancement.py000066400000000000000000000252641317371701200222430ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ ============================================== Crossing-preserving contextual enhancement ============================================== This demo presents an example of crossing-preserving contextual enhancement of FOD/ODF fields [Meesters2016]_, implementing the contextual PDE framework of [Portegies2015a]_ for processing HARDI data. The aim is to enhance the alignment of elongated structures in the data such that crossing/junctions are maintained while reducing noise and small incoherent structures. This is achieved via a hypo-elliptic 2nd order PDE in the domain of coupled positions and orientations :math:`\mathbb{R}^3 \rtimes S^2`. This domain carries a non-flat geometrical differential structure that allows including a notion of alignment between neighboring points. Let :math:`({\bf y},{\bf n}) \in \mathbb{R}^3\rtimes S^2` where :math:`{\bf y} \in \mathbb{R}^{3}` denotes the spatial part, and :math:`{\bf n} \in S^2` the angular part. Let :math:`W:\mathbb{R}^3\rtimes S^2\times \mathbb{R}^{+} \to \mathbb{R}` be the function representing the evolution of FOD/ODF field. Then, the contextual PDE with evolution time :math:`t\geq 0` is given by: .. math:: \begin{cases} \frac{\partial}{\partial t} W({\bf y},{\bf n},t) &= ((D^{33}({\bf n} \cdot \nabla)^2 + D^{44} \Delta_{S^2})W)({\bf y},{\bf n},t) \\ W({\bf y},{\bf n},0) &= U({\bf y},{\bf n}) \end{cases}, where: * :math:`D^{33}>0` is the coefficient for the spatial smoothing (which goes only in the direction of :math:`n`); * :math:`D^{44}>0` is the coefficient for the angular smoothing (here :math:`\Delta_{S^2}` denotes the Laplace-Beltrami operator on the sphere :math:`S^2`); * :math:`U:\mathbb{R}^3\rtimes S^2 \to \mathbb{R}` is the initial condition given by the noisy FOD/ODF’s field. This equation is solved via a shift-twist convolution (denoted by :math:`\ast_{\mathbb{R}^3\rtimes S^2}`) with its corresponding kernel :math:`P_t:\mathbb{R}^3\rtimes S^2 \to \mathbb{R}^+`: .. math:: W({\bf y},{\bf n},t) = (P_t \ast_{\mathbb{R}^3 \rtimes S^2} U)({\bf y},{\bf n}) = \int_{\mathbb{R}^3} \int_{S^2} P_t (R^T_{{\bf n}^\prime}({\bf y}-{\bf y}^\prime), R^T_{{\bf n}^\prime} {\bf n} ) U({\bf y}^\prime, {\bf n}^\prime) Here, :math:`R_{\bf n}` is any 3D rotation that maps the vector :math:`(0,0,1)` onto :math:`{\bf n}`. Note that the shift-twist convolution differs from a Euclidean convolution and takes into account the non-flat structure of the space :math:`\mathbb{R}^3\rtimes S^2`. The kernel :math:`P_t` has a stochastic interpretation [DuitsAndFranken2011]_. It can be seen as the limiting distribution obtained by accumulating random walks of particles in the position/orientation domain, where in each step the particles can (randomly) move forward/backward along their current orientation, and (randomly) change their orientation. This is an extension to the 3D case of the process for contour enhancement of 2D images. .. figure:: _static/stochastic_process.jpg :scale: 60 % :align: center The random motion of particles (a) and its corresponding probability map (b) in 2D. The 3D kernel is shown on the right. Adapted from [Portegies2015a]_. In practice, as the exact analytical formulas for the kernel :math:`P_t` are unknown, we use the approximation given in [Portegies2015b]_. """ """ The enhancement is evaluated on the Stanford HARDI dataset (150 orientations, b=2000 $s/mm^2$) where Rician noise is added. Constrained spherical deconvolution is used to model the fiber orientations. """ import numpy as np from dipy.data import fetch_stanford_hardi, read_stanford_hardi from dipy.sims.voxel import add_noise from dipy.core.gradients import gradient_table # Read data fetch_stanford_hardi() img, gtab = read_stanford_hardi() data = img.get_data() # Add Rician noise from dipy.segment.mask import median_otsu b0_slice = data[:, :, :, 1] b0_mask, mask = median_otsu(b0_slice) np.random.seed(1) data_noisy = add_noise(data, 10.0, np.mean(b0_slice[mask]), noise_type='rician') # Select a small part of it. padding = 3 # Include a larger region to avoid boundary effects data_small = data[25-padding:40+padding, 65-padding:80+padding, 35:42] data_noisy_small = data_noisy[25-padding:40+padding, 65-padding:80+padding, 35:42] """ Fit an initial model to the data, in this case Constrained Spherical Deconvolution is used. """ # Perform CSD on the original data from dipy.reconst.csdeconv import auto_response from dipy.reconst.csdeconv import ConstrainedSphericalDeconvModel response, ratio = auto_response(gtab, data, roi_radius=10, fa_thr=0.7) csd_model_orig = ConstrainedSphericalDeconvModel(gtab, response) csd_fit_orig = csd_model_orig.fit(data_small) csd_shm_orig = csd_fit_orig.shm_coeff # Perform CSD on the original data + noise response, ratio = auto_response(gtab, data_noisy, roi_radius=10, fa_thr=0.7) csd_model_noisy = ConstrainedSphericalDeconvModel(gtab, response) csd_fit_noisy = csd_model_noisy.fit(data_noisy_small) csd_shm_noisy = csd_fit_noisy.shm_coeff """ Inspired by [Rodrigues2010]_, a lookup-table is created, containing rotated versions of the kernel :math:`P_t` sampled over a discrete set of orientations. In order to ensure rotationally invariant processing, the discrete orientations are required to be equally distributed over a sphere. By default, a sphere with 100 directions is used. """ from dipy.denoise.enhancement_kernel import EnhancementKernel from dipy.denoise.shift_twist_convolution import convolve # Create lookup table D33 = 1.0 D44 = 0.02 t = 1 k = EnhancementKernel(D33, D44, t) """ Visualize the kernel """ from dipy.viz import fvtk from dipy.data import get_sphere from dipy.reconst.shm import sf_to_sh, sh_to_sf ren = fvtk.ren() # convolve kernel with delta spike spike = np.zeros((7, 7, 7, k.get_orientations().shape[0]), dtype=np.float64) spike[3, 3, 3, 0] = 1 spike_shm_conv = convolve(sf_to_sh(spike, k.get_sphere(), sh_order=8), k, sh_order=8, test_mode=True) sphere = get_sphere('symmetric724') spike_sf_conv = sh_to_sf(spike_shm_conv, sphere, sh_order=8) model_kernel = fvtk.sphere_funcs((spike_sf_conv * 6)[3,:,:,:], sphere, norm=False, radial_scale=True) fvtk.add(ren, model_kernel) fvtk.camera(ren, pos=(30, 0, 0), focal=(0, 0, 0), viewup=(0, 0, 1), verbose=False) fvtk.record(ren, out_path='kernel.png', size=(900, 900)) """ .. figure:: kernel.png :align: center Visualization of the contour enhancement kernel. """ """ Shift-twist convolution is applied on the noisy data """ # Perform convolution csd_shm_enh = convolve(csd_shm_noisy, k, sh_order=8) """ The Sharpening Deconvolution Transform is applied to sharpen the ODF field. """ # Sharpen via the Sharpening Deconvolution Transform from dipy.reconst.csdeconv import odf_sh_to_sharp csd_shm_enh_sharp = odf_sh_to_sharp(csd_shm_enh, sphere, sh_order=8, lambda_=0.1) # Convert raw and enhanced data to discrete form csd_sf_orig = sh_to_sf(csd_shm_orig, sphere, sh_order=8) csd_sf_noisy = sh_to_sf(csd_shm_noisy, sphere, sh_order=8) csd_sf_enh = sh_to_sf(csd_shm_enh, sphere, sh_order=8) csd_sf_enh_sharp = sh_to_sf(csd_shm_enh_sharp, sphere, sh_order=8) # Normalize the sharpened ODFs csd_sf_enh_sharp = csd_sf_enh_sharp * np.amax(csd_sf_orig)/np.amax(csd_sf_enh_sharp) * 1.25 """ The end results are visualized. It can be observed that the end result after diffusion and sharpening is closer to the original noiseless dataset. """ csd_sf_orig_slice = csd_sf_orig[padding:-padding, padding:-padding, [3], :] csd_sf_noisy_slice = csd_sf_noisy[padding:-padding, padding:-padding, [3], :] csd_sf_enh_slice = csd_sf_enh[padding:-padding, padding:-padding, [3], :] csd_sf_enh_sharp_slice = csd_sf_enh_sharp[padding:-padding, padding:-padding, [3], :] ren = fvtk.ren() # original ODF field fodf_spheres_org = fvtk.sphere_funcs(csd_sf_orig_slice, sphere, scale=2, norm=False, radial_scale=True) fodf_spheres_org.SetPosition(0, 35, 0) fvtk.add(ren, fodf_spheres_org) # ODF field with added noise fodf_spheres = fvtk.sphere_funcs(csd_sf_noisy_slice, sphere, scale=2, norm=False, radial_scale=True) fodf_spheres.SetPosition(0, 0, 0) fvtk.add(ren, fodf_spheres) # Enhancement of noisy ODF field fodf_spheres_enh = fvtk.sphere_funcs(csd_sf_enh_slice, sphere, scale=2, norm=False, radial_scale=True) fodf_spheres_enh.SetPosition(35, 0, 0) fvtk.add(ren, fodf_spheres_enh) # Additional sharpening fodf_spheres_enh_sharp = fvtk.sphere_funcs(csd_sf_enh_sharp_slice, sphere, scale=2, norm=False, radial_scale=True) fodf_spheres_enh_sharp.SetPosition(35, 35, 0) fvtk.add(ren, fodf_spheres_enh_sharp) fvtk.record(ren, out_path='enhancements.png', size=(900, 900)) """ .. figure:: enhancements.png :align: center The results after enhancements. Top-left: original noiseless data. Bottom-left: original data with added Rician noise (SNR=10). Bottom-right: After enhancement of noisy data. Top-right: After enhancement and sharpening of noisy data. References ---------- .. [Meesters2016] S. Meesters, G. Sanguinetti, E. Garyfallidis, J. Portegies, R. Duits. (2016) Fast implementations of contextual PDE’s for HARDI data processing in DIPY. ISMRM 2016 conference. .. [Portegies2015a] J. Portegies, R. Fick, G. Sanguinetti, S. Meesters, G.Girard, and R. Duits. (2015) Improving Fiber Alignment in HARDI by Combining Contextual PDE flow with Constrained Spherical Deconvolution. PLoS One. .. [Portegies2015b] J. Portegies, G. Sanguinetti, S. Meesters, and R. Duits. (2015) New Approximation of a Scale Space Kernel on SE(3) and Applications in Neuroimaging. Fifth International Conference on Scale Space and Variational Methods in Computer Vision. .. [DuitsAndFranken2011] R. Duits and E. Franken (2011) Left-invariant diffusions on the space of positions and orientations and their application to crossing-preserving smoothing of HARDI images. International Journal of Computer Vision, 92:231-264. .. [Rodrigues2010] P. Rodrigues, R. Duits, B. Romeny, A. Vilanova (2010). Accelerated Diffusion Operators for Enhancing DW-MRI. Eurographics Workshop on Visual Computing for Biology and Medicine. The Eurographics Association. """ dipy-0.13.0/doc/examples/denoise_ascm.py000066400000000000000000000130141317371701200201270ustar00rootroot00000000000000""" ============================================================== Denoise images using Adaptive Soft Coefficient Matching (ASCM) ============================================================== The adaptive soft coefficient matching (ASCM) as described in [Coupe11]_ is a improved extension of non-local means (NLMEANS) denoising. ASCM gives a better denoised images from two standard non-local means denoised versions of the original data with different degrees sharpness. Here, one denoised input is more "smooth" than the other (the easiest way to achieve this denoising is use ``non_local_means`` with two different patch radii). ASCM involves these basic steps * Computes wavelet decomposition of the noisy as well as denoised inputs * Combines the wavelets for the output image in a way that it takes it's smoothness (low frequency components) from the input with larger smoothing, and the sharp features (high frequency components) from the input with less smoothing. This way ASCM gives us a well denoised output while preserving the sharpness of the image features. Let us load the necessary modules """ import numpy as np import matplotlib.pyplot as plt import nibabel as nib from dipy.data import (fetch_sherbrooke_3shell, read_sherbrooke_3shell) from dipy.denoise.noise_estimate import estimate_sigma from time import time from dipy.denoise.non_local_means import non_local_means from dipy.denoise.adaptive_soft_matching import adaptive_soft_matching """ Choose one of the data from the datasets in dipy_ """ fetch_sherbrooke_3shell() img, gtab = read_sherbrooke_3shell() data = img.get_data() affine = img.affine mask = data[..., 0] > 80 data = data[..., 1] print("vol size", data.shape) t = time() """ In order to generate the two pre-denoised versions of the data we will use the ``non_local_means`` denoising. For ``non_local_means`` first we need to estimate the standard deviation of the noise. We use N=4 since the Sherbrooke dataset was acquired on a 1.5T Siemens scanner with a 4 array head coil. """ sigma = estimate_sigma(data, N=4) """ For the denoised version of the original data which preserves sharper features, we perform non-local means with smaller patch size. """ den_small = non_local_means( data, sigma=sigma, mask=mask, patch_radius=1, block_radius=1, rician=True) """ For the denoised version of the original data that implies more smoothing, we perform non-local means with larger patch size. """ den_large = non_local_means( data, sigma=sigma, mask=mask, patch_radius=2, block_radius=1, rician=True) """ Now we perform the adaptive soft coefficient matching. Empirically we set the adaptive parameter in ascm to be the average of the local noise variance, in this case the sigma itself. """ den_final = adaptive_soft_matching(data, den_small, den_large, sigma[0]) print("total time", time() - t) """ To access the quality of this denoising procedure, we plot the an axial slice of the original data, it's denoised output and residuals. """ axial_middle = data.shape[2] // 2 original = data[:, :, axial_middle].T final_output = den_final[:, :, axial_middle].T difference = np.abs(final_output.astype('f8') - original.astype('f8')) difference[~mask[:, :, axial_middle].T] = 0 fig, ax = plt.subplots(1, 3) ax[0].imshow(original, cmap='gray', origin='lower') ax[0].set_title('Original') ax[1].imshow(final_output, cmap='gray', origin='lower') ax[1].set_title('ASCM output') ax[2].imshow(difference, cmap='gray', origin='lower') ax[2].set_title('Residual') for i in range(3): ax[i].set_axis_off() plt.savefig('denoised_ascm.png', bbox_inches='tight') print("The ascm result saved in denoised_ascm.png") """ .. figure:: denoised_ascm.png :align: center Showing the axial slice without (left) and with (middle) ASCM denoising. """ """ From the above figure we can see that the residual is really uniform in nature which dictates that ASCM denoises the data while preserving the sharpness of the features. """ nib.save(nib.Nifti1Image(den_final, affine), 'denoised_ascm.nii.gz') print("Saving the entire denoised output in denoised_ascm.nii.gz") """ For comparison propose we also plot the outputs of the ``non_local_means`` (both with the larger as well as with the smaller patch radius) with the ASCM output. """ fig, ax = plt.subplots(1, 4) ax[0].imshow(original, cmap='gray', origin='lower') ax[0].set_title('Original') ax[1].imshow(den_small[..., axial_middle].T, cmap='gray', origin='lower', interpolation='none') ax[1].set_title('NLMEANS small') ax[2].imshow(den_large[..., axial_middle].T, cmap='gray', origin='lower', interpolation='none') ax[2].set_title('NLMEANS large') ax[3].imshow(final_output, cmap='gray', origin='lower', interpolation='none') ax[3].set_title('ASCM ') for i in range(4): ax[i].set_axis_off() plt.savefig('ascm_comparison.png', bbox_inches='tight') print("The comparison result saved in ascm_comparison.png") """ .. figure:: ascm_comparison.png :align: center Comparing outputs of the NLMEANS and ASCM. """ """ From the above figure, we can observe that the information of two pre-denoised versions of the raw data, ASCM outperforms standard non-local means in supressing noise and preserving feature sharpness. References ---------- .. [Coupe11] Pierrick Coupe, Jose Manjon, Montserrat Robles, Louis Collins. Adaptive Multiresolution Non-Local Means Filter for 3D MR Image Denoising. IET Image Processing, Institution of Engineering and Technology, 2011. <00645538> .. include:: ../links_names.inc """ dipy-0.13.0/doc/examples/denoise_localpca.py000066400000000000000000000073201317371701200207650ustar00rootroot00000000000000""" =============================== Denoise images using Local PCA =============================== The local PCA based denoising algorithm [Manjon2013]_ is an effective denoising method because it takes into account the directional information in diffusion data. The basic idea behind local PCA based diffusion denoising can be explained in the following three basic steps: * First, we estimate the local noise variance at each voxel. * Then, we apply PCA in local patches around each voxel over the gradient directions. * Finally, we threshold the eigenvalues based on the local estimate of sigma and then do a PCA reconstruction Let's load the necessary modules """ import numpy as np import nibabel as nib import matplotlib.pyplot as plt from time import time from dipy.denoise.localpca import localpca from dipy.denoise.pca_noise_estimate import pca_noise_estimate from dipy.data import read_isbi2013_2shell """ Load one of the datasets. These data were acquired with 63 gradients and 1 non-diffusion (b=0) image. """ img, gtab = read_isbi2013_2shell() data = img.get_data() affine = img.get_affine() print("Input Volume", data.shape) """ We use the ``pca_noise_estimate`` method to estimate the value of sigma to be used in local PCA algorithm. It takes both data and the gradient table object as input and returns an estimate of local noise standard deviation as a 3D array. We return a smoothed version, where a Gaussian filter with radius 3 voxels has been applied to the estimate of the noise before returning it. We correct for the bias due to Rician noise, based on an equation developed by Koay and Basser [Koay2006]_. """ t = time() sigma = pca_noise_estimate(data, gtab, correct_bias=True, smooth=3) print("Sigma estimation time", time() - t) """ Perform the localPCA using the function localpca. The localpca algorithm takes into account for the directional information in the diffusion MR data. It performs PCA on local 4D patch and then thresholds it using the local variance estimate done by noise estimation function, then performing PCA reconstruction on it gives us the deniosed estimate. """ t = time() denoised_arr = localpca(data, sigma=sigma, patch_radius=2) print("Time taken for local PCA (slow)", -t + time()) """ Let us plot the axial slice of the original and denoised data. We visualize all the slices (22 in total) """ sli = data.shape[2] // 2 gra = data.shape[3] // 2 orig = data[:, :, sli, gra] den = denoised_arr[:, :, sli, gra] rms_diff = np.sqrt((orig - den) ** 2) fig, ax = plt.subplots(1, 3) ax[0].imshow(orig, cmap='gray', origin='lower', interpolation='none') ax[0].set_title('Original') ax[0].set_axis_off() ax[1].imshow(den, cmap='gray', origin='lower', interpolation='none') ax[1].set_title('Denoised Output') ax[1].set_axis_off() ax[2].imshow(rms_diff, cmap='gray', origin='lower', interpolation='none') ax[2].set_title('Residual') ax[2].set_axis_off() plt.savefig('denoised_localpca.png', bbox_inches='tight') print("The result saved in denoised_localpca.png") """ .. figure:: denoised_localpca.png :align: center Showing the middle axial slice of the local PCA denoised output. """ nib.save(nib.Nifti1Image(denoised_arr, affine), 'denoised_localpca.nii.gz') print("Entire denoised data saved in denoised_localpca.nii.gz") """ .. [Manjon2013] Manjon JV, Coupe P, Concha L, Buades A, Collins DL "Diffusion Weighted Image Denoising Using Overcomplete Local PCA" (2013). PLoS ONE 8(9): e73021. doi:10.1371/journal.pone.0073021. .. [Koay2006] Koay CG, Basser PJ (2006). "Analytically exact correction scheme for signal extraction from noisy magnitude MR signals". JMR 179: 317-322. .. include:: ../links_names.inc """ dipy-0.13.0/doc/examples/denoise_nlmeans.py000066400000000000000000000054231317371701200206460ustar00rootroot00000000000000""" ============================================== Denoise images using Non-Local Means (NLMEANS) ============================================== Using the non-local means filter [Coupe08]_ and [Coupe11]_ and you can denoise 3D or 4D images and boost the SNR of your datasets. You can also decide between modeling the noise as Gaussian or Rician (default). """ import numpy as np import nibabel as nib import matplotlib.pyplot as plt from time import time from dipy.denoise.nlmeans import nlmeans from dipy.denoise.noise_estimate import estimate_sigma from dipy.data import fetch_sherbrooke_3shell, read_sherbrooke_3shell fetch_sherbrooke_3shell() img, gtab = read_sherbrooke_3shell() data = img.get_data() affine = img.affine mask = data[..., 0] > 80 # We select only one volume for the example to run quickly. data = data[..., 1] print("vol size", data.shape) # lets create a noisy data with Gaussian data """ In order to call ``non_local_means`` first you need to estimate the standard deviation of the noise. We use N=4 since the Sherbrooke dataset was acquired on a 1.5T Siemens scanner with a 4 array head coil. """ sigma = estimate_sigma(data, N=4) t = time() """ Calling the main function ``non_local_means`` """ t = time() den = nlmeans(data, sigma=sigma, mask=mask, patch_radius= 1, block_radius = 1, rician= True) print("total time", time() - t) """ Let us plot the axial slice of the denoised output """ axial_middle = data.shape[2] // 2 before = data[:, :, axial_middle].T after = den[:, :, axial_middle].T difference = np.abs(after.astype('f8') - before.astype('f8')) difference[~mask[:, :, axial_middle].T] = 0 fig, ax = plt.subplots(1, 3) ax[0].imshow(before, cmap='gray', origin='lower') ax[0].set_title('before') ax[1].imshow(after, cmap='gray', origin='lower') ax[1].set_title('after') ax[2].imshow(difference, cmap='gray', origin='lower') ax[2].set_title('difference') plt.savefig('denoised.png', bbox_inches='tight') """ .. figure:: denoised.png :align: center **Showing axial slice before (left) and after (right) NLMEANS denoising** """ nib.save(nib.Nifti1Image(den, affine), 'denoised.nii.gz') """ An improved version of non-local means denoising is adaptive soft coefficient matching, please refer to :ref:`example_denoise_ascm` for more details. References ---------- .. [Coupe08] P. Coupe, P. Yger, S. Prima, P. Hellier, C. Kervrann, C. Barillot, "An Optimized Blockwise Non Local Means Denoising Filter for 3D Magnetic Resonance Images", IEEE Transactions on Medical Imaging, 27(4):425-441, 2008 .. [Coupe11] Pierrick Coupe, Jose Manjon, Montserrat Robles, Louis Collins. "Adaptive Multiresolution Non-Local Means Filter for 3D MR Image Denoising" IET Image Processing, Institution of Engineering and Technology, 2011 .. include:: ../links_names.inc """ dipy-0.13.0/doc/examples/deterministic_fiber_tracking.py000066400000000000000000000061431317371701200233770ustar00rootroot00000000000000""" ============================================================= An introduction to the Deterministic Maximum Direction Getter ============================================================= Deterministic maximum direction getter is the deterministic version of the probabilistic direction getter. It can be used with the same local models and has the same parameters. Deterministic maximum fiber tracking follows the trajectory of the most probable pathway within the tracking constraint (e.g. max angle). In other words, it follows the direction with the highest probability from a distribution, as opposed to the probabilistic direction getter which draws the direction from the distribution. Therefore, the maximum deterministic direction getter is equivalent to the probabilistic direction getter returning always the maximum value of the distribution. Deterministic maximum fiber tracking is an alternative to EuDX deterministic tractography and unlike EuDX does not follow the peaks of the local models but uses the entire orientation distributions. This example is an extension of the :ref:`example_probabilistic_fiber_tracking` example. We begin by loading the data and fitting a Constrained Spherical Deconvolution (CSD) reconstruction model. """ from dipy.data import read_stanford_labels from dipy.reconst.csdeconv import ConstrainedSphericalDeconvModel from dipy.tracking import utils from dipy.tracking.local import (ThresholdTissueClassifier, LocalTracking) hardi_img, gtab, labels_img = read_stanford_labels() data = hardi_img.get_data() labels = labels_img.get_data() affine = hardi_img.affine seed_mask = labels == 2 white_matter = (labels == 1) | (labels == 2) seeds = utils.seeds_from_mask(seed_mask, density=1, affine=affine) csd_model = ConstrainedSphericalDeconvModel(gtab, None, sh_order=6) csd_fit = csd_model.fit(data, mask=white_matter) """ We use the fractional anisotropy (FA) of the DTI model to build a tissue classifier. """ import dipy.reconst.dti as dti from dipy.reconst.dti import fractional_anisotropy tensor_model = dti.TensorModel(gtab) tenfit = tensor_model.fit(data, mask=white_matter) FA = fractional_anisotropy(tenfit.evals) classifier = ThresholdTissueClassifier(FA, .2) """ The Fiber Orientation Distribution (FOD) of the CSD model estimates the distribution of small fiber bundles within each voxel. This distribution can be used for deterministic fiber tracking. As for probabilistic tracking, there are many ways to provide those distributions to the deterministic maximum direction getter. Here, the spherical harmonic representation of the FOD is used. """ from dipy.data import default_sphere from dipy.direction import DeterministicMaximumDirectionGetter from dipy.io.trackvis import save_trk detmax_dg = DeterministicMaximumDirectionGetter.from_shcoeff(csd_fit.shm_coeff, max_angle=30., sphere=default_sphere) streamlines = LocalTracking(detmax_dg, classifier, seeds, affine, step_size=.5) save_trk("deterministic_maximum_shm_coeff.trk", streamlines, affine, labels.shape) dipy-0.13.0/doc/examples/fiber_to_bundle_coherence.py000066400000000000000000000250571317371701200226450ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ ================================== Fiber to bundle coherence measures ================================== This demo presents the fiber to bundle coherence (FBC) quantitative measure of the alignment of each fiber with the surrounding fiber bundles [Meesters2016]_. These measures are useful in “cleaning” the results of tractography algorithms, since low FBCs indicate which fibers are isolated and poorly aligned with their neighbors, as shown in the figure below. .. _fiber_to_bundle_coherence: .. figure:: _static/fbc_illustration.png :scale: 60 % :align: center On the left this figure illustrates (in 2D) the contribution of two fiber points to the kernel density estimator. The kernel density estimator is the sum over all such locally aligned kernels. The local fiber to bundle coherence shown on the right color-coded for each fiber, is obtained by evaluating the kernel density estimator along the fibers. One spurious fiber is present which is isolated and badly aligned with the other fibers, and can be identified by a low LFBC value in the region where it deviates from the bundle. Figure adapted from [Portegies2015]_. Here we implement FBC measures based on kernel density estimation in the non-flat 5D position-orientation domain. First we compute the kernel density estimator induced by the full lifted output (defined in the space of positions and orientations) of the tractography. Then, the Local FBC (LFBC) is the result of evaluating the estimator along each element of the lifted fiber. A whole fiber measure, the relative FBC (RFBC), is calculated by the minimum of the moving average LFBC along the fiber. Details of the computation of FBC can be found in [Portegies2015]_. """ """ The FBC measures are evaluated on the Stanford HARDI dataset (150 orientations, b=2000 $s/mm^2$) which is one of the standard example datasets in dipy_. """ import numpy as np from dipy.data import (read_stanford_labels, fetch_stanford_t1, read_stanford_t1) # Fix seed np.random.seed(1) # Read data hardi_img, gtab, labels_img = read_stanford_labels() data = hardi_img.get_data() labels = labels_img.get_data() affine = hardi_img.affine fetch_stanford_t1() t1 = read_stanford_t1() t1_data = t1.get_data() # Select a relevant part of the data (left hemisphere) # Coordinates given in x bounds, y bounds, z bounds dshape = data.shape[:-1] xa, xb, ya, yb, za, zb = [15, 42, 10, 65, 18, 65] data_small = data[xa:xb, ya:yb, za:zb] selectionmask = np.zeros(dshape, 'bool') selectionmask[xa:xb, ya:yb, za:zb] = True """ The data is first fitted to Constant Solid Angle (CDA) ODF Model. CSA is a good choice to estimate general fractional anisotropy (GFA), which the tissue classifier can use to restrict fiber tracking to those areas where the ODF shows significant restricted diffusion, thus creating a region-of-interest in which the computations are done. """ # Perform CSA from dipy.reconst.shm import CsaOdfModel from dipy.data import default_sphere from dipy.direction import peaks_from_model csa_model = CsaOdfModel(gtab, sh_order=6) csa_peaks = peaks_from_model(csa_model, data, default_sphere, relative_peak_threshold=.6, min_separation_angle=45, mask=selectionmask) # Tissue classifier from dipy.tracking.local import ThresholdTissueClassifier classifier = ThresholdTissueClassifier(csa_peaks.gfa, 0.25) """ In order to perform probabilistic fiber tracking we first fit the data to the Constrained Spherical Deconvolution (CSD) model in DIPY. This model represents each voxel in the data set as a collection of small white matter fibers with different orientations. The density of fibers along each orientation is known as the Fiber Orientation Distribution (FOD), used in the fiber tracking. """ # Perform CSD on the original data from dipy.reconst.csdeconv import auto_response from dipy.reconst.csdeconv import ConstrainedSphericalDeconvModel response, ratio = auto_response(gtab, data, roi_radius=10, fa_thr=0.7) csd_model = ConstrainedSphericalDeconvModel(gtab, response) csd_fit = csd_model.fit(data_small) csd_fit_shm = np.lib.pad(csd_fit.shm_coeff, ((xa, dshape[0]-xb), (ya, dshape[1]-yb), (za, dshape[2]-zb), (0, 0)), 'constant') # Probabilistic direction getting for fiber tracking from dipy.direction import ProbabilisticDirectionGetter prob_dg = ProbabilisticDirectionGetter.from_shcoeff(csd_fit_shm, max_angle=30., sphere=default_sphere) """ The optic radiation is reconstructed by tracking fibers from the calcarine sulcus (visual cortex V1) to the lateral geniculate nucleus (LGN). We seed from the calcarine sulcus by selecting a region-of-interest (ROI) cube of dimensions 3x3x3 voxels. """ # Set a seed region region for tractography. from dipy.tracking import utils mask = np.zeros(data.shape[:-1], 'bool') rad = 3 mask[26-rad:26+rad, 29-rad:29+rad, 31-rad:31+rad] = True seeds = utils.seeds_from_mask(mask, density=[4, 4, 4], affine=affine) """ Local Tracking is used for probabilistic tractography which takes the direction getter along with the classifier and seeds as input. """ # Perform tracking using Local Tracking from dipy.tracking.local import LocalTracking streamlines = LocalTracking(prob_dg, classifier, seeds, affine, step_size=.5) # Compute streamlines and store as a list. streamlines = list(streamlines) """ In order to select only the fibers that enter into the LGN, another ROI is created from a cube of size 5x5x5 voxels. The near_roi command is used to find the fibers that traverse through this ROI. """ # Set a mask for the lateral geniculate nucleus (LGN) mask_lgn = np.zeros(data.shape[:-1], 'bool') rad = 5 mask_lgn[35-rad:35+rad, 42-rad:42+rad, 28-rad:28+rad] = True # Select all the fibers that enter the LGN and discard all others filtered_fibers2 = utils.near_roi(streamlines, mask_lgn, tol=1.8, affine=affine) sfil = [] for i in range(len(streamlines)): if filtered_fibers2[i]: sfil.append(streamlines[i]) streamlines = list(sfil) """ Inspired by [Rodrigues2010]_, a lookup-table is created, containing rotated versions of the fiber propagation kernel :math:`P_t` [DuitsAndFranken2011]_ rotated over a discrete set of orientations. See the `Contextual enhancement example `_ for more details regarding the kernel. In order to ensure rotationally invariant processing, the discrete orientations are required to be equally distributed over a sphere. By default, a sphere with 100 directions is used obtained from electrostatic repulsion in DIPY. """ # Compute lookup table from dipy.denoise.enhancement_kernel import EnhancementKernel D33 = 1.0 D44 = 0.02 t = 1 k = EnhancementKernel(D33, D44, t) """ The FBC measures are now computed, taking the tractography results and the lookup tables as input. """ # Apply FBC measures from dipy.tracking.fbcmeasures import FBCMeasures fbc = FBCMeasures(streamlines, k) """ After calculating the FBC measures, a threshold can be chosen on the relative FBC (RFBC) in order to remove spurious fibers. Recall that the relative FBC (RFBC) is calculated by the minimum of the moving average LFBC along the fiber. In this example we show the results for threshold 0 (i.e. all fibers are included) and 0.2 (removing the 20 percent most spurious fibers). """ # Calculate LFBC for original fibers fbc_sl_orig, clrs_orig, rfbc_orig = \ fbc.get_points_rfbc_thresholded(0, emphasis=0.01) # Apply a threshold on the RFBC to remove spurious fibers fbc_sl_thres, clrs_thres, rfbc_thres = \ fbc.get_points_rfbc_thresholded(0.125, emphasis=0.01) """ The results of FBC measures are visualized, showing the original fibers colored by LFBC (see :ref:`optic_radiation_before_cleaning`), and the fibers after the cleaning procedure via RFBC thresholding (see :ref:`optic_radiation_after_cleaning`). """ # Visualize the results from dipy.viz import fvtk, actor # Create renderer ren = fvtk.ren() # Original lines colored by LFBC lineactor = actor.line(fbc_sl_orig, clrs_orig, linewidth=0.2) fvtk.add(ren, lineactor) # Horizontal (axial) slice of T1 data vol_actor1 = fvtk.slicer(t1_data, affine=affine) vol_actor1.display(None, None, 20) fvtk.add(ren, vol_actor1) # Vertical (sagittal) slice of T1 data vol_actor2 = fvtk.slicer(t1_data, affine=affine) vol_actor2.display(35, None, None) fvtk.add(ren, vol_actor2) # Show original fibers fvtk.camera(ren, pos=(-264, 285, 155), focal=(0, -14, 9), viewup=(0, 0, 1), verbose=False) fvtk.record(ren, n_frames=1, out_path='OR_before.png', size=(900, 900)) # Show thresholded fibers fvtk.rm(ren, lineactor) fvtk.add(ren, actor.line(fbc_sl_thres, clrs_thres, linewidth=0.2)) fvtk.record(ren, n_frames=1, out_path='OR_after.png', size=(900, 900)) """ .. _optic_radiation_before_cleaning: .. figure:: OR_before.png :align: center The optic radiation obtained through probabilistic tractography colored by local fiber to bundle coherence. .. _optic_radiation_after_cleaning: .. figure:: OR_after.png :align: center The tractography result is cleaned (shown in bottom) by removing fibers with a relative FBC (RFBC) lower than the threshold :math:`\tau = 0.2`. Acknowledgments --------------- The techniques are developed in close collaboration with Pauly Ossenblok of the Academic Center of Epileptology Kempenhaeghe & Maastricht UMC+. References ---------- .. [Meesters2016] S. Meesters, G. Sanguinetti, E. Garyfallidis, J. Portegies, P. Ossenblok, R. Duits. (2016) Cleaning output of tractography via fiber to bundle coherence, a new open source implementation. Human Brain Mapping Conference 2016. .. [Portegies2015] J. Portegies, R. Fick, G. Sanguinetti, S. Meesters, G.Girard, and R. Duits. (2015) Improving Fiber Alignment in HARDI by Combining Contextual PDE flow with Constrained Spherical Deconvolution. PLoS One. .. [DuitsAndFranken2011] R. Duits and E. Franken (2011) Left-invariant diffusions on the space of positions and orientations and their application to crossing-preserving smoothing of HARDI images. International Journal of Computer Vision, 92:231-264. .. [Rodrigues2010] P. Rodrigues, R. Duits, B. Romeny, A. Vilanova (2010). Accelerated Diffusion Operators for Enhancing DW-MRI. Eurographics Workshop on Visual Computing for Biology and Medicine. The Eurographics Association. .. include:: ../links_names.inc """ dipy-0.13.0/doc/examples/gradients_spheres.py000066400000000000000000000136721317371701200212210ustar00rootroot00000000000000""" ===================== Gradients and Spheres ===================== This example shows how you can create gradient tables and sphere objects using dipy_. Usually, as we saw in :ref:`example_quick_start`, you load your b-values and b-vectors from disk and then you can create your own gradient table. But this time let's say that you are an MR physicist and you want to design a new gradient scheme or you are a scientist who wants to simulate many different gradient schemes. Now let's assume that you are interested in creating a multi-shell acquisition with 2-shells, one at b=1000 $s/mm^2$ and one at b=2500 $s/mm^2$. For both shells let's say that we want a specific number of gradients (64) and we want to have the points on the sphere evenly distributed. This is possible using the ``disperse_charges`` which is an implementation of electrostatic repulsion [Jones1999]_. """ import numpy as np from dipy.core.sphere import disperse_charges, Sphere, HemiSphere """ We can first create some random points on a ``HemiSphere`` using spherical polar coordinates. """ n_pts = 64 theta = np.pi * np.random.rand(n_pts) phi = 2 * np.pi * np.random.rand(n_pts) hsph_initial = HemiSphere(theta=theta, phi=phi) """ Next, we call ``disperse_charges`` which will iteratively move the points so that the electrostatic potential energy is minimized. """ hsph_updated, potential = disperse_charges(hsph_initial, 5000) """ In ``hsph_updated`` we have the updated ``HemiSphere`` with the points nicely distributed on the hemisphere. Let's visualize them. """ from dipy.viz import fvtk ren = fvtk.ren() ren.SetBackground(1, 1, 1) fvtk.add(ren, fvtk.point(hsph_initial.vertices, fvtk.colors.red, point_radius=0.05)) fvtk.add(ren, fvtk.point(hsph_updated.vertices, fvtk.colors.green, point_radius=0.05)) print('Saving illustration as initial_vs_updated.png') fvtk.record(ren, out_path='initial_vs_updated.png', size=(300, 300)) """ .. figure:: initial_vs_updated.png :align: center Example of electrostatic repulsion of red points which become green points. We can also create a sphere from the hemisphere and show it in the following way. """ sph = Sphere(xyz = np.vstack((hsph_updated.vertices, -hsph_updated.vertices))) fvtk.rm_all(ren) fvtk.add(ren, fvtk.point(sph.vertices, fvtk.colors.green, point_radius=0.05)) print('Saving illustration as full_sphere.png') fvtk.record(ren, out_path='full_sphere.png', size=(300, 300)) """ .. figure:: full_sphere.png :align: center Full sphere. It is time to create the Gradients. For this reason we will need to use the function ``gradient_table`` and fill it with the ``hsph_updated`` vectors that we created above. """ from dipy.core.gradients import gradient_table vertices = hsph_updated.vertices values = np.ones(vertices.shape[0]) """ We need two stacks of ``vertices``, one for every shell, and we need two sets of b-values, one at 1000 $s/mm^2$, and one at 2500 $s/mm^2$, as we discussed previously. """ bvecs = np.vstack((vertices, vertices)) bvals = np.hstack((1000 * values, 2500 * values)) """ We can also add some b0s. Let's add one in the beginning and one at the end. """ bvecs = np.insert(bvecs, (0, bvecs.shape[0]), np.array([0, 0, 0]), axis=0) bvals = np.insert(bvals, (0, bvals.shape[0]), 0) print(bvals) """ :: [ 0. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 2500. 0.] """ print(bvecs) """ :: [[ 0. 0. 0. ] [-0.80451777 -0.16877559 0.56944355] [ 0.32822557 -0.94355999 0.04430036] [-0.23584135 -0.96241331 0.13468285] [-0.39207424 -0.73505312 0.55314981] [-0.32539386 -0.16751384 0.93062235] [-0.82043195 -0.39411534 0.41420347] [ 0.65741493 0.74947875 0.07802061] [ 0.88853765 0.45303621 0.07251925] [ 0.39638642 -0.15185138 0.90543855] ... [ 0.10175269 0.08197111 0.99142681] [ 0.50577702 -0.37862345 0.77513476] [ 0.42845026 0.40155296 0.80943535] [ 0.26939707 0.81103868 0.51927014] [-0.48938584 -0.43780086 0.75420946] [ 0. 0. 0. ]] Both b-values and b-vectors look correct. Let's now create the ``GradientTable``. """ gtab = gradient_table(bvals, bvecs) fvtk.rm_all(ren) """ We can also visualize the gradients. Let's color with blue the first shell and with cyan the second shell. """ colors_b1000 = fvtk.colors.blue * np.ones(vertices.shape) colors_b2500 = fvtk.colors.cyan * np.ones(vertices.shape) colors = np.vstack((colors_b1000, colors_b2500)) colors = np.insert(colors, (0, colors.shape[0]), np.array([0, 0, 0]), axis=0) colors = np.ascontiguousarray(colors) fvtk.add(ren, fvtk.point(gtab.gradients, colors, point_radius=100)) print('Saving illustration as gradients.png') fvtk.record(ren, out_path='gradients.png', size=(300, 300)) """ .. figure:: gradients.png :align: center Diffusion gradients. References ---------- .. [Jones1999] Jones, DK. et al. Optimal strategies for measuring diffusion in anisotropic systems by magnetic resonance imaging, Magnetic Resonance in Medicine, vol 42, no 3, 515-525, 1999. .. include:: ../links_names.inc """ dipy-0.13.0/doc/examples/introduction_to_basic_tracking.py000066400000000000000000000175471317371701200237630ustar00rootroot00000000000000""" .. _intro_basic_tracking: ============================== Introduction to Basic Tracking ============================== Local fiber tracking is an approach used to model white matter fibers by creating streamlines from local directional information. The idea is as follows: if the local directionality of a tract/pathway segment is known, one can integrate along those directions to build a complete representation of that structure. Local fiber tracking is widely used in the field of diffusion MRI because it is simple and robust. In order to perform local fiber tracking, three things are needed: 1) A method for getting directions from a diffusion data set. 2) A method for identifying different tissue types within the data set. 3) A set of seeds from which to begin tracking. This example shows how to combine the 3 parts described above to create a tractography reconstruction from a diffusion data set. """ """ To begin, let's load an example HARDI data set from Stanford. If you have not already downloaded this data set, the first time you run this example you will need to be connected to the internet and this dataset will be downloaded to your computer. """ from dipy.data import read_stanford_labels hardi_img, gtab, labels_img = read_stanford_labels() data = hardi_img.get_data() labels = labels_img.get_data() affine = hardi_img.affine """ This dataset provides a label map in which all white matter tissues are labeled either 1 or 2. Lets create a white matter mask to restrict tracking to the white matter. """ white_matter = (labels == 1) | (labels == 2) """ 1. The first thing we need to begin fiber tracking is a way of getting directions from this diffusion data set. In order to do that, we can fit the data to a Constant Solid Angle ODF Model. This model will estimate the Orientation Distribution Function (ODF) at each voxel. The ODF is the distribution of water diffusion as a function of direction. The peaks of an ODF are good estimates for the orientation of tract segments at a point in the image. """ from dipy.reconst.shm import CsaOdfModel from dipy.data import default_sphere from dipy.direction import peaks_from_model csa_model = CsaOdfModel(gtab, sh_order=6) csa_peaks = peaks_from_model(csa_model, data, default_sphere, relative_peak_threshold=.8, min_separation_angle=45, mask=white_matter) """ 2. Next we need some way of restricting the fiber tracking to areas with good directionality information. We've already created the white matter mask, but we can go a step further and restrict fiber tracking to those areas where the ODF shows significant restricted diffusion by thresholding on the general fractional anisotropy (GFA). """ from dipy.tracking.local import ThresholdTissueClassifier classifier = ThresholdTissueClassifier(csa_peaks.gfa, .25) """ 3. Before we can begin tracking is to specify where to "seed" (begin) the fiber tracking. Generally, the seeds chosen will depend on the pathways one is interested in modeling. In this example, we'll use a $2 \times 2 \times 2$ grid of seeds per voxel, in a sagittal slice of the corpus callosum. Tracking from this region will give us a model of the corpus callosum tract. This slice has label value ``2`` in the labels image. """ from dipy.tracking import utils seed_mask = labels == 2 seeds = utils.seeds_from_mask(seed_mask, density=[2, 2, 2], affine=affine) """ Finally, we can bring it all together using ``LocalTracking``. We will then display the resulting streamlines using the ``fvtk`` module. """ from dipy.tracking.local import LocalTracking from dipy.viz import fvtk from dipy.viz.colormap import line_colors # Initialization of LocalTracking. The computation happens in the next step. streamlines = LocalTracking(csa_peaks, classifier, seeds, affine, step_size=.5) # Compute streamlines and store as a list. streamlines = list(streamlines) # Prepare the display objects. color = line_colors(streamlines) if fvtk.have_vtk: streamlines_actor = fvtk.line(streamlines, line_colors(streamlines)) # Create the 3D display. r = fvtk.ren() fvtk.add(r, streamlines_actor) # Save still images for this static example. Or for interactivity use # fvtk.show fvtk.record(r, n_frames=1, out_path='deterministic.png', size=(800, 800)) """ .. figure:: deterministic.png :align: center **Corpus Callosum Deterministic** We've created a deterministic set of streamlines, so called because if you repeat the fiber tracking (keeping all the inputs the same) you will get exactly the same set of streamlines. We can save the streamlines as a Trackvis file so it can be loaded into other software for visualization or further analysis. """ from dipy.io.trackvis import save_trk save_trk("CSA_detr.trk", streamlines, affine, labels.shape) """ Next let's try some probabilistic fiber tracking. For this, we'll be using the Constrained Spherical Deconvolution (CSD) Model. This model represents each voxel in the data set as a collection of small white matter fibers with different orientations. The density of fibers along each orientation is known as the Fiber Orientation Distribution (FOD). In order to perform probabilistic fiber tracking, we pick a fiber from the FOD at random at each new location along the streamline. Note: one could use this model to perform deterministic fiber tracking by always tracking along the directions that have the most fibers. Let's begin probabilistic fiber tracking by fitting the data to the CSD model. """ from dipy.reconst.csdeconv import (ConstrainedSphericalDeconvModel, auto_response) response, ratio = auto_response(gtab, data, roi_radius=10, fa_thr=0.7) csd_model = ConstrainedSphericalDeconvModel(gtab, response, sh_order=6) csd_fit = csd_model.fit(data, mask=white_matter) """ Next we'll need to make a ``ProbabilisticDirectionGetter``. Because the CSD model represents the FOD using the spherical harmonic basis, we can use the ``from_shcoeff`` method to create the direction getter. This direction getter will randomly sample directions from the FOD each time the tracking algorithm needs to take another step. """ from dipy.direction import ProbabilisticDirectionGetter prob_dg = ProbabilisticDirectionGetter.from_shcoeff(csd_fit.shm_coeff, max_angle=30., sphere=default_sphere) """ As with deterministic tracking, we'll need to use a tissue classifier to restrict the tracking to the white matter of the brain. One might be tempted to use the GFA of the CSD FODs to build a tissue classifier, however the GFA values of these FODs don't classify gray matter and white matter well. We will therefore use the GFA from the CSA model which we fit for the first section of this example. Alternatively, one could fit a ``TensorModel`` to the data and use the fractional anisotropy (FA) to build a tissue classifier. """ classifier = ThresholdTissueClassifier(csa_peaks.gfa, .25) """ Next we can pass this direction getter, along with the ``classifier`` and ``seeds``, to ``LocalTracking`` to get a probabilistic model of the corpus callosum. """ streamlines = LocalTracking(prob_dg, classifier, seeds, affine, step_size=.5, max_cross=1) # Compute streamlines and store as a list. streamlines = list(streamlines) # Prepare the display objects. color = line_colors(streamlines) if fvtk.have_vtk: streamlines_actor = fvtk.line(streamlines, line_colors(streamlines)) # Create the 3D display. r = fvtk.ren() fvtk.add(r, streamlines_actor) # Save still images for this static example. fvtk.record(r, n_frames=1, out_path='probabilistic.png', size=(800, 800)) """ .. figure:: probabilistic.png :align: center Corpus callosum probabilistic tracking. """ save_trk("CSD_prob.trk", streamlines, affine, labels.shape) dipy-0.13.0/doc/examples/kfold_xval.py000066400000000000000000000127541317371701200176410ustar00rootroot00000000000000""" ============================================ K-fold cross-validation for model comparison ============================================ Different models of diffusion MRI can be compared based on their accuracy in fitting the diffusion signal. Here, we demonstrate this by comparing two models: the diffusion tensor model (DTI) and Constrained Spherical Deconvolution (CSD). These models differ from each other substantially. DTI approximates the diffusion pattern as a 3D Gaussian distribution, and has only 6 free parameters. CSD, on the other hand, fits many more parameters. The models aare also not nested, so they cannot be compared using the log-likelihood ratio. A general way to perform model comparison is cross-validation [Hastie2008]_. In this method, a model is fit to some of the data (a *learning set*) and the model is then used to predict a held-out set (a *testing set*). The model predictions can then be compared to estimate prediction error on the held out set. This method has been used for comparison of models such as DTI and CSD [Rokem2014]_, and has the advantage that it the comparison is imprevious to differences in the number of parameters in the model, and it can be used to compare models that are not nested. In dipy_, we include an implementation of k-fold cross-validation. In this method, the data is divided into $k$ different segments. In each iteration $\frac{1}{k}th$ of the data is held out and the model is fit to the other $\frac{k-1}{k}$ parts of the data. A prediction of the held out data is done and recorded. At the end of $k$ iterations a prediction of all of the data will have been conducted, and this can be compared directly to all of the data. First, we import that modules needed for this example. In particular, the :mod:`reconst.cross_validation` module implements k-fold cross-validation """ import numpy as np np.random.seed(2014) import matplotlib.pyplot as plt import dipy.data as dpd import dipy.reconst.cross_validation as xval import dipy.reconst.dti as dti import dipy.reconst.csdeconv as csd import scipy.stats as stats """ We fetch some data and select a couple of voxels to perform comparisons on. One lies in the corpus callosum (cc), while the other is in the centrum semiovale (cso), a part of the brain known to contain multiple crossing white matter fiber populations. """ dpd.fetch_stanford_hardi() img, gtab = dpd.read_stanford_hardi() data = img.get_data() cc_vox = data[40, 70, 38] cso_vox = data[30, 76, 38] """ We initialize each kind of model: """ dti_model = dti.TensorModel(gtab) response, ratio = csd.auto_response(gtab, data, roi_radius=10, fa_thr=0.7) csd_model = csd.ConstrainedSphericalDeconvModel(gtab, response) """ Next, we perform cross-validation for each kind of model, comparing model predictions to the diffusion MRI data in each one of these voxels. Note that we use 2-fold cross-validation, which means that in each iteration, the model will be fit to half of the data, and used to predict the other half. """ dti_cc = xval.kfold_xval(dti_model, cc_vox, 2) csd_cc = xval.kfold_xval(csd_model, cc_vox, 2, response) dti_cso = xval.kfold_xval(dti_model, cso_vox, 2) csd_cso = xval.kfold_xval(csd_model, cso_vox, 2, response) """ We plot a scatter plot of the data with the model predictions in each of these voxels, focusing only on the diffusion-weighted measurements (each point corresponds to a different gradient direction). The two models are compared in each sub-plot (blue=DTI, red=CSD). """ fig, ax = plt.subplots(1,2) fig.set_size_inches([12,6]) ax[0].plot(cc_vox[~gtab.b0s_mask], dti_cc[~gtab.b0s_mask], 'o', color='b') ax[0].plot(cc_vox[~gtab.b0s_mask], csd_cc[~gtab.b0s_mask], 'o', color='r') ax[1].plot(cso_vox[~gtab.b0s_mask], dti_cso[~gtab.b0s_mask], 'o', color='b', label='DTI') ax[1].plot(cso_vox[~gtab.b0s_mask], csd_cso[~gtab.b0s_mask], 'o', color='r', label='CSD') plt.legend(loc='upper left') for this_ax in ax: this_ax.set_xlabel('Data (relative to S0)') this_ax.set_ylabel('Model prediction (relative to S0)') fig.savefig("model_predictions.png") """ .. figure:: model_predictions.png :align: center Model predictions. """ """ We can also quantify the goodness of fit of the models by calculating an R-squared score: """ cc_dti_r2=stats.pearsonr(cc_vox[~gtab.b0s_mask], dti_cc[~gtab.b0s_mask])[0]**2 cc_csd_r2=stats.pearsonr(cc_vox[~gtab.b0s_mask], csd_cc[~gtab.b0s_mask])[0]**2 cso_dti_r2=stats.pearsonr(cso_vox[~gtab.b0s_mask], dti_cso[~gtab.b0s_mask])[0]**2 cso_csd_r2=stats.pearsonr(cso_vox[~gtab.b0s_mask], csd_cso[~gtab.b0s_mask])[0]**2 print("Corpus callosum\n" "DTI R2 : %s\n" "CSD R2 : %s\n" "\n" "Centrum Semiovale\n" "DTI R2 : %s\n" "CSD R2 : %s\n" % (cc_dti_r2, cc_csd_r2, cso_dti_r2, cso_csd_r2)) """ This should look something like this:: Corpus callosum DTI R2 : 0.782881752597 CSD R2 : 0.805764364116 Centrum Semiovale DTI R2 : 0.431921832012 CSD R2 : 0.604806420501 As you can see, DTI is a pretty good model for describing the signal in the CC, while CSD is much better in describing the signal in regions of multiple crossing fibers. References ---------- .. [Hastie2008] Hastie, T., Tibshirani, R., Friedman, J. (2008). The Elements of Statistical Learning: Data Mining, Inference and Prediction. Springer-Verlag, Berlin .. [Rokem2014] Rokem, A., Chan, K.L. Yeatman, J.D., Pestilli, F., Mezer, A., Wandell, B.A., 2014. Evaluating the accuracy of diffusion models at multiple b-values with cross-validation. ISMRM 2014. .. include:: ../links_names.inc """ dipy-0.13.0/doc/examples/linear_fascicle_evaluation.py000066400000000000000000000264521317371701200230420ustar00rootroot00000000000000""" ================================================= Linear fascicle evaluation (LiFE) ================================================= Evaluating the results of tractography algorithms is one of the biggest challenges for diffusion MRI. One proposal for evaluation of tractography results is to use a forward model that predicts the signal from each of a set of streamlines, and then fit a linear model to these simultaneous predictions [Pestilli2014]_. We will use streamlines generated using probabilistic tracking on CSA peaks. For brevity, we will include in this example only streamlines going through the corpus callosum connecting left to right superior frontal cortex. The process of tracking and finding these streamlines is fully demonstrated in the :ref:`streamline_tools` example. If this example has been run, we can read the streamlines from file. Otherwise, we'll run that example first, by importing it. This provides us with all of the variables that were created in that example: """ import numpy as np import os.path as op import nibabel as nib import dipy.core.optimize as opt if not op.exists('lr-superiorfrontal.trk'): from streamline_tools import * else: # We'll need to know where the corpus callosum is from these variables: from dipy.data import (read_stanford_labels, fetch_stanford_t1, read_stanford_t1) hardi_img, gtab, labels_img = read_stanford_labels() labels = labels_img.get_data() cc_slice = labels == 2 fetch_stanford_t1() t1 = read_stanford_t1() t1_data = t1.get_data() data = hardi_img.get_data() # Read the candidates from file in voxel space: candidate_sl = [s[0] for s in nib.trackvis.read('lr-superiorfrontal.trk', points_space='voxel')[0]] """ The streamlines that are entered into the model are termed 'candidate streamliness' (or a 'candidate connectome'): """ """ Let's visualize the initial candidate group of streamlines in 3D, relative to the anatomical structure of this brain: """ from dipy.viz.colormap import line_colors from dipy.viz import fvtk candidate_streamlines_actor = fvtk.streamtube(candidate_sl, line_colors(candidate_sl)) cc_ROI_actor = fvtk.contour(cc_slice, levels=[1], colors=[(1., 1., 0.)], opacities=[1.]) vol_actor = fvtk.slicer(t1_data) vol_actor.display(40, None, None) vol_actor2 = vol_actor.copy() vol_actor2.display(None, None, 35) # Add display objects to canvas ren = fvtk.ren() fvtk.add(ren, candidate_streamlines_actor) fvtk.add(ren, cc_ROI_actor) fvtk.add(ren, vol_actor) fvtk.add(ren, vol_actor2) fvtk.record(ren, n_frames=1, out_path='life_candidates.png', size=(800, 800)) """ .. figure:: life_candidates.png :align: center **Candidate connectome before life optimization** """ """ Next, we initialize a LiFE model. We import the ``dipy.tracking.life`` module, which contains the classes and functions that implement the model: """ import dipy.tracking.life as life fiber_model = life.FiberModel(gtab) """ Since we read the streamlines from a file, already in the voxel space, we do not need to transform them into this space. Otherwise, if the streamline coordinates were in the world space (relative to the scanner iso-center, or relative to the mid-point of the AC-PC-connecting line), we would use this:: inv_affine = np.linalg.inv(hardi_img.affine) the inverse transformation from world space to the voxel space as the affine for the following model fit. The next step is to fit the model, producing a ``FiberFit`` class instance, that stores the data, as well as the results of the fitting procedure. The LiFE model posits that the signal in the diffusion MRI volume can be explained by the streamlines, by the equation .. math:: y = X\beta Where $y$ is the diffusion MRI signal, $\beta$ are a set of weights on the streamlines and $X$ is a design matrix. This matrix has the dimensions $m$ by $n$, where $m=n_{voxels} \cdot n_{directions}$, and $n_{voxels}$ is the set of voxels in the ROI that contains the streamlines considered in this model. The $i^{th}$ column of the matrix contains the expected contributions of the $i^{th}$ streamline (arbitrarily ordered) to each of the voxels. $X$ is a sparse matrix, because each streamline traverses only a small percentage of the voxels. The expected contributions of the streamline are calculated using a forward model, where each node of the streamline is modeled as a cylindrical fiber compartment with Gaussian diffusion, using the diffusion tensor model. See [Pestilli2014]_ for more detail on the model, and variations of this model. """ fiber_fit = fiber_model.fit(data, candidate_sl, affine=np.eye(4)) """ The ``FiberFit`` class instance holds various properties of the model fit. For example, it has the weights $\beta$, that are assigned to each streamline. In most cases, a tractography through some region will include redundant streamlines, and these streamlines will have $\beta_i$ that are 0. """ import matplotlib.pyplot as plt import matplotlib fig, ax = plt.subplots(1) ax.hist(fiber_fit.beta, bins=100, histtype='step') ax.set_xlabel('Fiber weights') ax.set_ylabel('# fibers') fig.savefig('beta_histogram.png') """ .. figure:: beta_histogram.png :align: center **LiFE streamline weights** """ """ We use $\beta$ to filter out these redundant streamlines, and generate an optimized group of streamlines: """ optimized_sl = list(np.array(candidate_sl)[np.where(fiber_fit.beta>0)[0]]) ren = fvtk.ren() fvtk.add(ren, fvtk.streamtube(optimized_sl, line_colors(optimized_sl))) fvtk.add(ren, cc_ROI_actor) fvtk.add(ren, vol_actor) fvtk.record(ren, n_frames=1, out_path='life_optimized.png', size=(800, 800)) """ .. figure:: life_optimized.png :align: center **Streamlines selected via LiFE optimization** """ """ The new set of streamlines should do well in fitting the data, and redundant streamlines have presumably been removed (in this case, about 50% of the streamlines). But how well does the model do in explaining the diffusion data? We can quantify that: the ``FiberFit`` class instance has a `predict` method, which can be used to invert the model and predict back either the data that was used to fit the model, or other unseen data (e.g. in cross-validation, see :ref:`kfold_xval`). Without arguments, the ``.predict()`` method will predict the diffusion signal for the same gradient table that was used in the fit data, but ``gtab`` and ``S0`` keyword arguments can be used to predict for other acquisition schemes and other baseline non-diffusion-weighted signals. """ model_predict = fiber_fit.predict() """ We will focus on the error in prediction of the diffusion-weighted data, and calculate the root of the mean squared error. """ model_error = model_predict - fiber_fit.data model_rmse = np.sqrt(np.mean(model_error[:, 10:] ** 2, -1)) """ As a baseline against which we can compare, we calculate another error term. In this case, we assume that the weight for each streamline is equal to zero. This produces the naive prediction of the mean of the signal in each voxel. """ beta_baseline = np.zeros(fiber_fit.beta.shape[0]) pred_weighted = np.reshape(opt.spdot(fiber_fit.life_matrix, beta_baseline), (fiber_fit.vox_coords.shape[0], np.sum(~gtab.b0s_mask))) mean_pred = np.empty((fiber_fit.vox_coords.shape[0], gtab.bvals.shape[0])) S0 = fiber_fit.b0_signal """ Since the fitting is done in the demeaned S/S0 domain, we need to add back the mean and then multiply by S0 in every voxel: """ mean_pred[..., gtab.b0s_mask] = S0[:, None] mean_pred[..., ~gtab.b0s_mask] =\ (pred_weighted + fiber_fit.mean_signal[:, None]) * S0[:, None] mean_error = mean_pred - fiber_fit.data mean_rmse = np.sqrt(np.mean(mean_error ** 2, -1)) """ First, we can compare the overall distribution of errors between these two alternative models of the ROI. We show the distribution of differences in error (improvement through model fitting, relative to the baseline model). Here, positive values denote an improvement in error with model fit, relative to without the model fit. """ fig, ax = plt.subplots(1) ax.hist(mean_rmse - model_rmse, bins=100, histtype='step') ax.text(0.2, 0.9,'Median RMSE, mean model: %.2f' % np.median(mean_rmse), horizontalalignment='left', verticalalignment='center', transform=ax.transAxes) ax.text(0.2, 0.8,'Median RMSE, LiFE: %.2f' % np.median(model_rmse), horizontalalignment='left', verticalalignment='center', transform=ax.transAxes) ax.set_xlabel('RMS Error') ax.set_ylabel('# voxels') fig.savefig('error_histograms.png') """ .. figure:: error_histograms.png :align: center Improvement in error with fitting of the LiFE model. """ """ Second, we can show the spatial distribution of the two error terms, and of the improvement with the model fit: """ vol_model = np.ones(data.shape[:3]) * np.nan vol_model[fiber_fit.vox_coords[:, 0], fiber_fit.vox_coords[:, 1], fiber_fit.vox_coords[:, 2]] = model_rmse vol_mean = np.ones(data.shape[:3]) * np.nan vol_mean[fiber_fit.vox_coords[:, 0], fiber_fit.vox_coords[:, 1], fiber_fit.vox_coords[:, 2]] = mean_rmse vol_improve = np.ones(data.shape[:3]) * np.nan vol_improve[fiber_fit.vox_coords[:, 0], fiber_fit.vox_coords[:, 1], fiber_fit.vox_coords[:, 2]] = mean_rmse - model_rmse sl_idx = 49 from mpl_toolkits.axes_grid1 import AxesGrid fig = plt.figure() fig.subplots_adjust(left=0.05, right=0.95) ax = AxesGrid(fig, 111, nrows_ncols = (1, 3), label_mode = "1", share_all = True, cbar_location="top", cbar_mode="each", cbar_size="10%", cbar_pad="5%") ax[0].matshow(np.rot90(t1_data[sl_idx, :, :]), cmap=matplotlib.cm.bone) im = ax[0].matshow(np.rot90(vol_model[sl_idx, :, :]), cmap=matplotlib.cm.hot) ax.cbar_axes[0].colorbar(im) ax[1].matshow(np.rot90(t1_data[sl_idx, :, :]), cmap=matplotlib.cm.bone) im = ax[1].matshow(np.rot90(vol_mean[sl_idx, :, :]), cmap=matplotlib.cm.hot) ax.cbar_axes[1].colorbar(im) ax[2].matshow(np.rot90(t1_data[sl_idx, :, :]), cmap=matplotlib.cm.bone) im = ax[2].matshow(np.rot90(vol_improve[sl_idx, :, :]), cmap=matplotlib.cm.RdBu) ax.cbar_axes[2].colorbar(im) for lax in ax: lax.set_xticks([]) lax.set_yticks([]) fig.savefig("spatial_errors.png") """ .. figure:: spatial_errors.png :align: center Spatial distribution of error and improvement. """ """ This image demonstrates that in many places, fitting the LiFE model results in substantial reduction of the error. Note that for full-brain tractographies *LiFE* can require large amounts of memory. For detailed memory profiling of the algorithm, based on the streamlines generated in :ref:`example_probabilistic_fiber_tracking`, see `this IPython notebook `_. For the Matlab implementation of LiFE, head over to `Franco Pestilli's github webpage `_. References ---------- .. [Pestilli2014] Pestilli, F., Yeatman, J, Rokem, A. Kay, K. and Wandell B.A. (2014). Validation and statistical inference in living connectomes. Nature Methods 11: 1058-1063. doi:10.1038/nmeth.3098 .. include:: ../links_names.inc """ dipy-0.13.0/doc/examples/piesno.py000066400000000000000000000075631317371701200170070ustar00rootroot00000000000000""" ============================= Noise estimation using PIESNO ============================= Often, one is interested in estimating the noise in the diffusion signal. One of the methods to do this is the Probabilistic Identification and Estimation of Noise (PIESNO) framework [Koay2009]_. Using this method, one can detect the standard deviation of the noise from Diffusion-Weighted Imaging (DWI). PIESNO also works with multiple channel DWI datasets that are acquired from N array coils for both SENSE and GRAPPA reconstructions. The PIESNO method works in two steps: 1) First, it finds voxels that are most likely background voxels. Intuitively, these voxels have very similar diffusion-weighted intensities (up to some noise) in the fourth dimension of the DWI dataset. White matter, gray matter or CSF voxels have diffusion intensities that vary quite a lot across different directions. 2) From these estimated background voxels and the input number of coils $N$, PIESNO finds what sigma each Gaussian from each of the $N$ coils would have generated the observed Rician ($N = 1$) or non-central Chi ($N > 1$) distributed noise profile in the DWI datasets. PIESNO makes an important assumption: the Gaussian noise standard deviation is assumed to be uniform. The noise is uniform across multiple slice locations or across multiple images of the same location. For the full details, please refer to the original paper. In this example, we will demonstrate the use of PIESNO with a 3-shell data-set. We start by importing necessary modules and functions and loading the data: """ import nibabel as nib import numpy as np from dipy.denoise.noise_estimate import piesno from dipy.data import fetch_sherbrooke_3shell, read_sherbrooke_3shell fetch_sherbrooke_3shell() img, gtab = read_sherbrooke_3shell() data = img.get_data() """ Now that we have fetched a dataset, we must call PIESNO with the right number of coils used to acquire this dataset. It is also important to know what was the parallel reconstruction algorithm used. Here, the data comes from a GRAPPA reconstruction, was acquired with a 12-elements head coil available on the Tim Trio Siemens, for which the 12 coil elements are combined into 4 groups of 3 coil elements each. The signal is therefore received through 4 distinct groups of receiver channels, yielding N = 4. Had we used a GE acquisition, we would have used N=1 even if multiple channel coils are used because GE uses a SENSE reconstruction, which has a Rician noise nature and thus N is always 1. """ sigma, mask = piesno(data, N=4, return_mask=True) axial = data[:, :, data.shape[2] // 2, 0].T axial_piesno = mask[:, :, data.shape[2] // 2].T import matplotlib.pyplot as plt fig, ax = plt.subplots(1, 2) ax[0].imshow(axial, cmap='gray', origin='lower') ax[0].set_title('Axial slice of the b=0 data') ax[1].imshow(axial_piesno, cmap='gray', origin='lower') ax[1].set_title('Background voxels from the data') for a in ax: a.set_axis_off() plt.savefig('piesno.png', bbox_inches='tight') """ .. figure:: piesno.png :align: center Showing the mid axial slice of the b=0 image (left) and estimated background voxels (right) used to estimate the noise standard deviation. """ nib.save(nib.Nifti1Image(mask, img.affine, img.header), 'mask_piesno.nii.gz') print('The noise standard deviation is sigma= ', sigma) print('The std of the background is =', np.std(data[mask[...,None].astype(np.bool)])) """ Here, we obtained a noise standard deviation of 7.26. For comparison, a simple standard deviation of all voxels in the estimated mask (as done in the previous example :ref:`example_snr_in_cc`) gives a value of 6.1. """ """ References ---------- .. [Koay2009] Koay C.G., E. Ozarslan, C. Pierpaoli. Probabilistic Identification and Estimation of Noise (PIESNO): A self-consistent approach and its applications in MRI. JMR, 199(1):94-103, 2009. .. include:: ../links_names.inc """ dipy-0.13.0/doc/examples/probabilistic_fiber_tracking.py000066400000000000000000000115501317371701200233600ustar00rootroot00000000000000""" ===================================================== An introduction to the Probabilistic Direction Getter ===================================================== Probabilistic fiber tracking is a way of reconstructing white matter connections using diffusion MR imaging. Like deterministic fiber tracking, the probabilistic approach follows the trajectory of a possible pathway step by step starting at a seed, however, unlike deterministic tracking, the tracking direction at each point along the path is chosen at random from a distribution. The distribution at each point is different and depends on the observed diffusion data at that point. The distribution of tracking directions at each point can be represented as a probability mass function (PMF) if the possible tracking directions are restricted to discrete numbers of well distributed points on a sphere. This example is an extension of the :ref:`intro_basic_tracking` example. We'll begin by repeating a few steps from that example, loading the data and fitting a Constrained Spherical Deconvolution (CSD) model. """ from dipy.data import read_stanford_labels from dipy.reconst.csdeconv import ConstrainedSphericalDeconvModel from dipy.tracking import utils from dipy.tracking.local import (ThresholdTissueClassifier, LocalTracking) hardi_img, gtab, labels_img = read_stanford_labels() data = hardi_img.get_data() labels = labels_img.get_data() affine = hardi_img.affine seed_mask = labels == 2 white_matter = (labels == 1) | (labels == 2) seeds = utils.seeds_from_mask(seed_mask, density=1, affine=affine) csd_model = ConstrainedSphericalDeconvModel(gtab, None, sh_order=6) csd_fit = csd_model.fit(data, mask=white_matter) """ We use the GFA of the CSA model to build a tissue classifier. """ from dipy.reconst.shm import CsaOdfModel csa_model = CsaOdfModel(gtab, sh_order=6) gfa = csa_model.fit(data, mask=white_matter).gfa classifier = ThresholdTissueClassifier(gfa, .25) """ The Fiber Orientation Distribution (FOD) of the CSD model estimates the distribution of small fiber bundles within each voxel. We can use this distribution for probabilistic fiber tracking. One way to do this is to represent the FOD using a discrete sphere. This discrete FOD can be used by the ``ProbabilisticDirectionGetter`` as a PMF for sampling tracking directions. We need to clip the FOD to use it as a PMF because the latter cannot have negative values. Ideally, the FOD should be strictly positive, but because of noise and/or model failures sometimes it can have negative values. """ from dipy.direction import ProbabilisticDirectionGetter from dipy.data import small_sphere from dipy.io.trackvis import save_trk fod = csd_fit.odf(small_sphere) pmf = fod.clip(min=0) prob_dg = ProbabilisticDirectionGetter.from_pmf(pmf, max_angle=30., sphere=small_sphere) streamlines = LocalTracking(prob_dg, classifier, seeds, affine, step_size=.5) save_trk("probabilistic_small_sphere.trk", streamlines, affine, labels.shape) """ One disadvantage of using a discrete PMF to represent possible tracking directions is that it tends to take up a lot of memory (RAM). The size of the PMF, the FOD in this case, must be equal to the number of possible tracking directions on the hemisphere, and every voxel has a unique PMF. In this case the data is ``(81, 106, 76)`` and ``small_sphere`` has 181 directions so the FOD is ``(81, 106, 76, 181)``. One way to avoid sampling the PMF and holding it in memory is to build the direction getter directly from the spherical harmonic representation of the FOD. By using this approach, we can also use a larger sphere, like ``default_sphere`` which has 362 directions on the hemisphere, without having to worry about memory limitations. """ from dipy.data import default_sphere prob_dg = ProbabilisticDirectionGetter.from_shcoeff(csd_fit.shm_coeff, max_angle=30., sphere=default_sphere) streamlines = LocalTracking(prob_dg, classifier, seeds, affine, step_size=.5) save_trk("probabilistic_shm_coeff.trk", streamlines, affine, labels.shape) """ Not all model fits have the ``shm_coeff`` attribute because not all models use this basis to represent the data internally. However we can fit the ODF of any model to the spherical harmonic basis using the ``peaks_from_model`` function. """ from dipy.direction import peaks_from_model peaks = peaks_from_model(csd_model, data, default_sphere, .5, 25, mask=white_matter, return_sh=True, parallel=True) fod_coeff = peaks.shm_coeff prob_dg = ProbabilisticDirectionGetter.from_shcoeff(fod_coeff, max_angle=30., sphere=default_sphere) streamlines = LocalTracking(prob_dg, classifier, seeds, affine, step_size=.5) save_trk("probabilistic_peaks_from_model.trk", streamlines, affine, labels.shape) dipy-0.13.0/doc/examples/quick_start.py000066400000000000000000000140751317371701200200370ustar00rootroot00000000000000""" ========================= Getting started with DIPY ========================= In diffusion MRI (dMRI) usually we use three types of files, a Nifti file with the diffusion weighted data, and two text files one with b-values and one with the b-vectors. In dipy_ we provide tools to load and process these files and we also provide access to publically available datasets for those who haven't acquired yet their own datasets. With the following commands we can download a dMRI dataset """ from dipy.data import fetch_sherbrooke_3shell fetch_sherbrooke_3shell() """ By default these datasets will go in the ``.dipy`` folder inside your home directory. Here is how you can access them. """ from os.path import expanduser, join home = expanduser('~') """ ``dname`` holds the directory name where the 3 files are in. """ dname = join(home, '.dipy', 'sherbrooke_3shell') """ Here, we show the complete filenames of the 3 files """ fdwi = join(dname, 'HARDI193.nii.gz') print(fdwi) fbval = join(dname, 'HARDI193.bval') print(fbval) fbvec = join(dname, 'HARDI193.bvec') print(fbvec) """ ``/home/username/.dipy/sherbrooke_3shell/HARDI193.nii.gz`` ``/home/username/.dipy/sherbrooke_3shell/HARDI193.bval`` ``/home/username/.dipy/sherbrooke_3shell/HARDI193.bvec`` Now, that we have their filenames we can start checking what these look like. Let's start first by loading the dMRI datasets. For this purpose, we use a python library called nibabel_ which enables us to read and write neuroimaging-specific file formats. """ import nibabel as nib img = nib.load(fdwi) data = img.get_data() """ ``data`` is a 4D array where the first 3 dimensions are the i, j, k voxel coordinates and the last dimension is the number of non-weighted (S0s) and diffusion-weighted volumes. We can very easily check the size of ``data`` in the following way: """ print(data.shape) """ ``(128, 128, 60, 194)`` We can also check the dimensions of each voxel in the following way: """ print(img.header.get_zooms()[:3]) """ ``(2.0, 2.0, 2.0)`` We can quickly visualize the results using matplotlib_. For example, let's show here the middle axial slices of volume 0 and volume 10. """ import matplotlib.pyplot as plt axial_middle = data.shape[2] // 2 plt.figure('Showing the datasets') plt.subplot(1, 2, 1).set_axis_off() plt.imshow(data[:, :, axial_middle, 0].T, cmap='gray', origin='lower') plt.subplot(1, 2, 2).set_axis_off() plt.imshow(data[:, :, axial_middle, 10].T, cmap='gray', origin='lower') plt.show() plt.savefig('data.png', bbox_inches='tight') """ .. figure:: data.png :align: center Showing the middle axial slice without (left) and with (right) diffusion weighting. The next step is to load the b-values and b-vectors from the disk using the function ``read_bvals_bvecs``. """ from dipy.io import read_bvals_bvecs bvals, bvecs = read_bvals_bvecs(fbval, fbvec) """ In DIPY, we use an object called ``GradientTable`` which holds all the acquisition specific parameters, e.g. b-values, b-vectors, timings and others. To create this object you can use the function ``gradient_table``. """ from dipy.core.gradients import gradient_table gtab = gradient_table(bvals, bvecs) """ Finally, you can use ``gtab`` (the GradientTable object) to show some information about the acquisition parameters """ print(gtab.info) """ B-values shape (193,) min 0.000000 max 3500.000000 B-vectors shape (193, 3) min -0.964050 max 0.999992 You, can also see the b-values using: """ print(gtab.bvals) """ :: [ 0. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 1000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 2000. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500. 3500.] Or, for example the 10 first b-vectors using: """ print(gtab.bvecs[:10, :]) """ :: array([[ 0. , 0. , 0. ], [ 0.999979 , -0.00504001, -0.00402795], [ 0. , 0.999992 , -0.00398794], [-0.0257055 , 0.653861 , -0.756178 ], [ 0.589518 , -0.769236 , -0.246462 ], [-0.235785 , -0.529095 , -0.815147 ], [-0.893578 , -0.263559 , -0.363394 ], [ 0.79784 , 0.133726 , -0.587851 ], [ 0.232937 , 0.931884 , -0.278087 ], [ 0.93672 , 0.144139 , -0.31903 ]]) ``gtab`` can be used to tell what part of the data is the S0 volumes (volumes which correspond to b-values of 0). """ S0s = data[:, :, :, gtab.b0s_mask] """ Here, we had only 1 S0 as we can verify by looking at the dimensions of S0s """ print(S0s.shape) """ ``(128, 128, 60, 1)`` Just, for fun let's save this in a new Nifti file. """ nib.save(nib.Nifti1Image(S0s, img.affine), 'HARDI193_S0.nii.gz') """ Now, that we learned how to load dMRI datasets we can start the analysis. See example :ref:`example_reconst_dti` to learn how to create FA maps. .. include:: ../links_names.inc """ dipy-0.13.0/doc/examples/reconst_csa.py000066400000000000000000000072161317371701200200100ustar00rootroot00000000000000""" ================================================= Reconstruct with Constant Solid Angle (Q-Ball) ================================================= We show how to apply a Constant Solid Angle ODF (Q-Ball) model from Aganj et al. [Aganj2010]_ to your datasets. First import the necessary modules: """ import numpy as np import nibabel as nib from dipy.data import fetch_stanford_hardi, read_stanford_hardi, get_sphere from dipy.reconst.shm import CsaOdfModel, normalize_data from dipy.direction import peaks_from_model """ Download and read the data for this tutorial. """ fetch_stanford_hardi() img, gtab = read_stanford_hardi() """ img contains a nibabel Nifti1Image object (data) and gtab contains a GradientTable object (gradient information e.g. b-values). For example to read the b-values it is possible to write print(gtab.bvals). Load the raw diffusion data and the affine. """ data = img.get_data() print('data.shape (%d, %d, %d, %d)' % data.shape) """ data.shape ``(81, 106, 76, 160)`` Remove most of the background using DIPY's mask module. """ from dipy.segment.mask import median_otsu maskdata, mask = median_otsu(data, 3, 1, True, vol_idx=range(10, 50), dilate=2) """ We instantiate our CSA model with spherical harmonic order of 4 """ csamodel = CsaOdfModel(gtab, 4) """ `Peaks_from_model` is used to calculate properties of the ODFs (Orientation Distribution Function) and return for example the peaks and their indices, or GFA which is similar to FA but for ODF based models. This function mainly needs a reconstruction model, the data and a sphere as input. The sphere is an object that represents the spherical discrete grid where the ODF values will be evaluated. """ sphere = get_sphere('symmetric724') csapeaks = peaks_from_model(model=csamodel, data=maskdata, sphere=sphere, relative_peak_threshold=.5, min_separation_angle=25, mask=mask, return_odf=False, normalize_peaks=True) GFA = csapeaks.gfa print('GFA.shape (%d, %d, %d)' % GFA.shape) """ GFA.shape ``(81, 106, 76)`` Apart from GFA, csapeaks also has the attributes peak_values, peak_indices and ODF. peak_values shows the maxima values of the ODF and peak_indices gives us their position on the discrete sphere that was used to do the reconstruction of the ODF. In order to obtain the full ODF, return_odf should be True. Before enabling this option, make sure that you have enough memory. Let's visualize the ODFs of a small rectangular area in an axial slice of the splenium of the corpus callosum (CC). """ data_small = maskdata[13:43, 44:74, 28:29] from dipy.data import get_sphere sphere = get_sphere('symmetric724') from dipy.viz import fvtk r = fvtk.ren() csaodfs = csamodel.fit(data_small).odf(sphere) """ It is common with CSA ODFs to produce negative values, we can remove those using ``np.clip`` """ csaodfs = np.clip(csaodfs, 0, np.max(csaodfs, -1)[..., None]) fvtk.add(r, fvtk.sphere_funcs(csaodfs, sphere, colormap='jet')) print('Saving illustration as csa_odfs.png') fvtk.record(r, n_frames=1, out_path='csa_odfs.png', size=(600, 600)) """ .. figure:: csa_odfs.png :align: center Constant Solid Angle ODFs. .. include:: ../links_names.inc References ---------- .. [Aganj2010] Aganj I, Lenglet C, Sapiro G, Yacoub E, Ugurbil K, Harel N. "Reconstruction of the orientation distribution function in single- and multiple-shell q-ball imaging within constant solid angle", Magnetic Resonance in Medicine. 2010 Aug;64(2):554-66. doi: 10.1002/mrm.22365 """ dipy-0.13.0/doc/examples/reconst_csa_parallel.py000066400000000000000000000071421317371701200216620ustar00rootroot00000000000000""" ==================================== Parallel reconstruction using Q-Ball ==================================== We show an example of parallel reconstruction using a Q-Ball Constant Solid Angle model (see Aganj et. al (MRM 2010)) and `peaks_from_model`. Import modules, fetch and read data, and compute the mask. """ import time from dipy.data import fetch_stanford_hardi, read_stanford_hardi, get_sphere from dipy.reconst.shm import CsaOdfModel from dipy.direction import peaks_from_model from dipy.segment.mask import median_otsu fetch_stanford_hardi() img, gtab = read_stanford_hardi() data = img.get_data() maskdata, mask = median_otsu(data, 3, 1, True, vol_idx=range(10, 50), dilate=2) """ We instantiate our CSA model with spherical harmonic order of 4 """ csamodel = CsaOdfModel(gtab, 4) """ `Peaks_from_model` is used to calculate properties of the ODFs (Orientation Distribution Function) and return for example the peaks and their indices, or GFA which is similar to FA but for ODF based models. This function mainly needs a reconstruction model, the data and a sphere as input. The sphere is an object that represents the spherical discrete grid where the ODF values will be evaluated. """ sphere = get_sphere('symmetric724') start_time = time.time() """ We will first run `peaks_from_model` using parallelism with 2 processes. If `nbr_processes` is None (default option) then this function will find the total number of processors from the operating system and use this number as `nbr_processes`. Sometimes it makes sense to use only a few of the processes in order to allow resources for other applications. However, most of the times using the default option will be sufficient. """ csapeaks_parallel = peaks_from_model(model=csamodel, data=maskdata, sphere=sphere, relative_peak_threshold=.5, min_separation_angle=25, mask=mask, return_odf=False, normalize_peaks=True, npeaks=5, parallel=True, nbr_processes=2) time_parallel = time.time() - start_time print("peaks_from_model using 2 processes ran in : " + str(time_parallel) + " seconds") """ peaks_from_model using 2 process ran in : 114.333221912 seconds, using 2 process If we don't use parallelism then we need to set `parallel=False`: """ start_time = time.time() csapeaks = peaks_from_model(model=csamodel, data=maskdata, sphere=sphere, relative_peak_threshold=.5, min_separation_angle=25, mask=mask, return_odf=False, normalize_peaks=True, npeaks=5, parallel=False, nbr_processes=None) time_single = time.time() - start_time print("peaks_from_model ran in : " + str(time_single) + " seconds") """ peaks_from_model ran in : 196.872478008 seconds """ print("Speedup factor : " + str(time_single / time_parallel)) """ Speedup factor : 1.72191839533 In Windows if you get a runtime error about frozen executable please start your script by adding your code above in a ``main`` function and use: if __name__ == '__main__': import multiprocessing multiprocessing.freeze_support() main() """ dipy-0.13.0/doc/examples/reconst_csd.py000066400000000000000000000170321317371701200200100ustar00rootroot00000000000000""" .. _reconst-csd: ======================================================= Reconstruction with Constrained Spherical Deconvolution ======================================================= This example shows how to use Constrained Spherical Deconvolution (CSD) introduced by Tournier et al. [Tournier2007]_. This method is mainly useful with datasets with gradient directions acquired on a spherical grid. The basic idea with this method is that if we could estimate the response function of a single fiber then we could deconvolve the measured signal and obtain the underlying fiber distribution. Let's first load the data. We will use a dataset with 10 b0s and 150 non-b0s with b-value 2000. """ import numpy as np from dipy.data import fetch_stanford_hardi, read_stanford_hardi fetch_stanford_hardi() img, gtab = read_stanford_hardi() data = img.get_data() """ You can verify the b-values of the datasets by looking at the attribute ``gtab.bvals``. In CSD there is an important pre-processing step: the estimation of the fiber response function. In order to do this we look for regions of the brain where it is known that there are single coherent fiber populations. For example if we use an ROI at the center of the brain, we will find single fibers from the corpus callosum. The ``auto_response`` function will calculate FA for an ROI of radius equal to ``roi_radius`` in the center of the volume and return the response function estimated in that region for the voxels with FA higher than 0.7. """ from dipy.reconst.csdeconv import auto_response response, ratio = auto_response(gtab, data, roi_radius=10, fa_thr=0.7) """ The ``response`` tuple contains two elements. The first is an array with the eigenvalues of the response function and the second is the average S0 for this response. It is good practice to always validate the result of auto_response. For this purpose we can print the elements of ``response`` and have a look at their values. """ print(response) """ (array([ 0.0014, 0.00029, 0.00029]), 416.206) The tensor generated from the response must be prolate (two smaller eigenvalues should be equal) and look anisotropic with a ratio of second to first eigenvalue of about 0.2. Or in other words, the axial diffusivity of this tensor should be around 5 times larger than the radial diffusivity. """ print(ratio) """ 0.21197 We can double-check that we have a good response function by visualizing the response function's ODF. Here is how you would do that: """ from dipy.viz import fvtk ren = fvtk.ren() evals = response[0] evecs = np.array([[0, 1, 0], [0, 0, 1], [1, 0, 0]]).T from dipy.data import get_sphere sphere = get_sphere('symmetric724') from dipy.sims.voxel import single_tensor_odf response_odf = single_tensor_odf(sphere.vertices, evals, evecs) response_actor = fvtk.sphere_funcs(response_odf, sphere) fvtk.add(ren, response_actor) print('Saving illustration as csd_response.png') fvtk.record(ren, out_path='csd_response.png', size=(200, 200)) """ .. figure:: csd_response.png :align: center Estimated response function. """ fvtk.rm(ren, response_actor) """ Depending on the dataset, FA threshold may not be the best way to find the best possible response function. For one, it depends on the diffusion tensor (FA and first eigenvector), which has lower accuracy at high b-values. Alternatively, the response function can be calibrated in a data-driven manner [Tax2014]_. First, the data is deconvolved with a 'fat' response function. All voxels that are considered to contain only one peak in this deconvolution (as determined by the peak threshold which gives an upper limit of the ratio of the second peak to the first peak) are maintained, and from these voxels a new response function is determined. This process is repeated until convergence is reached. Here we calibrate the response function on a small part of the data. """ from dipy.reconst.csdeconv import recursive_response """ A WM mask can shorten computation time for the whole dataset. Here it is created based on the DTI fit. """ import dipy.reconst.dti as dti tenmodel = dti.TensorModel(gtab) tenfit = tenmodel.fit(data, mask=data[..., 0] > 200) from dipy.reconst.dti import fractional_anisotropy FA = fractional_anisotropy(tenfit.evals) MD = dti.mean_diffusivity(tenfit.evals) wm_mask = (np.logical_or(FA >= 0.4, (np.logical_and(FA >= 0.15, MD >= 0.0011)))) response = recursive_response(gtab, data, mask=wm_mask, sh_order=8, peak_thr=0.01, init_fa=0.08, init_trace=0.0021, iter=8, convergence=0.001, parallel=True) """ We can check the shape of the signal of the response function, which should be like a pancake: """ response_signal = response.on_sphere(sphere) response_actor = fvtk.sphere_funcs(response_signal, sphere) ren = fvtk.ren() fvtk.add(ren, response_actor) print('Saving illustration as csd_recursive_response.png') fvtk.record(ren, out_path='csd_recursive_response.png', size=(200, 200)) """ .. figure:: csd_recursive_response.png :align: center Estimated response function using recursive calibration. """ fvtk.rm(ren, response_actor) """ Now, that we have the response function, we are ready to start the deconvolution process. Let's import the CSD model and fit the datasets. """ from dipy.reconst.csdeconv import ConstrainedSphericalDeconvModel csd_model = ConstrainedSphericalDeconvModel(gtab, response) """ For illustration purposes we will fit only a small portion of the data. """ data_small = data[20:50, 55:85, 38:39] csd_fit = csd_model.fit(data_small) """ Show the CSD-based ODFs also known as FODFs (fiber ODFs). """ csd_odf = csd_fit.odf(sphere) """ Here we visualize only a 30x30 region. """ fodf_spheres = fvtk.sphere_funcs(csd_odf, sphere, scale=1.3, norm=False) fvtk.add(ren, fodf_spheres) print('Saving illustration as csd_odfs.png') fvtk.record(ren, out_path='csd_odfs.png', size=(600, 600)) """ .. figure:: csd_odfs.png :align: center CSD ODFs. In Dipy we also provide tools for finding the peak directions (maxima) of the ODFs. For this purpose we recommend using ``peaks_from_model``. """ from dipy.direction import peaks_from_model csd_peaks = peaks_from_model(model=csd_model, data=data_small, sphere=sphere, relative_peak_threshold=.5, min_separation_angle=25, parallel=True) fvtk.clear(ren) fodf_peaks = fvtk.peaks(csd_peaks.peak_dirs, csd_peaks.peak_values, scale=1.3) fvtk.add(ren, fodf_peaks) print('Saving illustration as csd_peaks.png') fvtk.record(ren, out_path='csd_peaks.png', size=(600, 600)) """ .. figure:: csd_peaks.png :align: center CSD Peaks. We can finally visualize both the ODFs and peaks in the same space. """ fodf_spheres.GetProperty().SetOpacity(0.4) fvtk.add(ren, fodf_spheres) print('Saving illustration as csd_both.png') fvtk.record(ren, out_path='csd_both.png', size=(600, 600)) """ .. figure:: csd_both.png :align: center CSD Peaks and ODFs. References ---------- .. [Tournier2007] J-D. Tournier, F. Calamante and A. Connelly, "Robust determination of the fibre orientation distribution in diffusion MRI: Non-negativity constrained super-resolved spherical deconvolution", Neuroimage, vol. 35, no. 4, pp. 1459-1472, 2007. .. [Tax2014] C.M.W. Tax, B. Jeurissen, S.B. Vos, M.A. Viergever, A. Leemans, "Recursive calibration of the fiber response function for spherical deconvolution of diffusion MRI data", Neuroimage, vol. 86, pp. 67-80, 2014. .. include:: ../links_names.inc """ dipy-0.13.0/doc/examples/reconst_csd_parallel.py000066400000000000000000000072401317371701200216640ustar00rootroot00000000000000""" ================================= Parallel reconstruction using CSD ================================= This example shows how to use parallelism (multiprocessing) using ``peaks_from_model`` in order to speedup the signal reconstruction process. For this example will we use the same initial steps as we used in :ref:`example_reconst_csd`. Import modules, fetch and read data, apply the mask and calculate the response function. """ import multiprocessing from dipy.data import fetch_stanford_hardi, read_stanford_hardi fetch_stanford_hardi() img, gtab = read_stanford_hardi() data = img.get_data() from dipy.segment.mask import median_otsu maskdata, mask = median_otsu(data, 3, 1, False, vol_idx=range(10, 50), dilate=2) from dipy.reconst.csdeconv import auto_response response, ratio = auto_response(gtab, maskdata, roi_radius=10, fa_thr=0.7) data = maskdata[:, :, 33:37] mask = mask[:, :, 33:37] """ Now we are ready to import the CSD model and fit the datasets. """ from dipy.reconst.csdeconv import ConstrainedSphericalDeconvModel csd_model = ConstrainedSphericalDeconvModel(gtab, response) from dipy.data import get_sphere sphere = get_sphere('symmetric724') """ Compute the CSD-based ODFs using ``peaks_from_model``. This function has a parameter called ``parallel`` which allows for the voxels to be processed in parallel. If ``nbr_processes`` is None it will figure out automatically the number of CPUs available in your system. Alternatively, you can set ``nbr_processes`` manually. Here, we show an example where we compare the duration of execution with or without parallelism. """ import time from dipy.direction import peaks_from_model start_time = time.time() csd_peaks_parallel = peaks_from_model(model=csd_model, data=data, sphere=sphere, relative_peak_threshold=.5, min_separation_angle=25, mask=mask, return_sh=True, return_odf=False, normalize_peaks=True, npeaks=5, parallel=True, nbr_processes=None) time_parallel = time.time() - start_time print("peaks_from_model using " + str(multiprocessing.cpu_count()) + " process ran in :" + str(time_parallel) + " seconds") """ ``peaks_from_model`` using 8 processes ran in 114.425682068 seconds """ start_time = time.time() csd_peaks = peaks_from_model(model=csd_model, data=data, sphere=sphere, relative_peak_threshold=.5, min_separation_angle=25, mask=mask, return_sh=True, return_odf=False, normalize_peaks=True, npeaks=5, parallel=False, nbr_processes=None) time_single = time.time() - start_time print("peaks_from_model ran in :" + str(time_single) + " seconds") """ ``peaks_from_model`` ran in 242.772505999 seconds """ print("Speedup factor : " + str(time_single / time_parallel)) """ Speedup factor : 2.12166099088 In Windows if you get a runtime error about frozen executable please start your script by adding your code above in a ``main`` function and use: `` if __name__ == '__main__': import multiprocessing multiprocessing.freeze_support() main() `` """ dipy-0.13.0/doc/examples/reconst_dki.py000066400000000000000000000375231317371701200200150ustar00rootroot00000000000000""" ===================================================================== Reconstruction of the diffusion signal with the kurtosis tensor model ===================================================================== The diffusion kurtosis model is an expansion of the diffusion tensor model (see :ref:`example_reconst_dti`). In addition to the diffusion tensor (DT), the diffusion kurtosis model quantifies the degree to which water diffusion in biological tissues is non-Gaussian using the kurtosis tensor (KT) [Jensen2005]_. Measurements of non-Gaussian diffusion from the diffusion kurtosis model are of interest because they can be used to charaterize tissue microstructural heterogeneity [Jensen2010]_ and to derive concrete biophysical parameters, such as the density of axonal fibres and diffusion tortuosity [Fieremans2011]_. Moreover, DKI can be used to resolve crossing fibers in tractography and to obtain invariant rotational measures not limited to well-aligned fiber populations [NetoHe2015]_. The diffusion kurtosis model expresses the diffusion-weighted signal as: .. math:: S(n,b)=S_{0}e^{-bD(n)+\frac{1}{6}b^{2}D(n)^{2}K(n)} where $\mathbf{b}$ is the applied diffusion weighting (which is dependent on the measurement parameters), $S_0$ is the signal in the absence of diffusion gradient sensitization, $\mathbf{D(n)}$ is the value of diffusion along direction $\mathbf{n}$, and $\mathbf{K(n)}$ is the value of kurtosis along direction $\mathbf{n}$. The directional diffusion $\mathbf{D(n)}$ and kurtosis $\mathbf{K(n)}$ can be related to the diffusion tensor (DT) and kurtosis tensor (KT) using the following equations: .. math:: D(n)=\sum_{i=1}^{3}\sum_{j=1}^{3}n_{i}n_{j}D_{ij} and .. math:: K(n)=\frac{MD^{2}}{D(n)^{2}}\sum_{i=1}^{3}\sum_{j=1}^{3}\sum_{k=1}^{3} \sum_{l=1}^{3}n_{i}n_{j}n_{k}n_{l}W_{ijkl} where $D_{ij}$ are the elements of the second-order DT, and $W_{ijkl}$ the elements of the fourth-order KT and $MD$ is the mean diffusivity. As the DT, KT has antipodal symmetry and thus only 15 Wijkl elemments are needed to fully characterize the KT: .. math:: \begin{matrix} ( & W_{xxxx} & W_{yyyy} & W_{zzzz} & W_{xxxy} & W_{xxxz} & ... \\ & W_{xyyy} & W_{yyyz} & W_{xzzz} & W_{yzzz} & W_{xxyy} & ... \\ & W_{xxzz} & W_{yyzz} & W_{xxyz} & W_{xyyz} & W_{xyzz} & & )\end{matrix} In the following example we show how to fit the diffusion kurtosis model on diffusion-weighted multi-shell datasets and how to estimate diffusion kurtosis based statistics. First, we import all relevant modules: """ import numpy as np import matplotlib.pyplot as plt import dipy.reconst.dki as dki import dipy.reconst.dti as dti import dipy.reconst.dki_micro as dki_micro from dipy.data import fetch_cfin_multib from dipy.data import read_cfin_dwi from dipy.segment.mask import median_otsu from scipy.ndimage.filters import gaussian_filter """ DKI requires multi-shell data, i.e. data acquired from more than one non-zero b-value. Here, we use fetch to download a multi-shell dataset which was kindly provided by Hansen and Jespersen (more details about the data are provided in their paper [Hansen2016]_). The total size of the downloaded data is 192 MBytes, however you only need to fetch it once. """ fetch_cfin_multib() img, gtab = read_cfin_dwi() data = img.get_data() affine = img.affine """ Function ``read_cenir_multib`` return img and gtab which contains respectively a nibabel Nifti1Image object (where the data can be extracted) and a GradientTable object with information about the b-values and b-vectors. Before fitting the data, we preform some data pre-processing. We first compute a brain mask to avoid unnecessary calculations on the background of the image. """ maskdata, mask = median_otsu(data, 4, 2, False, vol_idx=[0, 1], dilate=1) """ Since the diffusion kurtosis models involves the estimation of a large number of parameters [TaxCMW2015]_ and since the non-Gaussian components of the diffusion signal are more sensitive to artefacts [NetoHe2012]_, it might be favorable to suppress the effects of noise and artefacts before diffusion kurtosis fitting. In this example the effects of noise and artefacts are suppress by using 3D Gaussian smoothing (with a Gaussian kernel with fwhm=1.25) as suggested by pioneer DKI studies (e.g. [Jensen2005]_, [NetoHe2012]_). Although here the Gaussian smoothing is used so that results are comparable to these studies, it is important to note that more advanced noise and artifact suppression algorithms are available in dipy_ (e.g. the non-local means filter :ref:`example-denoise-nlmeans`). """ fwhm = 1.25 gauss_std = fwhm / np.sqrt(8 * np.log(2)) # converting fwhm to Gaussian std data_smooth = np.zeros(data.shape) for v in range(data.shape[-1]): data_smooth[..., v] = gaussian_filter(data[..., v], sigma=gauss_std) """ Now that we have loaded and pre-processed the data we can go forward with DKI fitting. For this, the DKI model is first defined for the data's GradientTable object by instantiating the DiffusionKurtosisModel object in the following way: """ dkimodel = dki.DiffusionKurtosisModel(gtab) """ To fit the data using the defined model object, we call the ``fit`` function of this object: """ dkifit = dkimodel.fit(data_smooth, mask=mask) """ The fit method creates a DiffusionKurtosisFit object, which contains all the diffusion and kurtosis fitting parameters and other DKI attributes. For instance, since the diffusion kurtosis model estimates the diffusion tensor, all diffusion standard tensor statistics can be computed from the DiffusionKurtosisFit instance. For example, we show below how to extract the fractional anisotropy (FA), the mean diffusivity (MD), the axial diffusivity (AD) and the radial diffusivity (RD) from the DiffusionKurtosisiFit instance. """ FA = dkifit.fa MD = dkifit.md AD = dkifit.ad RD = dkifit.rd """ Note that these four standard measures could also be computed from DIPY's DTI module. Theoretically, computing these measures from both models should be analogous. However, according to recent studies, the diffusion statistics from the kurtosis model are expected to have better accuracy [Veraar2011]_, [NetoHe2012]_. For comparison purposes, we calculate below the FA, MD, AD, and RD using DIPY's ``TensorModel``. """ tenmodel = dti.TensorModel(gtab) tenfit = tenmodel.fit(data_smooth, mask=mask) dti_FA = tenfit.fa dti_MD = tenfit.md dti_AD = tenfit.ad dti_RD = tenfit.rd """ The DT based measures can be easily visualized using matplotlib. For example, the FA, MD, AD, and RD obtained from the diffusion kurtosis model (upper panels) and the tensor model (lower panels) are plotted for a selected axial slice. """ axial_slice = 9 fig1, ax = plt.subplots(2, 4, figsize=(12, 6), subplot_kw={'xticks': [], 'yticks': []}) fig1.subplots_adjust(hspace=0.3, wspace=0.05) ax.flat[0].imshow(FA[:, :, axial_slice], cmap='gray', vmin=0, vmax=0.7) ax.flat[0].set_title('FA (DKI)') ax.flat[1].imshow(MD[:, :, axial_slice], cmap='gray', vmin=0, vmax=2.0e-3) ax.flat[1].set_title('MD (DKI)') ax.flat[2].imshow(AD[:, :, axial_slice], cmap='gray', vmin=0, vmax=2.0e-3) ax.flat[2].set_title('AD (DKI)') ax.flat[3].imshow(RD[:, :, axial_slice], cmap='gray', vmin=0, vmax=2.0e-3) ax.flat[3].set_title('RD (DKI)') ax.flat[4].imshow(dti_FA[:, :, axial_slice], cmap='gray', vmin=0, vmax=0.7) ax.flat[4].set_title('FA (DTI)') ax.flat[5].imshow(dti_MD[:, :, axial_slice], cmap='gray', vmin=0, vmax=2.0e-3) ax.flat[5].set_title('MD (DTI)') ax.flat[6].imshow(dti_AD[:, :, axial_slice], cmap='gray', vmin=0, vmax=2.0e-3) ax.flat[6].set_title('AD (DTI)') ax.flat[7].imshow(dti_RD[:, :, axial_slice], cmap='gray', vmin=0, vmax=2.0e-3) ax.flat[7].set_title('RD (DTI)') plt.show() fig1.savefig('Diffusion_tensor_measures_from_DTI_and_DKI.png') """ .. figure:: Diffusion_tensor_measures_from_DTI_and_DKI.png :align: center Diffusion tensor measures obtained from the diffusion tensor estimated from DKI (upper panels) and DTI (lower panels). In addition to the standard diffusion statistics, the DiffusionKurtosisFit instance can be used to estimate the non-Gaussian measures of mean kurtosis (MK), the axial kurtosis (AK) and the radial kurtosis (RK). Kurtosis measures are susceptible to high amplitude outliers. The impact of high amplitude kurtosis outliers can be removed by introducing as an optional input the extremes of the typical values of kurtosis. Here these are assumed to be on the range between 0 and 3): """ MK = dkifit.mk(0, 3) AK = dkifit.ak(0, 3) RK = dkifit.rk(0, 3) """ Now we are ready to plot the kurtosis standard measures using matplotlib: """ fig2, ax = plt.subplots(1, 3, figsize=(12, 6), subplot_kw={'xticks': [], 'yticks': []}) fig2.subplots_adjust(hspace=0.3, wspace=0.05) ax.flat[0].imshow(MK[:, :, axial_slice], cmap='gray', vmin=0, vmax=1.5) ax.flat[0].set_title('MK') ax.flat[1].imshow(AK[:, :, axial_slice], cmap='gray', vmin=0, vmax=1.5) ax.flat[1].set_title('AK') ax.flat[2].imshow(RK[:, :, axial_slice], cmap='gray', vmin=0, vmax=1.5) ax.flat[2].set_title('RK') plt.show() fig2.savefig('Kurtosis_tensor_standard_measures.png') """ .. figure:: Kurtosis_tensor_standard_measures.png :align: center Kurtosis tensor standard measures obtained from the kurtosis tensor. The non-Gaussian behaviour of the diffusion signal is larger when water diffusion is restricted by compartments and barriers (e.g., myelin sheath). Therefore, as the figure above shows, white matter kurtosis values are smaller along the axial direction of fibers (smaller amplitudes shown in the AK map) than for the radial directions (larger amplitudes shown in the RK map). As mentioned above, DKI can also be used to derive concrete biophysical parameters by applying microstructural models to DT and KT estimated from DKI. For instance, Fieremans et al. [Fieremans2011]_ showed that DKI can be used to estimate the contribution of hindered and restricted diffusion for well-aligned fibers. These tensors can be also interpreted as the influences of intra- and extra-cellular compartments and can be used to estimate the axonal volume fraction and diffusion extra-cellular tortuosity. According to recent studies, these latter measures can be used to distinguish processes of axonal loss from processes of myelin degeneration [Fieremans2012]_. The model proposed by Fieremans and colleagues can be defined in dipy by instantiating the 'KurtosisMicrostructureModel' object in the following way: """ dki_micro_model = dki_micro.KurtosisMicrostructureModel(gtab) """ Before fitting this microstructural model, it is useful to indicate the regions in which this model provides meaningful information (i.e. voxels of well-aligned fibers). Following Fieremans et al. [Fieremans2011]_, a simple way to select this region is to generate a well-aligned fiber mask based on the values of diffusion sphericity, planarity and linearity. Here we will follow these selection criteria for a better comparision of our figures with the original article published by Fieremans et al. [Fieremans2011]_. Nevertheless, it is important to note that voxels with well-aligned fibers can be selected based on other approaches such as using predefined regions of interest. """ well_aligned_mask = np.ones(data.shape[:-1], dtype='bool') # Diffusion coefficient of linearity (cl) has to be larger than 0.4, thus # we exclude voxels with cl < 0.4. cl = dkifit.linearity.copy() well_aligned_mask[cl < 0.4] = False # Diffusion coefficient of planarity (cp) has to be lower than 0.2, thus # we exclude voxels with cp > 0.2. cp = dkifit.planarity.copy() well_aligned_mask[cp > 0.2] = False # Diffusion coefficient of sphericity (cs) has to be lower than 0.35, thus # we exclude voxels with cs > 0.35. cs = dkifit.sphericity.copy() well_aligned_mask[cs > 0.35] = False # Removing nan associated with background voxels well_aligned_mask[np.isnan(cl)] = False well_aligned_mask[np.isnan(cp)] = False well_aligned_mask[np.isnan(cs)] = False """ Analogous to DKI, the data fit can be done by calling the ``fit`` function of the model's object as follows: """ dki_micro_fit = dki_micro_model.fit(data_smooth, mask=well_aligned_mask) """ The KurtosisMicrostructureFit object created by this ``fit`` function can then be used to extract model parameters such as the axonal water fraction and diffusion hindered tortuosity: """ AWF = dki_micro_fit.awf TORT = dki_micro_fit.tortuosity """ These parameters are plotted below on top of the mean kurtosis maps: """ fig3, ax = plt.subplots(1, 2, figsize=(9, 4), subplot_kw={'xticks': [], 'yticks': []}) AWF[AWF == 0] = np.nan TORT[TORT == 0] = np.nan ax[0].imshow(MK[:, :, axial_slice], cmap=plt.cm.gray, interpolation='nearest') im0 = ax[0].imshow(AWF[:, :, axial_slice], cmap=plt.cm.Reds, alpha=0.9, vmin=0.3, vmax=0.7, interpolation='nearest') fig3.colorbar(im0, ax=ax.flat[0]) ax[1].imshow(MK[:, :, axial_slice], cmap=plt.cm.gray, interpolation='nearest') im1 = ax[1].imshow(TORT[:, :, axial_slice], cmap=plt.cm.Blues, alpha=0.9, vmin=2, vmax=6, interpolation='nearest') fig3.colorbar(im1, ax=ax.flat[1]) fig3.savefig('Kurtosis_Microstructural_measures.png') """ .. figure:: Kurtosis_Microstructural_measures.png :align: center Axonal water fraction (left panel) and tortuosity (right panel) values of well-aligned fiber regions overlaid on a top of a mean kurtosis all-brain image. References ---------- .. [TaxCMW2015] Tax CMW, Otte WM, Viergever MA, Dijkhuizen RM, Leemans A (2014). REKINDLE: Robust extraction of kurtosis INDices with linear estimation. Magnetic Resonance in Medicine 73(2): 794-808. .. [Jensen2005] Jensen JH, Helpern JA, Ramani A, Lu H, Kaczynski K (2005). Diffusional Kurtosis Imaging: The Quantification of Non_Gaussian Water Diffusion by Means of Magnetic Resonance Imaging. Magnetic Resonance in Medicine 53: 1432-1440 .. [Jensen2010] Jensen JH, Helpern JA (2010). MRI quantification of non-Gaussian water diffusion by kurtosis analysis. NMR in Biomedicine 23(7): 698-710 .. [Fieremans2011] Fieremans E, Jensen JH, Helpern JA (2011). White matter characterization with diffusion kurtosis imaging. NeuroImage 58: 177-188 .. [Fieremans2012] Fieremans E, Jensen JH, Helpern JA, Kim S, Grossman RI, Inglese M, Novikov DS. (2012). Diffusion distinguishes between axonal loss and demyelination in brain white matter. Proceedings of the 20th Annual Meeting of the International Society for Magnetic Resonance Medicine; Melbourne, Australia. May 5-11. .. [Hansen2016] Hansen, B, Jespersen, SN (2016). Data for evaluation of fast kurtosis strategies, b-value optimization and exploration of diffusion MRI contrast. Scientific Data 3: 160072 doi:10.1038/sdata.2016.72 .. [NetoHe2012] Neto Henriques R, Ferreira H, Correia M, (2012). Diffusion kurtosis imaging of the healthy human brain. Master Dissertation Bachelor and Master Programin Biomedical Engineering and Biophysics, Faculty of Sciences. http://repositorio.ul.pt/bitstream/10451/8511/1/ulfc104137_tm_Rafael_Henriques.pdf .. [NetoHe2015] Neto Henriques R, Correia MM, Nunes RG, Ferreira HA (2015). Exploring the 3D geometry of the diffusion kurtosis tensor - Impact on the development of robust tractography procedures and novel biomarkers, NeuroImage 111: 85-99 .. [Veraar2011] Veraart J, Poot DH, Van Hecke W, Blockx I, Van der Linden A, Verhoye M, Sijbers J (2011). More Accurate Estimation of Diffusion Tensor Parameters Using Diffusion Kurtosis Imaging. Magnetic Resonance in Medicine 65(1): 138-145 .. include:: ../links_names.inc """ dipy-0.13.0/doc/examples/reconst_dsi.py000066400000000000000000000064411317371701200200200ustar00rootroot00000000000000""" =========================================== Reconstruct with Diffusion Spectrum Imaging =========================================== We show how to apply Diffusion Spectrum Imaging [Wedeen08]_ to diffusion MRI datasets of Cartesian keyhole diffusion gradients. First import the necessary modules: """ from dipy.data import fetch_taiwan_ntu_dsi, read_taiwan_ntu_dsi, get_sphere from dipy.reconst.dsi import DiffusionSpectrumModel """ Download and read the data for this tutorial. """ fetch_taiwan_ntu_dsi() img, gtab = read_taiwan_ntu_dsi() """ img contains a nibabel Nifti1Image object (data) and gtab contains a GradientTable object (gradient information e.g. b-values). For example to read the b-values it is possible to write print(gtab.bvals). Load the raw diffusion data and the affine. """ data = img.get_data() print('data.shape (%d, %d, %d, %d)' % data.shape) """ data.shape ``(96, 96, 60, 203)`` This dataset has anisotropic voxel sizes, therefore reslicing is necessary. """ affine = img.affine """ Read the voxel size from the image header. """ voxel_size = img.header.get_zooms()[:3] """ Instantiate the Model and apply it to the data. """ dsmodel = DiffusionSpectrumModel(gtab) """ Lets just use one slice only from the data. """ dataslice = data[:, :, data.shape[2] // 2] dsfit = dsmodel.fit(dataslice) """ Load an odf reconstruction sphere """ sphere = get_sphere('symmetric724') """ Calculate the ODFs with this specific sphere """ ODF = dsfit.odf(sphere) print('ODF.shape (%d, %d, %d)' % ODF.shape) """ ODF.shape ``(96, 96, 724)`` In a similar fashion it is possible to calculate the PDFs of all voxels in one call with the following way """ PDF = dsfit.pdf() print('PDF.shape (%d, %d, %d, %d, %d)' % PDF.shape) """ PDF.shape ``(96, 96, 17, 17, 17)`` We see that even for a single slice this PDF array is close to 345 MBytes so we really have to be careful with memory usage when use this function with a full dataset. The simple solution is to generate/analyze the ODFs/PDFs by iterating through each voxel and not store them in memory if that is not necessary. """ from dipy.core.ndindex import ndindex for index in ndindex(dataslice.shape[:2]): pdf = dsmodel.fit(dataslice[index]).pdf() """ If you really want to save the PDFs of a full dataset on the disc we recommend using memory maps (``numpy.memmap``) but still have in mind that even if you do that for example for a dataset of volume size ``(96, 96, 60)`` you will need about 2.5 GBytes which can take less space when reasonable spheres (with < 1000 vertices) are used. Let's now calculate a map of Generalized Fractional Anisotropy (GFA) [Tuch04]_ using the DSI ODFs. """ from dipy.reconst.odf import gfa GFA = gfa(ODF) import matplotlib.pyplot as plt fig_hist, ax = plt.subplots(1) ax.set_axis_off() plt.imshow(GFA.T) plt.savefig('dsi_gfa.png', bbox_inches='tight', origin='lower', cmap='gray') """ .. figure:: dsi_gfa.png :align: center See also :ref:`example_reconst_dsi_metrics` for calculating different types of DSI maps. .. [Wedeen08] Wedeen et al., Diffusion spectrum magnetic resonance imaging (DSI) tractography of crossing fibers, Neuroimage, vol 41, no 4, 1267-1277, 2008. .. [Tuch04] Tuch, D.S, Q-ball imaging, MRM, vol 52, no 6, 1358-1372, 2004. .. include:: ../links_names.inc """ dipy-0.13.0/doc/examples/reconst_dsi_metrics.py000066400000000000000000000106351317371701200215460ustar00rootroot00000000000000""" =============================== Calculate DSI-based scalar maps =============================== We show how to calculate two DSI-based scalar maps: return to origin probability (rtop) [Descoteaux2011]_ and mean square displacement (msd) [Wu2007]_, [Wu2008]_ on your dataset. First import the necessary modules: """ import numpy as np import matplotlib.pyplot as plt from dipy.data import fetch_taiwan_ntu_dsi, read_taiwan_ntu_dsi from dipy.reconst.dsi import DiffusionSpectrumModel """ Download and read the data for this tutorial. """ fetch_taiwan_ntu_dsi() img, gtab = read_taiwan_ntu_dsi() """ img contains a nibabel Nifti1Image object (data) and gtab contains a GradientTable object (gradient information e.g. b-values). For example to read the b-values it is possible to write print(gtab.bvals). Load the raw diffusion data and the affine. """ data = img.get_data() affine = img.affine print('data.shape (%d, %d, %d, %d)' % data.shape) """ Instantiate the Model and apply it to the data. """ dsmodel = DiffusionSpectrumModel(gtab, qgrid_size=35, filter_width=18.5) """ Lets just use one slice only from the data. """ dataslice = data[30:70, 20:80, data.shape[2] // 2] """ Normalize the signal by the b0 """ dataslice = dataslice / (dataslice[..., 0, None]).astype(np.float) """ Calculate the return to origin probability on the signal that corresponds to the integral of the signal. """ print('Calculating... rtop_signal') rtop_signal = dsmodel.fit(dataslice).rtop_signal() """ Now we calculate the return to origin probability on the propagator, that corresponds to its central value. By default the propagator is divided by its sum in order to obtain a properly normalized pdf, however this normalization changes the values of rtop, therefore in order to compare it with the rtop previously calculated on the signal we turn the normalized parameter to false. """ print('Calculating... rtop_pdf') rtop_pdf = dsmodel.fit(dataslice).rtop_pdf(normalized=False) """ In theory, these two measures must be equal, to show that we calculate the mean square error on this two measures. """ mse = np.sum((rtop_signal - rtop_pdf) ** 2) / rtop_signal.size print("mse = %f" % mse) """ mse = 0.000000 Leaving the normalized parameter to the default changes the values of the rtop but not the contrast between the voxels. """ print('Calculating... rtop_pdf_norm') rtop_pdf_norm = dsmodel.fit(dataslice).rtop_pdf() """ Let's calculate the mean square displacement on the normalized propagator. """ print('Calculating... msd_norm') msd_norm = dsmodel.fit(dataslice).msd_discrete() """ Turning the normalized parameter to false makes it possible to calculate the mean square displacement on the propagator without normalization. """ print('Calculating... msd') msd = dsmodel.fit(dataslice).msd_discrete(normalized=False) """ Show the rtop images and save them in rtop.png. """ fig = plt.figure(figsize=(6, 6)) ax1 = fig.add_subplot(2, 2, 1, title='rtop_signal') ax1.set_axis_off() ind = ax1.imshow(rtop_signal.T, interpolation='nearest', origin='lower') plt.colorbar(ind) ax2 = fig.add_subplot(2, 2, 2, title='rtop_pdf_norm') ax2.set_axis_off() ind = ax2.imshow(rtop_pdf_norm.T, interpolation='nearest', origin='lower') plt.colorbar(ind) ax3 = fig.add_subplot(2, 2, 3, title='rtop_pdf') ax3.set_axis_off() ind = ax3.imshow(rtop_pdf.T, interpolation='nearest', origin='lower') plt.colorbar(ind) plt.savefig('rtop.png') """ .. figure:: rtop.png :align: center Return to origin probability. Show the msd images and save them in msd.png. """ fig = plt.figure(figsize=(7, 3)) ax1 = fig.add_subplot(1, 2, 1, title='msd_norm') ax1.set_axis_off() ind = ax1.imshow(msd_norm.T, interpolation='nearest', origin='lower') plt.colorbar(ind) ax2 = fig.add_subplot(1, 2, 2, title='msd') ax2.set_axis_off() ind = ax2.imshow(msd.T, interpolation='nearest', origin='lower') plt.colorbar(ind) plt.savefig('msd.png') """ .. figure:: msd.png :align: center Mean square displacement. .. [Descoteaux2011] Descoteaux M. et. al , "Multiple q-shell diffusion propagator imaging", Medical Image Analysis, vol 15, no 4, p. 603-621, 2011. .. [Wu2007] Wu Y. et al., "Hybrid diffusion imaging", NeuroImage, vol 36, p. 617-629, 2007. .. [Wu2008] Wu Y. et al., "Computation of Diffusion Function Measures in q-Space Using Magnetic Resonance Hybrid Diffusion Imaging", IEEE Transactions on Medical Imaging, vol 27, no 6, p. 858-865, 2008. .. include:: ../links_names.inc """ dipy-0.13.0/doc/examples/reconst_dsid.py000066400000000000000000000047171317371701200201700ustar00rootroot00000000000000""" ======================== DSI Deconvolution vs DSI ======================== An alternative method to DSI is the method proposed by [Canales10]_ which is called DSI with Deconvolution. This algorithm is using Lucy-Richardson deconvolution in the diffusion propagator with the goal to create sharper ODFs with higher angular resolution. In this example we will show with simulated data how this method's ODF performs against standard DSI ODF and a ground truth multi tensor ODF. """ import numpy as np from dipy.sims.voxel import multi_tensor, multi_tensor_odf from dipy.data import get_data, get_sphere from dipy.core.gradients import gradient_table from dipy.reconst.dsi import (DiffusionSpectrumDeconvModel, DiffusionSpectrumModel) """ For the simulation we will use a standard DSI acqusition scheme with 514 gradient directions and 1 S0. """ btable = np.loadtxt(get_data('dsi515btable')) gtab = gradient_table(btable[:, 0], btable[:, 1:]) """ Let's create a multi tensor with 2 fiber directions at 60 degrees. """ evals = np.array([[0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003]]) directions = [(-30, 0), (30, 0)] fractions = [50, 50] signal, _ = multi_tensor(gtab, evals, 100, angles=directions, fractions=fractions, snr=None) sphere = get_sphere('symmetric724').subdivide(1) odf_gt = multi_tensor_odf(sphere.vertices, evals, angles=directions, fractions=fractions) """ Perform the reconstructions with standard DSI and DSI with deconvolution. """ dsi_model = DiffusionSpectrumModel(gtab) dsi_odf = dsi_model.fit(signal).odf(sphere) dsid_model = DiffusionSpectrumDeconvModel(gtab) dsid_odf = dsid_model.fit(signal).odf(sphere) """ Finally, we can visualize the ground truth ODF, together with the DSI and DSI with deconvolution ODFs and observe that with the deconvolved method it is easier to resolve the correct fiber directions because the ODF is sharper. """ from dipy.viz import fvtk ren = fvtk.ren() odfs = np.vstack((odf_gt, dsi_odf, dsid_odf))[:, None, None] odf_actor = fvtk.sphere_funcs(odfs, sphere) odf_actor.RotateX(90) fvtk.add(ren, odf_actor) fvtk.record(ren, out_path='dsid.png', size=(300, 300)) """ .. figure:: dsid.png :align: center Ground truth ODF (left), DSI ODF (middle), DSI with Deconvolution ODF (right). .. [Canales10] Canales-Rodriguez et al., Deconvolution in Diffusion Spectrum Imaging, Neuroimage, vol 50, no 1, p. 136-149, 2010. """ dipy-0.13.0/doc/examples/reconst_dti.py000066400000000000000000000221101317371701200200100ustar00rootroot00000000000000""" .. _reconst_dti: ============================================================ Reconstruction of the diffusion signal with the Tensor model ============================================================ The diffusion tensor model is a model that describes the diffusion within a voxel. First proposed by Basser and colleagues [Basser1994]_, it has been very influential in demonstrating the utility of diffusion MRI in characterizing the micro-structure of white matter tissue and of the biophysical properties of tissue, inferred from local diffusion properties and it is still very commonly used. The diffusion tensor models the diffusion signal as: .. math:: \frac{S(\mathbf{g}, b)}{S_0} = e^{-b\mathbf{g}^T \mathbf{D} \mathbf{g}} Where $\mathbf{g}$ is a unit vector in 3 space indicating the direction of measurement and b are the parameters of measurement, such as the strength and duration of diffusion-weighting gradient. $S(\mathbf{g}, b)$ is the diffusion-weighted signal measured and $S_0$ is the signal conducted in a measurement with no diffusion weighting. $\mathbf{D}$ is a positive-definite quadratic form, which contains six free parameters to be fit. These six parameters are: .. math:: \mathbf{D} = \begin{pmatrix} D_{xx} & D_{xy} & D_{xz} \\ D_{yx} & D_{yy} & D_{yz} \\ D_{zx} & D_{zy} & D_{zz} \\ \end{pmatrix} This matrix is a variance/covariance matrix of the diffusivity along the three spatial dimensions. Note that we can assume that diffusivity has antipodal symmetry, so elements across the diagonal are equal. For example: $D_{xy} = D_{yx}$. This is why there are only 6 free parameters to estimate here. In the following example we show how to reconstruct your diffusion datasets using a single tensor model. First import the necessary modules: ``numpy`` is for numerical computation """ import numpy as np """ ``nibabel`` is for loading imaging datasets """ import nibabel as nib """ ``dipy.reconst`` is for the reconstruction algorithms which we use to create voxel models from the raw data. """ import dipy.reconst.dti as dti """ ``dipy.data`` is used for small datasets that we use in tests and examples. """ from dipy.data import fetch_stanford_hardi """ Fetch will download the raw dMRI dataset of a single subject. The size of the dataset is 87 MBytes. You only need to fetch once. """ fetch_stanford_hardi() """ Next, we read the saved dataset """ from dipy.data import read_stanford_hardi img, gtab = read_stanford_hardi() """ ``img`` contains a nibabel Nifti1Image object (with the data) and gtab contains a ``GradientTable`` object (information about the gradients e.g. b-values and b-vectors). """ data = img.get_data() print('data.shape (%d, %d, %d, %d)' % data.shape) """ data.shape ``(81, 106, 76, 160)`` First of all, we mask and crop the data. This is a quick way to avoid calculating Tensors on the background of the image. This is done using dipy_'s ``mask`` module. """ from dipy.segment.mask import median_otsu maskdata, mask = median_otsu(data, 3, 1, True, vol_idx=range(10, 50), dilate=2) print('maskdata.shape (%d, %d, %d, %d)' % maskdata.shape) """ maskdata.shape ``(72, 87, 59, 160)`` Now that we have prepared the datasets we can go forward with the voxel reconstruction. First, we instantiate the Tensor model in the following way. """ tenmodel = dti.TensorModel(gtab) """ Fitting the data is very simple. We just need to call the fit method of the TensorModel in the following way: """ tenfit = tenmodel.fit(maskdata) """ The fit method creates a ``TensorFit`` object which contains the fitting parameters and other attributes of the model. For example we can generate fractional anisotropy (FA) from the eigen-values of the tensor. FA is used to characterize the degree to which the distribution of diffusion in a voxel is directional. That is, whether there is relatively unrestricted diffusion in one particular direction. Mathematically, FA is defined as the normalized variance of the eigen-values of the tensor: .. math:: FA = \sqrt{\frac{1}{2}\frac{(\lambda_1-\lambda_2)^2+(\lambda_1- \lambda_3)^2+(\lambda_2-\lambda_3)^2}{\lambda_1^2+ \lambda_2^2+\lambda_3^2}} Note that FA should be interpreted carefully. It may be an indication of the density of packing of fibers in a voxel, and the amount of myelin wrapping these axons, but it is not always a measure of "tissue integrity". For example, FA may decrease in locations in which there is fanning of white matter fibers, or where more than one population of white matter fibers crosses. """ print('Computing anisotropy measures (FA, MD, RGB)') from dipy.reconst.dti import fractional_anisotropy, color_fa, lower_triangular FA = fractional_anisotropy(tenfit.evals) """ In the background of the image the fitting will not be accurate there is no signal and possibly we will find FA values with nans (not a number). We can easily remove these in the following way. """ FA[np.isnan(FA)] = 0 """ Saving the FA images is very easy using nibabel_. We need the FA volume and the affine matrix which transform the image's coordinates to the world coordinates. Here, we choose to save the FA in ``float32``. """ fa_img = nib.Nifti1Image(FA.astype(np.float32), img.affine) nib.save(fa_img, 'tensor_fa.nii.gz') """ You can now see the result with any nifti viewer or check it slice by slice using matplotlib_'s ``imshow``. In the same way you can save the eigen values, the eigen vectors or any other properties of the tensor. """ evecs_img = nib.Nifti1Image(tenfit.evecs.astype(np.float32), img.affine) nib.save(evecs_img, 'tensor_evecs.nii.gz') """ Other tensor statistics can be calculated from the ``tenfit`` object. For example, a commonly calculated statistic is the mean diffusivity (MD). This is simply the mean of the eigenvalues of the tensor. Since FA is a normalized measure of variance and MD is the mean, they are often used as complimentary measures. In DIPY, there are two equivalent ways to calculate the mean diffusivity. One is by calling the ``mean_diffusivity`` module function on the eigen-values of the ``TensorFit`` class instance: """ MD1 = dti.mean_diffusivity(tenfit.evals) nib.save(nib.Nifti1Image(MD1.astype(np.float32), img.affine), 'tensors_md.nii.gz') """ The other is to call the ``TensorFit`` class method: """ MD2 = tenfit.md """ Obviously, the quantities are identical. We can also compute the colored FA or RGB-map [Pajevic1999]_. First, we make sure that the FA is scaled between 0 and 1, we compute the RGB map and save it. """ FA = np.clip(FA, 0, 1) RGB = color_fa(FA, tenfit.evecs) nib.save(nib.Nifti1Image(np.array(255 * RGB, 'uint8'), img.affine), 'tensor_rgb.nii.gz') """ Let's try to visualize the tensor ellipsoids of a small rectangular area in an axial slice of the splenium of the corpus callosum (CC). """ print('Computing tensor ellipsoids in a part of the splenium of the CC') from dipy.data import get_sphere sphere = get_sphere('symmetric724') from dipy.viz import fvtk ren = fvtk.ren() evals = tenfit.evals[13:43, 44:74, 28:29] evecs = tenfit.evecs[13:43, 44:74, 28:29] """ We can color the ellipsoids using the ``color_fa`` values that we calculated above. In this example we additionally normalize the values to increase the contrast. """ cfa = RGB[13:43, 44:74, 28:29] cfa /= cfa.max() fvtk.add(ren, fvtk.tensor(evals, evecs, cfa, sphere)) print('Saving illustration as tensor_ellipsoids.png') fvtk.record(ren, n_frames=1, out_path='tensor_ellipsoids.png', size=(600, 600)) """ .. figure:: tensor_ellipsoids.png :align: center Tensor Ellipsoids. """ fvtk.clear(ren) """ Finally, we can visualize the tensor Orientation Distribution Functions for the same area as we did with the ellipsoids. """ tensor_odfs = tenmodel.fit(data[20:50, 55:85, 38:39]).odf(sphere) fvtk.add(ren, fvtk.sphere_funcs(tensor_odfs, sphere, colormap=None)) #fvtk.show(r) print('Saving illustration as tensor_odfs.png') fvtk.record(ren, n_frames=1, out_path='tensor_odfs.png', size=(600, 600)) """ .. figure:: tensor_odfs.png :align: center Tensor ODFs. Note that while the tensor model is an accurate and reliable model of the diffusion signal in the white matter, it has the drawback that it only has one principal diffusion direction. Therefore, in locations in the brain that contain multiple fiber populations crossing each other, the tensor model may indicate that the principal diffusion direction is intermediate to these directions. Therefore, using the principal diffusion direction for tracking in these locations may be misleading and may lead to errors in defining the tracks. Fortunately, other reconstruction methods can be used to represent the diffusion and fiber orientations in those locations. These are presented in other examples. References ---------- .. [Basser1994] Basser PJ, Mattielo J, LeBihan (1994). MR diffusion tensor spectroscopy and imaging. .. [Pajevic1999] Pajevic S, Pierpaoli (1999). Color schemes to represent the orientation of anisotropic tissues from diffusion tensor data: application to white matter fiber tract mapping in the human brain. .. include:: ../links_names.inc """ dipy-0.13.0/doc/examples/reconst_fwdti.py000066400000000000000000000203731317371701200203560ustar00rootroot00000000000000""" ========================================================================== Using the free water elimination model to remove free water contamination ========================================================================== As shown previously (see :ref:`example_reconst_dti`), the diffusion tensor model is a simple way to characterize diffusion anisotropy. However, in regions near the cerebral ventricle and parenchyma can be underestimated by partial volume effects of the cerebral spinal fluid (CSF). This free water contamination can particularly corrupt Diffusion Tensor Imaging analysis of microstructural changes when different groups of subjects show different brain morphology (e.g. brain ventricle enlargement associated with brain tissue atrophy that occurs in several brain pathologies and ageing). A way to remove this free water influences is to expand the DTI model to take into account an extra compartment representing the contributions of free water diffusion. The expression of the expanded DTI model is shown below: .. math:: S(\mathbf{g}, b) = S_0(1-f)e^{-b\mathbf{g}^T \mathbf{D} \mathbf{g}}+S_0fe^{-b D_{iso}} where $\mathbf{g}$ and $b$ are diffusion gradient direction and weighted (more information see :ref:`example_reconst_dti`), $S(\mathbf{g}, b)$ is the diffusion-weighted signal measured, $S_0$ is the signal in a measurement with no diffusion weighting, $\mathbf{D}$ is the diffusion tensor, $f$ the volume fraction of the free water component, and $D_{iso}$ is the isotropic value of the free water diffusion (normally set to $3.0 \times 10^{-3} mm^{2}s^{-1}$). In this example, we show how to process a diffusion weighting dataset using the free water elimination. Let's start by importing the relevant modules: """ import numpy as np import dipy.reconst.fwdti as fwdti import dipy.reconst.dti as dti import matplotlib.pyplot as plt from dipy.data import fetch_cenir_multib from dipy.data import read_cenir_multib from dipy.segment.mask import median_otsu """ Without spatial constrains the free water elimination model cannot be solved in data acquired from one non-zero b-value [Hoy2014]_. Therefore, here we download a dataset that was required from multiple b-values. """ fetch_cenir_multib(with_raw=False) """ From the downloaded data, we read only the data acquired with b-values up to 2000 $s/mm^2$ to decrease the influence of non-Gaussian diffusion effects of the tisse which are not taken into account by the free water elimination model [Hoy2014]_. """ bvals = [200, 400, 1000, 2000] img, gtab = read_cenir_multib(bvals) data = img.get_data() affine = img.affine """ The free water DTI model can take some minutes to process the full data set. Thus, we remove the background of the image to avoid unnecessary calculations. """ maskdata, mask = median_otsu(data, 4, 2, False, vol_idx=[0, 1], dilate=1) """ Moreover, for illustration purposes we process only an axial slice of the data. """ axial_slice = 40 mask_roi = np.zeros(data.shape[:-1], dtype=bool) mask_roi[:, :, axial_slice] = mask[:, :, axial_slice] """ The free water elimination model fit can then be initialized by instantiating a FreeWaterTensorModel class object: """ fwdtimodel = fwdti.FreeWaterTensorModel(gtab) """ The data can then be fitted using the ``fit`` function of the defined model object: """ fwdtifit = fwdtimodel.fit(data, mask=mask_roi) """ This 2-steps procedure will create a FreeWaterTensorFit object which contains all the diffusion tensor statistics free for free water contaminations. Below we extract the fractional anisotropy (FA) and the mean diffusivity (MD) of the free water diffusion tensor.""" FA = fwdtifit.fa MD = fwdtifit.md """ For comparison we also compute the same standard measures processed by the standard DTI model """ dtimodel = dti.TensorModel(gtab) dtifit = dtimodel.fit(data, mask=mask_roi) dti_FA = dtifit.fa dti_MD = dtifit.md """ Below the FA values for both free water elimnantion DTI model and standard DTI model are plotted in panels A and B, while the repective MD values are ploted in panels D and E. For a better visualization of the effect of the free water correction, the differences between these two metrics are shown in panels C and E. In addition to the standard diffusion statistics, the estimated volume fraction of the free water contamination is shown on panel G. """ fig1, ax = plt.subplots(2, 4, figsize=(12, 6), subplot_kw={'xticks': [], 'yticks': []}) fig1.subplots_adjust(hspace=0.3, wspace=0.05) ax.flat[0].imshow(FA[:, :, axial_slice].T, origin='lower', cmap='gray', vmin=0, vmax=1) ax.flat[0].set_title('A) fwDTI FA') ax.flat[1].imshow(dti_FA[:, :, axial_slice].T, origin='lower', cmap='gray', vmin=0, vmax=1) ax.flat[1].set_title('B) standard DTI FA') FAdiff = abs(FA[:, :, axial_slice] - dti_FA[:, :, axial_slice]) ax.flat[2].imshow(FAdiff.T, cmap='gray', origin='lower', vmin=0, vmax=1) ax.flat[2].set_title('C) FA difference') ax.flat[3].axis('off') ax.flat[4].imshow(MD[:, :, axial_slice].T, origin='lower', cmap='gray', vmin=0, vmax=2.5e-3) ax.flat[4].set_title('D) fwDTI MD') ax.flat[5].imshow(dti_MD[:, :, axial_slice].T, origin='lower', cmap='gray', vmin=0, vmax=2.5e-3) ax.flat[5].set_title('E) standard DTI MD') MDdiff = abs(MD[:, :, axial_slice] - dti_MD[:, :, axial_slice]) ax.flat[6].imshow(MDdiff.T, origin='lower', cmap='gray', vmin=0, vmax=2.5e-3) ax.flat[6].set_title('F) MD difference') F = fwdtifit.f ax.flat[7].imshow(F[:, :, axial_slice].T, origin='lower', cmap='gray', vmin=0, vmax=1) ax.flat[7].set_title('G) free water volume') plt.show() fig1.savefig('In_vivo_free_water_DTI_and_standard_DTI_measures.png') """ .. figure:: In_vivo_free_water_DTI_and_standard_DTI_measures.png :align: center In vivo diffusion measures obtain from the free water DTI and standard DTI. The values of Fractional Anisotropy for the free water DTI model and standard DTI model and their difference are shown in the upper panels (A-C), while respective MD values are shown in the lower panels (D-F). In addition the free water volume fraction estimated from the fwDTI model is shown in panel G. From the figure, one can observe that the free water elimination model produces in general higher values of FA and lower values of MD than the standard DTI model. These differences in FA and MD estimation are expected due to the suppression of the free water isotropic diffusion components. Unexpected high amplitudes of FA are however observed in the periventricular gray matter. This is a known artefact of regions associated to voxels with high water volume fraction (i.e. voxels containing basically CSF). We are able to remove this problematic voxels by excluding all FA values associated with measured volume fractions above a reasonable threshold of 0.7: """ FA[F > 0.7] = 0 dti_FA[F > 0.7] = 0 """ Above we reproduce the plots of the in vivo FA from the two DTI fits and where we can see that the inflated FA values were practically removed: """ fig1, ax = plt.subplots(1, 3, figsize=(9, 3), subplot_kw={'xticks': [], 'yticks': []}) fig1.subplots_adjust(hspace=0.3, wspace=0.05) ax.flat[0].imshow(FA[:, :, axial_slice].T, origin='lower', cmap='gray', vmin=0, vmax=1) ax.flat[0].set_title('A) fwDTI FA') ax.flat[1].imshow(dti_FA[:, :, axial_slice].T, origin='lower', cmap='gray', vmin=0, vmax=1) ax.flat[1].set_title('B) standard DTI FA') FAdiff = abs(FA[:, :, axial_slice] - dti_FA[:, :, axial_slice]) ax.flat[2].imshow(FAdiff.T, cmap='gray', origin='lower', vmin=0, vmax=1) ax.flat[2].set_title('C) FA difference') plt.show() fig1.savefig('In_vivo_free_water_DTI_and_standard_DTI_corrected.png') """ .. figure:: In_vivo_free_water_DTI_and_standard_DTI_corrected.png :align: center In vivo FA measures obtain from the free water DTI (A) and standard DTI (B) and their difference (C). Problematic inflated FA values of the images were removed by dismissing voxels above a volume fraction threshold of 0.7. References ---------- .. [Hoy2014] Hoy, A.R., Koay, C.G., Kecskemeti, S.R., Alexander, A.L., 2014. Optimization of a free water elimination two-compartmental model for diffusion tensor imaging. NeuroImage 103, 323-333. doi: 10.1016/j.neuroimage.2014.09.053 """ dipy-0.13.0/doc/examples/reconst_gqi.py000066400000000000000000000066651317371701200200310ustar00rootroot00000000000000""" =============================================== Reconstruct with Generalized Q-Sampling Imaging =============================================== We show how to apply Generalized Q-Sampling Imaging [Yeh2010]_ to diffusion MRI datasets. You can think of GQI as an analytical version of DSI orientation distribution function (ODF) (Garyfallidis, PhD thesis, 2012). First import the necessary modules: """ import numpy as np from dipy.data import fetch_taiwan_ntu_dsi, read_taiwan_ntu_dsi, get_sphere from dipy.reconst.gqi import GeneralizedQSamplingModel from dipy.direction import peaks_from_model """ Download and read the data for this tutorial. """ fetch_taiwan_ntu_dsi() img, gtab = read_taiwan_ntu_dsi() """ img contains a nibabel Nifti1Image object (data) and gtab contains a ``GradientTable`` object (gradient information e.g. b-values). For example to read the b-values it is possible to write:: ``print(gtab.bvals)`` Load the raw diffusion data and the affine. """ data = img.get_data() print('data.shape (%d, %d, %d, %d)' % data.shape) """ data.shape ``(96, 96, 60, 203)`` This dataset has anisotropic voxel sizes, therefore reslicing is necessary. """ affine = img.affine """ Read the voxel size from the image header. """ voxel_size = img.header.get_zooms()[:3] """ Instantiate the model and apply it to the data. """ gqmodel = GeneralizedQSamplingModel(gtab, sampling_length=3) """ The parameter ``sampling_length`` is used here to Lets just use one slice only from the data. """ dataslice = data[:, :, data.shape[2] // 2] mask = dataslice[..., 0] > 50 gqfit = gqmodel.fit(dataslice, mask=mask) """ Load an ODF reconstruction sphere """ sphere = get_sphere('symmetric724') """ Calculate the ODFs with this specific sphere """ ODF = gqfit.odf(sphere) print('ODF.shape (%d, %d, %d)' % ODF.shape) """ ODF.shape ``(96, 96, 724)`` Using ``peaks_from_model`` we can find the main peaks of the ODFs and other properties. """ gqpeaks = peaks_from_model(model=gqmodel, data=dataslice, sphere=sphere, relative_peak_threshold=.5, min_separation_angle=25, mask=mask, return_odf=False, normalize_peaks=True) gqpeak_values = gqpeaks.peak_values """ ``gqpeak_indices`` show which sphere points have the maximum values. """ gqpeak_indices = gqpeaks.peak_indices """ It is also possible to calculate GFA. """ GFA = gqpeaks.gfa print('GFA.shape (%d, %d)' % GFA.shape) """ With parameter ``return_odf=True`` we can obtain the ODF using ``gqpeaks.ODF`` """ gqpeaks = peaks_from_model(model=gqmodel, data=dataslice, sphere=sphere, relative_peak_threshold=.5, min_separation_angle=25, mask=mask, return_odf=True, normalize_peaks=True) """ This ODF will be of course identical to the ODF calculated above as long as the same data and mask are used. """ np.sum(gqpeaks.odf != ODF) == 0 """ True The advantage of using ``peaks_from_model`` is that it calculates the ODF only once and saves it or deletes if it is not necessary to keep. .. [Yeh2010] Yeh, F-C et al., Generalized Q-sampling imaging, IEEE Transactions on Medical Imaging, vol 29, no 9, 2010. .. include:: ../links_names.inc """ dipy-0.13.0/doc/examples/reconst_ivim.py000066400000000000000000000232721317371701200202060ustar00rootroot00000000000000""" ============================================================ Intravoxel incoherent motion ============================================================ The intravoxel incoherent motion (IVIM) model describes diffusion and perfusion in the signal acquired with a diffusion MRI sequence that contains multiple low b-values. The IVIM model can be understood as an adaptation of the work of Stejskal and Tanner [Stejskal65]_ in biological tissue, and was proposed by Le Bihan [LeBihan84]_. The model assumes two compartments: a slow moving compartment, where particles diffuse in a Brownian fashion as a consequence of thermal energy, and a fast moving compartment (the vascular compartment), where blood moves as a consequence of a pressure gradient. In the first compartment, the diffusion coefficient is $\mathbf{D}$ while in the second compartment, a pseudo diffusion term $\mathbf{D^*}$ is introduced that describes the displacement of the blood elements in an assumed randomly laid out vascular network, at the macroscopic level. According to [LeBihan84]_, $\mathbf{D^*}$ is greater than $\mathbf{D}$. The IVIM model expresses the MRI signal as follows: .. math:: S(b)=S_0(fe^{-bD^*}+(1-f)e^{-bD}) where $\mathbf{b}$ is the diffusion gradient weighing value (which is dependent on the measurement parameters), $\mathbf{S_{0}}$ is the signal in the absence of diffusion gradient sensitization, $\mathbf{f}$ is the perfusion fraction, $\mathbf{D}$ is the diffusion coefficient and $\mathbf{D^*}$ is the pseudo-diffusion constant, due to vascular contributions. In the following example we show how to fit the IVIM model on a diffusion-weighteddataset and visualize the diffusion and pseudo diffusion coefficients. First, we import all relevant modules: """ import matplotlib.pyplot as plt from dipy.reconst.ivim import IvimModel from dipy.data.fetcher import read_ivim """ We get an IVIM dataset using dipy_'s data fetcher ``read_ivim``. This dataset was acquired with 21 b-values in 3 different directions. Volumes corresponding to different directions were registered to each other, and averaged across directions. Thus, this dataset has 4 dimensions, with the length of the last dimension corresponding to the number of b-values. In order to use this model the data should contain signals measured at 0 bvalue. """ img, gtab = read_ivim() """ The variable ``img`` contains a nibabel NIfTI image object (with the data) and gtab contains a GradientTable object (information about the gradients e.g. b-values and b-vectors). We get the data from img using ``read_data``. """ data = img.get_data() print('data.shape (%d, %d, %d, %d)' % data.shape) """ The data has 54 slices, with 256-by-256 voxels in each slice. The fourth dimension corresponds to the b-values in the gtab. Let us visualize the data by taking a slice midway(z=33) at $\mathbf{b} = 0$. """ z = 33 b = 0 plt.imshow(data[:, :, z, b].T, origin='lower', cmap='gray', interpolation='nearest') plt.axhline(y=100) plt.axvline(x=170) plt.savefig("ivim_data_slice.png") plt.close() """ .. figure:: ivim_data_slice.png :align: center Heat map of a slice of data The region around the intersection of the cross-hairs in the figure contains cerebral spinal fluid (CSF), so it so it should have a very high $\mathbf{f}$ and $\mathbf{D^*}$, the area between the right and left is white matter so that should be lower, and the region on the right is gray matter and CSF. That should give us some contrast to see the values varying across the regions. """ x1, x2 = 90, 155 y1, y2 = 90, 170 data_slice = data[x1:x2, y1:y2, z, :] plt.imshow(data[x1:x2, y1:y2, z, b].T, origin='lower', cmap="gray", interpolation='nearest') plt.savefig("CSF_slice.png") plt.close() """ .. figure:: CSF_slice.png :align: center Heat map of the CSF slice selected. Now that we have prepared the datasets we can go forward with the ivim fit. Instead of fitting the entire volume, we focus on a small section of the slice as selected aboove, to fit the IVIM model. First, we instantiate the Ivim model. Using a two-stage approach: first, a linear fit used to get quick initial guesses for the parameters $\mathbf{S_{0}}$ and $\mathbf{D}$ by considering b-values greater than ``split_b_D`` (default: 400))and assuming a mono-exponential signal. This is based on the assumption that at high b-values the signal can be approximated as a mono exponential decay and by taking the logarithm of the signal values a linear fit can be obtained. Another linear fit for ``S0`` (bvals < ``split_b_S0`` (default: 200)) follows and ``f`` is estimated using $1 - S0_{prime}/S0$. Then a non-linear least squares fitting is performed to fit ``D_star`` and ``f``. If the ``two_stage`` flag is set to ``True`` while initializing the model, a final non-linear least squares fitting is performed for all the parameters using Scipy's ``leastsq`` or ``least_square`` function depending on which Scipy version you are using. All initializations for the model such as ``split_b_D`` are passed while creating the ``IvimModel``. If you are using Scipy 0.17, you can also set bounds by setting ``bounds=([0., 0., 0., 0.], [np.inf, 1., 1., 1.]))`` while initializing the ``IvimModel``. It is recommeded that you upgrade to Scipy 0.17 since the fitting results might at times return values which do not make sense physically (for example, a negative $\mathbf{f}$). """ ivimmodel = IvimModel(gtab) """ To fit the model, call the `fit` method and pass the data for fitting. """ ivimfit = ivimmodel.fit(data_slice) """ The fit method creates a IvimFit object which contains the parameters of the model obtained after fitting. These are accessible through the `model_params` attribute of the IvimFit object. The parameters are arranged as a 4D array, corresponding to the spatial dimensions of the data, and the last dimension (of length 4) corresponding to the model parameters according to the following order : $\mathbf{S_{0}, f, D^*, D}$. """ ivimparams = ivimfit.model_params print("ivimparams.shape : {}".format(ivimparams.shape)) """ As we see, we have a 20x20 slice at the height z = 33. Thus we have 400 voxels. We will now plot the parameters obtained from the fit for a voxel and also various maps for the entire slice. This will give us an idea about the diffusion and perfusion in that section. Let(i, j) denote the coordinate of the voxel. We have already fixed the z component as 33 and hence we will get a slice which is 33 units above. """ i, j = 10, 10 estimated_params = ivimfit.model_params[i, j, :] print(estimated_params) """ Next, we plot the results relative to the model fit. For this we will use the `predict` method of the IvimFit object to get the estimated signal. """ estimated_signal = ivimfit.predict(gtab)[i, j, :] plt.scatter(gtab.bvals, data_slice[i, j, :], color="green", label="Actual signal") plt.plot(gtab.bvals, estimated_signal, color="red", label="Estimated Signal") plt.xlabel("bvalues") plt.ylabel("Signals") S0_est, f_est, D_star_est, D_est = estimated_params text_fit = """Estimated \n S0={:06.3f} f={:06.4f}\n D*={:06.5f} D={:06.5f}""".format(S0_est, f_est, D_star_est, D_est) plt.text(0.65, 0.50, text_fit, horizontalalignment='center', verticalalignment='center', transform=plt.gca().transAxes) plt.legend(loc='upper right') plt.savefig("ivim_voxel_plot.png") plt.close() """ .. figure:: ivim_voxel_plot.png :align: center Plot of the signal from one voxel. Now we can map the perfusion and diffusion maps for the slice. We will plot a heatmap showing the values using a colormap. It will be useful to define a plotting function for the heatmap here since we will use it to plot for all the IVIM parameters. We will need to specify the lower and upper limits for our data. For example, the perfusion fractions should be in the range (0,1). Similarly, the diffusion and pseudo-diffusion constants are much smaller than 1. We pass an argument called ``variable`` to out plotting function which gives the label for the plot. """ def plot_map(raw_data, variable, limits, filename): lower, upper = limits plt.title('Map for {}'.format(variable)) plt.imshow(raw_data.T, origin='lower', clim=(lower, upper), cmap="gray", interpolation='nearest') plt.colorbar() plt.savefig(filename) plt.close() """ Let us get the various plots so that we can visualize them in one page """ plot_map(ivimfit.S0_predicted, "Predicted S0", (0, 10000), "predicted_S0.png") plot_map(data_slice[:, :, 0], "Measured S0", (0, 10000), "measured_S0.png") plot_map(ivimfit.perfusion_fraction, "f", (0, 1), "perfusion_fraction.png") plot_map(ivimfit.D_star, "D*", (0, 0.01), "perfusion_coeff.png") plot_map(ivimfit.D, "D", (0, 0.001), "diffusion_coeff.png") """ .. figure:: predicted_S0.png :align: center Heatmap of S0 predicted from the fit .. figure:: measured_S0.png :align: center Heatmap of measured signal at bvalue = 0. .. figure:: perfusion_fraction.png :align: center Heatmap of perfusion fraction values predicted from the fit .. figure:: perfusion_coeff.png :align: center Heatmap of perfusion coefficients predicted from the fit. .. figure:: diffusion_coeff.png :align: center Heatmap of diffusion coefficients predicted from the fit References: .. [Stejskal65] Stejskal, E. O.; Tanner, J. E. (1 January 1965). "Spin Diffusion Measurements: Spin Echoes in the Presence of a Time-Dependent Field Gradient". The Journal of Chemical Physics 42 (1): 288. Bibcode: 1965JChPh..42..288S. doi:10.1063/1.1695690. .. [LeBihan84] Le Bihan, Denis, et al. "Separation of diffusion and perfusion in intravoxel incoherent motion MR imaging." Radiology 168.2 (1988): 497-505. .. include:: ../links_names.inc """ dipy-0.13.0/doc/examples/reconst_mapmri.py000066400000000000000000000427261317371701200205340ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ ================================================================ Continuous and analytical diffusion signal modelling with MAPMRI ================================================================ We show how to model the diffusion signal as a linear combination of continuous functions from the MAPMRI basis [Ozarslan2013]_. This continuous representation allows for the computation of many properties of both the signal and diffusion propagator. We show how to estimate the analytical Orientation Distribution Function (ODF) and a variety of scalar indices. These include rotationally invariant quantities such as the Mean Squared Displacement (MSD), Q-space Inverse Variance (QIV), Return-To-Origin Probability (RTOP) and Non-Gaussianity (NG). Interestingly, the MAP-MRI basis also allows for the computation of directional indices, such as the Return To the Axis Probability (RTAP), the Return To the Plane Probability (RTPP), and the parallel and perpendicular Non-Gaussianity. The estimation of these properties from noisy DWIs requires that the fitting of the MAPMRI basis is regularized. We will show that this can be done using both constraining the diffusion propagator to positive values [Ozarslan2013]_ and analytic Laplacian Regularization [Fick2016a]_. First import the necessary modules: """ from dipy.reconst import mapmri from dipy.viz import fvtk from dipy.data import fetch_cenir_multib, read_cenir_multib, get_sphere from dipy.core.gradients import gradient_table import matplotlib.pyplot as plt from mpl_toolkits.axes_grid1 import make_axes_locatable """ Download and read the data for this tutorial. MAPMRI requires multi-shell data, to properly fit the radial part of the basis. The total size of the downloaded data is 1760 MBytes, however you only need to fetch it once. Parameter ``with_raw`` of function ``fetch_cenir_multib`` is set to ``False`` to only download eddy-current/motion corrected data:. """ fetch_cenir_multib(with_raw=False) """ For this example we select only the shell with b-values equal to the one of the Human Connectome Project (HCP). ``data`` contains the voxel data and ``gtab`` contains a ``GradientTable`` object (gradient information e.g. b-values). For example, to show the b-values it is possible to write:: ``print(gtab.bvals)`` For the values of the q-space indices to make sense it is necessary to explicitly state the ``big_delta`` and ``small_delta`` parameters in the gradient table. """ bvals = [1000, 2000, 3000] img, gtab = read_cenir_multib(bvals) big_delta = 0.0365 # seconds small_delta = 0.0157 # seconds gtab = gradient_table(bvals=gtab.bvals, bvecs=gtab.bvecs, big_delta=big_delta, small_delta=small_delta) data = img.get_data() data_small = data[40:65, 50:51, 35:60] print('data.shape (%d, %d, %d, %d)' % data.shape) """ The MAPMRI Model can now be instantiated. The ``radial_order`` determines the expansion order of the basis, i.e., how many basis functions are used to approximate the signal. First, we must decide to use the anisotropic or isotropic MAPMRI basis. As was shown in [Fick2016a]_, the isotropic basis is best used for tractography purposes, as the anisotropic basis has a bias towards smaller crossing angles in the ODF. For signal fitting and estimation of scalar quantities the anisotropic basis is suggested. The choice can be made by setting "anisotropic_scaling=True" or "anisotropic_scaling=False". First, we must select the method of regularization and/or constraining the basis fitting. - "laplacian_regularization=True" makes it use Laplacian regularization [Fick2016a]_. This method essentially reduces spurious oscillations in the reconstruction by minimizing the Laplacian of the fitted signal. Several options can be given to select the regularization weight: -"regularization_weighting="GCV"" uses generalized cross-validation [Craven1978]_ to find an optimal weight. -"regularization_weighting=0.2" for example would omit the GCV and just set it to 0.2 (found to be reasonable in HCP data [Fick2016a]_). -"regularization_weighting=np.array(weights)" would make the GCV use a custom range to find an optimal weight. - "positivity_constraint=True" makes it use the positivity constraint on the diffusion propagator [Ozarslan2013]_. This method constrains the final solution of the diffusion propagator to be positive at a set of discrete points, since negative values should not exist. -the "pos_grid" and "pos_radius" affect the location and number of constraint points in the diffusion propagator. Both methods do a good job of producing viable solutions to the signal fitting in practice. The difference is that the Laplacian regularization imposes smoothness over the entire signal, including the extrapolation beyond the measured signal. In practice this results in, but does not guarantee positive solutions of the diffusion propagator. The positivity constraint guarantees a positive solution in a set of discrete points, which in general results in smooth solutions, but does not guarantee it. A suggested strategy is to use a low Laplacian weight together with the positivity constraint. In this way both desired properties are guaranteed in the final solution. For now we will generate the anisotropic models for all combinations. """ radial_order = 6 map_model_laplacian_aniso = mapmri.MapmriModel(gtab, radial_order=radial_order, laplacian_regularization=True, laplacian_weighting=.2) map_model_positivity_aniso = mapmri.MapmriModel(gtab, radial_order=radial_order, laplacian_regularization=False, positivity_constraint=True) map_model_both_aniso = mapmri.MapmriModel(gtab, radial_order=radial_order, laplacian_regularization=True, laplacian_weighting=.05, positivity_constraint=True) """ Note that when we use only Laplacian regularization, the "GCV" option may select very low regularization weights in very anisotropic white matter such as the corpus callosum, resulting in corrupted scalar indices. In this example we therefore choose a preset value of 0.2, which has shown to be quite robust and also faster in practice [Fick2016a]_. We can then fit the MAPMRI model to the data. """ mapfit_laplacian_aniso = map_model_laplacian_aniso.fit(data_small) mapfit_positivity_aniso = map_model_positivity_aniso.fit(data_small) mapfit_both_aniso = map_model_both_aniso.fit(data_small) """ From the fitted models we will first illustrate the estimation of q-space indices. For completeness, we will compare the estimation using only Laplacian regularization, positivity constraint or both. We first show the RTOP [Ozarslan2013]_. """ # generating RTOP plots fig = plt.figure(figsize=(10, 5)) ax1 = fig.add_subplot(1, 3, 1, title=r'RTOP - Laplacian') ax1.set_axis_off() ind = ax1.imshow(mapfit_laplacian_aniso.rtop()[:, 0, :].T, interpolation='nearest', origin='lower', cmap=plt.cm.gray, vmin=0, vmax=5e7) ax2 = fig.add_subplot(1, 3, 2, title=r'RTOP - Positivity') ax2.set_axis_off() ind = ax2.imshow(mapfit_positivity_aniso.rtop()[:, 0, :].T, interpolation='nearest', origin='lower', cmap=plt.cm.gray, vmin=0, vmax=5e7) ax3 = fig.add_subplot(1, 3, 3, title=r'RTOP - Both') ax3.set_axis_off() ind = ax3.imshow(mapfit_both_aniso.rtop()[:, 0, :].T, interpolation='nearest', origin='lower', cmap=plt.cm.gray, vmin=0, vmax=5e7) divider = make_axes_locatable(ax3) cax = divider.append_axes("right", size="5%", pad=0.05) plt.colorbar(ind, cax=cax) plt.savefig('MAPMRI_maps_regularization.png') """ .. figure:: MAPMRI_maps_regularization.png :align: center It can be seen that all maps appear quite smooth and similar. Though, it is possible to see some subtle differences near the corpus callosum. The similarity and differences in reconstruction can be further illustrated by visualizing the analytic norm of the Laplacian of the fitted signal. """ fig = plt.figure(figsize=(10, 5)) ax1 = fig.add_subplot(1, 3, 1, title=r'Laplacian norm - Laplacian') ax1.set_axis_off() ind = ax1.imshow(mapfit_laplacian_aniso.norm_of_laplacian_signal()[:, 0, :].T, interpolation='nearest', origin='lower', cmap=plt.cm.gray, vmin=0, vmax=3 ) ax2 = fig.add_subplot(1, 3, 2, title=r'Laplacian norm - Positivity') ax2.set_axis_off() ind = ax2.imshow(mapfit_positivity_aniso.norm_of_laplacian_signal()[:, 0, :].T, interpolation='nearest', origin='lower', cmap=plt.cm.gray, vmin=0, vmax=3 ) ax3 = fig.add_subplot(1, 3, 3, title=r'Laplacian norm - Both') ax3.set_axis_off() ind = ax3.imshow(mapfit_both_aniso.norm_of_laplacian_signal()[:, 0, :].T, interpolation='nearest', origin='lower', cmap=plt.cm.gray, vmin=0, vmax=3 ) divider = make_axes_locatable(ax3) cax = divider.append_axes("right", size="5%", pad=0.05) plt.colorbar(ind, cax=cax) plt.savefig('MAPMRI_norm_laplacian.png') """ .. figure:: MAPMRI_norm_laplacian.png :align: center A high Laplacian norm indicates that the gradient in the three-dimensional signal reconstruction changes a lot - something that may indicate spurious oscillations. In the Laplacian reconstruction (left) we see that there are some isolated voxels that have a higher norm than the rest. In the positivity constraint reconstruction the norm is already smoother. When both methods are used together the overall norm gets smoother still, since both smoothness of the signal and positivity of the propagator are imposed. From now on we just use the combined approach, show all maps we can generate and explain their significance. """ fig = plt.figure(figsize=(15, 6)) ax1 = fig.add_subplot(1, 5, 1, title=r'MSD') ax1.set_axis_off() ind = ax1.imshow(mapfit_both_aniso.msd()[:, 0, :].T, interpolation='nearest', origin='lower', cmap=plt.cm.gray) ax2 = fig.add_subplot(1, 5, 2, title=r'QIV') ax2.set_axis_off() ind = ax2.imshow(mapfit_both_aniso.qiv()[:, 0, :].T, interpolation='nearest', origin='lower', cmap=plt.cm.gray) ax3 = fig.add_subplot(1, 5, 3, title=r'RTOP') ax3.set_axis_off() ind = ax3.imshow((mapfit_both_aniso.rtop()[:, 0, :]).T, interpolation='nearest', origin='lower', cmap=plt.cm.gray) ax4 = fig.add_subplot(1, 5, 4, title=r'RTAP') ax4.set_axis_off() ind = ax4.imshow((mapfit_both_aniso.rtap()[:, 0, :]).T, interpolation='nearest', origin='lower', cmap=plt.cm.gray) ax5 = fig.add_subplot(1, 5, 5, title=r'RTPP') ax5.set_axis_off() ind = ax5.imshow(mapfit_both_aniso.rtpp()[:, 0, :].T, interpolation='nearest', origin='lower', cmap=plt.cm.gray) plt.savefig('MAPMRI_maps.png') """ .. figure:: MAPMRI_maps.png :align: center From left to right: - Mean Squared Displacement (MSD) is a measure for how far protons are able to diffuse. a decrease in MSD indicates protons are hindered/restricted more, as can be seen by the high MSD in the CSF, but low in the white matter. - Q-space Inverse Variance (QIV) is a measure of variance in the signal, which is said to have higher contrast to white matter than the MSD [Hosseinbor2013]_. We also showed that QIV has high sensitivity to tissue composition change in a simulation study [Fick2016b]_. - Return to origin probability (RTOP) quantifies the probability that a proton will be at the same position at the first and second diffusion gradient pulse. A higher RTOP indicates that the volume a spin is inside of is smaller, meaning more overall restriction. This is illustrated by the low values in CSF and high values in white matter. - Return to axis probability (RTAP) is a directional index that quantifies the probability that a proton will be along the axis of the main eigenvector of a diffusion tensor during both diffusion gradient pulses. RTAP has been related to the apparent axon diameter [Ozarslan2013]_ [Fick2016a]_ under several strong assumptions on the tissue composition and acquisition protocol. - Return to plane probability (RTPP) is a directional index that quantifies the probability that a proton will be on the plane perpendicular to the main eigenvector of a diffusion tensor during both gradient pulses. RTPP is related to the length of a pore [Ozarslan2013]_ but in practice should be similar to that of Gaussian diffusion. It is also possible to estimate the amount of non-Gaussian diffusion in every voxel [Ozarslan2013]_. This quantity is estimated through the ratio between Gaussian volume (MAPMRI's first basis function) and the non-Gaussian volume (all other basis functions) of the fitted signal. For this value to be physically meaningful we must use a b-value threshold in the MAPMRI model. This threshold makes the scale estimation in MAPMRI only use samples that realistically describe Gaussian diffusion, i.e., at low b-values. """ map_model_both_ng = mapmri.MapmriModel(gtab, radial_order=radial_order, laplacian_regularization=True, laplacian_weighting=.05, positivity_constraint=True, bval_threshold=2000) mapfit_both_ng = map_model_both_ng.fit(data_small) fig = plt.figure(figsize=(10, 6)) ax1 = fig.add_subplot(1, 3, 1, title=r'NG') ax1.set_axis_off() ind = ax1.imshow(mapfit_both_ng.ng()[:, 0, :].T, interpolation='nearest', origin='lower', cmap=plt.cm.gray) divider = make_axes_locatable(ax1) cax = divider.append_axes("right", size="5%", pad=0.05) plt.colorbar(ind, cax=cax) ax2 = fig.add_subplot(1, 3, 2, title=r'NG perpendicular') ax2.set_axis_off() ind = ax2.imshow(mapfit_both_ng.ng_perpendicular()[:, 0, :].T, interpolation='nearest', origin='lower', cmap=plt.cm.gray) divider = make_axes_locatable(ax2) cax = divider.append_axes("right", size="5%", pad=0.05) plt.colorbar(ind, cax=cax) ax3 = fig.add_subplot(1, 3, 3, title=r'NG parallel') ax3.set_axis_off() ind = ax3.imshow(mapfit_both_ng.ng_parallel()[:, 0, :].T, interpolation='nearest', origin='lower', cmap=plt.cm.gray) divider = make_axes_locatable(ax3) cax = divider.append_axes("right", size="5%", pad=0.05) plt.colorbar(ind, cax=cax) plt.savefig('MAPMRI_ng.png') """ .. figure:: MAPMRI_ng.png :align: center On the left we see the overall NG and on the right the directional perpendicular NG and parallel NG. The NG ranges from 1 (completely non-Gaussian) to 0 (completely Gaussian). The overall NG of a voxel is always higher or equal than each of its components. It can be seen that NG has low values in the CSF and higher in the white matter. Increases or decreases in these values do not point to a specific microstructural change, but can indicate clues as to what is happening, similar to Fractional Anisotropy. An initial simulation study that quantifies the added value of q-space indices over DTI indices is given in [Fick2016b]_. The MAPMRI framework also allows for the estimation of Orientation Distribution Functions (ODFs). We recommend to use the isotropic implementation for this purpose, as the anisotropic implementation has a bias towards smaller crossing angles. For the isotropic basis we recommend to use a ``radial_order`` of 8, as the basis needs more generic and needs more basis functions to approximate the signal. """ radial_order = 8 map_model_both_iso = mapmri.MapmriModel(gtab, radial_order=radial_order, laplacian_regularization=True, laplacian_weighting=.1, positivity_constraint=True, anisotropic_scaling=False) mapfit_both_iso = map_model_both_iso.fit(data_small) """ Load an odf reconstruction sphere """ sphere = get_sphere('symmetric724') """ Compute the ODFs. The radial order ``s`` can be increased to sharpen the results, but it might also make the ODFs noisier. Always check the results visually. """ odf = mapfit_both_iso.odf(sphere, s=2) print('odf.shape (%d, %d, %d, %d)' % odf.shape) """ Display the ODFs. """ r = fvtk.ren() sfu = fvtk.sphere_funcs(odf, sphere, colormap='jet') sfu.RotateX(-90) fvtk.add(r, sfu) fvtk.record(r, n_frames=1, out_path='odfs.png', size=(600, 600)) """ .. figure:: odfs.png :align: center Orientation distribution functions (ODFs). References ---------- .. [Ozarslan2013] Ozarslan E. et. al, "Mean apparent propagator (MAP) MRI: A novel diffusion imaging method for mapping tissue microstructure", NeuroImage, 2013. .. [Fick2016a] Fick, Rutger HJ, et al. "MAPL: Tissue microstructure estimation using Laplacian-regularized MAP-MRI and its application to HCP data." NeuroImage (2016). .. [Craven1978] Craven et al. "Smoothing Noisy Data with Spline Functions." NUMER MATH 31.4 (1978): 377-403. .. [Hosseinbor2013] Hosseinbor et al. "Bessel fourier orientation reconstruction (bfor): an analytical diffusion propagator reconstruction for hybrid diffusion imaging and computation of q-space indices. NeuroImage 64, 650–670. .. [Fick2016b] Fick et al. "A sensitivity analysis of Q-space indices with respect to changes in axonal diameter, dispersion and tissue composition. ISBI 2016. .. include:: ../links_names.inc """ dipy-0.13.0/doc/examples/reconst_shore.py000066400000000000000000000056501317371701200203620ustar00rootroot00000000000000""" ================================================================== Continuous and analytical diffusion signal modelling with 3D-SHORE ================================================================== We show how to model the diffusion signal as a linear combination of continuous functions from the SHORE basis [Merlet2013]_. We also compute the analytical Orientation Distribution Function (ODF). First import the necessary modules: """ from dipy.reconst.shore import ShoreModel from dipy.reconst.shm import sh_to_sf from dipy.viz import fvtk from dipy.data import fetch_isbi2013_2shell, read_isbi2013_2shell, get_sphere from dipy.core.gradients import gradient_table """ Download and read the data for this tutorial. ``fetch_isbi2013_2shell()`` provides data from the `ISBI HARDI contest 2013 `_ acquired for two shells at b-values 1500 $s/mm^2$ and 2500 $s/mm^2$. The six parameters of these two functions define the ROI where to reconstruct the data. They respectively correspond to ``(xmin,xmax,ymin,ymax,zmin,zmax)`` with x, y, z and the three axis defining the spatial positions of the voxels. """ fetch_isbi2013_2shell() img, gtab = read_isbi2013_2shell() data = img.get_data() data_small = data[10:40, 22, 10:40] print('data.shape (%d, %d, %d, %d)' % data.shape) """ ``data`` contains the voxel data and ``gtab`` contains a ``GradientTable`` object (gradient information e.g. b-values). For example, to show the b-values it is possible to write:: ``print(gtab.bvals)`` Instantiate the SHORE Model. ``radial_order`` is the radial order of the SHORE basis. ``zeta`` is the scale factor of the SHORE basis. ``lambdaN`` and ``lambdaL`` are the radial and angular regularization constants, respectively. For details regarding these four parameters see [Cheng2011]_ and [Merlet2013]_. """ radial_order = 6 zeta = 700 lambdaN = 1e-8 lambdaL = 1e-8 asm = ShoreModel(gtab, radial_order=radial_order, zeta=zeta, lambdaN=lambdaN, lambdaL=lambdaL) """ Fit the SHORE model to the data """ asmfit = asm.fit(data_small) """ Load an odf reconstruction sphere """ sphere = get_sphere('symmetric724') """ Compute the ODFs """ odf = asmfit.odf(sphere) print('odf.shape (%d, %d, %d)' % odf.shape) """ Display the ODFs """ r = fvtk.ren() sfu = fvtk.sphere_funcs(odf[:, None, :], sphere, colormap='jet') sfu.RotateX(-90) fvtk.add(r, sfu) fvtk.record(r, n_frames=1, out_path='odfs.png', size=(600, 600)) """ .. figure:: odfs.png :align: center Orientation distribution functions. References ---------- .. [Merlet2013] Merlet S. et. al, "Continuous diffusion signal, EAP and ODF estimation via Compressive Sensing in diffusion MRI", Medical Image Analysis, 2013. .. [Cheng2011] Cheng J. et. al, "Theoretical Analysis and Pratical Insights on EAP Estimation via Unified HARDI Framework", MICCAI workshop on Computational Diffusion MRI, 2011. .. include:: ../links_names.inc """ dipy-0.13.0/doc/examples/reconst_shore_metrics.py000066400000000000000000000063031317371701200221040ustar00rootroot00000000000000""" =========================== Calculate SHORE scalar maps =========================== We show how to calculate two SHORE-based scalar maps: return to origin probability (rtop) [Descoteaux2011]_ and mean square displacement (msd) [Wu2007]_, [Wu2008]_ on your data. SHORE can be used with any multiple b-value dataset like multi-shell or DSI. First import the necessary modules: """ import nibabel as nib import numpy as np import matplotlib.pyplot as plt from dipy.data import fetch_taiwan_ntu_dsi, read_taiwan_ntu_dsi, get_sphere from dipy.data import get_data, dsi_voxels from dipy.reconst.shore import ShoreModel """ Download and read the data for this tutorial. """ fetch_taiwan_ntu_dsi() img, gtab = read_taiwan_ntu_dsi() """ img contains a nibabel Nifti1Image object (data) and gtab contains a GradientTable object (gradient information e.g. b-values). For example, to read the b-values it is possible to write print(gtab.bvals). Load the raw diffusion data and the affine. """ data = img.get_data() affine = img.affine print('data.shape (%d, %d, %d, %d)' % data.shape) """ Instantiate the Model. """ asm = ShoreModel(gtab) """ Let's just use only one slice only from the data. """ dataslice = data[30:70, 20:80, data.shape[2] // 2] """ Fit the signal with the model and calculate the SHORE coefficients. """ asmfit = asm.fit(dataslice) """ Calculate the analytical rtop on the signal that corresponds to the integral of the signal. """ print('Calculating... rtop_signal') rtop_signal = asmfit.rtop_signal() """ Now we calculate the analytical rtop on the propagator, that corresponds to its central value. """ print('Calculating... rtop_pdf') rtop_pdf = asmfit.rtop_pdf() """ In theory, these two measures must be equal, to show that we calculate the mean square error on this two measures. """ mse = np.sum((rtop_signal - rtop_pdf) ** 2) / rtop_signal.size print("mse = %f" % mse) """ mse = 0.000000 Let's calculate the analytical mean square displacement on the propagator. """ print('Calculating... msd') msd = asmfit.msd() """ Show the maps and save them to a file. """ fig = plt.figure(figsize=(6, 6)) ax1 = fig.add_subplot(2, 2, 1, title='rtop_signal') ax1.set_axis_off() ind = ax1.imshow(rtop_signal.T, interpolation='nearest', origin='lower') plt.colorbar(ind) ax2 = fig.add_subplot(2, 2, 2, title='rtop_pdf') ax2.set_axis_off() ind = ax2.imshow(rtop_pdf.T, interpolation='nearest', origin='lower') plt.colorbar(ind) ax3 = fig.add_subplot(2, 2, 3, title='msd') ax3.set_axis_off() ind = ax3.imshow(msd.T, interpolation='nearest', origin='lower', vmin=0) plt.colorbar(ind) plt.savefig('SHORE_maps.png') """ .. figure:: SHORE_maps.png :align: center rtop and msd calculated using the SHORE model. References ---------- .. [Descoteaux2011] Descoteaux M. et. al , "Multiple q-shell diffusion propagator imaging", Medical Image Analysis, vol 15, No. 4, p. 603-621, 2011. .. [Wu2007] Wu Y. et. al, "Hybrid diffusion imaging", NeuroImage, vol 36, p. 617-629, 2007. .. [Wu2008] Wu Y. et. al, "Computation of Diffusion Function Measures in q-Space Using Magnetic Resonance Hybrid Diffusion Imaging", IEEE TRANSACTIONS ON MEDICAL IMAGING, vol. 27, No. 6, p. 858-865, 2008. .. include:: ../links_names.inc """ dipy-0.13.0/doc/examples/reslice_datasets.py000066400000000000000000000034061317371701200210200ustar00rootroot00000000000000 """ ========================== Reslice diffusion datasets ========================== Overview -------- Often in imaging it is common to reslice images in different resolutions. Especially in dMRI we usually want images with isotropic voxel size as they facilitate most tractography algorithms. In this example we show how you can reslice a dMRI dataset to have isotropic voxel size. """ import nibabel as nib """ The function we need to use is called resample. """ from dipy.align.reslice import reslice from dipy.data import get_data """ We use here a very small dataset to show the basic principles but you can replace the following line with the path of your image. """ fimg = get_data('aniso_vox') """ We load the image and print the shape of the volume """ img = nib.load(fimg) data = img.get_data() data.shape """ ``(58, 58, 24)`` Load the affine of the image. The affine is the transformation matrix which maps image coordinates to world (mm) coordinates. """ affine = img.affine """ Load and show the zooms which hold the voxel size. """ zooms = img.header.get_zooms()[:3] zooms """ ``(4.0, 4.0, 5.0)`` Set the required new voxel size. """ new_zooms = (3., 3., 3.) new_zooms """ ``(3.0, 3.0, 3.0)`` Start resampling (reslicing). Trilinear interpolation is used by default. """ data2, affine2 = reslice(data, affine, zooms, new_zooms) data2.shape """ ``(77, 77, 40)`` Save the result as a new Nifti file. """ img2 = nib.Nifti1Image(data2, affine2) nib.save(img2, 'iso_vox.nii.gz') """ Or as analyze format or any other supported format. """ img3 = nib.Spm2AnalyzeImage(data2, affine2) nib.save(img3,'iso_vox.img') """ Done. Check your datasets. As you may have already realized the same code can be used for general reslicing problems not only for dMRI data. """ dipy-0.13.0/doc/examples/restore_dti.py000066400000000000000000000170011317371701200200210ustar00rootroot00000000000000""" ===================================================== Using the RESTORE algorithm for robust tensor fitting ===================================================== The diffusion tensor model takes into account certain kinds of noise (thermal), but not other kinds, such as "physiological" noise. For example, if a subject moves during the acquisition of one of the diffusion-weighted samples, this might have a substantial effect on the parameters of the tensor fit calculated in all voxels in the brain for that subject. One of the pernicious consequences of this is that it can lead to wrong interpretation of group differences. For example, some groups of participants (e.g. young children, patient groups, etc.) are particularly prone to motion and differences in tensor parameters and derived statistics (such as FA) due to motion would be confounded with actual differences in the physical properties of the white matter. An example of this was shown in a paper by Yendiki et al. [Yendiki2013]_. One of the strategies to deal with this problem is to apply an automatic method for detecting outliers in the data, excluding these outliers and refitting the model without the presence of these outliers. This is often referred to as "robust model fitting". One of the common algorithms for robust tensor fitting is called RESTORE, and was first proposed by Chang et al. [Chang2005]_. In the following example, we will demonstrate how to use RESTORE on a simulated dataset, which we will corrupt by adding intermittent noise. We start by importing a few of the libraries we will use. """ import numpy as np import nibabel as nib """ The module ``dipy.reconst.dti`` contains the implementation of tensor fitting, including an implementation of the RESTORE algorithm. """ import dipy.reconst.dti as dti """ ``dipy.data`` is used for small datasets that we use in tests and examples. """ import dipy.data as dpd """ ``dipy.viz.fvtk`` is used for 3D visualization and matplotlib for 2D visualizations: """ import dipy.viz.fvtk as fvtk import matplotlib.pyplot as plt """ If needed, the ``fetch_stanford_hardi`` function will download the raw dMRI dataset of a single subject. The size of this dataset is 87 MBytes. You only need to fetch once. """ dpd.fetch_stanford_hardi() img, gtab = dpd.read_stanford_hardi() """ We initialize a DTI model class instance using the gradient table used in the measurement. By default, ``dti.TensorModel`` will use a weighted least-squares algorithm (described in [Chang2005]_) to fit the parameters of the model. We initialize this model as a baseline for comparison of noise-corrupted models: """ dti_wls = dti.TensorModel(gtab) """ For the purpose of this example, we will focus on the data from a region of interest (ROI) surrounding the Corpus Callosum. We define that ROI as the following indices: """ roi_idx = (slice(20,50), slice(55,85), slice(38,39)) """ And use them to index into the data: """ data = img.get_data()[roi_idx] """ This dataset is not very noisy, so we will artificially corrupt it to simulate the effects of "physiological" noise, such as subject motion. But first, let's establish a baseline, using the data as it is: """ fit_wls = dti_wls.fit(data) fa1 = fit_wls.fa evals1 = fit_wls.evals evecs1 = fit_wls.evecs cfa1 = dti.color_fa(fa1, evecs1) sphere = dpd.get_sphere('symmetric724') """ We visualize the ODFs in the ROI using fvtk: """ ren = fvtk.ren() fvtk.add(ren, fvtk.tensor(evals1, evecs1, cfa1, sphere)) print('Saving illustration as tensor_ellipsoids_wls.png') fvtk.record(ren, n_frames=1, out_path='tensor_ellipsoids_wls.png', size=(600, 600)) """ .. figure:: tensor_ellipsoids_wls.png :align: center Tensor Ellipsoids. """ fvtk.clear(ren) """ Next, we corrupt the data with some noise. To simulate a subject that moves intermittently, we will replace a few of the images with a very low signal """ noisy_data = np.copy(data) noisy_idx = slice(-10, None) # The last 10 volumes are corrupted noisy_data[..., noisy_idx] = 1.0 """ We use the same model to fit this noisy data """ fit_wls_noisy = dti_wls.fit(noisy_data) fa2 = fit_wls_noisy.fa evals2 = fit_wls_noisy.evals evecs2 = fit_wls_noisy.evecs cfa2 = dti.color_fa(fa2, evecs2) ren = fvtk.ren() fvtk.add(ren, fvtk.tensor(evals2, evecs2, cfa2, sphere)) print('Saving illustration as tensor_ellipsoids_wls_noisy.png') fvtk.record(ren, n_frames=1, out_path='tensor_ellipsoids_wls_noisy.png', size=(600, 600)) """ In places where the tensor model is particularly sensitive to noise, the resulting tensor field will be distorted .. figure:: tensor_ellipsoids_wls_noisy.png :align: center Tensor Ellipsoids from noisy data. To estimate the parameters from the noisy data using RESTORE, we need to estimate what would be a reasonable amount of noise to expect in the measurement. To do that, we use the ``dipy.denoise.noise_estimate`` module: """ import dipy.denoise.noise_estimate as ne sigma = ne.estimate_sigma(data) """ This estimate of the standard deviation will be used by the RESTORE algorithm to identify the outliers in each voxel and is given as an input when initializing the TensorModel object: """ dti_restore = dti.TensorModel(gtab,fit_method='RESTORE', sigma=sigma) fit_restore_noisy = dti_restore.fit(noisy_data) fa3 = fit_restore_noisy.fa evals3 = fit_restore_noisy.evals evecs3 = fit_restore_noisy.evecs cfa3 = dti.color_fa(fa3, evecs3) ren = fvtk.ren() fvtk.add(ren, fvtk.tensor(evals3, evecs3, cfa3, sphere)) print('Saving illustration as tensor_ellipsoids_restore_noisy.png') fvtk.record(ren, n_frames=1, out_path='tensor_ellipsoids_restore_noisy.png', size=(600, 600)) """ .. figure:: tensor_ellipsoids_restore_noisy.png :align: center Tensor Ellipsoids from noisy data recovered with RESTORE. The tensor field looks rather restored to its noiseless state in this image, but to convince ourselves further that this did the right thing, we will compare the distribution of FA in this region relative to the baseline, using the RESTORE estimate and the WLS estimate [Chung2006]_. """ fig_hist, ax = plt.subplots(1) ax.hist(np.ravel(fa2), color='b', histtype='step', label='WLS') ax.hist(np.ravel(fa3), color='r', histtype='step', label='RESTORE') ax.hist(np.ravel(fa1), color='g', histtype='step', label='Original') ax.set_xlabel('Fractional Anisotropy') ax.set_ylabel('Count') plt.legend() fig_hist.savefig('dti_fa_distributions.png') """ .. figure:: dti_fa_distributions.png :align: center This demonstrates that RESTORE can recover a distribution of FA that more closely resembles the baseline distribution of the noiseless signal, and demonstrates the utility of the method to data with intermittent noise. Importantly, this method assumes that the tensor is a good representation of the diffusion signal in the data. If you have reason to believe this is not the case (for example, you have data with very high b values and you are particularly interested in locations in the brain in which fibers cross), you might want to use a different method to fit your data. References ---------- .. [Yendiki2013] Yendiki, A, Koldewynb, K, Kakunooria, S, Kanwisher, N, and Fischl, B. (2013). Spurious group differences due to head motion in a diffusion MRI study. Neuroimage. .. [Chang2005] Chang, L-C, Jones, DK and Pierpaoli, C (2005). RESTORE: robust estimation of tensors by outlier rejection. MRM, 53: 1088-95. .. [Chung2006] Chung, SW, Lu, Y, Henry, R-G, (2006). Comparison of bootstrap approaches for estimation of uncertainties of DTI parameters. NeuroImage 33, 531-541. .. include:: ../links_names.inc """ dipy-0.13.0/doc/examples/segment_clustering_features.py000066400000000000000000000257051317371701200233070ustar00rootroot00000000000000""" ============================================ Tractography Clustering - Available Features ============================================ This page lists available features that can be used by the tractography clustering framework. For every feature a brief description is provided explaining: what it does, when it's useful and how to use it. If you are not familiar with the tractography clustering framework, read the :ref:`clustering-framework` first. .. contents:: Available Features :local: :depth: 1 **Note**: All examples assume a function `get_streamlines` exists. We defined here a simple function to do so. It imports the necessary modules and load a small streamline bundle. """ def get_streamlines(): from nibabel import trackvis as tv from dipy.data import get_data fname = get_data('fornix') streams, hdr = tv.read(fname) streamlines = [i[0] for i in streams] return streamlines """ .. _clustering-examples-IdentityFeature: Identity Feature ================ **What:** Instances of `IdentityFeature` simply return the streamlines unaltered. In other words the features are the original data. **When:** The QuickBundles algorithm requires streamlines to have the same number of points. If this is the case for your streamlines, you can tell QuickBundles to not perform resampling (see following example). The clustering should be faster than using the default behaviour of QuickBundles since it will require less computation (i.e. no resampling). However, it highly depends on the number of points streamlines have. By default, QuickBundles resamples streamlines so that they have 12 points each [Garyfallidis12]_. *Unless stated otherwise, it is the default feature used by `Metric` objects in the clustering framework.* """ from dipy.segment.clustering import QuickBundles from dipy.segment.metric import IdentityFeature from dipy.segment.metric import AveragePointwiseEuclideanMetric # Get some streamlines. streamlines = get_streamlines() # Previously defined. # Make sure our streamlines have the same number of points. from dipy.tracking.streamline import set_number_of_points streamlines = set_number_of_points(streamlines, nb_points=12) # Create an instance of `IdentityFeature` and tell metric to use it. feature = IdentityFeature() metric = AveragePointwiseEuclideanMetric(feature=feature) qb = QuickBundles(threshold=10., metric=metric) clusters = qb.cluster(streamlines) print("Nb. clusters:", len(clusters)) print("Cluster sizes:", list(map(len, clusters))) """ :: Nb. clusters: 4 Cluster sizes: [64, 191, 47, 1] .. _clustering-examples-ResampleFeature: Resample Feature ================ **What:** Instances of `ResampleFeature` resample streamlines to a predetermined number of points. The resampling is done on the fly such that there are no permanent modifications made to your streamlines. **When:** The QuickBundles algorithm requires streamlines to have the same number of points. By default, QuickBundles uses `ResampleFeature` to resample streamlines so that they have 12 points each [Garyfallidis12]_. If you want to use a different number of points for the resampling, you should provide your own instance of `ResampleFeature` (see following example). **Note:** Resampling streamlines has an impact on clustering results both in term of speed and quality. Setting the number of points too low will result in a loss of information about the shape of the streamlines. On the contrary, setting the number of points too high will slow down the clustering process. """ from dipy.segment.clustering import QuickBundles from dipy.segment.metric import ResampleFeature from dipy.segment.metric import AveragePointwiseEuclideanMetric # Get some streamlines. streamlines = get_streamlines() # Previously defined. # Streamlines will be resampled to 24 points on the fly. feature = ResampleFeature(nb_points=24) metric = AveragePointwiseEuclideanMetric(feature=feature) # a.k.a. MDF qb = QuickBundles(threshold=10., metric=metric) clusters = qb.cluster(streamlines) print("Nb. clusters:", len(clusters)) print("Cluster sizes:", list(map(len, clusters))) """ :: Nb. clusters: 4 Cluster sizes: [64, 191, 44, 1] .. _clustering-examples-CenterOfMassFeature: Center of Mass Feature ====================== **What:** Instances of `CenterOfMassFeature` compute the center of mass (also known as center of gravity) of a set of points. This is achieved by taking the mean of every coordinate independently (for more information see the `wiki page `_). **When:** This feature can be useful when you *only* need information about the spatial position of a streamline. **Note:** The computed center is not guaranteed to be an existing point in the streamline. """ import numpy as np from dipy.viz import fvtk from dipy.segment.clustering import QuickBundles from dipy.segment.metric import CenterOfMassFeature from dipy.segment.metric import EuclideanMetric # Get some streamlines. streamlines = get_streamlines() # Previously defined. feature = CenterOfMassFeature() metric = EuclideanMetric(feature) qb = QuickBundles(threshold=5., metric=metric) clusters = qb.cluster(streamlines) # Extract feature of every streamline. centers = np.asarray(list(map(feature.extract, streamlines))) # Color each center of mass according to the cluster they belong to. rng = np.random.RandomState(42) colormap = fvtk.create_colormap(np.arange(len(clusters))) colormap_full = np.ones((len(streamlines), 3)) for cluster, color in zip(clusters, colormap): colormap_full[cluster.indices] = color # Visualization ren = fvtk.ren() fvtk.clear(ren) ren.SetBackground(0, 0, 0) fvtk.add(ren, fvtk.streamtube(streamlines, fvtk.colors.white, opacity=0.05)) fvtk.add(ren, fvtk.point(centers[:, 0, :], colormap_full, point_radius=0.2)) fvtk.record(ren, n_frames=1, out_path='center_of_mass_feature.png', size=(600, 600)) """ .. figure:: center_of_mass_feature.png :align: center Showing the center of mass of each streamline and colored according to the QuickBundles results. .. _clustering-examples-MidpointFeature: Midpoint Feature ================ **What:** Instances of `MidpointFeature` extract the middle point of a streamline. If there is an even number of points, the feature will then correspond to the point halfway between the two middle points. **When:** This feature can be useful when you *only* need information about the spatial position of a streamline. This can also be an alternative to the `CenterOfMassFeature` if the point extracted must be on the streamline. """ import numpy as np from dipy.viz import fvtk from dipy.segment.clustering import QuickBundles from dipy.segment.metric import MidpointFeature from dipy.segment.metric import EuclideanMetric # Get some streamlines. streamlines = get_streamlines() # Previously defined. feature = MidpointFeature() metric = EuclideanMetric(feature) qb = QuickBundles(threshold=5., metric=metric) clusters = qb.cluster(streamlines) # Extract feature of every streamline. midpoints = np.asarray(list(map(feature.extract, streamlines))) # Color each midpoint according to the cluster they belong to. rng = np.random.RandomState(42) colormap = fvtk.create_colormap(np.arange(len(clusters))) colormap_full = np.ones((len(streamlines), 3)) for cluster, color in zip(clusters, colormap): colormap_full[cluster.indices] = color # Visualization ren = fvtk.ren() fvtk.clear(ren) ren.SetBackground(0, 0, 0) fvtk.add(ren, fvtk.point(midpoints[:, 0, :], colormap_full, point_radius=0.2)) fvtk.add(ren, fvtk.streamtube(streamlines, fvtk.colors.white, opacity=0.05)) fvtk.record(ren, n_frames=1, out_path='midpoint_feature.png', size=(600, 600)) """ .. figure:: midpoint_feature.png :align: center Showing the middle point of each streamline and colored according to the QuickBundles results. .. _clustering-examples-ArcLengthFeature: ArcLength Feature ================= **What:** Instances of `ArcLengthFeature` compute the length of a streamline. More specifically, this feature corresponds to the sum of the lengths of every streamline segments. **When:** This feature can be useful when you *only* need information about the length of a streamline. """ import numpy as np from dipy.viz import fvtk from dipy.segment.clustering import QuickBundles from dipy.segment.metric import ArcLengthFeature from dipy.segment.metric import EuclideanMetric # Get some streamlines. streamlines = get_streamlines() # Previously defined. feature = ArcLengthFeature() metric = EuclideanMetric(feature) qb = QuickBundles(threshold=2., metric=metric) clusters = qb.cluster(streamlines) # Color each streamline according to the cluster they belong to. colormap = fvtk.create_colormap(np.ravel(clusters.centroids)) colormap_full = np.ones((len(streamlines), 3)) for cluster, color in zip(clusters, colormap): colormap_full[cluster.indices] = color # Visualization ren = fvtk.ren() fvtk.clear(ren) ren.SetBackground(0, 0, 0) fvtk.add(ren, fvtk.streamtube(streamlines, colormap_full)) fvtk.record(ren, n_frames=1, out_path='arclength_feature.png', size=(600, 600)) """ .. figure:: arclength_feature.png :align: center Showing the streamlines colored according to their length. .. _clustering-examples-VectorOfEndpointsFeature: Vector Between Endpoints Feature ================================ **What:** Instances of `VectorOfEndpointsFeature` extract the vector going from one extremity of the streamline to the other. In other words, this feature represents the vector beginning at the first point and ending at the last point of the streamlines. **When:** This feature can be useful when you *only* need information about the orientation of a streamline. **Note:** Since streamlines endpoints are ambiguous (e.g. the first point could be either the beginning or the end of the streamline), one must be careful when using this feature. """ import numpy as np from dipy.viz import fvtk from dipy.segment.clustering import QuickBundles from dipy.segment.metric import VectorOfEndpointsFeature from dipy.segment.metric import CosineMetric # Get some streamlines. streamlines = get_streamlines() # Previously defined. feature = VectorOfEndpointsFeature() metric = CosineMetric(feature) qb = QuickBundles(threshold=0.1, metric=metric) clusters = qb.cluster(streamlines) # Color each streamline according to the cluster they belong to. colormap = fvtk.create_colormap(np.arange(len(clusters))) colormap_full = np.ones((len(streamlines), 3)) for cluster, color in zip(clusters, colormap): colormap_full[cluster.indices] = color # Visualization ren = fvtk.ren() fvtk.clear(ren) ren.SetBackground(0, 0, 0) fvtk.add(ren, fvtk.streamtube(streamlines, colormap_full)) fvtk.record(ren, n_frames=1, out_path='vector_of_endpoints_feature.png', size=(600, 600)) """ .. figure:: vector_of_endpoints_feature.png :align: center Showing the streamlines colored according to their orientation. .. include:: ../links_names.inc .. [Garyfallidis12] Garyfallidis E. et al., QuickBundles a method for tractography simplification, Frontiers in Neuroscience, vol 6, no 175, 2012. """ dipy-0.13.0/doc/examples/segment_clustering_metrics.py000066400000000000000000000152531317371701200231340ustar00rootroot00000000000000""" =========================================== Tractography Clustering - Available Metrics =========================================== This page lists available metrics that can be used by the tractography clustering framework. For every metric a brief description is provided explaining: what it does, when it's useful and how to use it. If you are not familiar with the tractography clustering framework, check this tutorial :ref:`clustering-framework`. .. contents:: Available Metrics :local: :depth: 1 **Note**: All examples assume a function `get_streamlines` exists. We defined here a simple function to do so. It imports the necessary modules and load a small streamline bundle. """ def get_streamlines(): from nibabel import trackvis as tv from dipy.data import get_data fname = get_data('fornix') streams, hdr = tv.read(fname) streamlines = [i[0] for i in streams] return streamlines """ .. _clustering-examples-AveragePointwiseEuclideanMetric: Average of Pointwise Euclidean Metric ===================================== **What:** Instances of `AveragePointwiseEuclideanMetric` first compute the pointwise Euclidean distance between two sequences *of same length* then return the average of those distances. This metric takes as inputs two features that are sequences containing the same number of elements. **When:** By default the `QuickBundles` clustering will resample your streamlines on-the-fly so they have 12 points. If for some reason you want to avoid this and you made sure all your streamlines have already the same number of points, you can manually provide an instance of `AveragePointwiseEuclideanMetric` to `QuickBundles`. Since the default `Feature` is the `IdentityFeature` the streamlines won't be resampled thus saving some computational time. **Note:** Inputs must be sequences of same length. """ from dipy.viz import fvtk from dipy.segment.clustering import QuickBundles from dipy.segment.metric import AveragePointwiseEuclideanMetric # Get some streamlines. streamlines = get_streamlines() # Previously defined. # Make sure our streamlines have the same number of points. from dipy.tracking.streamline import set_number_of_points streamlines = set_number_of_points(streamlines, nb_points=12) # Create the instance of `AveragePointwiseEuclideanMetric` to use. metric = AveragePointwiseEuclideanMetric() qb = QuickBundles(threshold=10., metric=metric) clusters = qb.cluster(streamlines) print("Nb. clusters:", len(clusters)) print("Cluster sizes:", map(len, clusters)) """ :: Nb. clusters: 4 Cluster sizes: [64, 191, 44, 1] .. _clustering-examples-SumPointwiseEuclideanMetric: Sum of Pointwise Euclidean Metric ================================= **What:** Instances of `SumPointwiseEuclideanMetric` first compute the pointwise Euclidean distance between two sequences *of same length* then return the sum of those distances. **When:** This metric mainly exists because it is used internally by `AveragePointwiseEuclideanMetric`. **Note:** Inputs must be sequences of same length. """ from dipy.segment.clustering import QuickBundles from dipy.segment.metric import SumPointwiseEuclideanMetric # Get some streamlines. streamlines = get_streamlines() # Previously defined. # Make sure our streamlines have the same number of points. from dipy.tracking.streamline import set_number_of_points nb_points = 12 streamlines = set_number_of_points(streamlines, nb_points=nb_points) # Create the instance of `SumPointwiseEuclideanMetric` to use. metric = SumPointwiseEuclideanMetric() qb = QuickBundles(threshold=10.*nb_points, metric=metric) clusters = qb.cluster(streamlines) print("Nb. clusters:", len(clusters)) print("Cluster sizes:", map(len, clusters)) """ :: Nb. clusters: 4 Cluster sizes: [64, 191, 44, 1] .. _clustering-examples-MinimumAverageDirectFlipMetric: Minimum Average Direct Flip Metric (MDF) ======================================== **What:** It is the metric used in the QuickBundles algorithm [Garyfallidis12]_. Instances of `MinimumAverageDirectFlipMetric` first compute the direct distance *d1* by taking the average of the pointwise Euclidean distances between two sequences *of same length*. Reverse one of the two sequences and compute the flip distance *d2* using the same approach as for *d1*. Then, return the minimum between *d1* and *d2*. **When:** This metric mainly exists because it is used internally by `AveragePointwiseEuclideanMetric`. **Note:** Inputs must be sequences of same length. """ from dipy.segment.metric import MinimumAverageDirectFlipMetric # Get some streamlines. streamlines = get_streamlines() # Previously defined. # Make sure our streamlines have the same number of points. from dipy.tracking.streamline import set_number_of_points streamlines = set_number_of_points(streamlines, nb_points=20) # Create the instance of `MinimumAverageDirectFlipMetric` to use. metric = MinimumAverageDirectFlipMetric() d = metric.dist(streamlines[0], streamlines[1]) print("MDF distance between the first two streamlines: ", d) """ :: MDF distance between the first two streamlines: 11.681308709622542 .. _clustering-examples-MinimumAverageDirectFlipMetric: Cosine Metric ============= **What:** Instances of `CosineMetric` compute the cosine distance between two vectors (for more information see the `wiki page `_). **When:** This metric can be useful when you *only* need information about the orientation of a streamline. **Note:** Inputs must be vectors (i.e. 1D array). """ import numpy as np from dipy.viz import fvtk from dipy.segment.clustering import QuickBundles from dipy.segment.metric import VectorOfEndpointsFeature from dipy.segment.metric import CosineMetric # Get some streamlines. streamlines = get_streamlines() # Previously defined. feature = VectorOfEndpointsFeature() metric = CosineMetric(feature) qb = QuickBundles(threshold=0.1, metric=metric) clusters = qb.cluster(streamlines) # Color each streamline according to the cluster they belong to. colormap = fvtk.create_colormap(np.arange(len(clusters))) colormap_full = np.ones((len(streamlines), 3)) for cluster, color in zip(clusters, colormap): colormap_full[cluster.indices] = color # Visualization ren = fvtk.ren() fvtk.clear(ren) ren.SetBackground(0, 0, 0) fvtk.add(ren, fvtk.streamtube(streamlines, colormap_full)) fvtk.record(ren, n_frames=1, out_path='cosine_metric.png', size=(600, 600)) """ .. figure:: cosine_metric.png :align: center Showing the streamlines colored according to their orientation. .. include:: ../links_names.inc References ---------- .. [Garyfallidis12] Garyfallidis E. et al., QuickBundles a method for tractography simplification, Frontiers in Neuroscience, vol 6, no 175, 2012. """ dipy-0.13.0/doc/examples/segment_extending_clustering_framework.py000066400000000000000000000210661317371701200255270ustar00rootroot00000000000000""" ========================================================== Enhancing QuickBundles with different metrics and features ========================================================== QuickBundles is a flexible algorithm that requires only a distance metric and an adjacency threshold to perform clustering. There is a wide variety of metrics that could be used to cluster streamlines. The purpose of this tutorial is to show how to easily create new ``Feature`` and new ``Metric`` classes that can be used by QuickBundles. .. _clustering-framework: Clustering framework ==================== dipy_ provides a simple, flexible and fast framework to do clustering of sequential data (e.g. streamlines). A *sequential datum* in DIPY is represented as a numpy array of size :math:`(N \times D)`, where each row of the array represents a $D$ dimensional point of the sequence. A set of these sequences is represented as a list of numpy arrays of size :math:`(N_i \times D)` for :math:`i=1:M` where $M$ is the number of sequences in the set. This clustering framework is modular and divided in three parts: #. Feature extraction #. Distance computation #. Clustering algorithm The **feature extraction** part includes any preprocessing needed to be done on the data before computing distances between them (e.g. resampling the number of points of a streamline). To define a new way of extracting features, one has to subclass ``Feature`` (see below). The **distance computation** part includes any metric capable of evaluating a distance between two set of features previously extracted from the data. To define a new way of extracting features, one has to subclass ``Metric`` (see below). The **clustering algorithm** part represents the clustering algorithm itself (e.g. QuickBundles, K-means, Hierarchical Clustering). More precisely, it includes any algorithms taking as input a list of sequential data and outputting a ``ClusterMap`` object. Extending `Feature` =================== This section will guide you through the creation of a new feature extraction method that can be used in the context of this clustering framework. For a list of available features in DIPY see :ref:`example_segment_clustering_features`. Assuming a set of streamlines, the type of features we want to extract is the arc length (i.e. the sum of the length of each segment for a given streamline). Let's start by importing the necessary modules. """ from dipy.segment.metric import Feature from dipy.tracking.streamline import length """ We now define the class ``ArcLengthFeature`` that will perform the desired feature extraction. When subclassing ``Feature``, two methods have to be redefined: ``infer_shape`` and ``extract``. Also, an important property about feature extraction is whether or not its process is invariant to the order of the points within a streamline. This is needed as there is no way one can tell which extremity of a streamline is the beginning and which one is the end. """ class ArcLengthFeature(Feature): """ Computes the arc length of a streamline. """ def __init__(self): # The arc length stays the same even if the streamline is reversed. super(ArcLengthFeature, self).__init__(is_order_invariant=True) def infer_shape(self, streamline): """ Infers the shape of features extracted from `streamline`. """ # Arc length is a scalar return 1 def extract(self, streamline): """ Extracts features from `streamline`. """ # return np.sum(np.sqrt(np.sum((streamline[1:] - streamline[:-1]) ** 2))) # or use a DIPY's function that computes the arc length of a streamline. return length(streamline) """ The new feature extraction ``ArcLengthFeature`` is ready to be used. Let's use it to cluster a set of streamlines by their arc length. For educational purposes we will try to cluster a small streamline bundle known from neuroanatomy as the fornix. We start by loading the fornix streamlines. """ import numpy as np from nibabel import trackvis as tv from dipy.data import get_data from dipy.viz import fvtk fname = get_data('fornix') streams, hdr = tv.read(fname) streamlines = [i[0] for i in streams] """ Perform QuickBundles clustering using the metric ``SumPointwiseEuclideanMetric`` and our ``ArcLengthFeature``. """ from dipy.segment.clustering import QuickBundles from dipy.segment.metric import SumPointwiseEuclideanMetric metric = SumPointwiseEuclideanMetric(feature=ArcLengthFeature()) qb = QuickBundles(threshold=2., metric=metric) clusters = qb.cluster(streamlines) """ We will now visualize the clustering result. """ # Color each streamline according to the cluster they belong to. colormap = fvtk.create_colormap(np.ravel(clusters.centroids)) colormap_full = np.ones((len(streamlines), 3)) for cluster, color in zip(clusters, colormap): colormap_full[cluster.indices] = color ren = fvtk.ren() ren.SetBackground(1, 1, 1) fvtk.add(ren, fvtk.streamtube(streamlines, colormap_full)) fvtk.record(ren, n_frames=1, out_path='fornix_clusters_arclength.png', size=(600, 600)) """ .. figure:: fornix_clusters_arclength.png :align: center Showing the different clusters obtained by using the arc length. Extending `Metric` ================== This section will guide you through the creation of a new metric that can be used in the context of this clustering framework. For a list of available metrics in DIPY see :ref:`example_segment_clustering_metrics`. Assuming a set of streamlines, we want a metric that computes the cosine distance giving the vector between endpoints of each streamline (i.e. one minus the cosine of the angle between two vectors). For more information about this distance check ``_. Let's start by importing the necessary modules. """ from dipy.segment.metric import Metric from dipy.segment.metric import VectorOfEndpointsFeature """ We now define the class ``CosineMetric`` that will perform the desired distance computation. When subclassing ``Metric``, two methods have to be redefined: ``are_compatible`` and ``dist``. Moreover, when implementing the ``dist`` method, one needs to make sure the distance returned is symmetric (i.e. `dist(A, B) == dist(B, A)`). """ class CosineMetric(Metric): """ Computes the cosine distance between two streamlines. """ def __init__(self): # For simplicity, features will be the vector between endpoints of a streamline. super(CosineMetric, self).__init__(feature=VectorOfEndpointsFeature()) def are_compatible(self, shape1, shape2): """ Checks if two features are vectors of same dimension. Basically this method exists so we don't have to do this check inside the `dist` method (speedup). """ return shape1 == shape2 and shape1[0] == 1 def dist(self, v1, v2): """ Computes a the cosine distance between two vectors. """ norm = lambda x: np.sqrt(np.sum(x**2)) cos_theta = np.dot(v1, v2.T) / (norm(v1)*norm(v2)) # Make sure it's in [-1, 1], i.e. within domain of arccosine cos_theta = np.minimum(cos_theta, 1.) cos_theta = np.maximum(cos_theta, -1.) return np.arccos(cos_theta) / np.pi # Normalized cosine distance """ The new distance ``CosineMetric`` is ready to be used. Let's use it to cluster a set of streamlines according to the cosine distance of the vector between their endpoints. For educational purposes we will try to cluster a small streamline bundle known from neuroanatomy as the fornix. We start by loading the fornix streamlines. """ import numpy as np from nibabel import trackvis as tv from dipy.data import get_data from dipy.viz import fvtk fname = get_data('fornix') streams, hdr = tv.read(fname) streamlines = [i[0] for i in streams] """ Perform QuickBundles clustering using our metric ``CosineMetric``. """ from dipy.segment.clustering import QuickBundles metric = CosineMetric() qb = QuickBundles(threshold=0.1, metric=metric) clusters = qb.cluster(streamlines) """ We will now visualize the clustering result. """ # Color each streamline according to the cluster they belong to. colormap = fvtk.create_colormap(np.arange(len(clusters))) colormap_full = np.ones((len(streamlines), 3)) for cluster, color in zip(clusters, colormap): colormap_full[cluster.indices] = color ren = fvtk.ren() ren.SetBackground(1, 1, 1) fvtk.add(ren, fvtk.streamtube(streamlines, colormap_full)) fvtk.record(ren, n_frames=1, out_path='fornix_clusters_cosine.png', size=(600, 600)) """ .. figure:: fornix_clusters_cosine.png :align: center Showing the different clusters obtained by using the cosine metric. .. include:: ../links_names.inc """ dipy-0.13.0/doc/examples/segment_quickbundles.py000066400000000000000000000114361317371701200217170ustar00rootroot00000000000000""" ========================================= Tractography Clustering with QuickBundles ========================================= This example explains how we can use QuickBundles [Garyfallidis12]_ to simplify/cluster streamlines. First import the necessary modules. """ import numpy as np from nibabel import trackvis as tv from dipy.segment.clustering import QuickBundles from dipy.io.pickles import save_pickle from dipy.data import get_data from dipy.viz import fvtk """ For educational purposes we will try to cluster a small streamline bundle known from neuroanatomy as the fornix. """ fname = get_data('fornix') """ Load fornix streamlines. """ streams, hdr = tv.read(fname) streamlines = [i[0] for i in streams] """ Perform QuickBundles clustering using the MDF metric and a 10mm distance threshold. Keep in mind that since the MDF metric requires streamlines to have the same number of points, the clustering algorithm will internally use a representation of streamlines that have been automatically downsampled/upsampled so they have only 12 points (To set manually the number of points, see :ref:`clustering-examples-ResampleFeature`). """ qb = QuickBundles(threshold=10.) clusters = qb.cluster(streamlines) """ `clusters` is a `ClusterMap` object which contains attributes that provide information about the clustering result. """ print("Nb. clusters:", len(clusters)) print("Cluster sizes:", map(len, clusters)) print("Small clusters:", clusters < 10) print("Streamlines indices of the first cluster:\n", clusters[0].indices) print("Centroid of the last cluster:\n", clusters[-1].centroid) """ :: Nb. clusters: 4 Cluster sizes: [64, 191, 47, 1] Small clusters: array([False, False, False, True], dtype=bool) Streamlines indices of the first cluster: [0, 7, 8, 10, 11, 12, 13, 14, 15, 18, 26, 30, 33, 35, 41, 65, 66, 85, 100, 101, 105, 115, 116, 119, 122, 123, 124, 125, 126, 128, 129, 135, 139, 142, 143, 144, 148, 151, 159, 167, 175, 180, 181, 185, 200, 208, 210, 224, 237, 246, 249, 251, 256, 267, 270, 280, 284, 293, 296, 297, 299] Centroid of the last cluster: array([[ 84.83773804, 117.92590332, 77.32278442], [ 86.10850525, 115.84362793, 81.91885376], [ 86.40357208, 112.25676727, 85.72930145], [ 86.48336792, 107.60327911, 88.13782501], [ 86.23897552, 102.5100708 , 89.29447174], [ 85.04563904, 97.46020508, 88.54240417], [ 82.60240173, 93.14851379, 86.84208679], [ 78.98937225, 89.57682037, 85.63652039], [ 74.72344208, 86.60827637, 84.9391861 ], [ 70.40846252, 85.15874481, 82.4484024 ], [ 66.74534607, 86.00262451, 78.82582092], [ 64.02451324, 88.43942261, 75.0697403 ]], dtype=float32) `clusters` has also attributes like `centroids` (cluster representatives), and methods like `add`, `remove`, and `clear` to modify the clustering result. Lets first show the initial dataset. """ ren = fvtk.ren() ren.SetBackground(1, 1, 1) fvtk.add(ren, fvtk.streamtube(streamlines, fvtk.colors.white)) fvtk.record(ren, n_frames=1, out_path='fornix_initial.png', size=(600, 600)) """ .. figure:: fornix_initial.png :align: center Initial Fornix dataset. Show the centroids of the fornix after clustering (with random colors): """ colormap = fvtk.create_colormap(np.arange(len(clusters))) fvtk.clear(ren) ren.SetBackground(1, 1, 1) fvtk.add(ren, fvtk.streamtube(streamlines, fvtk.colors.white, opacity=0.05)) fvtk.add(ren, fvtk.streamtube(clusters.centroids, colormap, linewidth=0.4)) fvtk.record(ren, n_frames=1, out_path='fornix_centroids.png', size=(600, 600)) """ .. figure:: fornix_centroids.png :align: center Showing the different QuickBundles centroids with random colors. Show the labeled fornix (colors from centroids). """ colormap_full = np.ones((len(streamlines), 3)) for cluster, color in zip(clusters, colormap): colormap_full[cluster.indices] = color fvtk.clear(ren) ren.SetBackground(1, 1, 1) fvtk.add(ren, fvtk.streamtube(streamlines, colormap_full)) fvtk.record(ren, n_frames=1, out_path='fornix_clusters.png', size=(600, 600)) """ .. figure:: fornix_clusters.png :align: center Showing the different clusters. It is also possible to save the complete `ClusterMap` object with pickling. """ save_pickle('QB.pkl', clusters) """ Finally, here is a video of QuickBundles applied on a larger dataset. .. raw:: html .. include:: ../links_names.inc References ---------- .. [Garyfallidis12] Garyfallidis E. et al., QuickBundles a method for tractography simplification, Frontiers in Neuroscience, vol 6, no 175, 2012. """ dipy-0.13.0/doc/examples/sfm_reconst.py000066400000000000000000000132751317371701200200310ustar00rootroot00000000000000""" .. _sfm-reconst: ============================================== Reconstruction with the Sparse Fascicle Model ============================================== In this example, we will use the Sparse Fascicle Model (SFM) [Rokem2015]_, to reconstruct the fiber Orientation Distribution Function (fODF) in every voxel. First, we import the modules we will use in this example: """ import dipy.reconst.sfm as sfm import dipy.data as dpd import dipy.direction.peaks as dpp from dipy.viz import fvtk """ For the purpose of this example, we will use the Stanford HARDI dataset (150 directions, single b-value of 2000 $s/mm^2$) that can be automatically downloaded. If you have not yet downloaded this data-set in one of the other examples, you will need to be connected to the internet the first time you run this example. The data will be stored for subsequent runs, and for use with other examples. """ from dipy.data import read_stanford_hardi img, gtab = read_stanford_hardi() data = img.get_data() """ Reconstruction of the fiber ODF in each voxel guides subsequent tracking steps. Here, the model is the Sparse Fascicle Model, described in [Rokem2014]_. This model reconstructs the diffusion signal as a combination of the signals from different fascicles. This model can be written as: .. math:: y = X\beta Where $y$ is the signal and $\beta$ are weights on different points in the sphere. The columns of the design matrix, $X$ are the signals in each point in the measurement that would be predicted if there was a fascicle oriented in the direction represented by that column. Typically, the signal used for this kernel will be a prolate tensor with axial diffusivity 3-5 times higher than its radial diffusivity. The exact numbers can also be estimated from examining parts of the brain in which there is known to be only one fascicle (e.g. in corpus callosum). Sparsity constraints on the fiber ODF ($\beta$) are set through the Elastic Net algorihtm [Zou2005]_. Elastic Net optimizes the following cost function: .. math:: \sum_{i=1}^{n}{(y_i - \hat{y}_i)^2} + \alpha (\lambda \sum_{j=1}^{m}{w_j}+(1-\lambda) \sum_{j=1}^{m}{w^2_j} where $\hat{y}$ is the signal predicted for a particular setting of $\beta$, such that the left part of this expression is the squared loss function; $\alpha$ is a parameter that sets the balance between the squared loss on the data, and the regularization constraints. The regularization parameter $\lambda$ sets the `l1_ratio`, which controls the balance between L1-sparsity (low sum of weights), and low L2-sparsity (low sum-of-squares of the weights). Just like Constrained Spherical Deconvolution (see :ref:`reconst-csd`), the SFM requires the definition of a response function. We'll take advantage of the automated algorithm in the :mod:`csdeconv` module to find this response function: """ from dipy.reconst.csdeconv import auto_response response, ratio = auto_response(gtab, data, roi_radius=10, fa_thr=0.7) """ The ``response`` return value contains two entries. The first is an array with the eigenvalues of the response function and the second is the average S0 for this response. It is a very good practice to always validate the result of ``auto_response``. For, this purpose we can print it and have a look at its values. """ print(response) """ (array([ 0.0014, 0.00029, 0.00029]), 416.206) We initialize an SFM model object, using these values. We will use the default sphere (362 vertices, symmetrically distributed on the surface of the sphere), as a set of putative fascicle directions that are considered in the model """ sphere = dpd.get_sphere() sf_model = sfm.SparseFascicleModel(gtab, sphere=sphere, l1_ratio=0.5, alpha=0.001, response=response[0]) """ For the purpose of the example, we will consider a small volume of data containing parts of the corpus callosum and of the centrum semiovale """ data_small = data[20:50, 55:85, 38:39] """ Fitting the model to this small volume of data, we calculate the ODF of this model on the sphere, and plot it. """ sf_fit = sf_model.fit(data_small) sf_odf = sf_fit.odf(sphere) fodf_spheres = fvtk.sphere_funcs(sf_odf, sphere, scale=1.3, norm=True) ren = fvtk.ren() fvtk.add(ren, fodf_spheres) print('Saving illustration as sf_odfs.png') fvtk.record(ren, out_path='sf_odfs.png', size=(1000, 1000)) """ We can extract the peaks from the ODF, and plot these as well """ sf_peaks = dpp.peaks_from_model(sf_model, data_small, sphere, relative_peak_threshold=.5, min_separation_angle=25, return_sh=False) fvtk.clear(ren) fodf_peaks = fvtk.peaks(sf_peaks.peak_dirs, sf_peaks.peak_values, scale=1.3) fvtk.add(ren, fodf_peaks) print('Saving illustration as sf_peaks.png') fvtk.record(ren, out_path='sf_peaks.png', size=(1000, 1000)) """ Finally, we plot both the peaks and the ODFs, overlayed: """ fodf_spheres.GetProperty().SetOpacity(0.4) fvtk.add(ren, fodf_spheres) print('Saving illustration as sf_both.png') fvtk.record(ren, out_path='sf_both.png', size=(1000, 1000)) """ .. figure:: sf_both.png :align: center SFM Peaks and ODFs. To see how to use this information in tracking, proceed to :ref:`sfm-track`. References ---------- .. [Rokem2015] Ariel Rokem, Jason D. Yeatman, Franco Pestilli, Kendrick N. Kay, Aviv Mezer, Stefan van der Walt, Brian A. Wandell (2015). Evaluating the accuracy of diffusion MRI models in white matter. PLoS ONE 10(4): e0123272. doi:10.1371/journal.pone.0123272 .. [Zou2005] Zou H, Hastie T (2005). Regularization and variable selection via the elastic net. J R Stat Soc B:301-320 """ dipy-0.13.0/doc/examples/sfm_tracking.py000066400000000000000000000115761317371701200201600ustar00rootroot00000000000000""" .. _sfm-track: ================================================== Tracking with the Sparse Fascicle Model ================================================== Tracking requires a per-voxel model. Here, the model is the Sparse Fascicle Model (SFM), described in [Rokem2015]_. This model reconstructs the diffusion signal as a combination of the signals from different fascicles (see also :ref:`sfm-reconst`). To begin, we read the Stanford HARDI data set into memory: """ from dipy.data import read_stanford_labels hardi_img, gtab, labels_img = read_stanford_labels() data = hardi_img.get_data() labels = labels_img.get_data() affine = hardi_img.affine """ This data set provides a label map (generated using `FreeSurfer `_), in which the white matter voxels are labeled as either 1 or 2: """ white_matter = (labels == 1) | (labels == 2) """ The first step in tracking is generating a model from which tracking directions can be extracted in every voxel. For the SFM, this requires first that we define a canonical response function that will be used to deconvolve the signal in every voxel """ from dipy.reconst.csdeconv import auto_response response, ratio = auto_response(gtab, data, roi_radius=10, fa_thr=0.7) """ We initialize an SFM model object, using this response function and using the default sphere (362 vertices, symmetrically distributed on the surface of the sphere): """ from dipy.data import get_sphere sphere = get_sphere() from dipy.reconst import sfm sf_model = sfm.SparseFascicleModel(gtab, sphere=sphere, l1_ratio=0.5, alpha=0.001, response=response[0]) """ We fit this model to the data in each voxel in the white-matter mask, so that we can use these directions in tracking: """ from dipy.direction.peaks import peaks_from_model pnm = peaks_from_model(sf_model, data, sphere, relative_peak_threshold=.5, min_separation_angle=25, mask=white_matter, parallel=True ) """ A ThresholdTissueClassifier object is used to segment the data to track only through areas in which the Generalized Fractional Anisotropy (GFA) is sufficiently high. """ from dipy.tracking.local import ThresholdTissueClassifier classifier = ThresholdTissueClassifier(pnm.gfa, .25) """ Tracking will be started from a set of seeds evenly distributed in the white matter: """ from dipy.tracking import utils seeds = utils.seeds_from_mask(white_matter, density=[2, 2, 2], affine=affine) """ For the sake of brevity, we will take only the first 1000 seeds, generating only 1000 streamlines. Remove this line to track from many more points in all of the white matter """ seeds = seeds[:1000] """ We now have the necessary components to construct a tracking pipeline and execute the tracking """ from dipy.tracking.local import LocalTracking streamlines = LocalTracking(pnm, classifier, seeds, affine, step_size=.5) streamlines = list(streamlines) """ Next, we will create a visualization of these streamlines, relative to this subject's T1-weighted anatomy: """ from dipy.viz import fvtk from dipy.viz.colormap import line_colors from dipy.data import read_stanford_t1 from dipy.tracking.utils import move_streamlines from numpy.linalg import inv t1 = read_stanford_t1() t1_data = t1.get_data() t1_aff = t1.affine color = line_colors(streamlines) """ To speed up visualization, we will select a random sub-set of streamlines to display. This is particularly important, if you track from seeds throughout the entire white matter, generating many streamlines. In this case, for demonstration purposes, we subselect 900 streamlines. """ from dipy.tracking.streamline import select_random_set_of_streamlines plot_streamlines = select_random_set_of_streamlines(streamlines, 900) streamlines_actor = fvtk.streamtube( list(move_streamlines(plot_streamlines, inv(t1_aff))), line_colors(streamlines), linewidth=0.1) vol_actor = fvtk.slicer(t1_data) vol_actor.display(40, None, None) vol_actor2 = vol_actor.copy() vol_actor2.display(None, None, 35) ren = fvtk.ren() fvtk.add(ren, streamlines_actor) fvtk.add(ren, vol_actor) fvtk.add(ren, vol_actor2) fvtk.record(ren, n_frames=1, out_path='sfm_streamlines.png', size=(800, 800)) """ .. figure:: sfm_streamlines.png :align: center **Sparse Fascicle Model tracks** Finally, we can save these streamlines to a 'trk' file, for use in other software, or for further analysis. """ from dipy.io.trackvis import save_trk save_trk("sfm_detr.trk", streamlines, affine, labels.shape) """ References ---------- .. [Rokem2015] Ariel Rokem, Jason D. Yeatman, Franco Pestilli, Kendrick N. Kay, Aviv Mezer, Stefan van der Walt, Brian A. Wandell (2015). Evaluating the accuracy of diffusion MRI models in white matter. PLoS ONE 10(4): e0123272. doi:10.1371/journal.pone.0123272 """ dipy-0.13.0/doc/examples/simulate_dki.py000066400000000000000000000120211317371701200201450ustar00rootroot00000000000000""" .. _simulate_dki: ========================== DKI MultiTensor Simulation ========================== In this example we show how to simulate the Diffusion Kurtosis Imaging (DKI) data of a single voxel. DKI captures information about the non-Gaussian properties of water diffusion which is a consequence of the existence of tissue barriers and compartments. In these simulations compartmental heterogeneity is taken into account by modeling different compartments for the intra- and extra-cellular media of two populations of fibers. These simulations are performed according to [RNH2015]_. We first import all relevant modules. """ import numpy as np import matplotlib.pyplot as plt from dipy.sims.voxel import (multi_tensor_dki, single_tensor) from dipy.data import get_data from dipy.io.gradients import read_bvals_bvecs from dipy.core.gradients import gradient_table from dipy.reconst.dti import (decompose_tensor, from_lower_triangular) """ For the simulation we will need a GradientTable with the b-values and b-vectors. Here we use the GradientTable of the sample dipy_ dataset ``small_64D``. """ fimg, fbvals, fbvecs = get_data('small_64D') bvals, bvecs = read_bvals_bvecs(fbvals, fbvecs) """ DKI requires data from more than one non-zero b-value. Since the dataset ``small_64D`` was acquired with one non-zero bvalue we artificialy produce a second non-zero b-value. """ bvals = np.concatenate((bvals, bvals * 2), axis=0) bvecs = np.concatenate((bvecs, bvecs), axis=0) """ The b-values and gradient directions are then converted to DIPY's ``GradientTable`` format. """ gtab = gradient_table(bvals, bvecs) """ In ``mevals`` we save the eigenvalues of each tensor. To simulate crossing fibers with two different media (representing intra and extra-cellular media), a total of four components have to be taken in to account (i.e. the first two compartments correspond to the intra and extra cellular media for the first fiber population while the others correspond to the media of the second fiber population) """ mevals = np.array([[0.00099, 0, 0], [0.00226, 0.00087, 0.00087], [0.00099, 0, 0], [0.00226, 0.00087, 0.00087]]) """ In ``angles`` we save in polar coordinates (:math:`\theta, \phi`) the principal axis of each compartment tensor. To simulate crossing fibers at 70$^{\circ}$ the compartments of the first fiber are aligned to the X-axis while the compartments of the second fiber are aligned to the X-Z plane with an angular deviation of 70$^{\circ}$ from the first one. """ angles = [(90, 0), (90, 0), (20, 0), (20, 0)] """ In ``fractions`` we save the percentage of the contribution of each compartment, which is computed by multiplying the percentage of contribution of each fiber population and the water fraction of each different medium """ fie = 0.49 # intra axonal water fraction fractions = [fie*50, (1 - fie)*50, fie*50, (1 - fie)*50] """ Having defined the parameters for all tissue compartments, the elements of the diffusion tensor (DT), the elements of the kurtosis tensor (KT) and the DW signals simulated from the DKI model can be obtain using the function ``multi_tensor_dki``. """ signal_dki, dt, kt = multi_tensor_dki(gtab, mevals, S0=200, angles=angles, fractions=fractions, snr=None) """ We can also add Rician noise with a specific SNR. """ signal_noisy, dt, kt = multi_tensor_dki(gtab, mevals, S0=200, angles=angles, fractions=fractions, snr=10) """ For comparison purposes, we also compute the DW signal if only the diffusion tensor components are taken into account. For this we use DIPY's function ``single_tensor`` which requires that dt is decomposed into its eigenvalues and eigenvectors. """ dt_evals, dt_evecs = decompose_tensor(from_lower_triangular(dt)) signal_dti = single_tensor(gtab, S0=200, evals=dt_evals, evecs=dt_evecs, snr=None) """ Finally, we can visualize the values of the different version of simulated signals for all assumed gradient directions and bvalues. """ plt.plot(signal_dti, label='noiseless dti') plt.plot(signal_dki, label='noiseless dki') plt.plot(signal_noisy, label='with noise') plt.legend() plt.show() plt.savefig('simulated_dki_signal.png') """ .. figure:: simulated_dki_signal.png :align: center Simulated signals obtain from the DTI and DKI models. Non-Gaussian diffusion properties in tissues are responsible to smaller signal attenuations for larger bvalues when compared to signal attenuations from free Gaussian water diffusion. This feature can be shown from the figure above, since signals simulated from the DKI models reveals larger DW signal intensities than the signals obtained only from the diffusion tensor components. References ---------- .. [RNH2015] R. Neto Henriques et al., "Exploring the 3D geometry of the diffusion kurtosis tensor - Impact on the development of robust tractography procedures and novel biomarkers", NeuroImage (2015) 111, 85-99. .. include:: ../links_names.inc """ dipy-0.13.0/doc/examples/simulate_multi_tensor.py000066400000000000000000000050221317371701200221250ustar00rootroot00000000000000""" ====================== MultiTensor Simulation ====================== In this example we show how someone can simulate the signal and the ODF of a single voxel using a MultiTensor. """ import numpy as np from dipy.sims.voxel import (multi_tensor, multi_tensor_odf, single_tensor_odf, all_tensor_evecs) from dipy.data import get_sphere """ For the simulation we will need a GradientTable with the b-values and b-vectors Here we use the one we created in :ref:`example_gradients_spheres`. """ from gradients_spheres import gtab """ In ``mevals`` we save the eigenvalues of each tensor. """ mevals = np.array([[0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003]]) """ In ``angles`` we save in polar coordinates (:math:`\theta, \phi`) the principal axis of each tensor. """ angles = [(0, 0), (60, 0)] """ In ``fractions`` we save the percentage of the contribution of each tensor. """ fractions = [50, 50] """ The function ``multi_tensor`` will return the simulated signal and an array with the principal axes of the tensors in cartesian coordinates. """ signal, sticks = multi_tensor(gtab, mevals, S0=100, angles=angles, fractions=fractions, snr=None) """ We can also add Rician noise with a specific SNR. """ signal_noisy, sticks = multi_tensor(gtab, mevals, S0=100, angles=angles, fractions=fractions, snr=20) import matplotlib.pyplot as plt plt.plot(signal, label='noiseless') plt.plot(signal_noisy, label='with noise') plt.legend() plt.show() plt.savefig('simulated_signal.png') """ .. figure:: simulated_signal.png :align: center **Simulated MultiTensor signal** """ """ For the ODF simulation we will need a sphere. Because we are interested in a simulation of only a single voxel, we can use a sphere with very high resolution. We generate that by subdividing the triangles of one of dipy_'s cached spheres, which we can read in the following way. """ sphere = get_sphere('symmetric724') sphere = sphere.subdivide(2) odf = multi_tensor_odf(sphere.vertices, mevals, angles, fractions) from dipy.viz import fvtk ren = fvtk.ren() odf_actor = fvtk.sphere_funcs(odf, sphere) odf_actor.RotateX(90) fvtk.add(ren, odf_actor) print('Saving illustration as multi_tensor_simulation') fvtk.record(ren, out_path='multi_tensor_simulation.png', size=(300, 300)) """ .. figure:: multi_tensor_simulation.png :align: center Simulating a MultiTensor ODF. .. include:: ../links_names.inc """ dipy-0.13.0/doc/examples/snr_in_cc.py000066400000000000000000000147001317371701200174360ustar00rootroot00000000000000""" ============================================= SNR estimation for Diffusion-Weighted Images ============================================= Computing the Signal-to-Noise-Ratio (SNR) of DW images is still an open question, as SNR depends on the white matter structure of interest as well as the gradient direction corresponding to each DWI. In classical MRI, SNR can be defined as the ratio of the mean of the signal divided by the standard deviation of the underlying Gaussian noise, that is $SNR = mean(signal) / std(noise)$. The noise standard deviation can be computed from the background in any of the DW images. How do we compute the mean of the signal, and what signal? The strategy here is to compute a 'worst-case' SNR for DWI. Several white matter structures such as the corpus callosum (CC), corticospinal tract (CST), or the superior longitudinal fasciculus (SLF) can be easily identified from the colored-FA (CFA) map. In this example, we will use voxels from the CC, which have the characteristic of being highly red in the CFA map since they are mainly oriented in the left-right direction. We know that the DW image closest to the X-direction will be the one with the most attenuated diffusion signal. This is the strategy adopted in several recent papers (see [Descoteaux2011]_ and [Jones2013]_). It gives a good indication of the quality of the DWI data. First, we compute the tensor model in a brain mask (see the :ref:`reconst_dti` example for further explanations). """ from __future__ import division, print_function import nibabel as nib import numpy as np from dipy.data import fetch_stanford_hardi, read_stanford_hardi from dipy.segment.mask import median_otsu from dipy.reconst.dti import TensorModel fetch_stanford_hardi() img, gtab = read_stanford_hardi() data = img.get_data() affine = img.affine print('Computing brain mask...') b0_mask, mask = median_otsu(data) print('Computing tensors...') tenmodel = TensorModel(gtab) tensorfit = tenmodel.fit(data, mask=mask) """Next, we set our red-green-blue thresholds to (0.6, 1) in the x axis and (0, 0.1) in the y and z axes respectively. These values work well in practice to isolate the very RED voxels of the cfa map. Then, as assurance, we want just RED voxels in the CC (there could be noisy red voxels around the brain mask and we don't want those). Unless the brain acquisition was badly aligned, the CC is always close to the mid-sagittal slice. The following lines perform these two operations and then saves the computed mask. """ print('Computing worst-case/best-case SNR using the corpus callosum...') from dipy.segment.mask import segment_from_cfa from dipy.segment.mask import bounding_box threshold = (0.6, 1, 0, 0.1, 0, 0.1) CC_box = np.zeros_like(data[..., 0]) mins, maxs = bounding_box(mask) mins = np.array(mins) maxs = np.array(maxs) diff = (maxs - mins) // 4 bounds_min = mins + diff bounds_max = maxs - diff CC_box[bounds_min[0]:bounds_max[0], bounds_min[1]:bounds_max[1], bounds_min[2]:bounds_max[2]] = 1 mask_cc_part, cfa = segment_from_cfa(tensorfit, CC_box, threshold, return_cfa=True) cfa_img = nib.Nifti1Image((cfa*255).astype(np.uint8), affine) mask_cc_part_img = nib.Nifti1Image(mask_cc_part.astype(np.uint8), affine) nib.save(mask_cc_part_img, 'mask_CC_part.nii.gz') import matplotlib.pyplot as plt region = 40 fig = plt.figure('Corpus callosum segmentation') plt.subplot(1, 2, 1) plt.title("Corpus callosum (CC)") plt.axis('off') red = cfa[..., 0] plt.imshow(np.rot90(red[region, ...])) plt.subplot(1, 2, 2) plt.title("CC mask used for SNR computation") plt.axis('off') plt.imshow(np.rot90(mask_cc_part[region, ...])) fig.savefig("CC_segmentation.png", bbox_inches='tight') """ .. figure:: CC_segmentation.png :align: center """ """Now that we are happy with our crude CC mask that selected voxels in the x-direction, we can use all the voxels to estimate the mean signal in this region. """ mean_signal = np.mean(data[mask_cc_part], axis=0) """Now, we need a good background estimation. We will re-use the brain mask computed before and invert it to catch the outside of the brain. This could also be determined manually with a ROI in the background. [Warning: Certain MR manufacturers mask out the outside of the brain with 0's. One thus has to be careful how the noise ROI is defined]. """ from scipy.ndimage.morphology import binary_dilation mask_noise = binary_dilation(mask, iterations=10) mask_noise[..., :mask_noise.shape[-1]//2] = 1 mask_noise = ~mask_noise mask_noise_img = nib.Nifti1Image(mask_noise.astype(np.uint8), affine) nib.save(mask_noise_img, 'mask_noise.nii.gz') noise_std = np.std(data[mask_noise, :]) print('Noise standard deviation sigma= ', noise_std) """We can now compute the SNR for each DWI. For example, report SNR for DW images with gradient direction that lies the closest to the X, Y and Z axes. """ # Exclude null bvecs from the search idx = np.sum(gtab.bvecs, axis=-1) == 0 gtab.bvecs[idx] = np.inf axis_X = np.argmin(np.sum((gtab.bvecs-np.array([1, 0, 0]))**2, axis=-1)) axis_Y = np.argmin(np.sum((gtab.bvecs-np.array([0, 1, 0]))**2, axis=-1)) axis_Z = np.argmin(np.sum((gtab.bvecs-np.array([0, 0, 1]))**2, axis=-1)) for direction in [0, axis_X, axis_Y, axis_Z]: SNR = mean_signal[direction]/noise_std if direction == 0 : print("SNR for the b=0 image is :", SNR) else : print("SNR for direction", direction, " ", gtab.bvecs[direction], "is :", SNR) """SNR for the b=0 image is : ''42.0695455758''""" """SNR for direction 58 [ 0.98875 0.1177 -0.09229] is : ''5.46995373635''""" """SNR for direction 57 [-0.05039 0.99871 0.0054406] is : ''23.9329492871''""" """SNR for direction 126 [-0.11825 -0.039925 0.99218 ] is : ''23.9965694823''""" """ Since the CC is aligned with the X axis, the lowest SNR is for that gradient direction. In comparison, the DW images in the perpendical Y and Z axes have a high SNR. The b0 still exhibits the highest SNR, since there is no signal attenuation. Hence, we can say the Stanford diffusion data has a 'worst-case' SNR of approximately 5, a 'best-case' SNR of approximately 24, and a SNR of 42 on the b0 image. """ """ References ---------- .. [Descoteaux2011] Descoteaux, M., Deriche, R., Le Bihan, D., Mangin, J.-F., and Poupon, C. Multiple q-shell diffusion propagator imaging. Medical Image Analysis, 15(4), 603, 2011. .. [Jones2013] Jones, D. K., Knosche, T. R., & Turner, R. White Matter Integrity, Fiber Count, and Other Fallacies: The Dos and Don'ts of Diffusion MRI. NeuroImage, 73, 239, 2013. """ dipy-0.13.0/doc/examples/streamline_formats.py000066400000000000000000000031571317371701200214030ustar00rootroot00000000000000""" =========================== Read/Write streamline files =========================== Overview ======== dipy_ can read and write many different file formats. In this example we give a short introduction on how to use it for loading or saving streamlines. Read :ref:`faq` """ import numpy as np from dipy.data import get_data from nibabel import trackvis """ 1. Read/write trackvis streamline files with nibabel. """ fname = get_data('fornix') print(fname) streams, hdr = trackvis.read(fname) streamlines = [s[0] for s in streams] """ Similarly you can use ``trackvis.write`` to save the streamlines. 2. Read/write streamlines with numpy. """ streamlines_np = np.array(streamlines, dtype=np.object) np.save('fornix.npy', streamlines_np) streamlines2 = list(np.load('fornix.npy')) """ 3. We also work on our HDF5 based file format which can read/write massive datasets (as big as the size of you free disk space). With `Dpy` we can support * direct indexing from the disk * memory usage always low * extensions to include different arrays in the same file Here is a simple example. """ from dipy.io.dpy import Dpy dpw = Dpy('fornix.dpy', 'w') """ Write many streamlines at once. """ dpw.write_tracks(streamlines2) """ Write one track """ dpw.write_track(streamlines2[0]) """ or one track each time. """ for t in streamlines: dpw.write_track(t) dpw.close() """ Read streamlines directly from the disk using their indices .. include:: ../links_names.inc """ dpr = Dpy('fornix.dpy', 'r') some_streamlines = dpr.read_tracksi([0, 10, 20, 30, 100]) dpr.close() print(len(streamlines)) print(len(some_streamlines)) dipy-0.13.0/doc/examples/streamline_length.py000066400000000000000000000136641317371701200212150ustar00rootroot00000000000000""" ===================================== Streamline length and size reduction ===================================== This example shows how to calculate the lengths of a set of streamlines and also how to compress the streamlines without considerably reducing their lengths or overall shape. A streamline in dipy_ is represented as a numpy array of size :math:`(N \times 3)` where each row of the array represent a 3D point of the streamline. A set of streamlines is represented with a list of numpy arrays of size :math:`(N_i \times 3)` for :math:`i=1:M` where $M$ is the number of streamlines in the set. """ import numpy as np from dipy.tracking.utils import length from dipy.tracking.metrics import downsample from dipy.tracking.distances import approx_polygon_track """ Let's first create a simple simulation of a bundle of streamlines using a cosine function. """ def simulated_bundles(no_streamlines=50, n_pts=100): t = np.linspace(-10, 10, n_pts) bundle = [] for i in np.linspace(3, 5, no_streamlines): pts = np.vstack((np.cos(2 * t/np.pi), np.zeros(t.shape) + i, t )).T bundle.append(pts) start = np.random.randint(10, 30, no_streamlines) end = np.random.randint(60, 100, no_streamlines) bundle = [10 * streamline[start[i]:end[i]] for (i, streamline) in enumerate(bundle)] bundle = [np.ascontiguousarray(streamline) for streamline in bundle] return bundle bundle = simulated_bundles() print('This bundle has %d streamlines' % len(bundle)) """ This bundle has 50 streamlines. Using the ``length`` function we can retrieve the lengths of each streamline. Below we show the histogram of the lengths of the streamlines. """ lengths = list(length(bundle)) import matplotlib.pyplot as plt fig_hist, ax = plt.subplots(1) ax.hist(lengths, color='burlywood') ax.set_xlabel('Length') ax.set_ylabel('Count') plt.show() plt.legend() plt.savefig('length_histogram.png') """ .. figure:: length_histogram.png :align: center **Histogram of lengths of the streamlines** ``Length`` will return the length in the units of the coordinate system that streamlines are currently. So, if the streamlines are in world coordinates then the lengths will be in millimeters (mm). If the streamlines are for example in native image coordinates of voxel size 2mm isotropic then you will need to multiply the lengths by 2 if you want them to correspond to mm. In this example we process simulated data without units, however this information is good to have in mind when you calculate lengths with real data. Next, let's find the number of points that each streamline has. """ n_pts = [len(streamline) for streamline in bundle] """ Often, streamlines are represented with more points than what is actually necessary for specific applications. Also, sometimes every streamline has different number of points which could be of a trouble for some algorithms . The function ``downsample`` can be used to set the number of points of a streamline at a specific number and at the same time enforce that all the segments of the streamline will have equal length. """ bundle_downsampled = [downsample(s, 12) for s in bundle] n_pts_ds = [len(s) for s in bundle_downsampled] """ Alternatively, the function ``approx_polygon_track`` allows to reduce the number of points so that they are more points in curvy regions and less points in less curvy regions. In contrast with ``downsample`` it does not enforce that segments should be of equal size. """ bundle_downsampled2 = [approx_polygon_track(s, 0.25) for s in bundle] n_pts_ds2 = [len(streamline) for streamline in bundle_downsampled2] """ Both, ``downsample`` and ``approx_polygon_track`` can be thought as methods for lossy compression of streamlines. """ from dipy.viz import fvtk ren = fvtk.ren() ren.SetBackground(*fvtk.colors.white) bundle_actor = fvtk.streamtube(bundle, fvtk.colors.red, linewidth=0.3) fvtk.add(ren, bundle_actor) bundle_actor2 = fvtk.streamtube(bundle_downsampled, fvtk.colors.red, linewidth=0.3) bundle_actor2.SetPosition(0, 40, 0) bundle_actor3 = fvtk.streamtube(bundle_downsampled2, fvtk.colors.red, linewidth=0.3) bundle_actor3.SetPosition(0, 80, 0) fvtk.add(ren, bundle_actor2) fvtk.add(ren, bundle_actor3) fvtk.camera(ren, pos=(0, 0, 0), focal=(30, 0, 0)) fvtk.record(ren, out_path='simulated_cosine_bundle.png', size=(900, 900)) """ .. figure:: simulated_cosine_bundle.png :align: center Initial bundle (down), downsampled at 12 equidistant points (middle), downsampled not equidistantly (up). From the figure above we can see that all 3 bundles look quite similar. However, when we plot the histogram of the number of points used for each streamline, it becomes obvious that we have managed to reduce in a great amount the size of the initial dataset. """ import matplotlib.pyplot as plt fig_hist, ax = plt.subplots(1) ax.hist(n_pts, color='r', histtype='step', label='initial') ax.hist(n_pts_ds, color='g', histtype='step', label='downsample (12)') ax.hist(n_pts_ds2, color='b', histtype='step', label='approx_polygon_track (0.25)') ax.set_xlabel('Number of points') ax.set_ylabel('Count') plt.show() plt.legend() plt.savefig('n_pts_histogram.png') """ .. figure:: n_pts_histogram.png :align: center Histogram of the number of points of the streamlines. Finally, we can also show that the lengths of the streamlines haven't changed considerably after applying the two methods of downsampling. """ lengths_downsampled = list(length(bundle_downsampled)) lengths_downsampled2 = list(length(bundle_downsampled2)) fig, ax = plt.subplots(1) ax.plot(lengths, color='r', label='initial') ax.plot(lengths_downsampled, color='g', label='downsample (12)') ax.plot(lengths_downsampled2, color='b', label='approx_polygon_track (0.25)') ax.set_xlabel('Streamline ID') ax.set_ylabel('Length') plt.show() plt.legend() plt.savefig('lengths_plots.png') """ .. figure:: lengths_plots.png :align: center Lengths of each streamline for every one of the 3 bundles. .. include:: ../links_names.inc """ dipy-0.13.0/doc/examples/streamline_tools.py000066400000000000000000000266471317371701200211010ustar00rootroot00000000000000""" .. _streamline_tools: ========================================================= Connectivity Matrices, ROI Intersections and Density Maps ========================================================= This example is meant to be an introduction to some of the streamline tools available in dipy_. Some of the functions covered in this example are ``target``, ``connectivity_matrix`` and ``density_map``. ``target`` allows one to filter streamlines that either pass through or do not pass through some region of the brain, ``connectivity_matrix`` groups and counts streamlines based on where in the brain they begin and end, and finally, density map counts the number of streamlines that pass though every voxel of some image. To get started we'll need to have a set of streamlines to work with. We'll use EuDX along with the CsaOdfModel to make some streamlines. Let's import the modules and download the data we'll be using. """ from dipy.tracking.eudx import EuDX from dipy.reconst import peaks, shm from dipy.tracking import utils from dipy.data import read_stanford_labels, fetch_stanford_t1, read_stanford_t1 hardi_img, gtab, labels_img = read_stanford_labels() data = hardi_img.get_data() labels = labels_img.get_data() fetch_stanford_t1() t1 = read_stanford_t1() t1_data = t1.get_data() """ We've loaded an image called ``labels_img`` which is a map of tissue types such that every integer value in the array ``labels`` represents an anatomical structure or tissue type [#]_. For this example, the image was created so that white matter voxels have values of either 1 or 2. We'll use ``peaks_from_model`` to apply the ``CsaOdfModel`` to each white matter voxel and estimate fiber orientations which we can use for tracking. """ white_matter = (labels == 1) | (labels == 2) csamodel = shm.CsaOdfModel(gtab, 6) csapeaks = peaks.peaks_from_model(model=csamodel, data=data, sphere=peaks.default_sphere, relative_peak_threshold=.8, min_separation_angle=45, mask=white_matter) """ Now we can use EuDX to track all of the white matter. To keep things reasonably fast we use ``density=2`` which will result in 8 seeds per voxel. We'll set ``a_low`` (the parameter which determines the threshold of FA/QA under which tracking stops) to be very low because we've already applied a white matter mask. """ seeds = utils.seeds_from_mask(white_matter, density=2) streamline_generator = EuDX(csapeaks.peak_values, csapeaks.peak_indices, odf_vertices=peaks.default_sphere.vertices, a_low=.05, step_sz=.5, seeds=seeds) affine = streamline_generator.affine streamlines = list(streamline_generator) """ The first of the tracking utilities we'll cover here is ``target``. This function takes a set of streamlines and a region of interest (ROI) and returns only those streamlines that pass though the ROI. The ROI should be an array such that the voxels that belong to the ROI are ``True`` and all other voxels are ``False`` (this type of binary array is sometimes called a mask). This function can also exclude all the streamlines that pass though an ROI by setting the ``include`` flag to ``False``. In this example we'll target the streamlines of the corpus callosum. Our ``labels`` array has a sagittal slice of the corpus callosum identified by the label value 2. We'll create an ROI mask from that label and create two sets of streamlines, those that intersect with the ROI and those that don't. """ cc_slice = labels == 2 cc_streamlines = utils.target(streamlines, cc_slice, affine=affine) cc_streamlines = list(cc_streamlines) other_streamlines = utils.target(streamlines, cc_slice, affine=affine, include=False) other_streamlines = list(other_streamlines) assert len(other_streamlines) + len(cc_streamlines) == len(streamlines) """ We can use some of dipy_'s visualization tools to display the ROI we targeted above and all the streamlines that pass though that ROI. The ROI is the yellow region near the center of the axial image. """ from dipy.viz import fvtk from dipy.viz.colormap import line_colors # Make display objects color = line_colors(cc_streamlines) cc_streamlines_actor = fvtk.line(cc_streamlines, line_colors(cc_streamlines)) cc_ROI_actor = fvtk.contour(cc_slice, levels=[1], colors=[(1., 1., 0.)], opacities=[1.]) vol_actor = fvtk.slicer(t1_data) vol_actor.display(40, None, None) vol_actor2 = vol_actor.copy() vol_actor2.display(None, None, 35) # Add display objects to canvas r = fvtk.ren() fvtk.add(r, vol_actor) fvtk.add(r, vol_actor2) fvtk.add(r, cc_streamlines_actor) fvtk.add(r, cc_ROI_actor) # Save figures fvtk.record(r, n_frames=1, out_path='corpuscallosum_axial.png', size=(800, 800)) fvtk.camera(r, [-1, 0, 0], [0, 0, 0], viewup=[0, 0, 1]) fvtk.record(r, n_frames=1, out_path='corpuscallosum_sagittal.png', size=(800, 800)) """ .. figure:: corpuscallosum_axial.png :align: center **Corpus Callosum Axial** .. include:: ../links_names.inc .. figure:: corpuscallosum_sagittal.png :align: center **Corpus Callosum Sagittal** """ """ Once we've targeted on the corpus callosum ROI, we might want to find out which regions of the brain are connected by these streamlines. To do this we can use the ``connectivity_matrix`` function. This function takes a set of streamlines and an array of labels as arguments. It returns the number of streamlines that start and end at each pair of labels and it can return the streamlines grouped by their endpoints. Notice that this function only considers the endpoints of each streamline. """ M, grouping = utils.connectivity_matrix(cc_streamlines, labels, affine=affine, return_mapping=True, mapping_as_streamlines=True) M[:3, :] = 0 M[:, :3] = 0 """ We've set ``return_mapping`` and ``mapping_as_streamlines`` to ``True`` so that ``connectivity_matrix`` returns all the streamlines in ``cc_streamlines`` grouped by their endpoint. Because we're typically only interested in connections between gray matter regions, and because the label 0 represents background and the labels 1 and 2 represent white matter, we discard the first three rows and columns of the connectivity matrix. We can now display this matrix using matplotlib, we display it using a log scale to make small values in the matrix easier to see. """ import numpy as np import matplotlib.pyplot as plt plt.imshow(np.log1p(M), interpolation='nearest') plt.savefig("connectivity.png") """ .. figure:: connectivity.png :align: center **Connectivity of Corpus Callosum** .. include:: ../links_names.inc """ """ In our example track there are more streamlines connecting regions 11 and 54 than any other pair of regions. These labels represent the left and right superior frontal gyrus respectively. These two regions are large, close together, have lots of corpus callosum fibers and are easy to track so this result should not be a surprise to anyone. However, the interpretation of streamline counts can be tricky. The relationship between the underlying biology and the streamline counts will depend on several factors, including how the tracking was done, and the correct way to interpret these kinds of connectivity matrices is still an open question in the diffusion imaging literature. The next function we'll demonstrate is ``density_map``. This function allows one to represent the spatial distribution of a track by counting the density of streamlines in each voxel. For example, let's take the track connecting the left and right superior frontal gyrus. """ lr_superiorfrontal_track = grouping[11, 54] shape = labels.shape dm = utils.density_map(lr_superiorfrontal_track, shape, affine=affine) """ Let's save this density map and the streamlines so that they can be visualized together. In order to save the streamlines in a ".trk" file we'll need to move them to "trackvis space", or the representation of streamlines specified by the trackvis Track File format. To do that, we will use tools available in `nibabel `_) """ import nibabel as nib # Save density map dm_img = nib.Nifti1Image(dm.astype("int16"), hardi_img.affine) dm_img.to_filename("lr-superiorfrontal-dm.nii.gz") # Make a trackvis header so we can save streamlines voxel_size = labels_img.header.get_zooms() trackvis_header = nib.trackvis.empty_header() trackvis_header['voxel_size'] = voxel_size trackvis_header['dim'] = shape trackvis_header['voxel_order'] = "RAS" # Move streamlines to "trackvis space" trackvis_point_space = utils.affine_for_trackvis(voxel_size) lr_sf_trk = utils.move_streamlines(lr_superiorfrontal_track, trackvis_point_space, input_space=affine) lr_sf_trk = list(lr_sf_trk) # Save streamlines for_save = [(sl, None, None) for sl in lr_sf_trk] nib.trackvis.write("lr-superiorfrontal.trk", for_save, trackvis_header) """ Let's take a moment here to consider the representation of streamlines used in DIPY. Streamlines are a path though the 3D space of an image represented by a set of points. For these points to have a meaningful interpretation, these points must be given in a known coordinate system. The ``affine`` attribute of the ``streamline_generator`` object specifies the coordinate system of the points with respect to the voxel indices of the input data. ``trackvis_point_space`` specifies the trackvis coordinate system with respect to the same indices. The ``move_streamlines`` function returns a new set of streamlines from an existing set of streamlines in the target space. The target space and the input space must be specified as affine transformations with respect to the same reference [#]_. If no input space is given, the input space will be the same as the current representation of the streamlines, in other words the input space is assumed to be ``np.eye(4)``, the 4-by-4 identity matrix. All of the functions above that allow streamlines to interact with volumes take an affine argument. This argument allows these functions to work with streamlines regardless of their coordinate system. For example even though we moved our streamlines to "trackvis space", we can still compute the density map as long as we specify the right coordinate system. """ dm_trackvis = utils.density_map(lr_sf_trk, shape, affine=trackvis_point_space) assert np.all(dm == dm_trackvis) """ This means that streamlines can interact with any image volume, for example a high resolution structural image, as long as one can register that image to the diffusion images and calculate the coordinate system with respect to that image. """ """ .. rubric:: Footnotes .. [#] The image `aparc-reduced.nii.gz`, which we load as ``labels_img``, is a modified version of label map `aparc+aseg.mgz` created by `FreeSurfer `_. The corpus callosum region is a combination of the FreeSurfer labels 251-255. The remaining FreeSurfer labels were re-mapped and reduced so that they lie between 0 and 88. To see the FreeSurfer region, label and name, represented by each value see `label_info.txt` in `~/.dipy/stanford_hardi`. .. [#] An affine transformation is a mapping between two coordinate systems that can represent scaling, rotation, sheer, translation and reflection. Affine transformations are often represented using a 4x4 matrix where the last row of the matrix is ``[0, 0, 0, 1]``. """ dipy-0.13.0/doc/examples/syn_registration_2d.py000066400000000000000000000156151317371701200214770ustar00rootroot00000000000000""" ========================================== Symmetric Diffeomorphic Registration in 2D ========================================== This example explains how to register 2D images using the Symmetric Normalization (SyN) algorithm proposed by Avants et al. [Avants09]_ (also implemented in the ANTS software [Avants11]_) We will perform the classic Circle-To-C experiment for diffeomorphic registration """ import numpy as np from dipy.data import get_data from dipy.align.imwarp import SymmetricDiffeomorphicRegistration from dipy.align.metrics import SSDMetric, CCMetric, EMMetric import dipy.align.imwarp as imwarp from dipy.viz import regtools fname_moving = get_data('reg_o') fname_static = get_data('reg_c') moving = np.load(fname_moving) static = np.load(fname_static) """ To visually check the overlap of the static image with the transformed moving image, we can plot them on top of each other with different channels to see where the differences are located """ regtools.overlay_images(static, moving, 'Static', 'Overlay', 'Moving', 'input_images.png') """ .. figure:: input_images.png :align: center Input images. """ """ We want to find an invertible map that transforms the moving image (circle) into the static image (the C letter). The first decision we need to make is what similarity metric is appropriate for our problem. In this example we are using two binary images, so the Sum of Squared Differences (SSD) is a good choice. """ dim = static.ndim metric = SSDMetric(dim) """ Now we define an instance of the registration class. The SyN algorithm uses a multi-resolution approach by building a Gaussian Pyramid. We instruct the registration instance to perform at most $[n_0, n_1, ..., n_k]$ iterations at each level of the pyramid. The 0-th level corresponds to the finest resolution. """ level_iters = [200, 100, 50, 25] sdr = SymmetricDiffeomorphicRegistration(metric, level_iters, inv_iter = 50) """ Now we execute the optimization, which returns a DiffeomorphicMap object, that can be used to register images back and forth between the static and moving domains """ mapping = sdr.optimize(static, moving) """ It is a good idea to visualize the resulting deformation map to make sure the result is reasonable (at least, visually) """ regtools.plot_2d_diffeomorphic_map(mapping, 10, 'diffeomorphic_map.png') """ .. figure:: diffeomorphic_map.png :align: center Deformed lattice under the resulting diffeomorphic map. """ """ Now let's warp the moving image and see if it gets similar to the static image """ warped_moving = mapping.transform(moving, 'linear') regtools.overlay_images(static, warped_moving, 'Static','Overlay','Warped moving', 'direct_warp_result.png') """ .. figure:: direct_warp_result.png :align: center Moving image transformed under the (direct) transformation in green on top of the static image (in red). """ """ And we can also apply the inverse mapping to verify that the warped static image is similar to the moving image """ warped_static = mapping.transform_inverse(static, 'linear') regtools.overlay_images(warped_static, moving,'Warped static','Overlay','Moving', 'inverse_warp_result.png') """ .. figure:: inverse_warp_result.png :align: center Static image transformed under the (inverse) transformation in red on top of the moving image (in green). """ """ Now let's register a couple of slices from a b0 image using the Cross Correlation metric. Also, let's inspect the evolution of the registration. To do this we will define a function that will be called by the registration object at each stage of the optimization process. We will draw the current warped images after finishing each resolution. """ def callback_CC(sdr, status): #Status indicates at which stage of the optimization we currently are #For now, we will only react at the end of each resolution of the scale #space if status == imwarp.RegistrationStages.SCALE_END: #get the current images from the metric wmoving = sdr.metric.moving_image wstatic = sdr.metric.static_image #draw the images on top of each other with different colors regtools.overlay_images(wmoving, wstatic, 'Warped moving', 'Overlay', 'Warped static') """ Now we are ready to configure and run the registration. First load the data """ from dipy.data.fetcher import fetch_syn_data, read_syn_data from dipy.segment.mask import median_otsu fetch_syn_data() t1, b0 = read_syn_data() data = np.array(b0.get_data(), dtype = np.float64) """ We first remove the skull from the b0 volume """ b0_mask, mask = median_otsu(data, 4, 4) """ And select two slices to try the 2D registration """ static = b0_mask[:, :, 40] moving = b0_mask[:, :, 38] """ After loading the data, we instantiate the Cross Correlation metric. The metric receives three parameters: the dimension of the input images, the standard deviation of the Gaussian Kernel to be used to regularize the gradient and the radius of the window to be used for evaluating the local normalized cross correlation. """ sigma_diff = 3.0 radius = 4 metric = CCMetric(2, sigma_diff, radius) """ Let's use a scale space of 3 levels """ level_iters = [100, 50, 25] sdr = SymmetricDiffeomorphicRegistration(metric, level_iters) sdr.callback = callback_CC """ And execute the optimization """ mapping = sdr.optimize(static, moving) warped = mapping.transform(moving) ''' We can see the effect of the warping by switching between the images before and after registration ''' regtools.overlay_images(static, moving, 'Static', 'Overlay', 'Moving', 't1_slices_input.png') """ .. figure:: t1_slices_input.png :align: center Input images. """ regtools.overlay_images(static, warped, 'Static', 'Overlay', 'Warped moving', 't1_slices_res.png') """ .. figure:: t1_slices_res.png :align: center Moving image transformed under the (direct) transformation in green on top of the static image (in red). """ ''' And we can apply the inverse warping too ''' inv_warped = mapping.transform_inverse(static) regtools.overlay_images(inv_warped, moving, 'Warped static', 'Overlay', 'moving', 't1_slices_res2.png') """ .. figure:: t1_slices_res2.png :align: center Static image transformed under the (inverse) transformation in red on top of the moving image (in green). """ ''' Finally, let's see the deformation ''' regtools.plot_2d_diffeomorphic_map(mapping, 5, 'diffeomorphic_map_b0s.png') """ .. figure:: diffeomorphic_map_b0s.png :align: center Deformed lattice under the resulting diffeomorphic map. References ---------- .. [Avants09] Avants, B. B., Epstein, C. L., Grossman, M., & Gee, J. C. (2009). Symmetric Diffeomorphic Image Registration with Cross- Correlation: Evaluating Automated Labeling of Elderly and Neurodegenerative Brain, 12(1), 26-41. .. [Avants11] Avants, B. B., Tustison, N., & Song, G. (2011). Advanced Normalization Tools ( ANTS ), 1-35. .. include:: ../links_names.inc """ dipy-0.13.0/doc/examples/syn_registration_3d.py000066400000000000000000000117721317371701200215000ustar00rootroot00000000000000""" ========================================== Symmetric Diffeomorphic Registration in 3D ========================================== This example explains how to register 3D volumes using the Symmetric Normalization (SyN) algorithm proposed by Avants et al. [Avants09]_ (also implemented in the ANTS software [Avants11]_) We will register two 3D volumes from the same modality using SyN with the Cross Correlation (CC) metric. """ import numpy as np import nibabel as nib from dipy.align.imwarp import SymmetricDiffeomorphicRegistration from dipy.align.imwarp import DiffeomorphicMap from dipy.align.metrics import CCMetric import os.path from dipy.viz import regtools """ Let's fetch two b0 volumes, the first one will be the b0 from the Stanford HARDI dataset """ from dipy.data import fetch_stanford_hardi, read_stanford_hardi fetch_stanford_hardi() nib_stanford, gtab_stanford = read_stanford_hardi() stanford_b0 = np.squeeze(nib_stanford.get_data())[..., 0] """ The second one will be the same b0 we used for the 2D registration tutorial """ from dipy.data.fetcher import fetch_syn_data, read_syn_data fetch_syn_data() nib_syn_t1, nib_syn_b0 = read_syn_data() syn_b0 = np.array(nib_syn_b0.get_data()) """ We first remove the skull from the b0's """ from dipy.segment.mask import median_otsu stanford_b0_masked, stanford_b0_mask = median_otsu(stanford_b0, 4, 4) syn_b0_masked, syn_b0_mask = median_otsu(syn_b0, 4, 4) static = stanford_b0_masked static_affine = nib_stanford.affine moving = syn_b0_masked moving_affine = nib_syn_b0.affine """ Suppose we have already done a linear registration to roughly align the two images """ pre_align = np.array([[1.02783543e+00, -4.83019053e-02, -6.07735639e-02, -2.57654118e+00], [4.34051706e-03, 9.41918267e-01, -2.66525861e-01, 3.23579799e+01], [5.34288908e-02, 2.90262026e-01, 9.80820307e-01, -1.46216651e+01], [0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 1.00000000e+00]]) """ As we did in the 2D example, we would like to visualize (some slices of) the two volumes by overlapping them over two channels of a color image. To do that we need them to be sampled on the same grid, so let's first re-sample the moving image on the static grid. We create an AffineMap to transform the moving image towards the static image """ from dipy.align.imaffine import AffineMap affine_map = AffineMap(pre_align, static.shape, static_affine, moving.shape, moving_affine) resampled = affine_map.transform(moving) """ plot the overlapped middle slices of the volumes """ regtools.overlay_slices(static, resampled, None, 1, 'Static', 'Moving', 'input_3d.png') """ .. figure:: input_3d.png :align: center Static image in red on top of the pre-aligned moving image (in green). """ """ We want to find an invertible map that transforms the moving image into the static image. We will use the Cross Correlation metric """ metric = CCMetric(3) """ Now we define an instance of the registration class. The SyN algorithm uses a multi-resolution approach by building a Gaussian Pyramid. We instruct the registration object to perform at most $[n_0, n_1, ..., n_k]$ iterations at each level of the pyramid. The 0-th level corresponds to the finest resolution. """ level_iters = [10, 10, 5] sdr = SymmetricDiffeomorphicRegistration(metric, level_iters) """ Execute the optimization, which returns a DiffeomorphicMap object, that can be used to register images back and forth between the static and moving domains. We provide the pre-aligning matrix that brings the moving image closer to the static image """ mapping = sdr.optimize(static, moving, static_affine, moving_affine, pre_align) """ Now let's warp the moving image and see if it gets similar to the static image """ warped_moving = mapping.transform(moving) """ We plot the overlapped middle slices """ regtools.overlay_slices(static, warped_moving, None, 1, 'Static', 'Warped moving', 'warped_moving.png') """ .. figure:: warped_moving.png :align: center Moving image transformed under the (direct) transformation in green on top of the static image (in red). """ """ And we can also apply the inverse mapping to verify that the warped static image is similar to the moving image """ warped_static = mapping.transform_inverse(static) regtools.overlay_slices(warped_static, moving, None, 1, 'Warped static', 'Moving', 'warped_static.png') """ .. figure:: warped_static.png :align: center Static image transformed under the (inverse) transformation in red on top of the moving image (in green). Note that the moving image has lower resolution. References ---------- .. [Avants09] Avants, B. B., Epstein, C. L., Grossman, M., & Gee, J. C. (2009). Symmetric Diffeomorphic Image Registration with Cross- Correlation: Evaluating Automated Labeling of Elderly and Neurodegenerative Brain, 12(1), 26-41. .. [Avants11] Avants, B. B., Tustison, N., & Song, G. (2011). Advanced Normalization Tools ( ANTS ), 1-35. .. include:: ../links_names.inc """ dipy-0.13.0/doc/examples/tissue_classification.py000066400000000000000000000107551317371701200220760ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ ======================================================= Tissue Classification of a T1-weighted Structural Image ======================================================= This example explains how to segment a T1-weighted structural image by using a Bayesian formulation. The observation model (likelihood term) is defined as a Gaussian distribution and a Markov Random Field (MRF) is used to model the a priori probability of the context-dependent patterns of the different tissue types of the brain. Expectation Maximization and Iterated Conditional Modes are used to find the optimal solution. Similar algorithms have been proposed by Zhang et al. [Zhang2001]_ and Avants et al. [Avants2011]_ available in FAST-FSL and ANTS-atropos, respectively. Here we will use a T1-weighted image, that has been previously skull-stripped and bias field corrected. """ import numpy as np import matplotlib.pyplot as plt from dipy.data import fetch_tissue_data, read_tissue_data from dipy.segment.tissue import TissueClassifierHMRF """ First we fetch the T1 volume from the Syn dataset and determine its shape. """ fetch_tissue_data() t1_img = read_tissue_data() t1 = t1_img.get_data() print('t1.shape (%d, %d, %d)' % t1.shape) """ We have fetched the T1 volume. Now we will look at the axial and the coronal slices of the image. """ fig = plt.figure() a = fig.add_subplot(1, 2, 1) img_ax = np.rot90(t1[..., 89]) imgplot = plt.imshow(img_ax, cmap="gray") a.axis('off') a.set_title('Axial') a = fig.add_subplot(1, 2, 2) img_cor = np.rot90(t1[:, 128, :]) imgplot = plt.imshow(img_cor, cmap="gray") a.axis('off') a.set_title('Coronal') plt.savefig('t1_image.png', bbox_inches='tight', pad_inches=0) """ .. figure:: t1_image.png :align: center T1-weighted image of healthy adult. Now we will define the other two parameters for the segmentation algorithm. We will segment three classes, namely corticospinal fluid (CSF), white matter (WM) and gray matter (GM). """ nclass = 3 """ Then, the smoothness factor of the segmentation. Good performance is achieved with values between 0 and 0.5. """ beta = 0.1 """ We could also set the number of iterations. By default this parameter is set to 100 iterations, but most of the times the the ICM (Iterated Conditional Modes) loop will converge before reaching the 100th iteration. After setting the necessary parameters we can now call an instance of the class "TissueClassifierHMRF" and its method called "classify" and input the parameters defined above to perform the segmentation task. """ import time t0 = time.time() hmrf = TissueClassifierHMRF() initial_segmentation, final_segmentation, PVE = hmrf.classify(t1, nclass, beta) t1 = time.time() total_time = t1-t0 print('Total time:' + str(total_time)) fig = plt.figure() a = fig.add_subplot(1, 2, 1) img_ax = np.rot90(final_segmentation[..., 89]) imgplot = plt.imshow(img_ax) a.axis('off') a.set_title('Axial') a = fig.add_subplot(1, 2, 2) img_cor = np.rot90(final_segmentation[:, 128, :]) imgplot = plt.imshow(img_cor) a.axis('off') a.set_title('Coronal') plt.savefig('final_seg.png', bbox_inches='tight', pad_inches=0) """ Now we plot the resulting segmentation. .. figure:: final_seg.png :align: center Each tissue class is color coded separately, red for the WM, yellow for the GM and light blue for the CSF. And we will also have a look at the probability maps for each tissue class. """ fig = plt.figure() a = fig.add_subplot(1, 3, 1) img_ax = np.rot90(PVE[..., 89, 0]) imgplot = plt.imshow(img_ax, cmap="gray") a.axis('off') a.set_title('CSF') a = fig.add_subplot(1, 3, 2) img_cor = np.rot90(PVE[:, :, 89, 1]) imgplot = plt.imshow(img_cor, cmap="gray") a.axis('off') a.set_title('Gray Matter') a = fig.add_subplot(1, 3, 3) img_cor = np.rot90(PVE[:, :, 89, 2]) imgplot = plt.imshow(img_cor, cmap="gray") a.axis('off') a.set_title('White Matter') plt.savefig('probabilities.png', bbox_inches='tight', pad_inches=0) plt.show() """ .. figure:: probabilities.png :align: center :scale: 120 These are the probability maps of each of the three tissue classes. .. [Zhang2001] Zhang, Y., Brady, M. and Smith, S. Segmentation of Brain MR Images Through a Hidden Markov Random Field Model and the Expectation-Maximization Algorithm IEEE Transactions on Medical Imaging, 20(1): 45-56, 2001 .. [Avants2011] Avants, B. B., Tustison, N. J., Wu, J., Cook, P. A. and Gee, J. C. An open source multivariate framework for n-tissue segmentation with evaluation on public data. Neuroinformatics, 9(4): 381–400, 2011. """ dipy-0.13.0/doc/examples/tracking_eudx_odf.py000066400000000000000000000061121317371701200211560ustar00rootroot00000000000000""" ============================================= Deterministic Tracking with EuDX on ODF Peaks ============================================= .. NOTE:: DIPY has updated tools for fiber tracking. Our new machinery for fiber tracking is featured in the example titled Introduction to Basic Tracking. The tools demonstrated in this example are no longer actively being maintained and will likely be deprecated at some point. In this example we do deterministic fiber tracking on fields of ODF peaks. EuDX [Garyfallidis12]_ will be used for this. This example requires importing example `reconst_csa.py` in order to run. EuDX was primarily made with cpu efficiency in mind. The main idea can be used with any model that is a child of OdfModel. """ from reconst_csa import csapeaks, sphere """ This time we will not use FA as input to EuDX but we will use GFA (generalized FA), which is more suited for ODF functions. Tracking will stop when GFA is less than 0.2. """ from dipy.tracking.eudx import EuDX eu = EuDX(csapeaks.gfa, csapeaks.peak_indices[..., 0], seeds=10000, odf_vertices=sphere.vertices, a_low=0.2) csa_streamlines = [streamline for streamline in eu] """ Now that we have our streamlines in memory we can save the results on the disk. For this purpose we can use the TrackVis format (``*.trk``). First, we need to create a header. """ import nibabel as nib hdr = nib.trackvis.empty_header() hdr['voxel_size'] = (2., 2., 2.) hdr['voxel_order'] = 'LAS' hdr['dim'] = csapeaks.gfa.shape[:3] """ Save the streamlines. """ csa_streamlines_trk = ((sl, None, None) for sl in csa_streamlines) csa_sl_fname = 'csa_streamline.trk' nib.trackvis.write(csa_sl_fname, csa_streamlines_trk, hdr, points_space='voxel') """ Visualize the streamlines with fvtk (python vtk is required). """ from dipy.viz import fvtk from dipy.viz.colormap import line_colors r = fvtk.ren() fvtk.add(r, fvtk.line(csa_streamlines, line_colors(csa_streamlines))) print('Saving illustration as tensor_tracks.png') fvtk.record(r, n_frames=1, out_path='csa_tracking.png', size=(600, 600)) """ .. figure:: csa_tracking.png :align: center Deterministic streamlines with EuDX on ODF peaks field modulated by GFA. It is also possible to use EuDX with multiple ODF peaks, which is very helpful when tracking in crossing areas. """ eu = EuDX(csapeaks.peak_values, csapeaks.peak_indices, seeds=10000, odf_vertices=sphere.vertices, ang_thr=20., a_low=0.6) csa_streamlines_mult_peaks = [streamline for streamline in eu] fvtk.clear(r) fvtk.add(r, fvtk.line(csa_streamlines_mult_peaks, line_colors(csa_streamlines_mult_peaks))) print('Saving illustration as csa_tracking_mpeaks.png') fvtk.record(r, n_frames=1, out_path='csa_tracking_mpeaks.png', size=(600, 600)) """ .. figure:: csa_tracking_mpeaks.png :align: center Deterministic streamlines with EuDX on multiple ODF peaks. .. [Garyfallidis12] Garyfallidis E., "Towards an accurate brain tractography", PhD thesis, University of Cambridge, 2012. .. include:: ../links_names.inc """ dipy-0.13.0/doc/examples/tracking_eudx_tensor.py000066400000000000000000000075251317371701200217310ustar00rootroot00000000000000""" ================================================= Deterministic Tracking with EuDX on Tensor Fields ================================================= In this example we do deterministic fiber tracking on Tensor fields with EuDX [Garyfallidis12]_. This example requires to import example `reconst_dti.py` to run. EuDX was primarily made with cpu efficiency in mind. Therefore, it should be useful to give you a quick overview of your reconstruction results with the help of tracking. """ import os import sys import numpy as np import nibabel as nib if not os.path.exists('tensor_fa.nii.gz'): import reconst_dti """ EuDX will use the directions (eigen vectors) of the Tensors to propagate streamlines from voxel to voxel and fractional anisotropy to stop tracking. """ fa_img = nib.load('tensor_fa.nii.gz') FA = fa_img.get_data() evecs_img = nib.load('tensor_evecs.nii.gz') evecs = evecs_img.get_data() """ In the background of the image the fitting will not be accurate because there all measured signal is mostly noise and possibly we will find FA values with nans (not a number). We can easily remove these in the following way. """ FA[np.isnan(FA)] = 0 """ EuDX takes as input discretized voxel directions on a unit sphere. Therefore, it is necessary to discretize the eigen vectors before feeding them in EuDX. For the discretization procedure we use an evenly distributed sphere of 724 points which we can access using the get_sphere function. """ from dipy.data import get_sphere sphere = get_sphere('symmetric724') """ We use quantize_evecs (evecs here stands for eigen vectors) to apply the discretization. """ from dipy.reconst.dti import quantize_evecs peak_indices = quantize_evecs(evecs, sphere.vertices) """ EuDX is the fiber tracking algorithm that we use in this example. The most important parameters are the first one which represents the magnitude of the peak of a scalar anisotropic function, the second which represents the indices of the discretized directions of the peaks and odf_vertices are the vertices of the input sphere. """ from dipy.tracking.eudx import EuDX eu = EuDX(FA.astype('f8'), peak_indices, seeds=50000, odf_vertices = sphere.vertices, a_low=0.2) tensor_streamlines = [streamline for streamline in eu] """ We can now save the results in the disk. For this purpose we can use the TrackVis format (``*.trk``). First, we need to create a header. """ hdr = nib.trackvis.empty_header() hdr['voxel_size'] = fa_img.header.get_zooms()[:3] hdr['voxel_order'] = 'LAS' hdr['dim'] = FA.shape """ Then we need to input the streamlines in the way that Trackvis format expects them. """ tensor_streamlines_trk = ((sl, None, None) for sl in tensor_streamlines) ten_sl_fname = 'tensor_streamlines.trk' """ Save the streamlines. """ nib.trackvis.write(ten_sl_fname, tensor_streamlines_trk, hdr, points_space='voxel') """ If you don't want to use Trackvis to visualize the file you can use our lightweight `fvtk` module. """ try: from dipy.viz import fvtk except ImportError: raise ImportError('Python vtk module is not installed') sys.exit() """ Create a scene. """ ren = fvtk.ren() """ Every streamline will be coloured according to its orientation """ from dipy.viz.colormap import line_colors """ fvtk.line adds a streamline actor for streamline visualization and fvtk.add adds this actor in the scene """ fvtk.add(ren, fvtk.streamtube(tensor_streamlines, line_colors(tensor_streamlines))) print('Saving illustration as tensor_tracks.png') ren.SetBackground(1, 1, 1) fvtk.record(ren, n_frames=1, out_path='tensor_tracks.png', size=(600, 600)) """ .. figure:: tensor_tracks.png :align: center Deterministic streamlines with EuDX on a Tensor Field. References ---------- .. [Garyfallidis12] Garyfallidis E., "Towards an accurate brain tractography", PhD thesis, University of Cambridge, 2012. .. include:: ../links_names.inc """ dipy-0.13.0/doc/examples/tracking_quick_start.py000066400000000000000000000136431317371701200217210ustar00rootroot00000000000000""" ==================== Tracking Quick Start ==================== This example shows how to perform fast fiber tracking using dipy_ [Garyfallidis12]_. We will use Constrained Spherical Deconvolution (CSD) [Tournier07]_ for local reconstruction and then generate deterministic streamlines using the fiber directions (peaks) from CSD and fractional anisotropic (FA) from DTI as a stopping criteria for the tracking. Let's load the necessary modules. """ import numpy as np from dipy.tracking.local import LocalTracking, ThresholdTissueClassifier from dipy.tracking.utils import random_seeds_from_mask from dipy.reconst.dti import TensorModel from dipy.reconst.csdeconv import (ConstrainedSphericalDeconvModel, auto_response) from dipy.direction import peaks_from_model from dipy.data import fetch_stanford_hardi, read_stanford_hardi, get_sphere from dipy.segment.mask import median_otsu from dipy.viz import actor, window from dipy.io.image import save_nifti from nibabel.streamlines import save as save_trk from nibabel.streamlines import Tractogram from dipy.tracking.streamline import Streamlines """ Enables/disables interactive visualization """ interactive = False """ Load one of the available datasets with 150 gradients on the sphere and 10 b0s """ fetch_stanford_hardi() img, gtab = read_stanford_hardi() data = img.get_data() """ Create a brain mask. This dataset is a bit difficult to segment with the default ``median_otsu`` parameters (see :ref:`example_brain_extraction_dwi`) therefore we use here more advanced options. """ maskdata, mask = median_otsu(data, 3, 1, False, vol_idx=range(10, 50), dilate=2) """ For the Constrained Spherical Deconvolution we need to estimate the response function (see :ref:`example_reconst_csd`) and create a model. """ response, ratio = auto_response(gtab, data, roi_radius=10, fa_thr=0.7) csd_model = ConstrainedSphericalDeconvModel(gtab, response) """ Next, we use ``peaks_from_model`` to fit the data and calculated the fiber directions in all voxels. """ sphere = get_sphere('symmetric724') csd_peaks = peaks_from_model(model=csd_model, data=data, sphere=sphere, mask=mask, relative_peak_threshold=.5, min_separation_angle=25, parallel=True) """ For the tracking part, we will use the fiber directions from the ``csd_model`` but stop tracking in areas where fractional anisotropy is low (< 0.1). To derive the FA, used here as a stopping criterion, we would need to fit a tensor model first. Here, we fit the tensor using weighted least squares (WLS). """ tensor_model = TensorModel(gtab, fit_method='WLS') tensor_fit = tensor_model.fit(data, mask) fa = tensor_fit.fa """ In this simple example we can use FA to stop tracking. Here we stop tracking when FA < 0.1. """ tissue_classifier = ThresholdTissueClassifier(fa, 0.1) """ Now, we need to set starting points for propagating each track. We call those seeds. Using ``random_seeds_from_mask`` we can select a specific number of seeds (``seeds_count``) in each voxel where the mask ``fa > 0.3`` is true. """ seeds = random_seeds_from_mask(fa > 0.3, seeds_count=1) """ For quality assurance we can also visualize a slice from the direction field which we will use as the basis to perform the tracking. """ ren = window.Renderer() ren.add(actor.peak_slicer(csd_peaks.peak_dirs, csd_peaks.peak_values, colors=None)) if interactive: window.show(ren, size=(900, 900)) else: window.record(ren, out_path='csd_direction_field.png', size=(900, 900)) """ .. figure:: csd_direction_field.png :align: center **Direction Field (peaks)** ``EuDX`` [Garyfallidis12]_ is a fast algorithm that we use here to generate streamlines. This algorithm is what is used here and the default option when providing the output of peaks directly in LocalTracking. """ streamline_generator = LocalTracking(csd_peaks, tissue_classifier, seeds, affine=np.eye(4), step_size=0.5) streamlines = Streamlines(streamline_generator) """ The total number of streamlines is shown below. """ print(len(streamlines)) """ To increase the number of streamlines you can change the parameter ``seeds_count`` in ``random_seeds_from_mask``. We can visualize the streamlines using ``actor.line`` or ``actor.streamtube``. """ ren.clear() ren.add(actor.line(streamlines)) if interactive: window.show(ren, size=(900, 900)) else: print('Saving illustration as det_streamlines.png') window.record(ren, out_path='det_streamlines.png', size=(900, 900)) """ .. figure:: det_streamlines.png :align: center **Deterministic streamlines using EuDX (new framework)** To learn more about this process you could start playing with the number of seed points or, even better, specify seeds to be in specific regions of interest in the brain. Save the resulting streamlines in a Trackvis (.trk) format and FA as Nifti1 (.nii.gz). """ save_trk(Tractogram(streamlines, affine_to_rasmm=img.affine), 'det_streamlines.trk') save_nifti('fa_map.nii.gz', fa, img.affine) """ In Windows if you get a runtime error about frozen executable please start your script by adding your code above in a ``main`` function and use: `` if __name__ == '__main__': import multiprocessing multiprocessing.freeze_support() main() `` References ---------- .. [Garyfallidis12] Garyfallidis E., "Towards an accurate brain tractography", PhD thesis, University of Cambridge, 2012. .. [Tournier07] J-D. Tournier, F. Calamante and A. Connelly, "Robust determination of the fibre orientation distribution in diffusion MRI: Non-negativity constrained super-resolved spherical deconvolution", Neuroimage, vol. 35, no. 4, pp. 1459-1472, 2007. .. include:: ../links_names.inc """ dipy-0.13.0/doc/examples/tracking_tissue_classifier.py000066400000000000000000000264471317371701200231160ustar00rootroot00000000000000""" ================================================= Using Various Tissue Classifiers for Tractography ================================================= The tissue classifier determines if the tracking stops or continues at each tracking position. The tracking stops when it reaches an ending region (e.g. low FA, gray matter or corticospinal fluid regions) or exits the image boundaries. The tracking also stops if the direction getter has no direction to follow. Each tissue classifier determines if the stopping is 'valid' or 'invalid'. A streamline is 'valid' when the tissue classifier determines if the streamline stops in a position classified as 'ENDPOINT' or 'OUTSIDEIMAGE'. A streamline is 'invalid' when it stops in a position classified as 'TRACKPOINT' or 'INVALIDPOINT'. These conditions are described below. The 'LocalTracking' generator can be set to output all generated streamlines or only the 'valid' ones. This example is an extension of the :ref:`example_deterministic_fiber_tracking` example. We begin by loading the data, creating a seeding mask from white matter voxels of the corpus callosum, fitting a Constrained Spherical Deconvolution (CSD) reconstruction model and creating the maximum deterministic direction getter. """ import numpy as np from dipy.data import (read_stanford_labels, default_sphere, read_stanford_pve_maps) from dipy.direction import DeterministicMaximumDirectionGetter from dipy.io.trackvis import save_trk from dipy.reconst.csdeconv import (ConstrainedSphericalDeconvModel, auto_response) from dipy.tracking.local import LocalTracking from dipy.tracking import utils from dipy.viz import fvtk from dipy.viz.colormap import line_colors ren = fvtk.ren() hardi_img, gtab, labels_img = read_stanford_labels() _, _, img_pve_wm = read_stanford_pve_maps() data = hardi_img.get_data() labels = labels_img.get_data() affine = hardi_img.affine white_matter = img_pve_wm.get_data() seed_mask = np.logical_and(labels == 2, white_matter == 1) seeds = utils.seeds_from_mask(seed_mask, density=2, affine=affine) response, ratio = auto_response(gtab, data, roi_radius=10, fa_thr=0.7) csd_model = ConstrainedSphericalDeconvModel(gtab, response) csd_fit = csd_model.fit(data, mask=white_matter) dg = DeterministicMaximumDirectionGetter.from_shcoeff(csd_fit.shm_coeff, max_angle=30., sphere=default_sphere) """ Threshold Tissue Classifier --------------------------- A scalar map can be used to define where the tracking stops. The threshold tissue classifier uses a scalar map to stop the tracking whenever the interpolated scalar value is lower than a fixed threshold. Here, we show an example using the fractional anisotropy (FA) map of the DTI model. The threshold tissue classifier uses a trilinear interpolation at the tracking position. **Parameters** - metric_map: numpy array [:, :, :] - threshold: float **Stopping criterion** - 'ENDPOINT': metric_map < threshold, - 'OUTSIDEIMAGE': tracking point outside of metric_map, - 'TRACKPOINT': stop because no direction is available, - 'INVALIDPOINT': N/A. """ import matplotlib.pyplot as plt import dipy.reconst.dti as dti from dipy.reconst.dti import fractional_anisotropy from dipy.tracking.local import ThresholdTissueClassifier tensor_model = dti.TensorModel(gtab) tenfit = tensor_model.fit(data, mask=labels > 0) FA = fractional_anisotropy(tenfit.evals) threshold_classifier = ThresholdTissueClassifier(FA, .2) fig = plt.figure() mask_fa = FA.copy() mask_fa[mask_fa < 0.2] = 0 plt.xticks([]) plt.yticks([]) plt.imshow(mask_fa[:, :, data.shape[2] // 2].T, cmap='gray', origin='lower', interpolation='nearest') fig.tight_layout() fig.savefig('threshold_fa.png') """ .. figure:: threshold_fa.png :align: center **Thresholded fractional anisotropy map.** """ all_streamlines_threshold_classifier = LocalTracking(dg, threshold_classifier, seeds, affine, step_size=.5, return_all=True) save_trk("deterministic_threshold_classifier_all.trk", all_streamlines_threshold_classifier, affine, labels.shape) streamlines = [sl for sl in all_streamlines_threshold_classifier] fvtk.clear(ren) fvtk.add(ren, fvtk.line(streamlines, line_colors(streamlines))) fvtk.record(ren, out_path='all_streamlines_threshold_classifier.png', size=(600, 600)) """ .. figure:: all_streamlines_threshold_classifier.png :align: center **Deterministic tractography using a thresholded fractional anisotropy.** """ """ Binary Tissue Classifier ------------------------ A binary mask can be used to define where the tracking stops. The binary tissue classifier stops the tracking whenever the tracking position is outside the mask. Here, we show how to obtain the binary tissue classifier from the white matter mask defined above. The binary tissue classifier uses a nearest-neighborhood interpolation at the tracking position. **Parameters** - mask: numpy array [:, :, :] **Stopping criterion** - 'ENDPOINT': mask = 0 - 'OUTSIDEIMAGE': tracking point outside of mask - 'TRACKPOINT': no direction is available - 'INVALIDPOINT': N/A """ from dipy.tracking.local import BinaryTissueClassifier binary_classifier = BinaryTissueClassifier(white_matter == 1) fig = plt.figure() plt.xticks([]) plt.yticks([]) fig.tight_layout() plt.imshow(white_matter[:, :, data.shape[2] // 2].T, cmap='gray', origin='lower', interpolation='nearest') fig.savefig('white_matter_mask.png') """ .. figure:: white_matter_mask.png :align: center **White matter binary mask.** """ all_streamlines_binary_classifier = LocalTracking(dg, binary_classifier, seeds, affine, step_size=.5, return_all=True) save_trk("deterministic_binary_classifier_all.trk", all_streamlines_binary_classifier, affine, labels.shape) streamlines = [sl for sl in all_streamlines_binary_classifier] fvtk.clear(ren) fvtk.add(ren, fvtk.line(streamlines, line_colors(streamlines))) fvtk.record(ren, out_path='all_streamlines_binary_classifier.png', size=(600, 600)) """ .. figure:: all_streamlines_binary_classifier.png :align: center **Deterministic tractography using a binary white matter mask.** """ """ ACT Tissue Classifier --------------------- Anatomically-constrained tractography (ACT) [Smith2012]_ uses information from anatomical images to determine when the tractography stops. The ``include_map`` defines when the streamline reached a 'valid' stopping region (e.g. gray matter partial volume estimation (PVE) map) and the ``exclude_map`` defines when the streamline reached an 'invalid' stopping region (e.g. corticospinal fluid PVE map). The background of the anatomical image should be added to the ``include_map`` to keep streamlines exiting the brain (e.g. through the brain stem). The ACT tissue classifier uses a trilinear interpolation at the tracking position. **Parameters** - ``include_map``: numpy array ``[:, :, :]``, - ``exclude_map``: numpy array ``[:, :, :]``, **Stopping criterion** - 'ENDPOINT': ``include_map`` > 0.5, - 'OUTSIDEIMAGE': tracking point outside of ``include_map`` or ``exclude_map``, - 'TRACKPOINT': no direction is available, - 'INVALIDPOINT': ``exclude_map`` > 0.5. """ from dipy.tracking.local import ActTissueClassifier img_pve_csf, img_pve_gm, img_pve_wm = read_stanford_pve_maps() background = np.ones(img_pve_gm.shape) background[(img_pve_gm.get_data() + img_pve_wm.get_data() + img_pve_csf.get_data()) > 0] = 0 include_map = img_pve_gm.get_data() include_map[background > 0] = 1 exclude_map = img_pve_csf.get_data() act_classifier = ActTissueClassifier(include_map, exclude_map) fig = plt.figure() plt.subplot(121) plt.xticks([]) plt.yticks([]) plt.imshow(include_map[:, :, data.shape[2] // 2].T, cmap='gray', origin='lower', interpolation='nearest') plt.subplot(122) plt.xticks([]) plt.yticks([]) plt.imshow(exclude_map[:, :, data.shape[2] // 2].T, cmap='gray', origin='lower', interpolation='nearest') fig.tight_layout() fig.savefig('act_maps.png') """ .. figure:: act_maps.png :align: center **Include (left) and exclude (right) maps for ACT.** """ all_streamlines_act_classifier = LocalTracking(dg, act_classifier, seeds, affine, step_size=.5, return_all=True) save_trk("deterministic_act_classifier_all.trk", all_streamlines_act_classifier, affine, labels.shape) streamlines = [sl for sl in all_streamlines_act_classifier] fvtk.clear(ren) fvtk.add(ren, fvtk.line(streamlines, line_colors(streamlines))) fvtk.record(ren, out_path='all_streamlines_act_classifier.png', size=(600, 600)) """ .. figure:: all_streamlines_act_classifier.png :align: center **Deterministic tractography using ACT stopping criterion.** """ valid_streamlines_act_classifier = LocalTracking(dg, act_classifier, seeds, affine, step_size=.5, return_all=False) save_trk("deterministic_act_classifier_valid.trk", valid_streamlines_act_classifier, affine, labels.shape) streamlines = [sl for sl in valid_streamlines_act_classifier] fvtk.clear(ren) fvtk.add(ren, fvtk.line(streamlines, line_colors(streamlines))) fvtk.record(ren, out_path='valid_streamlines_act_classifier.png', size=(600, 600)) """ .. figure:: valid_streamlines_act_classifier.png :align: center **Deterministic tractography using a anatomically-constrained tractography stopping criterion. Streamlines ending in gray matter region only.** """ """ The threshold and binary tissue classifiers use respectively a scalar map and a binary mask to stop the tracking. The ACT tissue classifier use partial volume fraction (PVE) maps from an anatomical image to stop the tracking. Additionally, the ACT tissue classifier determines if the tracking stopped in expected regions (e.g. gray matter) and allows the user to get only streamlines stopping in those regions. Notes ------ Currently in ACT the proposed method that cuts streamlines going through subcortical gray matter regions is not implemented. The backtracking technique for streamlines reaching INVALIDPOINT is not implemented either. References ---------- .. [Smith2012] Smith, R. E., Tournier, J.-D., Calamante, F., & Connelly, A. Anatomically-constrained tractography: Improved diffusion MRI streamlines tractography through effective use of anatomical information. NeuroImage, 63(3), 1924-1938, 2012. """ dipy-0.13.0/doc/examples/valid_examples.txt000066400000000000000000000024311317371701200206630ustar00rootroot00000000000000 quick_start.py tracking_quick_start.py brain_extraction_dwi.py reconst_csa_parallel.py reconst_csa.py reconst_csd_parallel.py reconst_csd.py reconst_dki.py reconst_dsi_metrics.py reconst_dsi.py reconst_dti.py reconst_fwdti.py reconst_gqi.py reconst_dsid.py reconst_ivim.py reconst_mapmri.py kfold_xval.py reslice_datasets.py segment_quickbundles.py segment_extending_clustering_framework.py segment_clustering_features.py segment_clustering_metrics.py snr_in_cc.py streamline_formats.py # tracking_eudx_odf.py # tracking_eudx_tensor.py sfm_tracking.py sfm_reconst.py gradients_spheres.py simulate_multi_tensor.py simulate_dki.py restore_dti.py streamline_length.py reconst_shore.py reconst_shore_metrics.py streamline_tools.py linear_fascicle_evaluation.py denoise_nlmeans.py denoise_localpca.py fiber_to_bundle_coherence.py # denoise_ascm.py introduction_to_basic_tracking.py probabilistic_fiber_tracking.py deterministic_fiber_tracking.py affine_registration_3d.py syn_registration_2d.py syn_registration_3d.py tissue_classification.py bundle_registration.py tracking_tissue_classifier.py # piesno.py viz_advanced.py viz_slice.py viz_bundles.py contextual_enhancement.py workflow_creation.py combined_workflow_creation.py viz_surfaces.py viz_ui.py dipy-0.13.0/doc/examples/viz_advanced.py000066400000000000000000000227731317371701200201470ustar00rootroot00000000000000""" ================================== Advanced interactive visualization ================================== In dipy_ we created a thin interface to access many of the capabilities available in the Visualization Toolkit framework (VTK) but tailored to the needs of structural and diffusion imaging. Initially the 3D visualization module was named ``fvtk``, meaning functions using vtk. This is still available for backwards compatibility but now there is a more comprehensive way to access the main functions using the following modules. """ import numpy as np from dipy.viz import actor, window, ui """ In ``window`` we have all the objects that connect what needs to be rendered to the display or the disk e.g., for saving screenshots. So, there you will find key objects and functions like the ``Renderer`` class which holds and provides access to all the actors and the ``show`` function which displays what is in the renderer on a window. Also, this module provides access to functions for opening/saving dialogs and printing screenshots (see ``snapshot``). In the ``actor`` module we can find all the different primitives e.g., streamtubes, lines, image slices, etc. In the ``ui`` module we have some other objects which allow to add buttons and sliders and these interact both with windows and actors. Because of this they need input from the operating system so they can process events. Let's get started. In this tutorial, we will visualize some bundles together with FA or T1. We will be able to change the slices using a ``LineSlider2D`` widget. First we need to fetch and load some datasets. """ from dipy.data.fetcher import fetch_bundles_2_subjects, read_bundles_2_subjects fetch_bundles_2_subjects() """ The following function outputs a dictionary with the required bundles e.g. ``af left`` (left arcuate fasciculus) and maps, e.g. FA for a specific subject. """ res = read_bundles_2_subjects('subj_1', ['t1', 'fa'], ['af.left', 'cst.right', 'cc_1']) """ We will use 3 bundles, FA and the affine transformation that brings the voxel coordinates to world coordinates (RAS 1mm). """ streamlines = res['af.left'] + res['cst.right'] + res['cc_1'] data = res['fa'] shape = data.shape affine = res['affine'] """ With our current design it is easy to decide in which space you want the streamlines and slices to appear. The default we have here is to appear in world coordinates (RAS 1mm). """ world_coords = True """ If we want to see the objects in native space we need to make sure that all objects which are currently in world coordinates are transformed back to native space using the inverse of the affine. """ if not world_coords: from dipy.tracking.streamline import transform_streamlines streamlines = transform_streamlines(streamlines, np.linalg.inv(affine)) """ Now we create, a ``Renderer`` object and add the streamlines using the ``line`` function and an image plane using the ``slice`` function. """ ren = window.Renderer() stream_actor = actor.line(streamlines) if not world_coords: image_actor_z = actor.slicer(data, affine=np.eye(4)) else: image_actor_z = actor.slicer(data, affine) """ We can also change also the opacity of the slicer. """ slicer_opacity = 0.6 image_actor_z.opacity(slicer_opacity) """ We can add additonal slicers by copying the original and adjusting the ``display_extent``. """ image_actor_x = image_actor_z.copy() image_actor_x.opacity(slicer_opacity) x_midpoint = int(np.round(shape[0] / 2)) image_actor_x.display_extent(x_midpoint, x_midpoint, 0, shape[1] - 1, 0, shape[2] - 1) image_actor_y = image_actor_z.copy() image_actor_y.opacity(slicer_opacity) y_midpoint = int(np.round(shape[1] / 2)) image_actor_y.display_extent(0, shape[0] - 1, y_midpoint, y_midpoint, 0, shape[2] - 1) """ Connect the actors with the Renderer. """ ren.add(stream_actor) ren.add(image_actor_z) ren.add(image_actor_x) ren.add(image_actor_y) """ Now we would like to change the position of each ``image_actor`` using a slider. The sliders are widgets which require access to different areas of the visualization pipeline and therefore we don't recommend using them with ``show``. The more appropriate way is to use them with the ``ShowManager`` object which allows accessing the pipeline in different areas. Here is how: """ show_m = window.ShowManager(ren, size=(1200, 900)) show_m.initialize() """ After we have initialized the ``ShowManager`` we can go ahead and create sliders to move the slices and change their opacity. """ line_slider_z = ui.LineSlider2D(min_value=0, max_value=shape[2] - 1, initial_value=shape[2] / 2, text_template="{value:.0f}", length=140) line_slider_x = ui.LineSlider2D(min_value=0, max_value=shape[0] - 1, initial_value=shape[0] / 2, text_template="{value:.0f}", length=140) line_slider_y = ui.LineSlider2D(min_value=0, max_value=shape[1] - 1, initial_value=shape[1] / 2, text_template="{value:.0f}", length=140) opacity_slider = ui.LineSlider2D(min_value=0.0, max_value=1.0, initial_value=slicer_opacity, length=140) """ Now we will write callbacks for the sliders and register them. """ def change_slice_z(i_ren, obj, slider): z = int(np.round(slider.value)) image_actor_z.display_extent(0, shape[0] - 1, 0, shape[1] - 1, z, z) def change_slice_x(i_ren, obj, slider): x = int(np.round(slider.value)) image_actor_x.display_extent(x, x, 0, shape[1] - 1, 0, shape[2] - 1) def change_slice_y(i_ren, obj, slider): y = int(np.round(slider.value)) image_actor_y.display_extent(0, shape[0] - 1, y, y, 0, shape[2] - 1) def change_opacity(i_ren, obj, slider): slicer_opacity = slider.value image_actor_z.opacity(slicer_opacity) image_actor_x.opacity(slicer_opacity) image_actor_y.opacity(slicer_opacity) line_slider_z.add_callback(line_slider_z.slider_disk, "MouseMoveEvent", change_slice_z) line_slider_x.add_callback(line_slider_x.slider_disk, "MouseMoveEvent", change_slice_x) line_slider_y.add_callback(line_slider_y.slider_disk, "MouseMoveEvent", change_slice_y) opacity_slider.add_callback(opacity_slider.slider_disk, "MouseMoveEvent", change_opacity) """ We'll also create text labels to identify the sliders. """ def build_label(text): label = ui.TextBlock2D() label.message = text label.font_size = 18 label.font_family = 'Arial' label.justification = 'left' label.bold = False label.italic = False label.shadow = False label.actor.GetTextProperty().SetBackgroundColor(0, 0, 0) label.actor.GetTextProperty().SetBackgroundOpacity(0.0) label.color = (1, 1, 1) return label line_slider_label_z = build_label(text="Z Slice") line_slider_label_x = build_label(text="X Slice") line_slider_label_y = build_label(text="Y Slice") opacity_slider_label = build_label(text="Opacity") """ Now we will create a ``panel`` to contain the sliders and labels. """ panel = ui.Panel2D(center=(1030, 120), size=(300, 200), color=(1, 1, 1), opacity=0.1, align="right") panel.add_element(line_slider_label_x, 'relative', (0.1, 0.75)) panel.add_element(line_slider_x, 'relative', (0.65, 0.8)) panel.add_element(line_slider_label_y, 'relative', (0.1, 0.55)) panel.add_element(line_slider_y, 'relative', (0.65, 0.6)) panel.add_element(line_slider_label_z, 'relative', (0.1, 0.35)) panel.add_element(line_slider_z, 'relative', (0.65, 0.4)) panel.add_element(opacity_slider_label, 'relative', (0.1, 0.15)) panel.add_element(opacity_slider, 'relative', (0.65, 0.2)) show_m.ren.add(panel) """ Then, we can render all the widgets and everything else in the screen and start the interaction using ``show_m.start()``. However, if you change the window size, the panel will not update its position properly. The solution to this issue is to update the position of the panel using its ``re_align`` method every time the window size changes. """ global size size = ren.GetSize() def win_callback(obj, event): global size if size != obj.GetSize(): size_old = size size = obj.GetSize() size_change = [size[0] - size_old[0], 0] panel.re_align(size_change) show_m.initialize() """ Finally, please set the following variable to ``True`` to interact with the datasets in 3D. """ interactive = False ren.zoom(1.5) ren.reset_clipping_range() if interactive: show_m.add_window_callback(win_callback) show_m.render() show_m.start() else: window.record(ren, out_path='bundles_and_3_slices.png', size=(1200, 900), reset_camera=False) """ .. figure:: bundles_and_3_slices.png :align: center A few bundles with interactive slicing. """ del show_m """ .. include:: ../links_names.inc """dipy-0.13.0/doc/examples/viz_bundles.py000066400000000000000000000133711317371701200200300ustar00rootroot00000000000000""" ======================================== Visualize bundles and metrics on bundles ======================================== First, let's download some available datasets. Here we are using a dataset which provides metrics and bundles. """ import numpy as np from dipy.viz import window, actor from dipy.data import fetch_bundles_2_subjects, read_bundles_2_subjects from dipy.tracking.streamline import transform_streamlines fetch_bundles_2_subjects() dix = read_bundles_2_subjects(subj_id='subj_1', metrics=['fa'], bundles=['cg.left', 'cst.right']) """ Store fractional anisotropy. """ fa = dix['fa'] """ Store grid to world transformation matrix. """ affine = dix['affine'] """ Store the cingulum bundle. A bundle is a list of streamlines. """ bundle = dix['cg.left'] """ It happened that this bundle is in world coordinates and therefore we need to transform it into native image coordinates so that it is in the same coordinate space as the ``fa`` image. """ bundle_native = transform_streamlines(bundle, np.linalg.inv(affine)) """ Show every streamline with an orientation color =============================================== This is the default option when you are using ``line`` or ``streamtube``. """ renderer = window.Renderer() stream_actor = actor.line(bundle_native) renderer.set_camera(position=(-176.42, 118.52, 128.20), focal_point=(113.30, 128.31, 76.56), view_up=(0.18, 0.00, 0.98)) renderer.add(stream_actor) # Uncomment the line below to show to display the window # window.show(renderer, size=(600, 600), reset_camera=False) window.record(renderer, out_path='bundle1.png', size=(600, 600)) """ .. figure:: bundle1.png :align: center One orientation color for every streamline. You may wonder how we knew how to set the camera. This is very easy. You just need to run ``window.show`` once see how you want to see the object and then close the window and call the ``camera_info`` method which prints the position, focal point and view up vectors of the camera. """ renderer.camera_info() """ Show every point with a value from a volume with default colormap ================================================================= Here we will need to input the ``fa`` map in ``streamtube`` or ``line``. """ renderer.clear() stream_actor2 = actor.line(bundle_native, fa, linewidth=0.1) """ We can also show the scalar bar. """ bar = actor.scalar_bar() renderer.add(stream_actor2) renderer.add(bar) # window.show(renderer, size=(600, 600), reset_camera=False) window.record(renderer, out_path='bundle2.png', size=(600, 600)) """ .. figure:: bundle2.png :align: center Every point with a color from FA. Show every point with a value from a volume with your colormap ============================================================== Here we will need to input the ``fa`` map in ``streamtube`` """ renderer.clear() hue = [0.0, 0.0] # red only saturation = [0.0, 1.0] # white to red lut_cmap = actor.colormap_lookup_table(hue_range=hue, saturation_range=saturation) stream_actor3 = actor.line(bundle_native, fa, linewidth=0.1, lookup_colormap=lut_cmap) bar2 = actor.scalar_bar(lut_cmap) renderer.add(stream_actor3) renderer.add(bar2) # window.show(renderer, size=(600, 600), reset_camera=False) window.record(renderer, out_path='bundle3.png', size=(600, 600)) """ .. figure:: bundle3.png :align: center Every point with a color from FA using a non default colormap. Show every bundle with a specific color ======================================== You can have a bundle with a specific color. In this example, we are chosing orange. """ renderer.clear() stream_actor4 = actor.line(bundle_native, (1., 0.5, 0), linewidth=0.1) renderer.add(stream_actor4) # window.show(renderer, size=(600, 600), reset_camera=False) window.record(renderer, out_path='bundle4.png', size=(600, 600)) """ .. figure:: bundle4.png :align: center Entire bundle with a specific color. Show every streamline of a bundle with a different color ======================================================== Let's make a colormap where every streamline of the bundle is colored by its length. """ renderer.clear() from dipy.tracking.streamline import length lengths = length(bundle_native) hue = [0.5, 0.5] # red only saturation = [0.0, 1.0] # black to white lut_cmap = actor.colormap_lookup_table( scale_range=(lengths.min(), lengths.max()), hue_range=hue, saturation_range=saturation) stream_actor5 = actor.line(bundle_native, lengths, linewidth=0.1, lookup_colormap=lut_cmap) renderer.add(stream_actor5) bar3 = actor.scalar_bar(lut_cmap) renderer.add(bar3) # window.show(renderer, size=(600, 600), reset_camera=False) window.record(renderer, out_path='bundle5.png', size=(600, 600)) """ .. figure:: bundle5.png :align: center **Color every streamline by the length of the streamline ** Show every point of every streamline with a different color ============================================================ In this case in which we want to have a color per point and per streamline, we can create a list of the colors to correspond to the list of streamlines (bundles). Here in ``colors`` we will insert some random RGB colors. """ renderer.clear() colors = [np.random.rand(*streamline.shape) for streamline in bundle_native] stream_actor6 = actor.line(bundle_native, colors, linewidth=0.2) renderer.add(stream_actor6) # window.show(renderer, size=(600, 600), reset_camera=False) window.record(renderer, out_path='bundle6.png', size=(600, 600)) """ .. figure:: bundle6.png :align: center Random colors per points per streamline. In summary, we showed that there are many useful ways for visualizing maps on bundles. """ dipy-0.13.0/doc/examples/viz_slice.py000066400000000000000000000174571317371701200175040ustar00rootroot00000000000000 """ ===================== Simple volume slicing ===================== Here we present an example for visualizing slices from 3D images. """ from __future__ import division import os import nibabel as nib from dipy.data import fetch_bundles_2_subjects from dipy.viz import window, actor, ui """ Let's download and load a T1. """ fetch_bundles_2_subjects() fname_t1 = os.path.join(os.path.expanduser('~'), '.dipy', 'exp_bundles_and_maps', 'bundles_2_subjects', 'subj_1', 't1_warped.nii.gz') img = nib.load(fname_t1) data = img.get_data() affine = img.affine """ Create a Renderer object which holds all the actors which we want to visualize. """ renderer = window.Renderer() renderer.background((0.5, 0.5, 0.5)) """ Render slices from T1 with a specific value range ================================================= The T1 has usually a higher range of values than what can be visualized in an image. We can set the range that we would like to see. """ mean, std = data[data > 0].mean(), data[data > 0].std() value_range = (mean - 0.5 * std, mean + 1.5 * std) """ The ``slice`` function will read data and resample the data using an affine transformation matrix. The default behavior of this function is to show the middle slice of the last dimension of the resampled data. """ slice_actor = actor.slicer(data, affine, value_range) """ The ``slice_actor`` contains an axial slice. """ renderer.add(slice_actor) """ The same actor can show any different slice from the given data using its ``display`` function. However, if we want to show multiple slices we need to copy the actor first. """ slice_actor2 = slice_actor.copy() """ Now we have a new ``slice_actor`` which displays the middle slice of sagittal plane. """ slice_actor2.display(slice_actor2.shape[0]//2, None, None) renderer.add(slice_actor2) renderer.reset_camera() renderer.zoom(1.4) """ In order to interact with the data you will need to uncomment the line below. """ # window.show(renderer, size=(600, 600), reset_camera=False) """ Otherwise, you can save a screenshot using the following command. """ window.record(renderer, out_path='slices.png', size=(600, 600), reset_camera=False) """ .. figure:: slices.png :align: center Simple slice viewer. Render slices from FA with your colormap ======================================== It is also possible to set the colormap of your preference. Here we are loading an FA image and showing it in a non-standard way using an HSV colormap. """ fname_fa = os.path.join(os.path.expanduser('~'), '.dipy', 'exp_bundles_and_maps', 'bundles_2_subjects', 'subj_1', 'fa_1x1x1.nii.gz') img = nib.load(fname_fa) fa = img.get_data() """ Notice here how the scale range is (0, 255) and not (0, 1) which is the usual range of FA values. """ lut = actor.colormap_lookup_table(scale_range=(0, 255), hue_range=(0.4, 1.), saturation_range=(1, 1.), value_range=(0., 1.)) """ This is because the lookup table is applied in the slice after interpolating to (0, 255). """ fa_actor = actor.slicer(fa, affine, lookup_colormap=lut) renderer.clear() renderer.add(fa_actor) renderer.reset_camera() renderer.zoom(1.4) # window.show(renderer, size=(600, 600), reset_camera=False) window.record(renderer, out_path='slices_lut.png', size=(600, 600), reset_camera=False) """ .. figure:: slices_lut.png :align: center **Simple slice viewer with an HSV colormap**. """ """ Now we would like to add the ability to click on a voxel and show its value on a panel in the window. The panel is a UI element which requires access to different areas of the visualization pipeline and therefore we don't recommend using it with ``window.show``. The more appropriate way is to use the ``ShowManager`` object, which allows accessing the pipeline in different areas. """ show_m = window.ShowManager(renderer, size=(1200, 900)) show_m.initialize() """ We'll start by creating the panel and adding it to the ``ShowManager`` """ label_position = ui.TextBlock2D(text='Position:') label_value = ui.TextBlock2D(text='Value:') result_position = ui.TextBlock2D(text='') result_value = ui.TextBlock2D(text='') panel_picking = ui.Panel2D(center=(200, 120), size=(250, 125), color=(0, 0, 0), opacity=0.75, align="left") panel_picking.add_element(label_position, 'relative', (0.1, 0.55)) panel_picking.add_element(label_value, 'relative', (0.1, 0.25)) panel_picking.add_element(result_position, 'relative', (0.45, 0.55)) panel_picking.add_element(result_value, 'relative', (0.45, 0.25)) show_m.ren.add(panel_picking) """ Add a left-click callback to the slicer. Also disable interpolation so you can see what you are picking. """ def left_click_callback(obj, ev): """Get the value of the clicked voxel and show it in the panel.""" event_pos = show_m.iren.GetEventPosition() obj.picker.Pick(event_pos[0], event_pos[1], 0, show_m.ren) i, j, k = obj.picker.GetPointIJK() result_position.message = '({}, {}, {})'.format(str(i), str(j), str(k)) result_value.message = '%.8f' % data[i, j, k] fa_actor.SetInterpolate(False) fa_actor.AddObserver('LeftButtonPressEvent', left_click_callback, 1.0) # show_m.start() """ Create a mosaic ================ By using the ``copy`` and ``display`` method of the ``slice_actor`` becomes easy and efficient to create a mosaic of all the slices. So, let's clear the renderer and change the projection from perspective to parallel. We'll also need a new show manager and an associated callback. """ renderer.clear() renderer.projection('parallel') result_position.message = '' result_value.message = '' show_m_mosaic = window.ShowManager(renderer, size=(1200, 900)) show_m_mosaic.initialize() def left_click_callback_mosaic(obj, ev): """Get the value of the clicked voxel and show it in the panel.""" event_pos = show_m_mosaic.iren.GetEventPosition() obj.picker.Pick(event_pos[0], event_pos[1], 0, show_m_mosaic.ren) i, j, k = obj.picker.GetPointIJK() result_position.message = '({}, {}, {})'.format(str(i), str(j), str(k)) result_value.message = '%.8f' % data[i, j, k] """ Now we need to create two nested for loops which will set the positions of the grid of the mosaic and add the new actors to the renderer. We are going to use 15 columns and 10 rows but you can adjust those with your datasets. """ cnt = 0 X, Y, Z = slice_actor.shape[:3] rows = 10 cols = 15 border = 10 for j in range(rows): for i in range(cols): slice_mosaic = slice_actor.copy() slice_mosaic.display(None, None, cnt) slice_mosaic.SetPosition((X + border) * i, 0.5 * cols * (Y + border) - (Y + border) * j, 0) slice_mosaic.SetInterpolate(False) slice_mosaic.AddObserver('LeftButtonPressEvent', left_click_callback_mosaic, 1.0) renderer.add(slice_mosaic) cnt += 1 if cnt > Z: break if cnt > Z: break renderer.reset_camera() renderer.zoom(1.6) # show_m_mosaic.ren.add(panel_picking) # show_m_mosaic.start() """ If you uncomment the two lines above, you will be able to move the mosaic up/down and left/right using the middle mouse button drag, zoom in/out using the scroll wheel, and pick voxels with left click. """ window.record(renderer, out_path='mosaic.png', size=(900, 600), reset_camera=False) """ .. figure:: mosaic.png :align: center A mosaic of all the slices in the T1 volume. """ dipy-0.13.0/doc/examples/viz_surfaces.py000066400000000000000000000056621317371701200202130ustar00rootroot00000000000000""" ================== Visualize surfaces ================== Here is a simple tutorial that shows how to visualize surfaces using dipy_. It also shows how to load/save, get/set and update ``vtkPolyData`` and show surfaces. ``vtkPolyData`` is a structure used by VTK to represent surfaces and other data structures. Here we show how to visualize a simple cube but the same idea should apply for any surface. """ import numpy as np """ Import useful functions from ``dipy.viz.utils`` """ import dipy.io.vtk as io_vtk import dipy.viz.utils as ut_vtk from dipy.viz import window # Conditional import machinery for vtk # Allow import, but disable doctests if we don't have vtk from dipy.utils.optpkg import optional_package vtk, have_vtk, setup_module = optional_package('vtk') """ Create an empty ``vtkPolyData`` """ my_polydata = vtk.vtkPolyData() """ Create a cube with vertices and triangles as numpy arrays """ my_vertices = np.array([[0.0, 0.0, 0.0], [0.0, 0.0, 1.0], [0.0, 1.0, 0.0], [0.0, 1.0, 1.0], [1.0, 0.0, 0.0], [1.0, 0.0, 1.0], [1.0, 1.0, 0.0], [1.0, 1.0, 1.0]]) # the data type for vtk is needed to mention here, numpy.int64 my_triangles = np.array([[0, 6, 4], [0, 2, 6], [0, 3, 2], [0, 1, 3], [2, 7, 6], [2, 3, 7], [4, 6, 7], [4, 7, 5], [0, 4, 5], [0, 5, 1], [1, 5, 7], [1, 7, 3]],dtype='i8') """ Set vertices and triangles in the ``vtkPolyData`` """ ut_vtk.set_polydata_vertices(my_polydata, my_vertices) ut_vtk.set_polydata_triangles(my_polydata, my_triangles) """ Save the ``vtkPolyData`` """ file_name = "my_cube.vtk" io_vtk.save_polydata(my_polydata, file_name) print("Surface saved in " + file_name) """ Load the ``vtkPolyData`` """ cube_polydata = io_vtk.load_polydata(file_name) """ add color based on vertices position """ cube_vertices = ut_vtk.get_polydata_vertices(cube_polydata) colors = cube_vertices * 255 ut_vtk.set_polydata_colors(cube_polydata, colors) print("new surface colors") print(ut_vtk.get_polydata_colors(cube_polydata)) """ Visualize surfaces """ # get vtkActor cube_actor = ut_vtk.get_actor_from_polydata(cube_polydata) # renderer and scene renderer = window.Renderer() renderer.add(cube_actor) renderer.set_camera(position=(10, 5, 7), focal_point=(0.5, 0.5, 0.5)) renderer.zoom(3) # display # window.show(renderer, size=(600, 600), reset_camera=False) window.record(renderer, out_path='cube.png', size=(600, 600)) """ .. figure:: cube.png :align: center An example of a simple surface visualized with DIPY. .. include:: ../links_names.inc """ dipy-0.13.0/doc/examples/viz_ui.py000066400000000000000000000114621317371701200170100ustar00rootroot00000000000000""" =============== User Interfaces =============== This example shows how to use the UI API. Currently includes button, textbox, panel, and line slider. First, a bunch of imports. """ import os from dipy.data import read_viz_icons, fetch_viz_icons from dipy.viz import ui, window """ 3D Elements =========== Let's have some cubes in 3D. """ def cube_maker(color=None, size=(0.2, 0.2, 0.2), center=None): cube = window.vtk.vtkCubeSource() cube.SetXLength(size[0]) cube.SetYLength(size[1]) cube.SetZLength(size[2]) if center is not None: cube.SetCenter(*center) cube_mapper = window.vtk.vtkPolyDataMapper() cube_mapper.SetInputConnection(cube.GetOutputPort()) cube_actor = window.vtk.vtkActor() cube_actor.SetMapper(cube_mapper) if color is not None: cube_actor.GetProperty().SetColor(color) return cube_actor cube_actor_1 = cube_maker((1, 0, 0), (50, 50, 50), center=(0, 0, 0)) cube_actor_2 = cube_maker((0, 1, 0), (10, 10, 10), center=(100, 0, 0)) """ Buttons ======= We first fetch the icons required for making the buttons. """ fetch_viz_icons() """ Add the icon filenames to a dict. """ icon_files = dict() icon_files['stop'] = read_viz_icons(fname='stop2.png') icon_files['play'] = read_viz_icons(fname='play3.png') icon_files['plus'] = read_viz_icons(fname='plus.png') icon_files['cross'] = read_viz_icons(fname='cross.png') """ Create a button through our API. """ button_example = ui.Button2D(icon_fnames=icon_files) """ We now add some click listeners. """ def left_mouse_button_click(i_ren, obj, button): print("Left Button Clicked") def left_mouse_button_drag(i_ren, obj, button): print ("Left Button Dragged") button_example.on_left_mouse_button_drag = left_mouse_button_drag button_example.on_left_mouse_button_pressed = left_mouse_button_click def right_mouse_button_drag(i_ren, obj, button): print("Right Button Dragged") def right_mouse_button_click(i_ren, obj, button): print ("Right Button Clicked") button_example.on_right_mouse_button_drag = right_mouse_button_drag button_example.on_right_mouse_button_pressed = right_mouse_button_click """ Let's have another button. """ second_button_example = ui.Button2D(icon_fnames=icon_files) """ This time, we will call the built in `next_icon` method via a callback that is triggered on left click. """ def modify_button_callback(i_ren, obj, button): button.next_icon() i_ren.force_render() second_button_example.on_left_mouse_button_pressed = modify_button_callback """ Panels ====== Simply create a panel and add elements to it. """ panel = ui.Panel2D(center=(440, 90), size=(300, 150), color=(1, 1, 1), align="right") panel.add_element(button_example, 'relative', (0.2, 0.2)) panel.add_element(second_button_example, 'absolute', (480, 100)) """ TextBox ======= """ text = ui.TextBox2D(height=3, width=10) """ 2D Line Slider ============== """ def translate_green_cube(i_ren, obj, slider): value = slider.value cube_actor_2.SetPosition(value, 0, 0) line_slider = ui.LineSlider2D(initial_value=-2, min_value=-5, max_value=5) line_slider.add_callback(line_slider.slider_disk, "MouseMoveEvent", translate_green_cube) """ 2D Disk Slider ============== """ def rotate_red_cube(i_ren, obj, slider): angle = slider.value cube_actor_1.RotateY(0.005 * angle) disk_slider = ui.DiskSlider2D() disk_slider.set_center((200, 200)) disk_slider.add_callback(disk_slider.handle, "MouseMoveEvent", rotate_red_cube) """ 2D File Select Menu ============== """ file_select_menu = ui.FileSelectMenu2D(size=(500, 500), position=(300, 300), font_size=16, extensions=["py", "png"], directory_path=os.getcwd(), parent=None) """ Adding Elements to the ShowManager ================================== Once all elements have been initialised, they have to be added to the show manager in the following manner. """ current_size = (600, 600) show_manager = window.ShowManager(size=current_size, title="DIPY UI Example") show_manager.ren.add(cube_actor_1) show_manager.ren.add(cube_actor_2) show_manager.ren.add(panel) show_manager.ren.add(text) show_manager.ren.add(line_slider) show_manager.ren.add(disk_slider) show_manager.ren.add(file_select_menu) show_manager.ren.reset_camera() show_manager.ren.reset_clipping_range() show_manager.ren.azimuth(30) # Uncomment this to start the visualisation # show_manager.start() window.record(show_manager.ren, size=current_size, out_path="viz_ui.png") """ .. figure:: viz_ui.png :align: center **User interface example**. """ dipy-0.13.0/doc/examples/workflow_creation.py000066400000000000000000000072511317371701200212420ustar00rootroot00000000000000""" ============================================================ Creating a new workflow. ============================================================ A workflow is a series of dipy_ operations with fixed inputs and outputs that is callable via command line or another interface. For example, after installing dipy_, you can call anywhere from your command line:: $ dipy_nlmeans t1.nii.gz t1_denoised.nii.gz """ """ First create your workflow. Usually this would be in its own python file in the ``<../dipy/workflows>`` directory. """ import shutil """ ``shutil`` Will be used for sample file manipulation. """ from dipy.workflows.workflow import Workflow """ ``Workflow`` is the base class that will be extended to create our workflow. """ class AppendTextFlow(Workflow): def run(self, input_files, text_to_append='dipy', out_dir='', out_file='append.txt'): """ Parameters ---------- input_files : string Path to the input files. This path may contain wildcards to process multiple inputs at once. text_to_append : string, optional Text that will be appended to the file. (default 'dipy') out_dir : string, optional Where the resulting file will be saved. (default '') out_file : string, optional Name of the result file to be saved. (default 'append.txt') """ """ ``AppendTextFlow`` is the name of our workflow. Note that it needs to extend Workflow for everything to work properly. It will append text to a file. It is mandatory to have out_dir as a parameter. It is also mandatory to put 'out_' in front of every parameter that is going to be an output. Lastly, all out_ params needs to be at the end of the params list. The ``run`` docstring is very important, you need to document every parameter as they will be used with inspection to build the command line argument parser. """ io_it = self.get_io_iterator() for in_file, out_file in io_it: shutil.copy(in_file, out_file) with open(out_file, 'a') as myfile: myfile.write(text_to_append) """ Use self.get_io_iterator() in every workflow you create. This creates an ``IOIterator`` object that create output file names and directory structure based on the inputs and some other advanced output strategy parameters. By iterating on the ``IOIterator`` object you created previously you conveniently get all input and output paths for every input file found when globbing the input parameters. The code in the loop is the actual workflow processing code. It can be anything. For the example, it just appends text to an input file. """ """ This is it for the workflow! Now to be able to call it easily via command line, you need to add this bit of code. Usually this is in a separate executable file located in ``bin``. """ from dipy.workflows.flow_runner import run_flow """ This is the method that will wrap everything that is needed to make a flow command line ready then run it. """ if __name__ == "__main__": run_flow(AppendTextFlow()) """ This is the only thing needed to make your workflow available through command line. Now just call the script you just made with ``-h`` to see the argparser help text:: python workflow_creation.py --help You should see all your parameters available along with some extra common ones like logging file and force overwrite. Also all the documentation you wrote about each parameter is there. Now call it for real with a text file:: python workflow_creation.py ./text_file.txt .. include:: ../links_names.inc """ dipy-0.13.0/doc/examples_built/000077500000000000000000000000001317371701200163245ustar00rootroot00000000000000dipy-0.13.0/doc/examples_built/.gitignore000066400000000000000000000001451317371701200203140ustar00rootroot00000000000000# Ignore everything in this directory apart from gitignore and the README file * !.gitignore !README dipy-0.13.0/doc/examples_built/README000066400000000000000000000012301317371701200172000ustar00rootroot00000000000000Examples built README --------------------- This directory is a build-time container for the examples. The /tools/make_examples.py script takes the examples in /doc/examples, compiles the examples to rst files, builds all the graphics, and puts the result into this directory, along with copies of the .py files. The contents of this directory then gets included because the /doc/example_index.rst file points to this directory and the built .rst files. Please don't put anything in this directory other than the .gitignore (ignore everything) and this README. If you need something in here, please copy it from the make_examples.py script. dipy-0.13.0/doc/examples_built/_static/000077500000000000000000000000001317371701200177525ustar00rootroot00000000000000dipy-0.13.0/doc/examples_built/_static/fbc_illustration.png000066400000000000000000002310431317371701200240260ustar00rootroot00000000000000PNG  IHDR pHYs%%IR$ -iCCPPhotoshop ICC profilexڭWwTTwݷ eE@Л .HXf0 3 ! .hTĨ  AH l|0`o[>s]wq|R(I%AԴt bq`\\4 qupŁ+v4b@ 0@)`>]*%iSv69hKfr%ـjN /[ 8B/T)((jl6f&=z(ܙ>f9x;0* 2cbh9!9$9W@ D0H]Y^Rp%J#8SR/'KD1rE9aYP0EPå9)C>3%Q%Ͻ_3̑X,IX+(rqCT4'1b(JF{ BBx(D> !E! 13:][H@8̚&\(9|*dBamEtKкh7ڋiڝCو/#ک]d<*I-Q)Wu܁R )gJ9RNX/Dxc8.N@jZ:gJB—XQ3]_b\sc@͗K8IJb405hC0  X$" SC t˱۱? p +.p]3 `*O=Bx~D(MiDM1$VmD-3q8E':[xI| )MjF9"(2BfEd)YF.%ב5^3CaȰgcD2RٌrFc',d2f39d>bX,}=˗Ⲥrz^IeV7띂BBHaB ( (+Z*SU+T\CIb‒RRuJ*UzlҢ̵xZj:խԶ֎ծI]OGSM'YgNq.]JJ7R7_wAF %xԾQG'ЫۯwM>G?T?O~=`tg zGk]1ۆa,mFFFbFzusW012369i'GFLY%-4ov\<|yyDu--,s,ZZJުꩵuuu]"L[/k16}슱c?;;;pSY[pϲ:NV׳wގB~jo~p@vϟ3~~0`!C[xDHE=Q?!1脣-M>MG~qe1cu/;tғS٧Lks:3δ:{װ_O<{q^pb}{ۑ;^RcwGS/r׫W/^y=ot|z+֋%̿˸[qO^}5ˣmy{?>v=Vy\ISzz:ggi֗BbWvv{M~Cʇ'?>ds练b YY]J(_AoxhGx@t38 ]]G.)rubKƻWF $48i@>`[?ٱls;;iTXtXML:com.adobe.xmp Adobe Photoshop CC 2015 (Macintosh) 2016-01-06T12:09:40+01:00 2016-05-30T11:05:01+02:00 2016-05-30T11:05:01+02:00 xmp.iid:59de9650-86dc-496a-9b82-d40903827ef7 adobe:docid:photoshop:eff4f437-66d3-1179-9d05-d5477665b218 xmp.did:7e073c12-aa97-416b-8e35-c87e21007702 created xmp.iid:7e073c12-aa97-416b-8e35-c87e21007702 2016-01-06T12:09:40+01:00 Adobe Photoshop CC 2015 (Macintosh) saved xmp.iid:e6697ffb-b577-430f-831f-1774c07531a6 2016-01-06T12:09:40+01:00 Adobe Photoshop CC 2015 (Macintosh) / saved xmp.iid:59de9650-86dc-496a-9b82-d40903827ef7 2016-05-30T11:05:01+02:00 Adobe Photoshop CC 2015 (Macintosh) / image/png 3 Display 1 1440000/10000 1440000/10000 2 65535 640 169 ] cHRMmusdph0>tIDATxydYޏ>CwyԒjɖYMb81$$dXVC `ck`Y[-<ΰ}Nչuvt^gUݪSgBk} fI"E)RHq'\b)RH"E1.\٧CyM+-wˢ)RH"E;Ͽg 5)l0E)RH,"9H"E)RImg QY8:aJ"E)R>e^ELR)RH"-jI)RH"E;*(¦TJSH"E)<*3A ! H"E)Rj͡ϟ?ϟo߾W+-4¾oJSH"E)n__5Ξ=I~??}~9x }_5xWb!kH"E)R}Ο?)O}ض KZHyTK)ohLTb YNsSH"E)nlRm/ORJ"|0 ,o ݽ`ݹ9jb۶B DJr92B߯&þM!H"E)RP,Z*ȸ,}رcTU/|jJOO2<255 pO/}!lq~׿Κ5k8tKċ/2TRHB6Сq|`R0E)RH.###ʯ Ay.];m.l1>ϲsN~w6># \.STXnMx'>ƗeN>v}C/E)RHq C"??ʶmۘq^__PZUbGGGc)*Q*'>Mؾ};k׮ի/{[h6|BjZu%yٗ%E)RHq3y>]]]Oپ}{ y*B&5rJ|A/300@GGʏȏpחZyAT+"JBS>uDO[,A SH"Eoh(}J Y㌍p^z%>O |Ao˶(,b#!׎%uZVd%`}.die0%)RH" ~@ ^} P!aXrIvJM6d߭Ta! eR, [ZaT| _r~+0Y.Eq ҋL`)RH:!<[DȔRxOV}6m͛ .]LNN200//w )%0|P }B9!6!HW"[4kI _+—$V{:z/uI"E)^kOS(,Pu!#4Bk=z#HT-IR%?qԾxK)RHVooK_y{>Ì_%UM.* clt Pk&}͹C¿u`h] uOH"E7wb__#|={kJb`adF Ds4ԭlaXHL|Ykڥ#R""E)R$<#Wݻ,.~G}5$!qP8@ u\%u%>\'IP'PVc3d׊'d~)RH&GG[$+_k 1E q`IN Њ $V"O/"V #VJ|fqX-)RH"ד X"R脣_5a:pW@XMi\,ADXFN< 1%)RH"ŝ X*W(MF Hm`Z|c3 @08v(nrKkV/Ѣph&x*A-$dJSH"E;*&@GGP `1c50&]YF^ỏ?(jZ#3 Kٿ4aM=b CͤSFo'!=)RH"ŝiQ^crip_Tu&&~"}r{3gΰjR61 /~^~m)ON"E)4]y4`+-ɟj"K&Fc΍64Jm]@-*V`Zf0iy[J&/IE6)RH"ŝC5qtE"[He71LVV"A ,-e@qEf)UKfzUIDawO&-&DӾ7J`)RHqG@ !V4kM[TD0Lf'L!FxHpܘdiN8xJ%[)9{,""'ixʝvNF,Y) L"E)nsHOv=KL(K+xsV/#h"aD|ޑR-322N9zMu, kaqXݳ$-N/w=$9ǰ9o0E)RHqAhp"^"tP_ §į9], l&}0"a"}FG%4ࠁJcfڣ\ qbi OP%h:1LU)RηzO@X+ v7} 3Iq@q:цEfil4~ 50YO.g#C2Jl[Q Ke$#Wik/lY F9}.^VjD.#33cc._)\O#Ш {ǤO0E*`%sέjڣȇ`z P)/)D`EH؄X-ia6dG+/LEFe pdQPz1? pff_D)o\<˓?8D>2m,|m<]]x^s?qW'ٲen&ؾⓤ%No)R n>z).!J[Fn6(n0 vp:⎂[*JT[*ț@Y[s89l$RK8LLCB?6fᓟ| 6۬];@XdtKv観n|XGdTQJו];RxE7$Ⴥ17bY"E7 ߂p0v Ͱ~.z0:=)(HqG#Uk%ø/ D/''C -#lS'N{!]b~VpႠlT`xk@F._p$"Bvm{&"$^&`yJE ntE>}h(:?9K3< PefJqOG )>sxJq*:;'8|yMTM4 "E[:DOK]zu~C? ލlں j`klh;,VTE+5ٲu#/p˲Xv s"B)RDLBOF ޘ3ۅzv6!E_zS:;JkZ\&&F8t$lɺ "Oگ&,S״6#TV5 RϚ*쟷(c?T*5\L"eNb]vnY(_8"E ѽP\D+# k6" T8o;"} [ݬFR9DGO=}Mz·&@  JD1&`@(<4ZA y|UCۂBg:Q|Ж6Dc@hۧ˖'OY:;S"EzED*tsm5Tg#nyIq! C,Z>Z Zհ##;$p^1keDʟ˷wm[?LLM<70/A%QeV07k] vNMW.ѓ5n꧛Upo3KJVè) L"}KWK/;oҷ {Mg)n!>}??ŋM1vtMcqPH}G-OslSg(/煃?fdBFTQUjT ӳ =.S,Wz AT82 S!gCelz9 Q3!~n/ZӺ7IH4G\A߱~\yD)R1[bsNA_ړ?u"%^,(|GdGg]/"JQ{ok.Hue&'A*J9غx**a zN7L ",)T4G&zd ,v 4vIE^+GD ? &6ogΜ!-I"{d9f{/%T*>qƤ2Tf10S`sk5N$Cdhv >&-whz`HaEg 1;Hښ8> ́M&/K[qk94X@r={կ|ڃ,nQ(XjΝcjj"HAe=zѷ}ֶ廠;z(bϟ_e ")>%/=u6_QV^/>Jzut`>\hʕkm_|qp3Nǔ5ql۶}p_^)^{L_髈!Sr+Cy=v ѳ&=)P)_Wtwws6lٳgߟ<|<ۄ+HB$vVcdx7-+$}}9;fk|JRf+G#F.aWkG溬_nHV]j.slY<j^G?>VTd244@PؼCbCCÜ@P?tt|`K"^bD%ݎ!doڗlT!`uŒcp>u3lw-#W(#O-&"Q 'c,+pq$,B!i '&>dȹe^ h2YNv?J "OrEx{ߎht-˲شi# )Rz7u .#w:/7 a#V܃> @KOhwЇ9.&''|';wܻp^lai'nKY\ zjߗyJEb:|k Ljw^( P%CYgb'X7o,ZvAA8"AoHgR!hA۵sqdr<=l۶=*FIΖJ%8<]]]Q8E)^\^&۹܇Bt'clz^SxxGRRTMOI|iH qTmFCy=C. 1>|C(Q[7.U)ΐIzdf(Ru'NLkS Bf<+JҒ)jTboM5}<ʡΙ}'?Zb||3gΰu"EW>Blmbp#z2‹'6)  P֍UCW5QѷS`so24>7ţ"<htt\[|}4x~b򧰘qAΈ\l+S\/sw9\rUe),FeПˁ2A-2*HhERFWW-#vI"Za`۶Yz5\z5HafFO!6`4zlznS,a:X y)-N 7c灨a^L=e>"ť>.3U%k5:9:[ CL֦Ryld(iys:r9 YrzD`㕬$QDS6ꢯ3g[H"ōCPꁞw.!W/!i7>2"@N60K{^j 2ǘ=ǽw#Ǖ9"\B2GK pxΨ)&Km8sNgjd><˸.Q8>0뱡g˲Э`Eg;w2XqX:+6f6Lwh9rH+K" jw~ m݋s▘hYX:VhFtP `3k V$0h"qlC?T"2)_DDB@0CtsQ,礵3UUp^2.snx^ы:eɶjTR/ev4>Y BJ:9> ,+WJk)R>*SF˵YdapBz_0=5}}}- '&&x"Z !J) k׮%066V߶m\СCeV\ڵk_ZXuR00ہ.]rYl\gxqP%"7UZ&쫑L6΋!.X+8)pbn^WB311Mo 1(~Y&.UpELƵVB6`FobDTŵ"ePr#* ccc\p)RHn~Hѽ?ѽ=y}qb;v;Ϳ __Kw.$o_җ(H)jlڴdŊ_7AOwJ+~\}_?ϙ3gm\.˻n~gunnHa9Qt[&PbR葦[M\5_ ڄ~ˆI6cts^ s5Km^Cv }xA7'h2 gϜ䁕@6kjLPJ:rͽ9,g+r821"k͚U<">@hsV\.LDEqWؠ|[V4/6l߾=KHb1.o03bBOBt "X8tttœ/~9s??׿<5YUHjkR2l۪G p|"_vk;8p<羻V ]}ц |C&⸵=6P2hi34N.|RO6~ᵠa5rnqHCDj@RF/h ج Fÿɋx>|1zzz;A)@e[nvo X3ehL;Y(G!QU)Ķm,ĉ|[BJ׿u.\~M!DG/1oVyq_+ d8u$__|nH*?5}뇌sM芀EJB3M'm]<Z-vIZA6[ftyr*l;hk_橔Df]@-"&@mGL^%.~_n8<\)DaHd2`aȵ#z׼vs9L-C_>hO Bpd;>Oy=NPqx AC=cjjRZ5F U\Br R_D|?ޝ]y`ٙ ]"´w¾i&1oXx*ɹ"ġBJ2a8)ɬ`xJ٣ 08!ŏ*/y /_CmNBf@k BT Ců Wf޽9seHq;A)|cq,uG?iޔH,ێ>8\9 lܸu]j+W$1>>?HXdbb|3#Ze}5>e `Th稵O.$cB{:A(˗ovV &DQ(#ơG˓ 3O$_T,\ YDِS'ˢ|f&'Z}-_app3gGTJ)H8Á?S,KDzPZUc97'^&=ѣ]+ Ӗ^wjR>;vhYRÐ/}/_WyG[/^dÆ ZPPQ $HMíEbb4- (+yy/yCi>0B[.êv #˗KEX!>y[xBt7–O;y&ֶMh5ʱc{пF) )P!GH6 //y$>/_gww1#j^BIEf r:u*K|2ssG>M6G}!(nYk-óqKjj+&E"`YA`r|r`YFFF,4YM(\\BX[yY0z<~ܵc%0?dgY=iNE0RxhaqZͳ^Ow+,u%HeO0l^#aD X gy<9]n:M(DE ENBWSO{x7m9;aVK{uRvZI)Xy|ӟӃ` ޵w JF07Sk6Esϱo>yyfffWTpjcǎ~A裏 G?Q/s?s<WB ?B2DW1<_u\x\.swk׮d8h+Бx lΟ;v, *̩)sY:2Y7jX!^:qDD$#>3_c=+VAA8Gƺ+AX!6, !'SMҮQ59H( S/6oyx衍s!`+D_D-k+^QN:ƍ;G;DaM68O>$?%|~}pFOAX+PX ~}b(LjjFFFP*Dk֚[ƿ۶Ŵ3??MVC)E6e۶mwvvۿOO=uVz;ȸ7Q>!"V[dTбT"LM(0\[2;sFG&! Q:b97JW(tu#Oi-Zez aC(2"r Gx6$ղ`_59LBYb Μ9 mmis; b0 QJi&0駟~PZ?xs,Il_5B.61Fhkkc{|ΝK1!Xf"sk~+(GDKSǂf&Ԫey2 +79FO> P5`DÈ0kw򴼟2; .bZLGGFEkC,yKNMH Sp]PaR).XSJzaͮ^u D^@A{9*5>))ZsvltɿI'^;<<4gΜa۶m$)^ \•+WZ:꿑=N8a&xJf˖-(xgO<Mw D_m50uɨߞ+^K%ɼ{5u"`xo?™I#}&%晪 IVl4XjӜ9Jm^~ǖgt(gv,mɸE<_Ed]EKM5 +F T\a)LEH {K|򓏙u\IҪG2ʕطo/_f1+S!_-|߿F1W >,R|x󜨙QXu8m)ǟ3л*=&)^K m1FЭ&-_7{CE$J[]K|? q1;޾6;SF)ys0\k6~m/ W A[8ULM#T>0@ h.w}N=$D(tٽFȼEgO!nS xY={+˖ F0&'$[%@^_Ep]]]riS ###;w!7l\ZT*" Clڴ !< _ J0Ds:PG`eRUBD|*vYRMSy:Aa>G𢂏2ku2_Vl f{(Ϭns+J4÷Μ*BÚB AhlAʫj Bf CC O I>1+3煮um44pW__ϳy* a|{'¿4)X LLLp2śf&Bvņ ܞW!ؼy3B~i?>GYҋF-3cudo0T%iVE9Co:z>1kȟ plʍmd 0mUX#XQ]GFvpDž3ԼiG+WWݛKCCE@G~>{*e vvƍ:kY!t+Oׯc)!'N:EWB-¶m7hQ` j득~O8$\9Û WL/1-{g%t$냘CHc2 `K˗>Ev/T!=i,_fX ~lZSJˢZgll/J4H@Hr/܁@iKd3+Uro%f3*u ^/3Oۨu]]]J-&{Ǘ?ǏNSbaH׭B6mT/ ԧ>w5!(v#O/З0z }b/bo^)^Y3`iD)!ozf%0U7^/V9_k]aa,_ƀ1 "ēy~TMcЯF!,|뤘ÖW (]IJ2ju-J==Sc8Cbè[&|4r0`GD}3+HrY-~|_ߪaT*Μ90S0 ټy3A_EN|QG'3c:Ky-1|z/!Y #WO tMW[Kmg7o7LիWsʕz) $pu{5] `y~IkKAPSщȰyS+L)Uzj e4RqJB!T8w3g{ Q$kZZt6Z'Ro ?J@? I_N@.V0 ;t9MgN~ `bxxNN88SiHRwm ԧ>7}89p}U:p#{Ӄ5t4H | `27iV/*"~5_HhM4H >4\C z` @h6㔰JVRTP; k3WXN*(  :4^D|-¤@EaVO4+͡[lhhN:x֒"u ={pqx~~Ufjcz_KXm胻*So}+x;رcZ9TAn xIRnVS_s{شiarXZμ1A?ϑ#Gعs'WTok^C40d/O~ aӤ+*h bE$ʋTHjaMp9/9`*Ȅ3 :12 ĆuPz93sdz|lϋ(%6ؑ;0Ryh1EҨ|q7#_Q@&lɰos跹Y &I୭lڴ{.k֤S\mdnz'qo.{n;Ν;|[oߋ+ȷ JzPD}?߃\qkzLR,=aB@PXb%r_' C?$zI0IS$3ʟO4?* u2T;=,Pt1<6.Dz6yX @N@EGCsml\ghx5[4|>,Zfzޢhoog?~v_t79뢔bll/t#h!md2md2 /cرcGtۓ?G<Eze<ԎX6/VD%[@EՈyn Y)i2"l(q_Mf焥q*93]u:ɺVUqV.XCfύG)ЖdEqnVAY&P>/¹ S%xV:;;տzZ`ʕxGرcb^k:ko {֜4h "=zHBYv-A`Yj^xGr=''l۶HND9;vt`}#usCٕ/?&bڇj U_B 1 ɉ)v>αcShO5}Ľ~'Я4c<W.&'C x8!BXw1FD/ E`S*]186b #&2\;jZEmI|ǖ!d.2znU%C<:]~=r^x]vf"Iz^ Wk|kI [Zsk(Bv},]RJ@(@5W Eɿ+hLq9w4sXwd.fs5(l_&50]B+b+Q" oI^d#/nkqmUf8<De%,QN ]T\H&[2N ZV.Mہ%j8n{{_dl۶ѹR o@ :JU oX xw#ג`i77YuSfy8y<?qm|3aq2P~ݴ`'JA ,!;l 6aKp x%MlkϹs3wtP,c뷹ȣϭ5/Dpuټy/GeK3k!qDIZXBP6]k8&OܛVa6Asq1vE}a}JnF ߇={oyKzLR9^^* KJnUGn;޻;Y@gzX{ zBìn Ɉcs-ÿD2#n77@%lz:4p=& #@bρ9t-ovA䘚򙜨u]o"yv /ޭ:&[\F[[}9v 6lXvGDZH\G%[*F_ 7k(}[ҫ\KIKa;?ϦMnG@s7b]U :;oAWj=׹h[/V@׊® _lE8dN`T| d6(&oĶ)dL6@4UJ%õ"DJm#AkR[8q-tQr̺f|̳p}Xaʁs\8 ,[SjײGJ$S-Hnz{n1Ek/x<6nZPȍRΖ A $CrүWfbwb(R %j,)$"z5vz!>/"aL߭ށs9ܛט:@5J<7&r|N-\Յsgs|2Ǐ{aK */&f!`5ݵ*c_lq.95?z OR$Pp%ɝn4;Wnر|3lٲyܷ>X}oYQgrGGQMyVW[?I䫭N*9!~Z6Hek /pU!{;w}m\U0b7x pȌl;a6( "0125Bpq;DR* ֕Fx~B(aA7wz9ڻѾeL!\83萆Kgﯕ߭DjAR)}8j8A:m+BF$|o"eE߭<^\L^jj_ZA3Tdo wZw߼_8;GV#cu~slw.J]AIzi`~Ƥq, n*6oK_<׾u{73;:ƙSeo߱ftM8t,=<\䨈~(СqlJ˗k)P…'8O4L,2KV'_$BІoND2<,6mo|3"1ioԖ5GI M_OŻ^G>bXaXcnB.\!h:v咾3(\̕+T>B == v!kjRIu(bgQ222KP,06^ EJIM FG ә;ad.<",'=01<N"WĀ6ky %Q_'hS[YB%YB1j{Zj<_!ZpdPEHlLb`98{٬ Q0Hl4d*`2UBP,q+1TM}]\,_kII2h/Ą@RBQkr9η p({_<\E!+LLdlzKd2ZC[P̲S?7֪Χr,}tunP̐Dz#5X]w(}YL&q6nbkv,Z%`zbb={066iTAaQ,_ºd) |W<g(to[qs'oaœ\|ݾ@ѨɋoÉ[$|Bj  zO^__=LQ+WGNҿ~V\~H 4cZk8NW(-Yu/)8z}Ҩ{"g(CY eIA\Ґ mU{79*DtFqcԼ=ZGX4'*Qa5r!u;5N2;; d29Z軟?K/ra~یR%sLLLo@Zcaz۱\1:2Ņ 8qG ,J,V~ cG3YΜ=ͷ=m]AFmӂ@122ѣؽ 33MJ_Cpo#q] 27_?CwwOtۉ_AHz`8wĖ|it fFX d)gq۵@ur `nSosyeRͣP|Xx8ikvlPd^,tu*efYqNV@K.ǟaa"mM4=Hտatt$"XǤ9 \ěgm?aR̀U}dJ\w=L6#d:zs@C+jc[i/pK{r :IH^(QQAL<.H>=B.y@J %A6̗-ї"GB1o<;Z}9VӴ3m٦9~E5$Pd4رKqb`s!G6JGNX!dߜG|˓\8cǟ|ౝ n.UYW+ m۳=;ω8lXq`ky400{edt'8p{veǎaM=(…Q&'Fyѻ(0HųmIg-MHf{zz ÇOe`yDj4A0i֬颧)ON ;zA޺!hvfr_ǷCzX? LCU/ˌJ+mНِi˱훸|2_ VܵN$c8"VLQ/U$:^GQm aO^Ժ.ngPB&[V[fgv֏Re.\iNRغum=@϶VyO&ӀGͥVr2+W󩓗9{2Ss_\.nFıI҉q.vjW7;7ҵ/s 8GsyRg6dķfu_.\!J F4D3w O RLi25Ew9mR+i2,MFY:2<3;Vuk)OܿȀJ7@6D^;r jDF$a]<(O$ > ?nv7ⷨs Gܷk=_w_}{2ܹ|OP,`*~dNa4Ѝ bm)τjS<{B&Dv]o=zy j9Z -zϲ|wuKl`t 21^flL SenmULuwmqLLxqyBe:nFxx)u!S_ϰUxe3ҩI2ERwK/b^ "bPLOP"r.-oY /ټky}J.mC1+),s#ĺ9%XŪnC4BZ}W0kZ".h < )VF*x a/_"*:WeEʣ4aaĭ h;r-0QfBFT|ܔG ]ep'm]iۯR$BKq-@Sj235:sdoV ^ΓU3'G4$2)9@0"Ř=wP`Qn ~ɶXͪ ١k5{=MaF "=E>/!D6|P$x6K&ukZM҈i?$3 Y(DׯOQ*/&!T@Aה4#>qf\Ȼ$H2d3 "=fj TM^h:-hDce|?)mEj:lDŽgcbnZơgS!^MR,j-e8l\q\,ŶM { !¡dᛳ*4ssɀ iWdn5n:x mAݡؾ6HkCEk"5ѫ5۶(ٔJ]vLЅ!Tʊ|Hy>RVTUJ%uu-2YI6/p 2Yal0d'5j5RaH*lKV !Raۋv *Z~yUWRVTkmCޖu9tB>,૆V(usR`rS=2Vic^ <9dࣚsb (Fy~y4N2yڍ]E_aqR~mE9xb5-E״B8@5/jN?Z7n݆t#Lz@ OI!:h/3)KeʾbzF1i*RϘ`K Tj_EB,CZ$^ȩbR+y[g5{!@)+M5JMSy,), rAkPkthB+ccI͒fd]L͒f-m-ޛ;!0K}*!2 i u5RH, u )vs7!L-~yE!yE4&X"KO@*)n$mC6g~=!4a`CRi52)WCFGB<߈Z-mo+cฦaC%LڶeHe$\HE ݀&+n'dhh~nffROA Z# u:(hGc0334=y:;J@ ". Bڴ| #=&YUK$BT7eTBuQ8ZJ]þE6}.s,_ƏV&k|(_Il"{b U%{ 7QKvsh+61&6hS/MZ@HShzSaYm݈fgs9ܜfjRIdh~:GrA6k~+,Qe4~?RDuf Y0P|YSB  d21^vA (˞ aFhUMZ 3S-4^*%mA?K,::Z8s/izk8[A6l~R: $FhTH#TС}gYK60 0֨*eYױF$*նJ ;$ a2Tyܜ̏//ض VPD+Lִ?0\,kɫeilP6(cx۶Xh&fB&D bA>dD(o,j0 VkXd2izţl+7m[e[xGxGtF)u!Q(,A%) A a.P@-0\QqJD_'!)Ntx;WpHjrSsY:;;""la Qm{|RRk]A=Ϩ֍N+>xO!R,VQei}(BGrTsB o`$VxH_`=H#\@\jDR0KR ME-jV@xx8hej O9dOкH46(W@Q 9Qe2*%]#ԆŚԊQ,m&3@ "Fbkp58 P #e.چ[WjU `䱩00'D4Ԭ'< /T #⧴!`aY˭5hƤ! R,DŽH)*RJ8C.6x% ̍=,mrLD]Hط _9tla$ |}C(UւZ;!A`r^qJD}6keF%ńjŐP5Wv,L.>V9JOF0R, s0"ٌN2w4Mses#[GpHCGD9WAn ˺2"JQ rm)uz͈ȸfY!e5+e ̱뚼C˺0lG7ׂWyɚuIbQ:P̓Lm1w1 50ހDZs._R̮5Wd]]%Nկd_u5 f2IT(.]2XjBahFg&' 2(ą|ʋS=4yC5CXQ ye&f'dsfcuq k qX42WzM+̄L~mj, !Ţٞd2f<$Pcyѱj UN7a6dse A/,C-i_zE& Fm#|}EZc 5e:hƱ0PCRk5_O6:=8KVxIŸnU!,sn56/t:S# KHbWck9E*iQbBt2MSaH9!q&-ZG RFJ#GM+8 4N'M0 ! #:-rhYP.@жQ6y|d.|ĥU:XnydrDBY] `ɁyP{yO:4ٻߢ>㱰7ZsV1joLt%ZLi eg\B_{@۔R!EfE ͘fJ3ڙs{j`n(ʏ!t ),` U)M! 2#MrLbL(GV,4z1墂+*}!B@ooTO+3[`ˮp; $3R&e K2K**["*g;#mwEl=تp!ÒEEI$b"ALy3i佉Dyܛw8{ﳾ!Wܣr(\1&n)li&` `}V)i0嬯g`Hg0#Rn6FNpďkf$:zF0bU]],Ia˨NiYH&ZDgPT؈sϦPikrő#PYDaZy6Ǎ_tҝZ6{'wFzi& MGWRψyG6+Z4|_\c4 $ @@pQםNo ׄkLiݍ$1 kt#T")mP':{SsPuOu)\ iYkR!Gܐ#~׬0V:h#u d {5h,]74qu7+Q4rZUu< ydJog]g JlFr 8'2:3NY ˭+Qu XkÇo9WJu긲 ~(1ecmv)J%$8u3@A-w9΅SdK v +CL,@;v&juuՕFá{h5vi*4#K{S|ڑev,խ5MutQ +F6c"Pmo0?qDs_tOePYE.0 x>r࢑'I95z,9 fhȑ?+8uRF Vݢ"}Ş:;ꪋGq>q8FF(My \iҁs{݋ Tnc+گqΜ@A&+vNgQ̃ci0)ŵb~,¹R8\`<>dr,OqڹJF>8I8W}FqYT\=67_5oJ8 cVɲ|(YNk4j_Vp<Bΰϝ6y}jC.m^f{z rcf}8d50zs˜tc㦑DIW>ҐVȑ̤|i#l&U)բhˍBN5j J%A*Aye)$-,22q[]{E4H}9 u̙݁ ]ǜ3ݮgP<wƼ ~44MٙmF0bMF\})r BU8V#s1M-?9wiE#8Ζ fp{r}m`x=8zߤ/;ŶI pb}x.DO doOc&l|gy?IW(l't? l .=Bwԍdy'GGRU,pkx04&* @ L2Κ߷Jlc 2Is,kGqK 3/}]8$Zt_1 [ų[v~ʕ f[54AH5Yd:ahZTZG Q’1U6s6=浳9 #ƣIbP6s:[7iC!ZuYz p.s_"3>0Y~4]E$cR;bv+xWaLI*++w1Y$4q@,Te˵C7H-yJiCESsq6pGc8ăG7ׯms]e wqNzC(F6L$/ HfVZ/%.$&TUr.o|pffxy=hZĘCkdHKe{> ӾOqi("{5>kn@6 ;;%!vL_w<v -cYbAYPqAm3#=mMWH>ށԍ*E)!Q7y/_exES-Fk|i*lj;ѩuQKr5TfFobx`O0APj Z?#ZO2W=kͩ;>ɡqV9p9\bSa4:;;y8| i: Y_5NA@ὥi4^0_a>sg,w!]rKSNd EU!c9]/<×ᅬa2UhYјG-͏$zo\c#Đ â윿*Ӫ`XP0wábe%>& Wd8'T>wd,tI _Ɋb4Ɋ8T#) ѸLùGg*tNec_؇IkpA8"cJUj-8pmUbJV&]t` -&fA3!a߾uWۨ6QoS{F(˜)^I0*K|޽@Jwgpjd~=8u=* }kꨮU P,WUUQeD]`>,F 0I6,$&1MSQa\18"`|dIw_=XlV4Ggg? M#X h#I8 !isN&΃(lI>nYd:0Ƣ(ڱX\ܒ$!˲@"#ƱIa7سK:e+̣DYUjʢi n<%Ijc֍9O cwsdOY!;)yr'y9~ ǏS7yۂ֮o+WOrOqC4ہ͖(SڹCYY9vE:O^Sώ:+|W`1a>33a0`eb1i/>?̉cn7$~xS iá0J10 ϥ?O<㑡1?0a3Ks*ܰ%zenlU|#NHyJxq7јWguuBhIJ6Xә&J|PUԵk]JgdYh4j7'ƜeX.KEhqh]F4Bgi , Ҭ31$I{ ƛ1|ů]W"eאuji,֡$6Gu@U:.e@6SngG@U]as%]xpr4&+6Xlߘpnʷb;ShjѠ-hD;"㱀̓Tah9pi:rlň?Xlq= ~8zK~,Y^.Zj#J_ۅwy1u>H |khّ5tMw)X]>3<|ӟ;|1~M4C&Hjuzq]pX[n:,6\a6BE# >`AjDk&s EM\r#t~NH.op_vJJqFFTY<Ĥf2%wI66BAyýn`F fGx\/<ʩ!" qg t:?G'O%>MSb%TFڠtB欮eC!y.NGܳ5aړr!J EGe*ł뱻PT,F]Ct{mm"pcrX@#7 Z#:bm1:}ɏ451+,41Qq#oD:֑͝T)z0Fc8$#sMr;s[֬ L_c PՋb  ȋhIѭϳ&]_wl=~W|W@-p $Ew6zPx,m!5U4uImKmp4 (>`ihr}c~鿹aF3[w8FTQkO;ЍtZ/#ԅdZaҢ2tN@c0FTsK#e;3 2ƴ"~auR}(kYzHh@JmɈ =YÒQMICUy^aI\Yl̶jl)U{“ZFZex9a-9y- !~G⃲l*E MkܠQZJ5(Y:b0JUϋ^7FǓ&40z<o щ*R N^pPxq7{o$Jeɶh ʏpͦ!M`N|)6i6)_Q*K9ujDg] e+lS/0*-uiGU8}43 G'k #fb`VF{ (tg Q_4NX+#T!zD/mH bgdA],$c{G|Y[" Xlb-ʮ#KEQb0cxVㅽHաA+`8 F#ǣ@MN>]{(P^xnqFSKFC$Ѣ N @hcv7B*=z|^'^ꅂQ.`4oJ1kT}}M bxVRYU#^֊NM-qnJU q㑐&NEnLֱ-u[V31/ߋ^P9{UU~sF-h4z|{%_%MSXԔrKi#PC)O|9~|Q͂8\^@Q;5WUXH=`@Fip5%;jmHR֎?a8HPZ$j}U H_}+W^ѿ6iJE%vxr$kMbsx}`U+4aD54f 5/Sע׹vkf^pŕ CBt Tkưhj㺯JTоyXUl:̵\~*uCTP]i e:=Ǎ)L!WW 9kb|Qqo dYNdK  zwhwt8.3w4PQ6T774hȮ_-V]C,2\(Nyq#%VmG5}X1,%r::<gey!q_mDql-K=^w}2l Ñj@qb3Uee"%vKm1yu,l{!(|tΓdE\hz ,elmG-) c{4kUjU0td>-v4a`62_]m/䢫 d3t`u/vjli\4cȢ-yUxJi'( ^lTԎ41|۵.[Oo?!s_xҗĹs{o~Œ|W7g7s3lmcFf/IĩK?^|2DT4&n׺n43 lnw;O eA>YYs_O;s(Q蠡 x73^z^f5y>^S_[S@\"@䓌=z?˗WX]]{qH255g^#Ksw]a*8Z㽦qo5AY\Sk=k>^J ߌ.0FDsr.ڣ4<}p asjB_Z DƁyN|(Gj97`6ae wޡXP XbzY?6"/ZmdBmM_yQܴ$P s݈i ōQ zфBhn֛ AR/֖%!t9Ȇ;[I&y&Qd,ݶ6p< `,ZU 0R,=eD}H ;sQ]͏KѨiy<.i`k fstRQfөg{ǷJk?gÇsx |_wſݿc=_D$-@z ֚'Or(, 8 z' RsZ Y Ag0QGVMuXPU5nbg4N0pؘG9c)1+pk:_w5>k_]Q%lj*3v$IFd绗#I ۣk"[^wŢ1ZxE`6jv8#I5y2IIRh4$i?G0J8u:_y;x-^9{j G R>|| *UiZ l2fUjk(So0>~]._y1l5+8;,oPWidi=݇Wq]#'C7{_& wqm9ӧ`^aĊBQ :x|UjRr[枭9\ޱ.JuEǹͦcUbI PC8rT`oe"^l& u;vtס lxQt@4ePrPvKF@Vu 19'*l"4jj7{</ /rwo|__'?I~~__k_g~:w?yOOx?`$nim իWŘXO0L&kFy1t6MIU4uIUAWfV$._ '< I#)_…M/=##~5[.U ہwޠ1s%lIҽ0;-ftcccc?Tx_hݦR$ڿ.43 ڱEEm5T nIKMڤ->0>of7nxMBxS?I(s,yx+ o{'9dII2$MW"M' u|$58W(?'M*W1/9<(e}gRl9c*Pg kwqgVRyȍP Vb8> AζQ8`#.%֪u{Gq: b4}nhC+)(,NV 0P,d$>$b6]γh:6Dq&H7>1wq|~?׿u^|E}㩧bkk .s?d0xoC\w"Y[خ:%uaEQX̩76" +;L3&9+9kfzZrח`m}ӧ/ȅ[4uͥK;3 AB/Y? 4C`1/ciHic:lrLSq,mA9p/HN,/ƗpnX:_!&A* EUȗTQDQBU+ck]4Ue#c9.Q }N29vre3ƹ)'{m&hKzFR+idQj mDՐ kX,(2e7!IWHi6 KDGE/` D2YqJuNc)9GiIX)͢2Є,wLSP$"M:TcHR [ HL9JCt}/m,NlJrܸPxOiJ4oE^bǏj4y`(\N%cADCml(m)amg\E&+>`PmfR;{M-]QLh\'4ٍ\Y&} ^$K;W f8$t{jہ4lvVV`2*я<;wiG?& vc[Kuo|iN>׹9Ō|t:e69ž*X Œ8c44$i%v΅T'?(OlpիV<ݜ<5a>o  6'$3,ty6-hMtU6Y^yuXIPڴNgNEd+ ko+L3ct7> 4at' I8֊:~rSHkğx%6nQ/~mAq8 x{pUu%Tf}[Xr;:?1]VgcQh8;Q'Xbk{j7 p(5X*QT455B =7H[.,Z S{NdÜ!' w8⯳7Yez 4&|Z_,RZJ BAzl*γS J\.Kur/$ #pV("F{rEɄGu"H}˞7Q o^)FEL&Dr; уRнǫIME|s5!n:6G5RT h9i;Q[[tfsL@x,Ѩk]nr/,3q|^QǛprӤ3I\&VV%Zg<NGQ@kzHc{{/ /9'xkW?&ƾ,ʅҴReY5J,KIYt49~<2|%)wZlb00 I*GUؠK} m#qY3OOqX;16 $v,Kzn#O1b&I" N]- Mcp.upVaGU; Su-KIB֦|}o\i bsdmUso/Fkmu:NaCv7CBű+?~+V649T`mI0Hvd6!:L*3vH,ZkP_*|o'zjh ƣCF}+s$8B1=1 3s!?č|B*+)PaSc6׳}%KqX땪SBE)졮P~<2Dɤw% =?2l3b*  w'8Yb8aTB(JəwQ eBByqKP 4/͐$IIc2M 7čPt}f=h݅ual#cJa. fd/CQJ@qHXKpz_\yUy>}[<2;PftwIE1"[aԡȐ02#QZTjlк~ϯoYc(ĠVC&wmtӄuUApŷuƇ{FCU"Bj^`:/2ZyvPiVFu5LR9ӠK.O5KӮ'M t=fvI꽋j7f.\O<6__s˗.~&F"@o:iMjjU7cK2*tSl̕GwD0od. SjpSkaWU++Egb4Pd#`HG 3TLxW AҒ'xLiFUtaMюBCJUyi8]>NyݓyD~| _`kkhGgϞG_?_c'N:yyG{ٟY8q|#o~ 3.`&(Њ))ŜŢ`QUeo 4 OBW8Od4ueqXkQ8uV>GD'OiΟwdsk:};{"u7S7ގL\YEZ8l9@1~}qߘ-\%0Q#OkL{F;(DfLYaTd Ͼk`S8gP.u%c!aH+HEjPNIF{j4W| ͜O(IYȨ2Sؠd¢RMhv ey<۟jwb``,H)lY&3`iC(yy|C萆%Ǐ\IB"<1C0 b`M,| Q7v }}qYRUMkDJ^Qҿ_ ii2L8}4ֿMGQ2&8{#P86N-IuKX0-zoј׷a<fdu1Nُ!ib~^&z2J I}IE+aO#C:HW]KNSɸ1"dԎi-@!p(eCdDXtmo tV߈Z@vȨ4XҨ,+z3@\!yLM"sH]5[@`(xU6va:~Gl4u\ dB:_3ƫU4NJ6/^dggSNY Ֆ5c:b<l;+$\YV>:;o954uEY.U)fD0F#au%i6KD Ћ%I0F`]_\d7;O'9~^}%ϾG9vh`zYo.8Z.r#Ə>㿵y@<'xyn9.NJT$_x9 O1wT Oǖ[We(g2(rYqR#@d\ X|B唤T>9J$| Ն Q.}EJF,ȩ}З^ؽKCaCKH{<ܩ` uN>' H@=PH[yyMጘ@,`ZpR"+JI.6N>^ڈŏ߾"^FJ"s~{36"blX8׫fYbXrb:p0a,(|=[ׁ„lH_;&ͤ" gG;FF+mth#K mU蠶N9u=vr*BqniAQh0}{n &|0lt:<6G;{̺mo6~u-\ѽWa cbbQؚMJY}cro]ce¥n !R/pO}c?v`G a;{-!J _첣h+rWe\WtڋŢ(- MFU|2% Á4P1 YnI2&Nt FA*T{\1^btW(FlCJ9/gJȖ QjiS `:m9.MRj,A+8RUch0օ`mVϮ{4b4u *sz;3#F!`OaQz '_Fϡ%Zi&tJT*>4h?t@0FC7*x7q !EY䘕Bʢ93s{E[P,Q`qCqNF&bQΆ(id b/,kkTpQtm/AƔឪJr=4.4Lm2CdIR^. ~F#ZeVw^w~$)E_diy[ܿ *T-(9yRU%eQCv;FiT# qlZݲU)& Mg|xyFQ+d<嗯2/,Z7ZvfiduDv96/}R?,2|ѥ~Rvx+JދR`ITUIXaT'UL y.F1oȽ ! 5Pn XLH֚aչQWYxZ ir{t;Uw.bQB x˨npE*ctO_|Oы MOn=rTֳb*:3 *I$$!1u| OBWHeeYl/pSH-,nj<Q5ZwQ(pln-.rߊ+Og$2~ dJK:fxL"SŋkZ%g0:x.oJwD}LjZ G(ѣy%,^82 6F#cT9sٶ1-MuAĢkgB>9Ȓw t+%5t9`3ILjMgv䷓=y]msG&Q-cQ^=bl7Kb4 FHc m'ZČ5͘{]QŢEC][f:bƦoCY޳X,HGޖZpQtiqtdJGѝLtȴlKӝ;[^DQV&.z#3 Ƿ,黽%Ţm+rOEM D̉YZO\IT:d#_6:E)LRTг P&$Qm0 $?xk iޖ̻TŅ _xs/:Tj4s A  'ky}3.T,)ik=Y&ߧi!T*h¹/ia+ {BDȮHckP*vq?vb4[|@lbuQޅ7R9OJ:Mti8Y][am}2/ˊX0_)El*Q1KXR#(0h\hY֣cq^HLaRO6dJI6$haXiSh9 ȋ[yX9Wk4N{ח޽"сy014ʵ#S^SW2ԂC"c(k""lO2 +X1*gʞzIgps㸁UhN1 a}=ݗUBhf*˲\7V?s/#?O8tm/]9gv5]U H2vj-y~GVCȊbuIқ#9\XgsLL{e*X2!Du Ub;HZ 8G,XLիϤÂGoI< i""bUd%0yz FU"@~7 $ Jt|+%Kc-zBMUNNCݍFRBX&h>ZY&nʍF@N}ƅ7~Ll~Qѻ-@]U)mַ5b"JbqyG6WeCOG]ctiQD_l.hX[^ \[S$@TR$IJ@c4uƤ$1Z*dnڴHctrz)Yqbmbِ=MV"6Td|pI8  ZE뱶 V&c;;S?‹/>0X_/z;#'&QXMaLul[OE{13UU2DH9R}blTReMI .~gPbhdLpTYC4 } ax'dDbN }{-SQAOF~O1_Rz͆7L:GF߾qJ`-6NoIQ]HI<Hr_tIR QpbfV]'iL.rֆ5bDsu-3~K@wyoZ4=V8REhyoQ̌~`?O(CEu[ GJޟs.ĕ Vr"Nż{V61m0yˤ3p5v ҎA}]K嫲``0 9 ݒGw$0jq,8|=qs=RM#_!UBo)njZ`R"6L6Z| ͯ]}6Әr %f$&7zdݱ6:RE0 ȿvVa1ĨզEխ 0&vH*\L]r צ>rNDT\M1ZGw(\Ų.x.]kkpo 3&Q!tG1p i*:3k$I"ڶ` dԈ8uMQdYV-~ E8ZR{0"@Y,Y]_gxeiN2$F:T?f8T -+E!9Hg狖~M\@]b^r2[#\^A8Ʒi { h8_=đ Yf(ʆ[ ~ku~8?w>X`VTZ4U Y}H!XEQ4l$ؑS'7X[] FE|k66<`襨gg^/aˡCz8sr#$ήZI-ig0`>[(kk$Ʉ.s=z=rn8DpyхKYqUdB0$h- 3$&5OAp"g= ~=4a<^N0vEOf qM=GoNYfB |@U3oT|^Ʒ3W)tM}l66I݋ \]{l(Yʢ(j9EQPU5]Uم6VJ"[=w{Q*~ޞc4 +UfPW^㟸|NfEÍ˗v|ysg9{n̄,qB%E\O|LF/nSs*wӧ1f (S}9|yqIN@%\:/ʧ?0znbFQꡌ3wl7YfM;3_ w}-^򽗮s_9ƣ˼'yN>S6C-@=`R`3Oğs>ƙ'8*]O4J? >P{KݻƖ?8ލA#8V;vzoZv?G0-IZb Y@nY}Ph9I)zH2Z׹Ά笭1 'ūs9c9D ` #VUt`C.q,3^5E.^S?+{$J)jȩ;1ϴ0 ._CJ{b|\|A6̙ӽp#GO˗y;?;ϕ+$LnƕS&+̇Wiq '5.^FuP90VM^go©Sx{H;-<.TƩSgYW'Y[?{ zI{/{wق<[;udc,ߛo_qpwMX/({}x} &Ο;SO@]7?]w3W 3g]_<ݳ GָQsI8͒V+~ GULb(ˊkW7yu68 Ys}Ga 79Nd3'{7A666z׺g6 y;|%Oԅ甀!ر<8uY6XXϾëş9qpǏ1>v2ӸlQK̿և={Cs@k|=^>uX׮r&рNb6k:{y$n\Ex\z Y 2>}388! .{/ ;Gnh~7T]Feָq5"Hdfз AoXn#u;>ߦ߯Zv^zn\NH<~qp+zǃn G}p/^=G-77 ]fhzn/^.wpqF2 =;_/V9myYex?o,mPUޥ{<`88x/)sro GُV>vc|wp9V1>IENDB`dipy-0.13.0/doc/examples_built/_static/stochastic_process.jpg000066400000000000000000002675711317371701200244000ustar00rootroot00000000000000ExifMM*t|(1$2>?̂%i$PStochastic interpretation of the contour enhancement kernels.A. Accumulation of 300 sample paths drawn from the underlying stochastic process of the contour enhancement PDE in ℝ2 ⋊ S1, projected on the xy-plane. B. The contour enhancement kernel arises from the accumulation of infinitely many sample paths. The gray-scale contours indicate the marginal of the kernel, obtained by integration over S1, the red glyphs are polar graphs representing the kernel at each grid point. C. The contour enhancement kernel oriented in the positive z-direction in ℝ3 ⋊ S2 can be visualized on a grid with glyphs that in this case are spherical graphs.-'-'Adobe Photoshop CC 2015 (Macintosh)2016:01:25 10:34:34P T9X TzL&ff\(Creative Commons Attribution License(1HH Adobe_CMAdobed            " ?   3!1AQa"q2B#$Rb34rC%Scs5&DTdE£t6UeuF'Vfv7GWgw5!1AQaq"2B#R3$brCScs4%&5DTdEU6teuFVfv'7GWgw ?Tzʱ3kp _)oE_JLu1fNEV yǵ겳phJ2׏nfk~MX۫uc8VZ_{z/}+}M$UO6ʯ嬢۫ȩ ?gk=̲}WWf~ecmF~߱E?(S֙*gͺ[6_UW}v+F2-ssͱ v5?ĔcoT=5olkOzI1 ))tLHO )Չ/ wW#KyWjI2Rzxo[蹍 ǮG%ߥfl?R{+:z0n ?5zU}c5S@;6ɏ7'y9`7Lh?䤧7uLم\_7m}qeu,.G^?[(sdcN]9o:cDP.7.#ihG[}`$LJOŷ}u`"WYfc[5ϳp}b8[+a05ƭo*Uli)ȯ'Mc2^ں_Mgf[Ɇݙ__Z_Wwٽ,5p1en\ѳk}IVŅK/7яԱFF\ б֛2Ge̪Z?Sǽu<8=.k`,o$%+_!G+u]cF VYlwŻe6A;Z䐛p@I~CEAm%$sDQL#$05{xp |?"JAՈ _|b:R{ u5`j`tK})bDuH6Z"Ѝ(?"~CFyk~_Ψߔju.->m.q-i`-=L-k4d t>E֖e3YfsZ\꟏uP59ik\;'ٿGkqqݎ4h>ק~}'Ȑ;n)l "dIr6 @`Lr>^NH$Zݮwʻf`8ɬIkߤ7+erN{R&>QË"95T5OG0lpcvpSܨt39ۜ_-ṋ֟^ ;h& c^T,_4vAo;\etnݻ#]dM9`эj亞EYxo~ X0%Q4vo椧2:nĵ3p5߶mچMOl3זbfza۶?;gY+5atݵK_{SߵjsGNo1FޥN>/mlU˭ʨ۞@c0jT &l]/[?c\8K;n{j椧(;, KD?8oޮW{KAxA}./v@{Q7?ooobߝmbKl}?}籉)]a1?,Ř7XF%ƹg֣>LcIt;o)t굌sf^K {[3+?Q̯bko-ֲkskQ9;麆کrqq:vFFc,z&<j3ql=]KP9omc>˒v55WGFP/sN˽ץoeWڿI>8OѼ Mh5wTz?L=/ h%\m/o~oH]EIӹ힯ߣ)3D`e!EȃJd\KD 0 =yβ-m,sn}:͵Ŧք_K*q]uI Wa Pyhѵ?\ݔ^\N{`n$?:Mϙ6',ԙ'p0jh#mmϏ]FOo@vێth?q{')Ռ\]x;P47t{WѶvLf[LhO]Xmy66hZ~=}45.fkG>nIO.C-h 56OӇ5bWAªawC} cC=[K\Iv85@& gU98to!p\FYc]RA-kwњL{rJhec6 KXuaTzv٤ i\b5,$"DpRY]m.Iq@B>kU X\6赾?p]a5ͧuwx4kdc9 :tm?}uztM,O'+*6_E֖EuoՎce^z7au1}d|26-*N4{?@}?#UĖL6}NZ_JQf;h0Z,=oQ34){!#R ,~' zke|'Zvծ_Xe ko w_癷YXFSwy9BY-^?V+wc߉NF0.cmM׏Qg%[t3^>Kݡcje~ԳԱ4~̠b=\8~Y~']"9c\ ~Ü;c!U|Ө4b`s,wkg{K#)9Lub?auN&Nkή>ޥ_ezg\|S$)9H%05ܨVXfec\cAZwX )s \8xy#l`p=$:/`\=pGu1tp\یWN֯>1ZsZp4A )Rk-4UX촵kus v;U~{=dfEeoSS\}&kjmsgϭ_+ @׵k^KZY؝c-o_CC]EQY6d`1w]j[mr_?uNmL1l?XnWD/c2w_gum~ا#8pa)KҶ@pNsy"N`ÛNּݏsG^Ϊy]{at179T.xmqǵγ}{~[י`2XuO5͟glVzlK^87Fw Nqː9Ie#9|w*]v·e_&mlێXLX o[\mc  6^UE{ ha X=CbؗTۛlI:jRR@.0{ݧܓւ5$P6qG^Խ_?$W7̃[[Ԁ cD)r'}Ɔ>ZvoRSh9 o#Hd*G$~kD$ :jL 0~⦒}[_|b t VW"{*5cMncm,'oj7cb N;c[Vd]7ٽuz=Q<5ֻeM& c?=N{ {!͐%m/fgzӲz7$6Ⱦ4fd:7V߆܏*[:^GZ2qqO@!4;k}X>k-DԮϷYVNEḙ-Ǿ[6͖,A_zV]%h "ĺ78 ^Fّt`r T!A/{700v+10_`Iw+:}S#se8`ӘX[nΫ'_gz[^EZ#d6Cqr?i'hJ_VuԿ+ZnqKĽqkK0;{YmRfY26iaI|r۽V_I#δ?/unn-4fl}Yq\-p?9je);OIzq?I:J/tGٴ~zDoos_r_SI\KϹ/t̚0'k*xLc[oDͲ \~#w&dF9KpqΛ')Gޅ V?a{-% 5=VάIFnc}= S;%8qKLxm.u1KZ=.{}A5H$x pv>G~I{6'^f:Kdx?ꬬj][[UӸ0CWc>o_c\{o3?~~_=[_|bf sVfFE=ײRC[{5Vtº8Zk?SCMo% eogM? ~I"CoNֶ$I&.>-X7e41V kګkZL?ּ0Wn+z'#f7nqvTcrkȮG_UOf33rexAŮ[ֻ>,rTD7,<וcHy_5hm,%}TSuFf-es.qs{?7lo csYk{\f֚1s׶}jNy-c֗ 'n-j.;qK`% k7uea>6ǣ!R^x-֝kNoz%7=0yv#ՏHMnkX>}kl}U\,KsT~$y[\[-l}=vkz{\Klp] Zq~o'WSZ\aNN@?j5 \]CXd ۘ;ץWKKZ5=GG% cCfd7ۿ#ƶk {/eDWI%j@$rӤ84I׎yI=v>oCjc[:f˝BrY[{5̝I%tjS@όG+eoJbHH `DJnZw4#kx 9 |EK9ki "zw{ ikvS-~h\|  noVqUa;s]KIX\-w%P8980ypnwzuЭD ; 6`oIL i Б#cEu1h5)(i=ݩ?zGHǙ_Pn( lhsocQtTSRKTSRKTSRKTSRKTSUwWB:ײ2ݓ_lU:kȶg]4pI%?TcI|W$ywk鯕IOJ-؉m.q-i`-=L-k4d t>E֖e3YfsZ\꟏uP59ik\;'ٿGkqqݎ4h>ק~}'Ȑ;n)l "dIr6 @`Lr>^NH$Zݮwʻf`8ɬIkߤ7+erN{R&>QË"95T5OG0lpcvpSܨt39ۜ_-ṋ֟^ ;h& c^T,_4vAo;\etnݻ#]dM9`эj亞EYxo~ X0%Q4vo椧2:nĵ3p5߶mچMOl3זbfza۶?;gY+5atݵK_{SߵjsGNo1FޥN>/mlU˭ʨ۞@c0jT &l]/[?c\8K;n{j椧(;, KD?8oޮW{KAxA}./v@{Q7?ooobߝmbKl}?}籉)]a1?,Ř7XF%ƹg֣>LcIt;o)t굌sf^K {[3+?Q̯bko-ֲkskQ9;麆کrqq:vFFc,z&<j3ql=]KP9omc>˒v55WGFP/sN˽ץoeWڿI>8OѼ Mh5wTz?L=/ h%\m/o~oH]EIӹ힯ߣ)3D`e!EȃJd\KD 0 =yβ-m,sn}:͵Ŧք_K*q]uI Wa Pyhѵ?\ݔ^\N{`n$?:Mϙ6',ԙ'p0jh#mmϏ]FOo@vێth?q{')Ռ\]x;P47t{WѶvLf[LhO]Xmy66hZ~=}45.fkG>nIO.C-h 56OӇ5bWAªawC} cC=[K\Iv85@& gU98to!p\FYc]RA-kwњL{rJhec6 KXuaTzv٤ i\b5,$"DpRY]m.Iq@B>kU X\6赾?p]a5ͧuwx4kdc9 :tm?}uztM,O'+*6_E֖EuoՎce^z7au1}d|26-*N4{?@}?#UĖL6}NZ_JQf;h0Z,=oQ34){!#R ,~' zke|'Zvծ_Xe ko w_癷YXFSwy9BY-^?V+wc߉NF0.cmM׏Qg%[t3^>Kݡcje~ԳԱ4~̠b=\8~Y~']"9c\ ~Ü;c!U|Ө4b`s,wkg{K#)9Lub?auN&Nkή>ޥ_ezg\|S$)9H%05ܨVXfec\cAZwX )s \8xy#l`p=$:/`\=pGu1tp\یWN֯>1ZsZp4A )Rk-4UX촵kus v;U~{=dfEeoSS\}&kjmsgϭ_+ @׵k^KZY؝c-o_CC]EQY6d`1w]j[mr_?uNmL1l?XnWD/c2w_gum~ا#8pa)KҶ@pNsy"N`ÛNּݏsG^Ϊy]{at179T.xmqǵγ}{~[י`2XuO5͟glVzlK^87Fw Nqː9Ie#9|w*]v·e_&mlێXLX o[\mc  6^UE{ ha X=CbؗTۛlI:jRR@.0{ݧܓւ5$P6qG^Խ_?$W7̃[[Ԁ cD)r'}Ɔ>ZvoRSh9 o#Hd*G$~kD$ :jL 0~⦒}[_|b t VW"{*5cMncm,'oj7cb N;c[Vd]7ٽuz=Q<5ֻeM& c?=N{ {!͐%m/fgzӲz7$6Ⱦ4fd:7V߆܏*[:^GZ2qqO@!4;k}X>k-DԮϷYVNEḙ-Ǿ[6͖,A_zV]%h "ĺ78 ^Fّt`r T!A/{700v+10_`Iw+:}S#se8`ӘX[nΫ'_gz[^EZ#d6Cqr?i'hJ_VuԿ+ZnqKĽqkK0;{YmRfY26iaI|r۽V_I#δ?/unn-4fl}Yq\-p?9je);OIzq?I:J/tGٴ~zDoos_r_SI\KϹ/t̚0'k*xLc[oDͲ \~#w&dF9KpqΛ')Gޅ V?a{-% 5=VάIFnc}= S;%8qKLxm.u1KZ=.{}A5H$x pv>G~I{6'^f:Kdx?ꬬj][[UӸ0CWc>o_c\{o3?~~_=[_|bf sVfFE=ײRC[{5Vtº8Zk?SCMo% eogM? ~I"CoNֶ$I&.>-X7e41V kګkZL?ּ0Wn+z'#f7nqvTcrkȮG_UOf33rexAŮ[ֻ>,rTD7,<וcHy_5hm,%}TSuFf-es.qs{?7lo csYk{\f֚1s׶}jNy-c֗ 'n-j.;qK`% k7uea>6ǣ!R^x-֝kNoz%7=0yv#ՏHMnkX>}kl}U\,KsT~$y[\[-l}=vkz{\Klp] Zq~o'WSZ\aNN@?j5 \]CXd ۘ;ץWKKZ5=GG% cCfd7ۿ#ƶk {/eDWI%j@$rӤ84I׎yI=v>oCjc[:f˝BrY[{5̝I%tjS@όG+eoJbHH `DJnZw4#kx 9 |EK9ki "zw{ ikvS-~h\|  noVqUa;s]KIX\-w%P8980ypnwzuЭD ; 6`oIL i Б#cEu1h5)(i=ݩ?zGHǙ_Pn( lhsocQtTSRKTSRKTSRKTSRKTSUwWB:ײ2ݓ_lU:kȶg]4pI%?TcI|W$ywk鯕IOJ-؉ 2015-10-14 Stochastic interpretation of the contour enhancement kernels.A. Accumulation of 300 sample paths drawn from the underlying stochastic process of the contour enhancement PDE in ℝ2 ⋊ S1, projected on the xy-plane. B. The contour enhancement kernel arises from the accumulation of infinitely many sample paths. The gray-scale contours indicate the marginal of the kernel, obtained by integration over S1, the red glyphs are polar graphs representing the kernel at each grid point. C. The contour enhancement kernel oriented in the positive z-direction in ℝ3 ⋊ S2 can be visualized on a grid with glyphs that in this case are spherical graphs. Public Library of Science Fig 2 Creative Commons Attribution License Adobed         P  s!1AQa"q2B#R3b$r%C4Scs5D'6Tdt& EFVU(eufv7GWgw8HXhx)9IYiy*:JZjzm!1AQa"q2#BRbr3$4CS%cs5DT &6E'dtU7()󄔤euFVfvGWgw8HXhx9IYiy*:JZjz ?N*UثWb]v*UثWb]v*}}gag5oinIUE$3/ʗ?qe^>bPUNE>*qWb]v*U ׼ T!hm%3(v⸪v*UثWb]v*UثWbXN~qkCܻihw0pY]iTbv*UثWb]v*UثWb]v*UثWb]v*UثWb]v*UثN*UثWb]v*UثWb]Kzy榵EP0n&ёOb%N23*5s󧖯4 wwc-pAyf2Q*/>qlU4-8PѢr`74^v*UثWb]v*UثWb^{Wb]v*UثWb]c>qZk+xMmgqs6`<(H4ogHȑ5Xcۆ#Wm[j\G'Ŋ,^gkP2CT,Y#bX ]ƽh|ϩy'˺ug ΫbA*1sqY9zi/eu?$hgsl_q(UBP2vd8WbTm--`Fy` 3QUT 8gEuZO/ś OL$\ſ?Bj>qE 7h~وC`[yF<}c<fA:ƿ~n59T#7GF5ORoY?ozn*UثWb]v*UثN*UثWb]v*UثWb][w{?C=+WڸOY^k4 L(Ӯ+Z,7? U6Gm;ygGn,.fY X駸qpjzһyS|%KӬ_Z|Hmc_F4QU|OCH| _2X¿XOլ$Ώ״u$n?|U1j-{jm7z2$ƳX!%丝]xC*7ˌ~M6^Tl .Zo I Gz gs՝bF$I4rN*=~SyRy̱ioz‰-.5UBDTκ?$biyC5 KP R}?Qwn/>ӣ"pY?//6MjIa\_Cm R7iddJYNo[Cv/VD!I$MKۮ*]v*UثWb]v*UثW>5?*|UXWb]v*UثWbX_9?JcZG4[̏ig s$|U*T]N6Q. @u* 9|<>LY'NѵPRHJǏUp?f6|U 5QW:=γ}l;;iWbM*3OT\CGgzƥf>a5eeq8y!OVg%#*UثWb]v*UثWN*UثWb]v*UثWb]Oz_y極}ȋX,k9M'"ܒ)œd\U=o16tǫdu-\nZL('J]K/?WU}j bYUrǒ}U3nk9a[֝LHܘ2ڧ*|o3cA]JQҭaHH,1+$- n?\$\?QG򦬞`8[Ca caoSIpSNn6]v*UثWb]v*UثWb^{Wb]v*UثWb]a4[;=}}vKu3}>nl'kgu\,|~x1d&W_Z *o@oETr(>a_o~?n*iQӦ*UثէK*,UثWb]v*UثWb{?PTyOk_T R x輁V?*?$ysS}OERYӹpԯ1$0务}XjF_d.3$j4vax/]Aum- 9n880XX@VV(DHQUTl6.%OD[ޕE)?^,T|83bWKIo=ֳoz&B7eEOGQ^t+u+tW=)u[建1W]8&*qWb]v*UثWb]N*UثWb]v*UثWb]v*UثWb]v*UثWb]v*UثWb]v*Uثt>*,Ug'?)[{oLU*w=W|^ث?lU*w1W|^8?*7*?p b7ኸou]Fou*7~U>8⮡8c]1]Q]0۩[zVWu_k2 v+K-`;r"MXQ{{kTWuUR]׿gVKwg^EZ,U{{kTWuUR]׿gVKwg^EZ,U{{kTWuUR]׿gVKwg^EZ,U/C\-О+B^M 5bS\R/?58yS֓{dd{y֏ lXdjW_C?:L*b7o4ށb+C Q>bνS=XXu'Z2kwOG n)4F2⬿S- ,(X_0/kO,&RU-[tYmC?/>N[3CK őӟS\PnKQk>Tv*UثWb\H`:Up\UUثWb]P2En_wW>5?*|UXKxx\GmXáuXuv*K]v*UثWb]v*UثWb]Q㊻]Q㊻UK1Wr_UK1UÏQ~UvZ ؟_7Qbv*UثWb3*f!Tu'a2ky^cև4wr@}N&F& քOi`fNMA-Ҹ˻eҐFe1 >UV?͏kKYc.q_nL͊g_/@{}T~akS5fJ.,^D@hG语UNmvze񶱂;h8P"QXWb]q IN*9\0ccUWb]v*@&bFy.[{B>⨘X֋Uv*UgOwz*tO[]v*Uk*=[y]bY&pS7ޘDIW]|1Wq/* ኡӠd`cG;SVL-U嶎HwDbD1B`$)NZE mp0`EdCM|UȪu|1Wq_xbq_ :Kh[ګ g-R8TSҪ£ҴѫJ+0+t\Wb<1U+[HD9d!TTTs eIUEZ`8߯nN:|*7Qbv*UثT.Yiv2"v>ث0_/y.ZYFŶR HӵwSD"yۢOtF'-!薞Id<qAu$6p~g|R4O([֙}SVfZY!0!~[h>zHl5mBi9HE/CQu˿61Ma`\ 6Y# MNJ'B ;j)}m$uK*_⼓ *~XCiW*n. -@kr<ٚ;~UL@j3Nbڎ?1%J_?I17? &,czꤗP@bm^ت_,iw_ZY*#eə%DKtyWb][$ą`8<2ޞU]v*UثLꢧ8)AHf>LU|2sf) GR=LUWv*UgOwz*tN**=SOo33fx»*PPhѿk r)["&MNh.LD(dcibVYğH}ޯH K:j"Whe=(6@#nYlD7#G][Zn.H rTr'cf U"67][,QJ9Qd0 7pJjfj7^Cv6Eç[,1inS9f-D__,ɐJ)ثg)?.S`NzuǒogX+xxʟIz)-5y.ٝᶂ4y\# |NRfYN#8J[M*+qKtʼn Ʊ-4_)DDE"v*Uԯ\Uuj)y-n-+vEKHv*Uت坵ksD$QR(e;цJ31 Wȥ_7Qbv*Uث? 0Uѩ{vT/-|l$>l3*`9~5ה5nnmH~cʾPu{[i&[4[j2TBodo"a%8_NqG ){/KLܐ#OR|0^/-) +;'}УRÎ(KyFfGFu۟Ն-̟ޖ*O4ȫ "FEqJeo5 Ayw튢˧ED#`4b4' G'|UQ!!&튨[IV! Pӵ6'MNPt ˸ۮ%,Vi%I6 hDv[[ewm}=JCs @ËqTæ*Uتe],D>6=? 1U0'߻?h1WHE'e6*k^OWb]v*5@BБ**rK+VS\U#AQܜUl19#U1Wb]L=aQSBZNqVWb]ixWb]v*Uت]ծt}&, 7'"@itgUc sXqJlM_!ɪ1s[ ]Uʏ4l߻oVk'D h СIcXվO|U6]v*UثWb]v*UkGWb7W7Qbv*UW|S9={CrZ"sW儤_hyȖK9oB62\$dK4q?l{䋏1̺p{$G-_)on?ung' 6$2a@}X幇SѬ5տ`p$p>>.rbZ?Lno|ڒ*#3 {dsIXw2ڛ"[z$3@~4RbF>/a76N:I.m'9>vYux"1˓Ê/˚D'5@c`QȿSKۛKErQ@4:&&G Ӆz9b3E\k6_2yKsJrw_GqeBxFb$"_@`G ,q+A40_1\$SXF3w U)u1P⬃v*=jLa?ڦ튵t4⪱dv?I]#O* X*OӊWVZw[]Z^Ԛ*w ,;c b$~hzzbd[asOOӊUS|O*U(TWx߄uTUWy]z^gov*UثVVWb]v*UثWbR bV6zka)Ӣ_(ԈD^roOWb]v*UثWb]i}#oig犿7Qbv*U 8*o7wī5zktkRxeY+s'n X=oZBcW3H'?%yQfK2w]nmQ-23|p_86mY[Zdr "SbpUfb9"~*8|q'G=ɾh_Kl"ERܱW+f&i0qėZUѥݏo*Y[C?*GW^KW*[$>RkY>~?*XyǠ U&OB#JUd8w*4dlU,#cҿ?8++(kFt8W>5?*|UXKqVWb]i犷v*UثWb]v*:YHDT$ʞirNmv*UثWb]v*UثMoi|W7Qbv*Uث/ʏ.ZG!~_<%%8#IgukTuCPO<늇~sju冓MmnP?e}cց/,!N$Zӧ[H?o4ξc_bAt=VRA[oWeeS/Pro1NdXySB~(ͱW>Z%5G%IR!LȞ28┋Ơv$E 1~+?qRy'>'r"r{xߘ-:V:|:~YۯA@S$XWbXי'=Uv*UgOwz*]v*u4jra)\ǿ4?u ت,f̾GQ)e L^j-Տ=60^4oG^Cq oS2vK(רMuwV+*]̠*FOSJ37tC~C038w gI--IW RvuKf6A=.A OOf`v*jkwH.ݖ\3-%I@ɇ9K<S>\ L wW~e5YG×y'FfPEw-67]5M{H簂`S($|9v,/>$iKG[TZgdS9j_?\>߮98?C)ˉ~_s1+;lCumcEUeK gjoͮ1&/>Mxe#2ҽ+{ȫ\ēG^dP¿Ac.3 qV# hq_c͒ZW犿7QbNMe%f 44]{ϟ8,j:{j6z().d41b8>TY]C) ***0y|xR17׎ғ|_g#ʞXӣmSY)}aXȝJJ f4`/Å(%2iz}Ƥl՗%OE)4GT/t9!ek# hL D 7Ô?b?&H_OQoexai1U +_OFV.n*?hUs,v֮$_8yU] ,.6s^ܰfhw=M]eYT~*UثWbgϘsĞRJWLMzq$O+*wR/+6pU=%[X0_*f7DF6OH*fg_S㊢1U9'E@qUYV203с>C6aݓ>.>UX֥i]˱?5SH9 y-`rܥ#QOfZ|W=P!#sp0|UA0G|e'c?U0Te78 BqDJ*B>X+Ya)W8'D,fWaVU (]L=aQSBZxx_h|I,o- qry ERRo+y^˖wvr;w{s~C{ OR~>Yk#cJZmoݢ1]@nWv8%U(c1gLlܩ?/,j,wOe%~]Hh0`ÉS>84,rxdNcRXi(&$ Zwz"OMJ;Q8YD8cߩj^[cIQEyŁ?I(FQl>%K>UӼP,ё-j9+.83K@"q*}"M&DBݭ1d..S"/ʖUŎm#O e iffj9vS,3X<  }_&/[\\J4Bʧ؏Wz2GExEW)d,JEq3IC&)SB<0=?>IbNWkIu+XfkM~|=2fc(Hv:扬+ Y%@ReN$> "y?U yAӮ4<ʕ;^:7,Kp'Ԧ$z_mXKʽxusNJBW[R?ɬ}d DAЂaZR&(c>Ws"9!X@?a,`̆?+^jb,ӣA>ZTq)USL(=s8f*]@z&iϧj1m1Z2嘲jAZCYwZ^[QMfڃ*beeu1pLɬ}OTӵ9-/쮯IJS~/ a 'b|6I&7QbBze֐][LAIYXjS*5ː\:ͅqzinEdY{UE8ƍ,|X1q[:Ns,imwGrz喽nAk涷XsReI̚1<$! j E$S = IVk C[5f>qߵ[{8+Y.|go<`d6Ѡ?%(cb6pZ\JH*O/mHF(C[(^cM=&;=J(X^1<w?*W_L\FCg~_F*敦UY]=%@S8cv*Uث]?WK\TCɸSXÂ=!&_ /CD,3?ՠ%4I*4|Q0ߏO|U(yN㢯Q,-բ<'o*+P8!6`z޸˫¦=Nتk)IYHdN Uמ~cM SUT@=}⪇xķG)~vV$ ڽ?dxW\R#%^N*\G# r!{k聹h'*K(#USM"5hG~x\9v٩QQQ,lh?UDf<GUqWb]L=aQSBZ^b)i$gc )Tx\QZnivM-oD+Fb/bHV[o5BOIj~ \V9Ȍ/n#xDNe]'TNWTN ;Pҍ4WA,gT'߷[u5 W]eg:Mr74WE`68Ѷ]kA>)>4xmbQcTP>X'v*UثV)"F?[\h W_7 +k;@:ۯ*,U"zFwZ1Vb[VpG҄Dr+\EN6j8عٰQ# N?h7.*ճ۫[]>7,jqT6!24! ,} GEU59QOOW b ׮S=ݛޞdK+]0BxՏW.辥3? }U n XfR0?iYiVwH$7\O?ڙHV7w`" P6*4d2ޣuUm,#>-:E)~,U3=qULUتBU#T%%5#pKQثt>*,U[]v*UxWb]v*UثWbF=4mTLtLXF(xҸov*UثWb]v*UثM1VVWryF-w]CpPTtE8b6Pǧ}G~l}'労d[,>oQľjoU3%9lM8\g 7!x?& S/N[TKyjJ, *T^ۓO*<N_nU*v'ӊ<Β,O2!㊷/<,7ԋs7yK^&E*{1UVO3y!YjJr0-پx/0xJ+/NkbeƷƏw&+}:id)Bq7.MpDI&P^1Ia7#b-$5}tڷ ,rU,Up77@H[~C G\UrxۨO⪑F#brg}?⨬UgOwz*o**UثWb7o[]v*UثWb]cޡ}*lAq$M*/¿Viݞ;(Zs&QCQ3OU^t[Un*@Dbk'@&Ԛv_lU#7Y$籒Vab}I?ؾ*kZށzIsj_g޲9e_Yxf.(p>6S?}TK_W<+Wb.DWv}D ,dӗr@!:FR-?:Ű[.,d+;NK4V|%Oͳg`9%. 7'ub_@y{\״;fķ5RxC0+Ň/l5zi`,r˅ cWb]ݾcoh2A=i44꒿_UyKKַ,BUV8[[3=)U !^\r),MQ"x-8cDBO2MZH+Td?RZܲ=HDeS T=A/d$I'9>&RmYx'Ŋ|Ʋ-n16Ufoy3~*I~aMQ{w5uNC8|1;bk1khX914uؼ9/΍[Vս'3@Tw@"~:I£4oK[ȮEoFʁv@}URM{Jng7~rڏXŸN8ukimfA4p%`7xPw^qR3]8M[AQ#b/'eT2#*ZiNkgxT~)1U$S2Mfr? U1"c紒½}Ixxgk5olde7$bɊKkLKyO[𵥜m̏<#\Uq5 ˪J+qΪX*N֯neJWfVQqTJZOZEeINE_@ ^-xy"Ya{';o9bW^\K=fNaJ.ncx|KzK*s30j~Pf&xdՆߒY&T%~,U3nmg{4@սfPx^_T }'XmhSTJ4b*3'ƻEO i~|**)RM|qWbcȾzl:Ei%UE#1 #?!+hxP5&%u2H?6MzZ (I"ŹӚQwdhq ')|~RT_2C}b,. K#,3FY Y1(Y4G]"[ }%MtP J❾*YDq $7 Xo0C ud%1,!aR۴<Xs|_P/G#Pou$ON,U0 Q+Uy}d"(2ɽ>K!\LV:.?3?'w-ZGik%m_8O&HY:Ǵ'?OIoP.8|^(kAsA.x˓(iA_㧔5o/hKwm}7LcvcoLHjF0?ӥa ciu[^YP/_v^p'_Œ?@߼dļ{͚.m}u-S 2F gK><<|?wVd@=៟^^? [`ٔy*J"4\{70lU.*/o˽B/lm3G{kAm6lmwXPLe]ڃg#ck/&kZ7<;Mf,[$%jV_WsUtرVxٶ4Iﯹ歛T]f]iзXzԔ`xMK:}&L\8PdSB53iup&6qCY4*?qe'8_B&Xk6u_YD;EZ75_22vc<";zcdž ~^q)N_~yK^o<Ѡ o{eq6e^<sG y.o?x^VN]yO 宙++Fj-原64jޱ cS^o?xgv>,3AŚS_ u4@sMƠkVO[[}bOD+6bԹWɊpW~?aFB\9xG6YoFS&*zƱLI;~*gom=tX-QB'GAkD@4D15Vp$B+}U /ICjUj b\H`-og`nazVVEX̓o_Szwo&v~Mk!c#S}8SVs$ZȤٰZ% ~x%nK]iWD>"B6YJ!cR:xƞ?:A21Kו7NjqQ8U63 O\7+SƝO,U}hGȞmE4cGq' E "1I! T|b/j~ bmEXpb+B,dV!RDX5^)EN*co-VL[g,,D*&>&WD}W- POIdV1h#bps߾*ݧ5{y$K#Ƿ>#=OC9xi,6o\F75 +qTT̺l s}k5?N7<r |$[bw% F=⩮/u{{i(azux%{b6rBá*@H ӊӫq%Z 6Ż+P=Q6^a8٧#Fd'ҟF*((OlW$ѐ Tֵ +U~U2>a,(@?vXڿUN3Ē[)[z4aJӔj*,UG|ic̲Bt}F !<[ն~J,*`,DegF| Mt2K,- )_N+H3$OҴhfGd cb*H U[]cfjt̷ \ۅq(A$Z<xOXAjghnZ uFbFCj)6x#p«QEUP>@bI&ʺ %(ƀ*_v*<]'[.ܳ-ւH(E9e_20gHD<b` 1z_ZD>+VHQ?\[.Sia~P$A@$n+l:Y^*UثV9$ibմ JVME9'WFd`ǂ_1A 4  +E[Ue֣2Q"Fx5GU mdس+d?֦*k U@w9rc_P >+޸Ak:UJI*޴$_-_Q@)ꈖuoXƍ}H$+gkN+ioE9Kr+@孵̬e8# uy'6*ʝ."d^uAdDcqT6\yMZYTraX$aYBx@ހv1D~|qV{mEz&4FAޜ1V?7< vT?C,G$cO,#;PxI5IuEMV*)_#b'AOV7T<\2Wb.4颅fIaC!q9ML[b֞TY/P;⩜~B-wv~ qTDBG/? =5T<$*LҖslPPXgm(Nu@ykˈ^ @iH2ۍ)W,U 9䑥;H“X!]8⩒yBNy4mZ9 deMBA0SզߵqjH"Ng@GWzq"C[nӊeqTmj#D+#VlUm5+dq BRqq=L[Ԗ#I,$8^>hɊ[_yE`%[Cj:j6*to6Z77}?|S! TCLiRAzFZPm̒SrYUj'BkrpBtmm;ԗ jݿ'ˊ(<۽JjW~_ 5-dI9 /C/;tU阫t>*,Ub]v*1v*UثWb]v*|?yPaՕaWb]v*UثWb]i|VVQV\Ms%żf!>B}B(ls#KX[\z)ROqVׄr ȏ)F kWZo ?n),?*kWW HjCPhV0n`#^vb1U/Wɶqpi>kXdT$ 3b^Y_fqHό\Um,w-GgG\UdBE")y6b $i[_銨,Jڔ~S)IF4[T:f#+jNRUtT$5*GI 4eT 4d7lU{QvoZžh@ʋcH$'$lIcAu=?BnIĀޟUJ/,[Mѷ)m*?gkTUSj^&Ty[Hb'`O) C͛mK7Y\,U{JE (EFU?Ud<hbz`{*M zRP@ޝE0V/0ybQ%2$d<$Mߔ#rVS 4+nmzÄW1.{GPf!$[ 0(f2,1Ub٧J^%_E$t:,>+.*򆡪u!.]Q"_:.gveUVvBzG ⩓~Sk^9:bFS ?$k- N7m؊X |[R2i4'FQw zJq[\HH+׵x0hF$J[Vc37 Uo-[$}BQEs:Sd9.*w;[q LoCK@? U+%kQ aZ -UtM-inoѽVckfB#_@EAUB~ WT;wKJː@Lڵ1*HʜR/Hq\Ut>*,U]v*Ӎ~UUثWb]v*UثWߞW~Zum7QH߻*(Lof./|,&)զyv:OoSFԕxpsCC=.ib_͗lebӜKWb]v*UثWbZnܳ@?ZM6}ToF{{rcz+ˊS6jRQFSMɚG5 :b;E,-"bMRg_I%N0̀ʜ%P4 \qKoPQ8i H/޵JC2ݛ*"gR_Z/&`6ܽ>O,dnXWvVfG2pZzi֨V16*ԛN;IT~r5_hoT-zZ[IGT.p<[1xbMkn^?[F*,a6Df):omFz~|8>)hm ̇jDxPQ'HWҭsrKQDU7{/2Nd=/nС?b䀬UBRKF '"2u-~ 9Sy2TtU !^OŊV9T-o8UQqW 85&Ž]B0'Z܂H-";Gs٘$G7*V$Z0Sj.*LmaPeeff484ybx챁ۉ[8@XHNwQh B MMeZU"/V(0W$Wu\Uom4 1- aTIBNT:ֵdxt>*,U[]v*UUUثWb]v*Uت:zlmf$l%H|D]?YvbYٳ<y^oAu?yy-gx'/.Hzs v*UثWb]v*[Z?i~UVZI<ᎥU} PKV8yR[(Q!~#rh=TqTI1@U?$-Nߴwb[<,U ^%qUiDх9<gn?bGҘV_ZUwiG^*1U%iDV(Apħ}$y,U;ђ%wi"+aѥbj8zԚ'4f Q>Yu.*}WJR@b"_xcbnO*mOyU+i#ca^IuO佑B?hWi/۶9IF^4_H, E0K+$V&J|~OgoQdE[KqfboC_U N+#5QU]Y*&}Rd@rV-09 2yYG}U.]ӧ[(UڱEďw4s 9_LU30bҙ CMZ]#٫.jȁnB*~^(`_M#_ک䦿⨴-c;X>>ZYjWtws+P(;ȼ1!?lU3; ͻ6?];ҒTҜUvBWCe*'.b #*a Re('Q*jS'ib=?|kXTбVSv*UثNh>v*U~h~cX ˩On׷WSd՞@XEffgvv9;d IC~[~kitҮZi;,jʲ*8nKسL !̡3c0 ȼ-4F F'1$ ˋêkzy匱/Sdd$1RUثguO.jVAi<+be^(]3/M/\c9F(R 6/| u$%A??3z"!x:TO~k9yOPtmwie#)(GOlNW?(p%?~;Ie1 c>VWPj琑NcxثT6izuΥN6q3ʑ12PG9+_3ֆ4+X-F9bG Zf񟳝ټ1FwK$<`3-GkYypi G$|8.5#CDB+R{OUU*?ϲr MDdZ/!PӵizHﰚP9?\{/P.. sLICLUX\Ud [ӯٍ i.*DK6祜!y?-KFb^Gۑ/[S9~D#FOc;ۇH2yL)cVrXhޭ7)ēˏ<G3GH.,P|H.*˺$uFv@;j,U7d[h9쐂+,?9WgO0Qn YUI߉j"a6*}oX!@B|U:(S3=eOVSBћqV|"$__U8aEP䳺 yqUHfRV^@@,?fP0dMyEy_N fm˓ xӑU"Uive8 m` hC >"m?NC`E%dJ U&t郫fUAJ>WbZu:5Ւ4!/!S5RMn-oڈ)7?5X#CVQ^> *6m;񟣶*dwnZ Gdu8"E"~#P0x#4db5O/تkUìH>N>Q3K 4*I+M*hf^G;VqUzrCCh$l*j*3'ƻEO h}1VWb]V53Dn},F)1vT^_^͞11);L/[d_,WR׍t&o+.Ad#}ڙV˖0'N\<YJ=ȏ,k~ Ow)9 cCLpe2x%O:ͩ魥 Ŵpr"B G1D q_? BXGbyLE.u+k&A !*M. V^/?ή?-9 p2ygl|#gK(d;]RK${vIZ U|-'^-|93Q#XWKO9ĦW춢1s\> :0plbP;ZWQnlmS3sb0/h˧9/he#RQJ˿'._՘j Zg;Q}S"?>)V$|cu?/y\ e3˕Ô !":k|Yv]>if_Ō!HI;Ązk]WbPڎiwEgjwX4PYb<3cA46}䭩#Ԋ#z͈ ZE~3ya9OG_<<s5k6G,5TR&p(ኬGVXپ l\ۗ?UmǼ^s U ıTd:zY_k~NHw*UNg9ScBYvolU OQx%_Zv½)T'9 7EN[[؍Z #Ŀ*7 ;` #N! >&*!(@uf#¸%`G϶*juڂI$Q _n_)B,H7e**U#U0Q2I@F}*a.H4Y-Hn2q8^LВ kGJ@\kD[ 2E#~8~b%Ƅd’D7e تn>h#2V}TSS,X^LTu(Oo-u(n{; 7FVZFUx}@:A?wn]?N̤84*,Uъy#rv IJ\ݧ/v!gU=?|Y 9È9(^3+V>~ZQ2L롁&kBD%FUOq1YO/ogUqe>NaM7cekcgK k HU 16yH<-nJ k6d#2^ eF6fi49u^/E#WGFyrTj6I]ǨK*^zSc'ٺ0N)Dsoo#AwE.C/1%G#&̞xڞ!0 JGs]87wWH *nK7 cnj@tE~\t(bmR;ˉ/.+FcWmJ bd՞MReWb+?7ͤK/c3(]y5do_6z˞3H?<2zs:^XFeO7>BVr.g.bQquz.3/WYO'0ɘv_O4є+of 83|R3{ QE IH8""Uf6\W`CWb^q~'5Ȣ@UmfUn*~w贓bq Wˋ'(~|Mq]ťDQon~ j HFb<43\Fnϗ@G&G%_RItQy+*i-1UE~2zl+|~Uyv+.*mbeBo r(~&>Kyec- [o\U0A*UJy0/7m+BOPv(<⸪%"HIաb?ma-D~FGbتRP?qȭ{"|,Ui$dD󟇗j;3v5;%EuOUtyATOEF?SA$Qчbr[$L~C[\d܄'>z1THR_q^lتEYoFW>T˂7|lPG E_~*qT4J &@WC [+bn orzrA %m2C&K'f WKP_8]x.~ FHOqWJެR-OJ􎘪 ![wI?C?a_RhfH: R#ۋa\U9PN&S1$ӯaqU)&vmAE |~lUBmMqTaDoOˊGUl1FSE5UUcVn I{⫤dBh0x1؏[*b=?|kXTбV?F**ʚwtFoHiadu{yVU ׎_O jCc((YC'bZnv**UyZWn[hn|>ec̍>xI15eH'O$eVR+ 2vM UثWbX.42ydc,akI1/fNUQ5fi5R< 'Hğ*Ɲُ]TK ,OAF*h&>%vHWiKY踪EtW "q$ƃrzU&q>+*b˃ˡT, ՠCF&!Ӌ lU4KiYf"(v%Q]C&_=91W@eh+|HQqTF*s5W]C1Uҝu2qT*֓ kF5Uy1PQ֞EuVPz8頒7P?ln@ak MG^bW9!R?k1TDG6*~_㸑HY;Wr'gSUD,g',{|Ud6QSbWb:#:hEqVW>5?*|UXCov*UثV**UثWb]v*U~dk yZ46}ˡc*rh/tknvGG|m%^o?\/unjYj/Q]nӜccWb]v*UثWbVU⫱V_"3\MO_] 4iT_e}ϋbUث\2VȭNتGOVc?5-qrcnGqUXmE*vYwXx2K)+փUcd1YXUdQU !h?*]v*Rg8"xɳNV5'GOӳ>*=XA>N*ӑ?*mb o($բW*ovQqWF(OqU0#hy!AőP WVVPAS#**7!,m~eo$L>cdķ=$PiUqT4~WTI*WkከY$J~|UvG0)=w_*AkSU*xثND 8?Wb]L=aQSBZl**UثWb7LUUثWb]v*UثBX:ٙ@+PVU>ORK(;0Ggfo9޳5.L-nTVPY[@`Aع53cx/Dw#v:g9]v*UثWb]ioZ~8@E [N**Ul,qEqU.Q%WtPh=GS _\]PXF?S-!>tU~* N*48HAG@1VWbT|uXm;bU:#c㊩ ,,;c*%b6xbNiw<+=]EK|W#MD V]{uUHU08UM>1UoqU%D7Urv.>%}'{Tn*]!eeqU\UثU?M3t=> UMI=IfM=Q?'_iqT] wbՕTbI1׋è8QI(/Q؏튯]L=aQSBZ[]v*UB{ɒ ! *d""*i\wcrF2yO& '2Pm1 ljDY\Y~eB ~:XrYԄKH|.^W9'zW=* yItBev0"d/9GK-sDuD.f.r"h⸴ՊnXT?vCm( 88e?LJp&;yΚ$qatP43Q}0pqÇ2 ' ҿ-|_1y=f+4ҵ,Dad\!/s8#I/ᛓD ob//dKki༛7:L<\|D8rgH|Kb"_B#4215!e&VjN->}ynQ,kwLLUn*m^ *>x=>F*v*UDqFT^R{P ؏p?*C0\U'Q/*cCp|WJ0ٗ⫱W>5?*|UXCퟐ[]YͣZrNB*0y˿<|]){X.䷝9LTB|2&f?,rHgx&Ґ18_z[]ѵ p-e[\F|2Codd^v#|A=OlvWg \ :wNla-Nk ny5B J{{ H34d}ua'eU陫lhN*U }t!N+xZĕ4 Zn*U (6xWbWFsUP1Wb]qWb][$We7WW+tf) EzoXjHb~///ߊGC4L{𭊢YE r'ӊncmUBI^9C:'{bUgOwz***sumknS$RM+E,@2P)^VVDW?ϮEjl匜PBS_6lɥ?\!RwT,QPRYEdُquۮk[~Z3ZΑ&+ dʫ+M3]=8H53lv*UثWbthۈD"m`Jhު ~*?Y8G.xrovQī`ZTKuxLS;ASN*rv򭅉v/c-šKV}#$^a-8hez*(4ZV **PTPG0;w8UثWb]v*Uت̅*IJyMD eGn)/8*-h~]XwMJc _qT|ofZy^Y2zs@)/Ϥ3ɛ)LL0i^b-5*a=bH_b?eѾ_lz\|Őpw(KWb]v*UثWb\4@1<&h$Rm֪͊m;۫+_0^ /Xfz8}O}J촭6NONq)54K3Q]*ZmiI=̽`>tY⯚_yS<٨_?&+8b nrf󉟘K\^Z[MYd@? /渫ښwn1GCm UoQ>U]ΑbثWb]v*UثVbsD=Y'8#v*!('oUGCv*UثWbTn`.OY>zz8r== zcU1Wb]v*UثWb=?|kXTбVov*UثVUUثWb]i%"TRcm=QP ZM5vnnfw&rb$z\8αT>ys{/VY[M$UAX[+}Bi,U~@u|ӣ^3L:xKyaNm?(wUUEPb]A4qU|UثWb]v*Uت=6O~4TJQ@: UU$ڦҼ >q_nL[NC)TjU[v*UثWb]+A׮*UثWb]v*Uثt>*,U1VWb]i[R̖ZޭkmѾvlZd d&3S)"YvL<'^߇րm5RUv]꺔'f訂d3%/7/rO:\u^L4P\~|yt=#LNq뢟_x |bX鎎I7"GV~=X͈C*AeC_2ϧ}F= ar@ pcIr܉Vce,󾘾4K[hYtiZ$~(8dj^v*UK?cYL) _36}8j=i4:V"BQ/1RzpyhSgk1)?ŊY4LJHǟ\Țfe^Mv=.N'ݗ\~9Ie]iت3@$(H튼W=3Riq{gLZQRNU [aJ]n1c*,U1VWbXZ |P[[by![Z_+g'd.<~Ob$ #)~c~_yKsk%wI04tt Z=\691xVQ1nġ"B"TP1$m&0!ثWF^_TMg3COkMA-$XncyF:д Gsy3yOua{ųZ~-lܔ6nԉ0 t[(+}]ΗwVD ;_sڭlF1<كO3s ثWb7i#WPZ^O@#0wٽOo!#|]yVqzu uݤxǚoj?#o< O!(bHbPF#A p6[IUثWz>8yUֽ}MCXO[(C?iyr&N{;2=P.' S-D뿐Bմ-6ގvRJjHz{-d_=6g@#fRKTv*Uثɿ:6O%3Ƒ=Y*M1FI%Px4E"7>v6/9@2@_-?6V(b7I"Pd?iؖc٦&ۀF_F*N*UثWb]v*UثWb]v*UثWbtcР!f5[-0?*qTL:6P%wnHPZ?QAsQTTF*UثWb]v*UثWb]v*UثW>5?*|UX_bz}׳-m5*F1>  _YRZqVWbcIF=g[k{{e #9Vv?U 37&&AgwG|国.mlij4e+0Y<—e_.ZhfY\J+N9=Fye1)v*UثtI34 *`GDi(ܤZFY::aNjro<&#*~rt=X̶`Z @yOc')k2Iz]db1 Oq A,۹nѪ£#1IYv*Uت_ OﯡԙG 2dJ12?VI3%^J+5n[ˤpi)*/9;J88@ruDb]v*ɏ-y@5_04S}Oא0BKz _ͧiab#(C'5⌅#G5m~ъN*UثWb~ϥi{ $ـgT~&drzhgW~Gx`t"97CWbvSh&-rhJY4jy+W9׻G/8 g??X;v*U/ԯ-=:+ nmD6r Â9K9>&7ثWbw*Nlb*F3B̯\sP8̫$e}p,_Xnc}O^73Na= gne&%uK?1/Ћ;Z)Hz.*U/K]mZWHKe/̃3bEBU鯩8 \_[S UثTZY{DIbW*,U,UUiZsD'ḅ28oc2 /evv[ 5EDToHȒw%*v* bRO6;FU\wCŃkJ~3esxdQ杀 2N]v*UyKVj7rѧ2:-/I_?̭6xoqa(bO(fǼ^yWDoq%ԅ([jTڬi⬇v*UثWזt4IpI*q4iZ6l4]=fM7_b~ъN*UثWb]v*UثWb]v*UثT2ͨɧ~); <bO󖧬jItAJb*UثWb]v*UثWb]v*UثW>5?*|UX_b]v*]v*UثWb]I5r]tcGUxrEJSjZ|Xwv*UثWb]v*UثG/ӊ_F*N*UثWb]v*UثWb]v*UثTiڟ Km^T~*UثWb]v*UثWb]v*UثWb^{Wb>Gov*UثV쟖**iZmϤKs%c^Yl25S'Ԁm1ʒUثTNG=RJQWP<,}?˙4a9%d%X󥶿yLXI"NE^/ϥB|B^(Ded2-AKf> \^/#;ͯL0v*Uت]o-\СGb{1^iaG_c<1w#$q )KWb]~qVVo_N*UثWb]Py㍏Ef*7f°OD US4q*j6.}P⭋gVsbݷhZ7:{fL*ؽڳ8*1WGPt?#mu;۰|6Ou`f>Cx*O2uʹW)_:EZb)[,OK+lUS,._CQM9?Q8V?f!!Zδ6Y\[i\g;Gl}G*м"aD([ȯδCnEfY`'~8&Zu bثWb]v*UثW>5?*|UX_1VWb]jB` *,52PT m\M9*IJ JІ$p? dy [v#mxT6[mޣrioe  06i^17yV\~wm2QT[W;7Jw?G?l'Jq|=1%WEjynqN_.b94X`D~r{x#kHp9i*O6Ir)_]|Q ğ>uK[_bS㞙`0?5d^Y$1DWHemVId) 6+5Z1cK.\̓ӂR}Ka"Su;KeVhPkYБe$, bR?;yN򏕵0|JhH$qʹ~˒0qF}<_+Zc\izRm[0xgn-;=f ŸXey^p5G{ϗc[a":kZko|5[j2bQ)G?}Y.b%ثWbf ROXa|͢kf}!`TS`8k)IDܧ|UZ KK #ܪOU$*UثWb]v*Uت-v#XtXZ^4%~A*v*UثWb]p$t$|U g$nT&lUIm.`x=TEW:?VނO8:WLU2Tv*UثWb=?|kXTбV#v*Uu[y%'Ҿ$c+)v_c91CS eXq}?5f:tW0閱]?ujE@,dTI<-$tUت .'bi%UTQVf'`@$V9c/y:~/l⹱2є !o ~%3Uͤ#<F@+Z6BIc[\Fܠl7=#. x%?m3J$QM#JejV#9)~*Uث?:;_$MѴ[[{?^}~d~|V8II7/swIa>n y澇5Vp󍅝ƘTn2/9?̎ԍ9'2͎o}.R3. uJ6rsJr?.7c-ͦJv*'ŲztGdf\op;:7KP@: ݃Wb]0y&hZH^`y!`E$]Q?6&hNPǎXG~Hu4[^hiHoHcuS!)?"ckUN*UثT,({EbJT&'dA%e1_ZZŬjx4c+q]WU-!WB7'7`ɘG<,G}\?Tu9.kҞ ԭabr82 I4dHj!!??4.u]=jh4/{OQU7B\ߠŲDvfL?_ǥo|zLZ I,TuB%W8岜|PوQ`u@~nXv"+8kNcpĿVN[jR6IcqRfD m )"џ3Q9b'om6iNσ^el.{-)ōk*+: pV+rl[<'(~t?Q8X-Uߧt8(qV oYO;iiZ,Cumhn Rf>}(7&E2;]4C2JX넠?y>ɘwL9J\4u[V ?ӯZ=$~zٳo1|Ukt%V};ˈ gejRq*цlf<8֏.r|]UǓ0SSx&:9ՙ./17|UrdQ\ ?Kb[]ƧJ wKռԑXiyJI ͯ .'C&$2t?OC~)\9/z>/3Lyb]CG*2ReXE=^v_?f6YaϘ1N?9zF?7bL7QPOlm vٔ˯ٔQxWb]y]z^~qVWbXdaC~mATi-{q1J1$60ǃČa S ";,R~$8|yGdO)KD򮎚F IhǓz}TU\2e1$+KWb] _~[痵k#odmeRI!fiZ,͆\S(сq3# DJO.?m~f*}N*Uت Q%F2hI!#Oo[+kHkr0U_A*]*U,],7 x@|SJ?'?@\U@[$OZ@j$Vs&!i=8@ƔT\i>‏:ݟr(H{7kKkp+GOw;#7oSoݟx'6*/n?Zqת$eHWKG늵nϴWGxwnث_gp~o?V*p_6OݑIp8XKbXҝV4$aDw֟Tf?HQjGw?0 Fv9D$bLH]B4'kOPG~_Dڂam5+⨼UثWb^{Wbӊ]\kk5cft n]lRg2)G(JeÐ_TB?)Wb7?,UbX噵$$Pk P96\g.!? τ[ .mc) 2+aV}98;ZZZGki [B"%U`&Ы]v*02u[e-kWWDJ5SQ2'k\vgfYrz ': 8ϖ]fm>mJn%VŚʥCmf't&c"o6YI2 eWbPw֓e}gawyu¼qrx'8QQYD|yajp i驌QQJV5s1?ÿE DORN]v*U~[y<8Io#` I2PMl&\80x~l6Vo_N*UJ+kin%4%. vo*\{uGت3v*qq<#1WsR+u dlٱ4.ZZS( Nڃ=)b$*ԭnNQ\зեIAԭ*/QUYGt` qVo.4>jXW'y#?}U=X%đH#e=8UتQGS~~G߫O#T9DYGB]y]z^~8xLUeaFS ba"HaE$q (@1_qqWv*4']ӛNm9&9DD5;:2J'AZ? b]v*yZG.kB]^[BczMXQwyqp[EhҊrUx*K$*2 OI{۲%i.mNJJn<а,cIS)1S?:[*?_S{3yH&O[{Hdog@YMlJSNU F [#eݣ] &*[[qրeHhd'n~/|U5Smwl+z?JiN.*麄(nsZ |G"|͊ AWN`gz__OqWb*rQh09^qTv*Uثt>*,U[]v*Uǡ\: UmX|.ZOXQQ9dHDHثWbR7Z^ei#JJ2yBr_eثT]Ю s4B-*i^ed^MgO1TEiv[%MB ciԌE$˧agԋe)v*UثV)bCr;J*UثWb>UU1WN*U+u+H[GQG6|UBjok4GdOc\U'$$ V%Ɲb:ټ9";pzzhmRxXX)#Ԓj}՛HkہrvUmgb,6* B9e$X@yC*XA#/wf?c}U꺌dVjc$œWEõmyi$@MϦ c mk>rp[$ޭ+TKbyqT31OWlVDe'?2CO x?^ y"xgX,U\5Y IdVoUqVⰾV+ydTb?,G2Z 7'?Mɦ1YW?#4">lٺ}XG~GSFRLJ&<-r*X:$4bGQ "fVHXn:,pB<$|i_r@&ԇ =59 H2U I0eB0~N=>[VO]-yXܕݴe{#I[lPHes20-Yh2|_'M卝xpHxd"Niݝл/Uj_,*#4;coO$o,*] 8.UWbd>* b튦xWb^{WbN**UثWb"U)ϙ41i>xҴ.@y<%^~Mtv*Uث o56F`.br%PXlZ( 7XFVHk, .!#dUeP\B(\)+`WbR0jڅKfU.fJ,n-j0Z{=f ?Ɋ4qbYv:7򨋇8ɘ{qf|A3{ K5-R/NVhJCNL,DI .]I Z7QY@@,L(NL߲H#)RSkSXjR4NWg<늱w0Ův/e(N?̟RaMw?1ڲ֥96.9vyK2x͗K?~zҿ0,ei]ItDkU|Y8ޔNݍ\!A?/3 kD'?dg#EΫQT`8? 'GS~O/F_L'Qͫc{`Gᚣ a zR@*7`ZO,DT|,U[R}"N_<a6 Bb#'{BX%gN 4_D~=d/6Rl2Du_Eo ۗ_.jP8 xi=Aɞ[s]}/gغƆ5>?ILK$YEtoV@ʆY:|?cu]18r%r數l+$6mH~/Nڲjo}6Xa/p"qVCʴTz,gS~\* c]H}ݔVM'GpiUDqT01;+NzTHW=?|kXTбVQv*U/h-泪Jaq QrI OYG>dqm* #oaPrĐykkmmoC,#PىfI~%=DXu+rtYG*<2B)1UWbQ-ܮJ)4UvۯG\UA]_e<9q$̒ b!FğlU4Σp¨UOث0<WsAPC}eSuD uUeC(﷭'⬷KU'-o栧c|U^E9N6qEJz>q/v[JԞ̧e)/lU8V!fۧПPUSkxⷌ!PV=Kvj*$HIy_dثxWbP=֑eONx*Jq1e9 DԣɌ$(U8 (t 7%à]nzb]:=KHVuKTlQrF&pdoHI_Yvvs˨KDyq['U\\éUD#ib`ZW2 N1)q(c"A #&Vo_N*U,+HcbQ$Pv*U!mv/ي!&;~ A؍RCNk@vq vY|2͊[],NGYXhѺ sv(އ"t*2 9~PxE1TCXO0h(pLU&˶U#&C#/M R%?U{p(ebmǐ욾ot[@u=yYF?^*ߕ^nC_*˧Yx'KOD冎t6MG3Ϋ\}USCʥbGxTxoYx_I*掿f1S2TuV-<S?US+&ĔE2#2lU0AVF72 G_8uiedy}nRT4hSy/֮* @:rw#T6oQ֓TBWLU4-"(Q7 %K^(oتgi )@jܢS~OGSA}julv; <~/NWb 1UnN* /kQOQb7,UG~W|ϢKFAk+!0F D/%Rh,4H/%7WEƷl+G{<[,<$>80A*UgOwz*1VWbR+.\yN]ml,mGyn2%44&v&* `[]/k{8;0~!BWɱTPv*UثW>5?*|UXM~ov*jE֓-K 2QBi.ψBf G@6 .\zUGv*y6Uqo%V(ӛu@vٓx8-9ƯB4=EEwr(%ثWbRO3yIIzOIyi*_q+Is!%WeIby^Vw ɒ5? 0V bv*UتǞ8EY% Z /\@<^PfU[ ]v*UتF4i{c,`=Iid;3Lۇ(폑%U?,UN*UتQ5*O;kH%ngA "A]Fj&*\Aq@.( ih[\ۚ3ZROKAIj֋ڍ?#gh 4~/Pm%~ UN*UثT524Gi$m-XEwiI(LUkmMSEWA9Yah@ΎGگϗ3 2qG='gdҜY"L8(;f[;v*Y3,αĿiQU_?n{+ȁaAo=gfh~4v5B}K(Ǟ_ҏO :ӄws՝2 *UثWb^{Wb7U]v*!UUثWb]moHO" GFl+ȑJ]ccg<5[]ͻZk̮Ŋ U mY_\ߘ,OE,Yr)ثWbPZܦqrM%EhUB[9-k3!o"r v*UثU?[Uޙ&^GQx%y["hWN*UثWbRFi,dk7cVDcN 6@wZU0bϊ5;ḺZzr&⨠ ķ|DqVAޠ?N*n?faLUauڋ}1D*?$c];R=u\Ur4}cOԸ_a㊮MJR G#IYTZGk5ʠbWb]v*UثWb=?|kXTбVVWb]%m:t;G.SO7 ތ~ (Q¹U*ثWbRO.\ykpk1,v_z?2MO"e2Nbgp1;wA̝Aኵ|* 1Wp_ UyhU&*N*UثWb]v*UثWb]v*UثT=F>U~W HhLqWb]v*UثWb]v*UثWb]v*3'ƻEO iO**UتmB{)gDmcGD0哎9r-_ ov*UثU;;qqg:\[eYb`Y VSVNxR%U UثWb3ӓIcgo,\ >! Pi+r'TX]v*U evV%\8cUڃݘdl(9v*Uت L:&ؙH7pd2ƇE##U_WN*UثWb]v*UثWb]v*UثUejz#X^]cF,36*v*UثWb]v*UثWb]v*UثW>5?*|UXM犷v*A$+F$U@,SV*]v*UتDtMMӣ"i#42Ҿl.Ydo#R;+KWb]hkߢy5JRE}Coʑ2tړȏㄱ2t(5.I]J"T&Qҏ]ev*UتSfbdԚ: j(zXzrKx/zx?߱~IPثWb]4F=J/#XQG:7d5SVNY_eisث`PSoּ囯1jVZtfmB 'ky}0@DG"㊰_+xL.F&6MxqVUG&LK;Tӧ*^>u(˳ZqV{K \ӛNhti&uF cfg+ɌLQrZix2>/0U TQ@,qICWbnjR[++PU(v5*y1U\8%o&cUثV;20\%:b!^GgbOd =@ۑEb>cԓ"ɸ]KL]zMK򵤖C Ȯ[^ኞ b[Ѵ>RԵ[f죄j:m*?g?ߖ:&~^[浵2zȾ) _rUU^p򮹨1.q{J,$ "/ኾ]v*UثWb]v*UثWb]v*UثWb]v*UثWb]v*UثWb=?|kXTбWH~F~`Y>_tO2Hc"[9$GD[ۗ^QܿMZ42DCqsĨ(d7Y)v^-_keS|ӫwjZaѴwU 2Or89V;~O"f7LUثWb]v*UثWb]v*UثWb]v*UثWbVk~{N*UثWb]v*UثWb]v*UثWb]v*UثWb]v*UثWb]v*Uثt?|k6Wb5ip$w/c]ثw5]|qWs_Uܗbx⮨]v*UثWb]v*UثWb]v*Uc㊯]N*UثWb]v*UثWb]v*UثWb]v*UثWb]v*UثWb]v*Uت?/}7X/v*UثWb]qVثG*:}UثWb]v*UثWb]v*UثWb]dipy-0.13.0/doc/examples_index.rst000066400000000000000000000101251317371701200170450ustar00rootroot00000000000000.. _examples: ======== Examples ======== .. toctree:: :maxdepth: 1 note_about_examples .. contents:: :depth: 2 ----------- Quick Start ----------- - :ref:`example_quick_start` - :ref:`example_tracking_quick_start` ------------- Preprocessing ------------- Gradients & Spheres ~~~~~~~~~~~~~~~~~~~ - :ref:`example_gradients_spheres` Brain Extraction ~~~~~~~~~~~~~~~~ - :ref:`example_brain_extraction_dwi` Basic SNR estimation ~~~~~~~~~~~~~~~~~~~~ - :ref:`example_snr_in_cc` Denoising ~~~~~~~~~ - :ref:`example_denoise_nlmeans` - :ref:`example_denoise_localpca` Reslice ~~~~~~~ - :ref:`example_reslice_datasets` -------------- Reconstruction -------------- Constrained Spherical Deconvolution ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - :ref:`example_reconst_csd` Simple Harmonic Oscillator based Reconstruction and Estimation ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - :ref:`example_reconst_shore` - :ref:`example_reconst_shore_metrics` Mean Apparent Propagator (MAP)-MRI ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - :ref:`example_reconst_mapmri` Diffusion Tensor Imaging ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - :ref:`example_reconst_dti` - :ref:`example_restore_dti` - :ref:`example_reconst_fwdti` Diffusion Kurtosis Imaging ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - :ref:`example_reconst_dki` Q-Ball Constant Solid Angle ~~~~~~~~~~~~~~~~~~~~~~~~~~~ - :ref:`example_reconst_csa` Diffusion Spectrum Imaging ~~~~~~~~~~~~~~~~~~~~~~~~~~ - :ref:`example_reconst_dsi` - :ref:`example_reconst_dsi_metrics` Generalized Q-Sampling Imaging ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - :ref:`example_reconst_gqi` DSI with Deconvolution ~~~~~~~~~~~~~~~~~~~~~~ - :ref:`example_reconst_dsid` Sparse Fascicle Model ~~~~~~~~~~~~~~~~~~~~~~ - :ref:`example_sfm_reconst` Intravoxel incoherent motion (IVIM) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - :ref:`example_reconst_ivim` Statistical evaluation ~~~~~~~~~~~~~~~~~~~~~~ - :ref:`example_kfold_xval` ------------------------------------------------ Contextual enhancement ------------------------------------------------ - :ref:`example_contextual_enhancement` - :ref:`example_fiber_to_bundle_coherence` -------------- Fiber tracking -------------- - :ref:`example_introduction_to_basic_tracking` - :ref:`example_probabilistic_fiber_tracking` - :ref:`example_deterministic_fiber_tracking` - :ref:`example_tracking_tissue_classifier` - :ref:`example_sfm_tracking` ------------------------------------- Fiber tracking evaluation ------------------------------------- - :ref:`example_linear_fascicle_evaluation` ------------------------------------ Streamline analysis and connectivity ------------------------------------ - :ref:`example_streamline_tools` - :ref:`example_streamline_length` ------------------ Registration ------------------ Image-based Registration ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - :ref:`example_affine_registration_3d` - :ref:`example_syn_registration_2d` - :ref:`example_syn_registration_3d` Streamline-based Registration ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - :ref:`example_bundle_registration` ------------ Segmentation ------------ Streamline Clustering ~~~~~~~~~~~~~~~~~~~~~ - :ref:`example_segment_quickbundles` - :ref:`example_segment_extending_clustering_framework` - :ref:`example_segment_clustering_features` - :ref:`example_segment_clustering_metrics` Brain Segmentation ~~~~~~~~~~~~~~~~~~ - :ref:`example_brain_extraction_dwi` Tissue Classification ~~~~~~~~~~~~~~~~~~~~~ - :ref:`example_tissue_classification` ----------- Simulations ----------- - :ref:`example_simulate_multi_tensor` - :ref:`example_reconst_dsid` - :ref:`example_simulate_dki` --------------- Multiprocessing --------------- - :ref:`example_reconst_csd_parallel` - :ref:`example_reconst_csa_parallel` ------------ File Formats ------------ - :ref:`example_streamline_formats` ------------------- Visualization ------------------- - :ref:`example_viz_advanced` - :ref:`example_viz_slice` - :ref:`example_viz_bundles` - :ref:`example_viz_surfaces` - :ref:`example_viz_ui` --------------- Workflows --------------- - :ref:`example_workflow_creation` - :ref:`example_combined_workflow_creation` dipy-0.13.0/doc/faq.rst000066400000000000000000000147151317371701200146200ustar00rootroot00000000000000.. _faq: ========================== Frequently Asked Questions ========================== ----------- Theoretical ----------- 1. **What is a b-value?** The b-value $b$ or *diffusion weighting* is a function of the strength, duration and temporal spacing and timing parameters of the specific paradigm. This function is derived from the Bloch-Torrey equations. In the case of the classical Stejskal-Tanner pulsed gradient spin-echo (PGSE) sequence, at the time of readout $b=\gamma^{2}G^{2}\delta^{2}\left(\Delta-\frac{\delta}{3}\right)$ where $\gamma$ is the gyromagnetic radio, $\delta$ denotes the pulse width, $G$ is the gradient amplitude and $\Delta$ the centre-to-centre spacing. $\gamma$ is a constant, but we can change the other three parameters and in that way control the b-value. 2. **What is q-space?** Q-space is the space of one or more 3D spin displacement wave vectors $\mathbf{q}$ as shown in equation $\ref{eq:fourier}$. The vector $\mathbf{q}$ parametrises the space of diffusion gradients. It is related to the applied magnetic gradient $\mathbf{g}$ by the formula $\mathbf{q}=(2\pi)^{-1}\gamma\delta\mathbf{g}$. Every single vector $\mathbf{q}$ has the same orientation as the direction of diffusion gradient $\mathbf{g}$ and length proportional to the strength $g$ of the gradient field. Every single point in q-space corresponds to a possible 3D volume of the MR signal for a specific gradient direction and strength. Therefore if, for example, we have programmed the scanner to apply 60 gradient directions, then our data should have 60 diffusion volumes, with each volume obtained for a specific gradient. A Diffusion Weighted Image (DWI) is the volume acquired from only one direction gradient. 3. **What does DWI stand for?** Diffusion Weighted Imaging (DWI) is MRI imaging designed to be sensitive to diffusion. A diffusion weighted image is a volume of voxel data gathered by applying only one gradient direction using a diffusion sequence. We expect that the signal in any voxel should be low if there is greater mobility of water molecules along the specified gradient direction and it should be high if there is less movement in that direction. Yes, it is counterintuitive but correct! However, greater mobility gives greater opportunity for the proton spins to be dephased, producing a smaller RF signal. 4. **Why dMRI and not DTI?** Diffusion MRI (dMRI or dwMRI) are the preferred terms if you want to speak about diffusion weighted MRI in general. DTI (diffusion tensor imaging) is just one of the many ways you can reconstruct the voxel from your measured signal. There are plenty of others, for example DSI, GQI, QBI, etc. 5. **What is the difference between Image coordinates and World coordinates?** Image coordinates have positive integer values and represent the centres $(i, j, k)$ of the voxels. There is an affine transform (stored in the nifti file) that takes the image coordinates and transforms them to millimeter (mm) in real world space. World coordinates have floating point precision and your dataset has 3 real dimensions e.g. $(x, y, z)$. 6. **We generated dMRI datasets with nonisotropic voxel sizes. What do we do?** You need to resample your raw data to an isotropic size. Have a look at the module ``dipy.align.aniso2iso``. (We think it is a mistake to acquire nonisotropic data because the directional resolution of the data will depend on the orientation of the gradient with respect to the voxels, being lower when aligned with a longer voxel dimension.) 7. **Why are non-isotropic voxel sizes a bad idea in diffusion?** If, for example, you have $2 \times 2 \times 4\ \textrm{mm}^3$ voxels, the last dimension will be averaged over the double distance and less detail will be captured compared to the other two dimensions. Furthermore, with very anisotropic voxels the uncertainty on orientation estimates will depend on the position of the subject in the scanner. --------- Practical --------- 1. **Why Python and not MATLAB or some other language?** Python is free, batteries included, very well-designed, painless to read and easy to use. There is nothing else like it. Give it a go. Once with Python, always with Python. 2. **Isn't Python slow?** True, sometimes Python can be slow, if you are using multiple nested ``for`` loops, for example. In that case, we use Cython_, which takes execution up to C speed. 3. **What numerical libraries do you use in Python?** The best ever designed numerical library - NumPy_. 2. **Which Python console do you recommend?** ``ipython`` 3. **What do you use for visualization?** For 3D visualization we use ``fvtk`` which depends in turn on ``python-vtk``:: from dipy.viz import fvtk For 2D visualization we use matplotlib_. 4. **What about interactive visualization?** There is already interaction in the ``fvtk`` module, but we have started a new project only for visualization which we plan to integrate in dipy_ in the near future. For more information, have a look at http://fos.me 5. **Which file formats do you support?** Nifti (.nii), Dicom (Siemens(read-only)), Trackvis (.trk), DIPY (.dpy), Numpy (.npy, ,npz), text and any other formats supported by nibabel and pydicom. You can also read/save in Matlab version v4 (Level 1.0), v6 and v7 to 7.2, using `scipy.io.loadmat`. For higher versions >= 7.3, you can use pytables_ or any other python-to-hdf5 library e.g. h5py. For object serialization you can use ``dipy.io.pickles`` functions ``load_pickle``, ``save_pickle``. 6. **What is dpy**? ``dpy`` is an ``hdf5`` file format which we use in DIPY to store tractography and other information. This allows us to store huge tractographies and load different parts of the datasets directly from the disk as if it were in memory. 7. **Which python editor should I use?** Any text editor would do the job but we prefer the following: PyCharm, Sublime, Aptana, Emacs, Vim and Eclipse (with PyDev). 8. **I have problems reading my dicom files using nibabel, what should I do?** Use Chris Rorden's dcm2nii to transform them to nifti files. http://www.cabiatl.com/mricro/mricron/dcm2nii.html Or you can make your own reader using pydicom. http://code.google.com/p/pydicom/ and then use nibabel to store the data as niftis. 9. **Where can I find diffusion data?** Have a look at Beijing Enhanced http://fcon_1000.projects.nitrc.org/indi/IndiRetro.html .. include:: links_names.inc dipy-0.13.0/doc/gimbal_lock.rst000066400000000000000000000153711317371701200163130ustar00rootroot00000000000000.. _gimbal-lock: ============= Gimbal lock ============= See also: http://en.wikipedia.org/wiki/Gimbal_lock Euler angles have a major deficiency, and that is, that it is possible, in some rotation sequences, to reach a situation where two of the three Euler angles cause rotation around the same axis of the object. In the case below, rotation around the $x$ axis becomes indistinguishable in its effect from rotation around the $z$ axis, so the $z$ and $x$ axis angles collapse into one transformation, and the rotation reduces from three degrees of freedom to two. Imagine that we are using the Euler angle convention of starting with a rotation around the $x$ axis, followed by the $y$ axis, followed by the $z$ axis. Here we see a Spitfire aircraft, flying across the screen. The $x$ axis is left to right (tail to nose), the $y$ axis is from the left wing tip to the right wing tip (going away from the screen), and the $z$ axis is from bottom to top: .. image:: images/spitfire_0.png Imagine we wanted to do a slight roll with the left wing tilting down (rotation about $x$) like this: .. image:: images/spitfire_x.png followed by a violent pitch so we are pointing straight up (rotation around $y$ axis): .. image:: images/spitfire_y.png Now we'd like to do a turn of the nose towards the viewer (and the tail away from the viewer): .. image:: images/spitfire_hoped.png But, wait, let's go back over that again. Look at the result of the rotation around the $y$ axis. Notice that the $x$ axis, as was, is now aligned with the $z$ axis, as it is now. Rotating around the $z$ axis will have exactly the same effect as adding an extra rotation around the $x$ axis at the beginning. That means that, when there is a $y$ axis rotation that rotates the $x$ axis onto the $z$ axis (a rotation of $\pm\pi/2$ around the $y$ axis) - the $x$ and $y$ axes are "locked" together. Mathematics of gimbal lock ========================== We see gimbal lock for this type of Euler axis convention, when $\cos(\beta) = 0$, where $\beta$ is the angle of rotation around the $y$ axis. By "this type of convention" we mean using rotation around all 3 of the $x$, $y$ and $z$ axes, rather than using the same axis twice - e.g. the physics convention of $z$ followed by $x$ followed by $z$ axis rotation (the physics convention has different properties to its gimbal lock). We can show how gimbal lock works by creating a rotation matrix for the three component rotations. Recall that, for a rotation of $\alpha$ radians around $x$, followed by a rotation $\beta$ around $y$, followed by rotation $\gamma$ around $z$, the rotation matrix $R$ is: .. math:: R = \left(\begin{smallmatrix}\operatorname{cos}\left(\beta\right) \operatorname{cos}\left(\gamma\right) & - \operatorname{cos}\left(\alpha\right) \operatorname{sin}\left(\gamma\right) + \operatorname{cos}\left(\gamma\right) \operatorname{sin}\left(\alpha\right) \operatorname{sin}\left(\beta\right) & \operatorname{sin}\left(\alpha\right) \operatorname{sin}\left(\gamma\right) + \operatorname{cos}\left(\alpha\right) \operatorname{cos}\left(\gamma\right) \operatorname{sin}\left(\beta\right)\\\operatorname{cos}\left(\beta\right) \operatorname{sin}\left(\gamma\right) & \operatorname{cos}\left(\alpha\right) \operatorname{cos}\left(\gamma\right) + \operatorname{sin}\left(\alpha\right) \operatorname{sin}\left(\beta\right) \operatorname{sin}\left(\gamma\right) &- \operatorname{cos}\left(\gamma\right) \operatorname{sin}\left(\alpha\right) + \operatorname{cos}\left(\alpha\right) \operatorname{sin}\left(\beta\right) \operatorname{sin}\left(\gamma\right)\\- \operatorname{sin}\left(\beta\right) & \operatorname{cos}\left(\beta\right) \operatorname{sin}\left(\alpha\right) & \operatorname{cos}\left(\alpha\right) \operatorname{cos}\left(\beta\right)\end{smallmatrix}\right) When $\cos(\beta) = 0$, $\sin(\beta) = \pm1$ and $R$ simplifies to: .. math:: R = \left(\begin{smallmatrix}0 & - \operatorname{cos}\left(\alpha\right) \operatorname{sin}\left(\gamma\right) + \pm{1} \operatorname{cos}\left(\gamma\right) \operatorname{sin}\left(\alpha\right) & \operatorname{sin}\left(\alpha\right) \operatorname{sin}\left(\gamma\right) + \pm{1} \operatorname{cos}\left(\alpha\right) \operatorname{cos}\left(\gamma\right)\\0 & \operatorname{cos}\left(\alpha\right) \operatorname{cos}\left(\gamma\right) + \pm{1} \operatorname{sin}\left(\alpha\right) \operatorname{sin}\left(\gamma\right) & - \operatorname{cos}\left(\gamma\right) \operatorname{sin}\left(\alpha\right) + \pm{1} \operatorname{cos}\left(\alpha\right) \operatorname{sin}\left(\gamma\right)\\- \pm{1} & 0 & 0\end{smallmatrix}\right) When $\sin(\beta) = 1$: .. math:: R = \left(\begin{smallmatrix}0 & \operatorname{cos}\left(\gamma\right) \operatorname{sin}\left(\alpha\right) - \operatorname{cos}\left(\alpha\right) \operatorname{sin}\left(\gamma\right) & \operatorname{cos}\left(\alpha\right) \operatorname{cos}\left(\gamma\right) + \operatorname{sin}\left(\alpha\right) \operatorname{sin}\left(\gamma\right)\\0 & \operatorname{cos}\left(\alpha\right) \operatorname{cos}\left(\gamma\right) + \operatorname{sin}\left(\alpha\right) \operatorname{sin}\left(\gamma\right) & \operatorname{cos}\left(\alpha\right) \operatorname{sin}\left(\gamma\right) - \operatorname{cos}\left(\gamma\right) \operatorname{sin}\left(\alpha\right)\\-1 & 0 & 0\end{smallmatrix}\right) From the `angle sum and difference identities `_ (see also `geometric proof `_, `Mathworld treatment `_) we remind ourselves that, for any two angles $\alpha$ and $\beta$: .. math:: \sin(\alpha \pm \beta) = \sin \alpha \cos \beta \pm \cos \alpha \sin \beta \, \cos(\alpha \pm \beta) = \cos \alpha \cos \beta \mp \sin \alpha \sin \beta We can rewrite $R$ as: .. math:: R = \left(\begin{smallmatrix}0 & V_{1} & V_{2}\\0 & V_{2} & - V_{1}\\-1 & 0 & 0\end{smallmatrix}\right) where: .. math:: V_1 = \operatorname{cos}\left(\gamma\right) \operatorname{sin}\left(\alpha\right) - \operatorname{cos}\left(\alpha\right) \operatorname{sin}\left(\gamma\right) = \sin(\alpha - \gamma) \, V_2 = \operatorname{cos}\left(\alpha\right) \operatorname{cos}\left(\gamma\right) + \operatorname{sin}\left(\alpha\right) \operatorname{sin}\left(\gamma\right) = \cos(\alpha - \gamma) We immediately see that $\alpha$ and $\gamma$ are going to lead the same transformation - the mathematical expression of the observation on the spitfire above, that rotation around the $x$ axis is equivalent to rotation about the $z$ axis. It's easy to do the same set of reductions, with the same conclusion, for the case where $\sin(\beta) = -1$ - see http://www.gregslabaugh.name/publications/euler.pdf. dipy-0.13.0/doc/glossary.rst000066400000000000000000000071021317371701200157040ustar00rootroot00000000000000========== Glossary ========== .. glossary:: Affine matrix A matrix implementing an :term:`affine transformation` in :term:`homogenous coordinates`. For a 3 dimensional transform, the matrix is shape 4 by 4. Affine transformation See `wikipedia affine`_ definition. An affine transformation is a :term:`linear transformation` followed by a translation. Axis angle A representation of rotation. See: `wikipedia axis angle`_ . From Euler's rotation theorem we know that any rotation or sequence of rotations can be represented by a single rotation about an axis. The axis $\boldsymbol{\hat{u}}$ is a :term:`unit vector`. The angle is $\theta$. The :term:`rotation vector` is a more compact representation of $\theta$ and $\boldsymbol{\hat{u}}$. Euclidean norm Also called Euclidean length, or L2 norm. The Euclidean norm $\|\mathbf{x}\|$ of a vector $\mathbf{x}$ is given by: .. math:: \|\mathbf{x}\| := \sqrt{x_1^2 + \cdots + x_n^2} Pure Pythagoras. Euler angles See: `wikipedia Euler angles`_ and `Mathworld Euler angles`_. Gimbal lock See :ref:`gimbal-lock` Homogenous coordinates See `wikipedia homogenous coordinates`_ Linear transformation A linear transformation is one that preserves lines - that is, if any three points are on a line before transformation, they are also on a line after transformation. See `wikipedia linear transform`_. Rotation, scaling and shear are linear transformations. Quaternion See: `wikipedia quaternion`_. An extension of the complex numbers that can represent a rotation. Quaternions have 4 values, $w, x, y, z$. $w$ is the *real* part of the quaternion and the vector $x, y, z$ is the *vector* part of the quaternion. Quaternions are less intuitive to visualize than :term:`Euler angles` but do not suffer from :term:`gimbal lock` and are often used for rapid interpolation of rotations. Reflection A transformation that can be thought of as transforming an object to its mirror image. The mirror in the transformation is a plane. A plan can be defined with a point and a vector normal to the plane. See `wikipedia reflection`_. Rotation matrix See `wikipedia rotation matrix`_. A rotation matrix is a matrix implementing a rotation. Rotation matrices are square and orthogonal. That means, that the rotation matrix $R$ has columns and rows that are :term:`unit vector`, and where $R^T R = I$ ($R^T$ is the transpose and $I$ is the identity matrix). Therefore $R^T = R^{-1}$ ($R^{-1}$ is the inverse). Rotation matrices also have a determinant of $1$. Rotation vector A representation of an :term:`axis angle` rotation. The angle $\theta$ and unit vector axis $\boldsymbol{\hat{u}}$ are stored in a *rotation vector* $\boldsymbol{u}$, such that: .. math:: \theta = \|\boldsymbol{u}\| \, \boldsymbol{\hat{u}} = \frac{\boldsymbol{u}}{\|\boldsymbol{u}\|} where $\|\boldsymbol{u}\|$ is the :term:`Euclidean norm` of $\boldsymbol{u}$ Shear matrix Square matrix that results in shearing transforms - see `wikipedia shear matrix`_. Unit vector A vector $\boldsymbol{\hat{u}}$ with a :term:`Euclidean norm` of 1. Normalized vector is a synonym. The "hat" over the $\boldsymbol{\hat{u}}$ is a convention to express the fact that it is a unit vector. .. include:: links_names.inc dipy-0.13.0/doc/index.rst000066400000000000000000000122471317371701200151560ustar00rootroot00000000000000.. _home: ########################### Diffusion Imaging In Python ########################### Dipy_ is a **free** and **open source** software project for computational neuroanatomy, focusing mainly on **diffusion** *magnetic resonance imaging* (dMRI) analysis. It implements a broad range of algorithms for denoising, registration, reconstruction, tracking, clustering, visualization, and statistical analysis of MRI data. ********** Highlights ********** **DIPY 0.13.0** is now available. New features include: - Faster local PCA implementation. - Fixed different issues with OpenMP and Windows / OSX. - Replacement of cvxopt by cvxpy. - Replacement of Pytables by h5py. - Updated API to support latest numpy version (1.14). - New user interfaces for visualization. - Large documentation update. **DIPY 0.12.0** is now available. New features include: - IVIM Simultaneous modeling of perfusion and diffusion. - MAPL, tissue microstructure estimation using Laplacian-regularized MAP-MRI. - DKI-based microstructural modelling. - Free water diffusion tensor imaging. - Denoising using Local PCA. - Streamline-based registration (SLR). - Fiber to bundle coherence (FBC) measures. - Bayesian MRF-based tissue classification. - New API for integrated user interfaces. - New hdf5 file (.pam5) for saving reconstruction results. - Interactive slicing of images, ODFs and peaks. - Updated API to support latest numpy versions. - New system for automatically generating command line interfaces. - Faster computation of cross correlation for image registration. **DIPY 0.11.0** is now available. New features include: - New framework for contextual enhancement of ODFs. - Compatibility with numpy (1.11). - Compatibility with VTK 7.0 which supports Python 3.x. - Faster PIESNO for noise estimation. - Reorient gradient directions according to motion correction parameters. - Supporting Python 3.3+ but not 3.2. - Reduced memory usage in DTI. - DSI now can use datasets with multiple b0s. - Fixed different issues with Windows 64bit and Python 3.5. **DIPY 0.10.1** is now available. New features in this release include: - Compatibility with new versions of scipy (0.16) and numpy (1.10). - New cleaner visualization API, including compatibility with VTK 6, and functions to create your own interactive visualizations. - Diffusion Kurtosis Imaging (DKI): Google Summer of Code work by Rafael Henriques. - Mean Apparent Propagator (MAP) MRI for tissue microstructure estimation. - Anisotropic Power Maps from spherical harmonic coefficients. - A new framework for affine registration of images. See :ref:`older highlights `. ************* Announcements ************* - :ref:`DIPY 0.13 ` released October 24, 2017. - :ref:`DIPY 0.12 ` released June 26, 2017. - :ref:`DIPY 0.11 ` released February 21, 2016. - :ref:`DIPY 0.10 ` released December 4, 2015. - :ref:`DIPY 0.9.2 ` released, March 18, 2015. - :ref:`DIPY 0.8.0 ` released, January 6, 2015. - dipy_ was an official exhibitor in `HBM 2015 `_. - DIPY was featured in `The Scientist Magazine `_, Nov, 2014. - `DIPY paper`_ accepted in Frontiers of Neuroinformatics, January 22nd, 2014. See some of our :ref:`past announcements ` *************** Getting Started *************** Here is a quick snippet showing how to calculate `color FA` also known as the DEC map. We use a Tensor model to reconstruct the datasets which are saved in a Nifti file along with the b-values and b-vectors which are saved as text files. Finally, we save our result as a Nifti file :: fdwi = 'dwi.nii.gz' fbval = 'dwi.bval' fbvec = 'dwi.bvec' from dipy.io.image import load_nifti, save_nifti from dipy.io import read_bvals_bvecs from dipy.core.gradients import gradient_table from dipy.reconst.dti import TensorModel data, affine = load_nifti(fdwi) bvals, bvecs = read_bvals_bvecs(fbval, fbvec) gtab = gradient_table(bvals, bvecs) tenmodel = TensorModel(gtab) tenfit = tenmodel.fit(data) save_nifti('colorfa.nii.gz', tenfit.color_fa, affine) As an exercise try to calculate `color FA` with your datasets. You will need to replace the filepaths `fimg`, `fbval` and `fbvec`. Here is what a slice should look like. .. image:: _static/colorfa.png :align: center ********** Next Steps ********** You can learn more about how you to use dipy_ with your datasets by reading the examples in our :ref:`documentation`. .. We need the following toctree directive to include the documentation .. in the document hierarchy - see http://sphinx.pocoo.org/concepts.html .. toctree:: :hidden: documentation stateoftheart ******* Support ******* We acknowledge support from the following organizations: - The department of Intelligent Systems Engineering of Indiana University. - The Gordon and Betty Moore Foundation and the Alfred P. Sloan Foundation, through the University of Washington eScience Institute Data Science Environment. - Google supported DIPY through the Google Summer of Code Program during Summer 2015 and 2016. .. include:: links_names.inc dipy-0.13.0/doc/installation.rst000066400000000000000000000306251317371701200165500ustar00rootroot00000000000000.. _installation: ############ Installation ############ dipy_ is in active development. You can install it from our latest release, but you may find that the release has gotten well behind the current development - at least - we hope so - if we're developing fast enough! If you want install the latest and greatest from the bleeding edge of the development, skip to :ref:`from-source`. If you just want to install a released version, read on for your platform. ******************** Installing a release ******************** If you are on Debian or Ubuntu Linux we recommend you try :ref:`install-packages` first. Otherwise please try :ref:`install-pip`. .. _install-packages: Using Anaconda: =============== On all platforms, you can use Anaconda_ to install DIPY. To do so issue the following command in a terminal:: conda install dipy -c conda-forge Some of the visualization methods require the VTK_ library and this can be installed separately (for the time being only on Python 2.7 and Python 3.6):: conda install -c conda-forge vtk For OSX users, VTK_ is not available on conda-forge channel, so we recommend to use the following one:: conda install -c clinicalgraphics vtk Using packages: =============== Windows ------- #. First, install the python library dependencies. One easy way to do that is to use the Anaconda_ distribution (see below for :ref:`alternatives`). #. Even with Anaconda_ installed, you will still need to install the nibabel_ library, which supports reading and writing of neuroimaging data formats. Open a terminal and type :: pip install nibabel #. Finally, we are ready to install DIPY itself. Same as with `nibabel` above, we will type at the terminal shell command line :: pip install dipy When the installation has finished we can check if it is successful in the following way. From a Python console script try :: >>> import dipy This should work with no error. #. Some of the visualization methods require the VTK_ library and this can be installed using Anaconda_ :: conda install -c conda-forge vtk OSX --- #. To use dipy_, you need to have some :ref:`dependencies` installed. First of all, make sure that you have installed the Apple Xcode_ developer tools. You'll need those to install all the following dependencies. #. Next, install the python library dependencies. One easy way to do that is to use the Anaconda_ distribution (see below for :ref:`alternatives`). #. Even with Anaconda_ installed, you will still need to install the nibabel_ library, which supports reading and writing of neuroimaging data formats. Open a terminal and type :: pip install nibabel #. Finally, we are ready to install DIPY itself. Same as with `nibabel` above, we will type at the terminal shell command line :: pip install dipy When the installation has finished we can check if it is successful in the following way. From a Python console script try :: >>> import dipy This should work with no error. #. Some of the visualization methods require the VTK_ library and this can be installed using Anaconda_ :: conda install -c clinicalgraphics vtk Linux ----- For Debian, Ubuntu and Mint set up the NeuroDebian_ repositories - see `NeuroDebian how to`_. Then:: sudo apt-get install python-dipy We hope to get packages for the other Linux distributions, but for now, please try :ref:`install-pip` instead. .. _install-pip: Using pip: ========== This method should work under Linux, Mac OS X and Windows. Please install numpy_ and scipy_ using their respective binary installers if you haven't already. For Windows and Mac OSX you can use Anaconda_ to get numpy, scipy, cython and lots of other useful python module. Anaconda_ is a big package but will install many tools and libraries that are useful for scientific processing. When you have numpy, scipy and cython installed then try :: pip install nibabel pip install dipy Then from any python console or script try :: >>> import dipy ******* Support ******* Contact us: =========== Do these installation instructions work for you? For any problems/suggestions please let us know by sending us an e-mail to the `nipy mailing list`_ with the subject line starting with ``[dipy]``. Common problems: ================ Multiple installations ---------------------- Make sure that you have uninstalled all previous versions of DIPY before installing a new one. A simple and general way to uninstall DIPY is by removing the installation directory. You can find where DIPY is installed by using:: import dipy dipy.__file__ and then remove the Dipy directory that contains that file. .. _alternatives: Alternatives to Anaconda ------------------------- If you have problems installing Anaconda_ we recommend using Canopy_ or pythonxy_. Memory issues ------------- DIPY can process large diffusion datasets. For this reason we recommend using a 64bit operating system which can allocate larger memory chunks than 32bit operating systems. If you don't have a 64bit computer that is okay DIPY works with 32bit too. .. _python-versions: Note on python versions ----------------------- Most of the functionality in DIPY supports versions of Python from 2.6 to 3.5. However, some visualization functionality depends on VTK_, which currently does not work with Python 3 versions. Therefore, if you want to use the visualization functions in DIPY, please use it with Python 2. .. _from-source: ********************** Installing from source ********************** Getting the source ================== More likely you will want to get the source repository to be able to follow the latest changes. In that case, you can use:: git clone https://github.com/nipy/dipy.git For more information about this see :ref:`following-latest`. After you've cloned the repository, you will have a new directory, containing the DIPY ``setup.py`` file, among others. We'll call this directory - that contains the ``setup.py`` file - the *DIPY source root directory*. Sometimes we'll also call it the ```` directory. Building and installing ======================= Install from source (all operating systems) ------------------------------------------- Change directory into the *DIPY source root directory*. To clean your directory from temporary file, use:: git clean -fxd This command will delete all files not present in your github repository. Then, complete your installation by using this command:: pip install --user -e . This command will do the following : - remove the old dipy installation if present - build dipy (equivalent to `python setup.py build_ext --inplace`) - install dipy locally on your user environment .. _install-source-nix: Install from source for Unix (e.g Linux, OSX) --------------------------------------------- Change directory into the *DIPY source root directory*. To install for the system:: python setup.py install To build DIPY in the source tree (locally) so you can run the code in the source tree (recommended for following the latest source) run:: python setup.py build_ext --inplace add the *DIPY source root directory* into your ``PYTHONPATH`` environment variable. Search google for ``PYTHONPATH`` for details or see `python module path`_ for an introduction. When adding dipy_ to the ``PYTHONPATH``, we usually add the ``PYTHONPATH`` at the end of ``~/.bashrc`` or (OSX) ``~/.bash_profile`` so we don't need to retype it every time. This should look something like:: export PYTHONPATH=/home/user_dir/Devel/dipy:$PYTHONPATH After changing the ``~/.bashrc`` or (OSX) ``~/.bash_profile`` try:: source ~/.bashrc or:: source ~/.bash_profile so that you can have immediate access to dipy_ without needing to restart your terminal. Ubuntu/Debian ------------- :: sudo apt-get install python-dev python-setuptools sudo apt-get install python-numpy python-scipy sudo apt-get install cython then:: sudo pip install nibabel (we need the latest version of this one - hence ``pip`` rather than ``apt-get``). You might want the optional packages too (highly recommended):: sudo apt-get install ipython python-h5py python-vtk python-matplotlib Now follow :ref:`install-source-nix`. Fedora / Mandriva maybe Redhat ------------------------------ Same as above but use yum rather than apt-get when necessary. Now follow :ref:`install-source-nix`. Windows ------- Anaconda_ is probably the easiest way to install the dependencies that you need. To build from source, you will also need to install the exact compiler which is used with your specific version of python. For getting this information, type this command in shell like ``cmd`` or Powershell_:: python -c "import platform;print(platform.python_compiler())" This command should print an information of this form:: MSC v.1900 64 bit (AMD64) Now that you find the relevant compiler, you have to install the VisualStudioBuildTools_ by respecting the following table:: Visual C++ 2008 (9.0) MSC_VER=1500 Visual C++ 2010 (10.0) MSC_VER=1600 Visual C++ 2012 (11.0) MSC_VER=1700 Visual C++ 2013 (12.0) MSC_VER=1800 Visual C++ 2015 (14.0) MSC_VER=1900 Visual C++ 2017 (15.0) MSC_VER=1910 After the VisualStudioBuildTools_ installation, restart a command shell and change directory into the *DIPY source root directory*. To install into your system:: python setup.py install To install inplace - so that DIPY is running out of the source code directory:: python setup.py develop (this is the mode we recommend for following the latest source code). If you get an error with ``python setup.py develop`` make sure you have installed `setuptools`_. If you get an error saying "unable to find vcvarsall.bat" then you need to check your environment variable ``PATH`` or reinstall VisualStudioBuildTools_. Distutils should automatically detect the compiler and use it. OSX --- Make sure you have Xcode_ and Anaconda_ installed. From here follow the :ref:`install-source-nix` instructions. OpenMP with OSX --------------- OpenMP_ is a standard library for efficient multithreaded applications. This is used in DIPY for speeding up many different parts of the library (e.g., denoising and bundle registration). If you do not have an OpenMP-enabled compiler, you can still compile DIPY from source using the above instructions, but it might not take advantage of the multithreaded parts of the code. To be able to compile DIPY from source with OpenMP on Mac OSX, you will have to do a few more things. First of all, you will need to install the Homebrew_ package manager. Next you will need to install and configure the compiler. You have two options: using the GCC compiler or the CLANG compiler. This depends on your python installation: Under Anaconda ~~~~~~~~~~~~~~~~ If you are using Anaconda_, you will need to use GCC. THe first option is to run the following command:: conda install gcc After this installation, gcc will be your default compiler in Anaconda_ environment. The second option is to install gcc via homebrew. Run the following:: brew reinstall gcc --without-multilib This should take about 45 minutes to complete. Then add to your bash configuration (usually in ``~/.bash_profile``), the following:: export PATH="/usr/local/Cellar/gcc/5.2.0/bin/gcc-5:$PATH Under Homebrew Python or python.org Python ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ If you are already using the Homebrew Python, or the standard python.org Python, you will need to use the CLANG compiler with OMP. Run:: brew install clang-omp And then edit the ``setup.py`` file to include the following line (e.g., on line 14, at the top of the file, but after the initial imports):: os.environ['CC'] = '/usr/local/bin/clang-omp' Building and installing ~~~~~~~~~~~~~~~~~~~~~~~ Whether you are using Anaconda_ or Hombrew/python.org Python, you will need to then run ``python setup.py install``. When you do that, it should now compile the code with this OpenMP-enabled compiler, and things should go faster! Testing ======== If you want to run the tests:: sudo pip install nose Then (in python or ipython_):: >>> import dipy >>> dipy.test() You can also run the examples in ``/doc``. Documentation (Unix only) ========================= To build the documentation in HTML in your computer you will need to do:: sudo pip install sphinx Then change directory to ```` and:: cd doc make clean make html .. include:: links_names.inc dipy-0.13.0/doc/introduction.rst000066400000000000000000000014101317371701200165560ustar00rootroot00000000000000.. _introduction: =============== What is DIPY? =============== * **a python package** for analyzing ``diffusion MRI data`` * a free and open project to collaborate and **share** your code and expertise. Want to know more? Read our :ref:`documentation`, :ref:`installation` guidelines and try the :ref:`examples`. Didn't find what you are looking for? Then try :ref:`faq` and then if this doesn't help send an e-mail to our e-mail list neuroimaging@python.org with subject starting with ``[Dipy]``. .. figure:: _static/pretty_tracks.png :align: center This is a depiction of tractography created with DIPY. If you want to learn more how you can create these with your datasets read the examples in our :ref:`documentation` . .. include:: links_names.inc dipy-0.13.0/doc/links_names.inc000066400000000000000000000200311317371701200163010ustar00rootroot00000000000000.. This (-*- rst -*-) format file contains commonly used link targets and name substitutions. It may be included in many files, therefore it should only contain link targets and name substitutions. Try grepping for "^\.\. _" to find plausible candidates for this list. .. NOTE: reST targets are __not_case_sensitive__, so only one target definition is needed for nipy, NIPY, Nipy, etc... .. _nipy: http://nipy.org .. _`Brain Imaging Center`: http://bic.berkeley.edu/ .. _dipy: http://dipy.org .. _`dipy github`: http://github.com/nipy/dipy .. _dipy pypi: http://pypi.python.org/pypi/dipy .. _nipy issues: http://github.com/nipy/nipy/issues .. _dipy issues: http://github.com/nipy/dipy/issues .. _dipy paper: http://www.frontiersin.org/Neuroinformatics/10.3389/fninf.2014.00008/abstract .. _journal paper: http://www.frontiersin.org/Neuroinformatics/10.3389/fninf.2014.00008/abstract .. _nibabel: http://nipy.org/nibabel .. _nibabel pypi: http://pypi.python.org/pypi/nibabel .. _nipy development guidelines: http://nipy.org/devel .. _buildbots: http://nipy.bic.berkeley.edu/builders .. _`dipy gitter`: https://gitter.im/nipy/dipy .. _neurostars: https://neurostars.org/ .. Packaging .. _neurodebian: http://neuro.debian.net .. _neurodebian how to: http://neuro.debian.net/#how-to-use-this-repository .. _pip: http://www.pip-installer.org/en/latest/ .. _easy_install: https://pypi.python.org/pypi/setuptools .. _homebrew: http://brew.sh/ .. Documentation tools .. _graphviz: http://www.graphviz.org/ .. _Sphinx: http://sphinx.pocoo.org/ .. _`Sphinx reST`: http://sphinx.pocoo.org/rest.html .. _reST: http://docutils.sourceforge.net/rst.html .. _docutils: http://docutils.sourceforge.net .. Licenses .. _GPL: http://www.gnu.org/licenses/gpl.html .. _BSD: http://www.opensource.org/licenses/bsd-license.php .. _LGPL: http://www.gnu.org/copyleft/lesser.html .. Working process .. _pynifti: http://niftilib.sourceforge.net/pynifti/ .. _nifticlibs: http://nifti.nimh.nih.gov .. _nifti: http://nifti.nimh.nih.gov .. _`nipy launchpad`: https://launchpad.net/nipy .. _launchpad: https://launchpad.net/ .. _`nipy trunk`: https://code.launchpad.net/~nipy-developers/nipy/trunk .. _`nipy mailing list`: https://mail.python.org/mailman/listinfo/neuroimaging .. _`nipy bugs`: https://bugs.launchpad.net/nipy .. _pep8: http://www.python.org/dev/peps/pep-0008/ .. _`numpy coding style`: http://scipy.org/scipy/numpy/wiki/CodingStyleGuidelines .. _python module path: http://docs.python.org/tutorial/modules.html#the-module-search-path .. Code support stuff .. _pychecker: http://pychecker.sourceforge.net/ .. _pylint: http://www.logilab.org/project/pylint .. _pyflakes: http://divmod.org/trac/wiki/DivmodPyflakes .. _virtualenv: http://pypi.python.org/pypi/virtualenv .. _git: http://git.or.cz/ .. _github: http://github.com .. _flymake: http://flymake.sourceforge.net/ .. _rope: http://rope.sourceforge.net/ .. _pymacs: http://pymacs.progiciels-bpi.ca/pymacs.html .. _ropemacs: http://rope.sourceforge.net/ropemacs.html .. _ECB: http://ecb.sourceforge.net/ .. _emacs_python_mode: http://www.emacswiki.org/cgi-bin/wiki/PythonMode .. _doctest-mode: http://www.cis.upenn.edu/~edloper/projects/doctestmode/ .. _bazaar: http://bazaar-vcs.org/ .. _nose: http://somethingaboutorange.com/mrl/projects/nose .. _`python coverage tester`: http://nedbatchelder.com/code/modules/coverage.html .. _cython: http://cython.org .. _travis-ci: https://travis-ci.org/ .. Other python projects .. _numpy: http://numpy.scipy.org .. _scipy: http://www.scipy.org .. _ipython: http://ipython.scipy.org .. _`ipython manual`: http://ipython.scipy.org/doc/manual/html .. _matplotlib: http://matplotlib.sourceforge.net .. _pythonxy: http://www.pythonxy.com .. _ETS: http://code.enthought.com/projects/tool-suite.php .. _`Enthought Tool Suite`: http://code.enthought.com/projects/tool-suite.php .. _canopy: https://www.enthought.com/products/canopy .. _anaconda: http://continuum.io/downloads .. _python: http://www.python.org .. _mayavi: http://mayavi.sourceforge.net/ .. _sympy: http://code.google.com/p/sympy/ .. _networkx: http://networkx.lanl.gov/ .. _setuptools: http://pypi.python.org/pypi/setuptools .. _distribute: http://packages.python.org/distribute .. _datapkg: http://okfn.org/projects/datapkg .. _pytables: http://www.pytables.org .. _python-vtk: http://www.vtk.org .. Python imaging projects .. _PyMVPA: http://www.pymvpa.org .. _BrainVISA: http://brainvisa.info .. _anatomist: http://brainvisa.info .. _pydicom: http://code.google.com/p/pydicom/ .. Not so python imaging projects .. _matlab: http://www.mathworks.com .. _spm: http://www.fil.ion.ucl.ac.uk/spm .. _spm8: http://www.fil.ion.ucl.ac.uk/spm/software/spm8 .. _eeglab: http://sccn.ucsd.edu/eeglab .. _AFNI: http://afni.nimh.nih.gov/afni .. _FSL: http://www.fmrib.ox.ac.uk/fsl .. _FreeSurfer: http://surfer.nmr.mgh.harvard.edu .. _voxbo: http://www.voxbo.org .. _mricron: http://www.mccauslandcenter.sc.edu/mricro/mricron/index.html .. _slicer: http://www.slicer.org/ .. _fibernavigator: https://github.com/scilus/fibernavigator .. File formats .. _DICOM: http://medical.nema.org/ .. _`wikipedia DICOM`: http://en.wikipedia.org/wiki/Digital_Imaging_and_Communications_in_Medicine .. _GDCM: http://sourceforge.net/apps/mediawiki/gdcm .. _`DICOM specs`: ftp://medical.nema.org/medical/dicom/2009/ .. _`DICOM object definitions`: ftp://medical.nema.org/medical/dicom/2009/09_03pu3.pdf .. _dcm2nii: http://www.cabiatl.com/mricro/mricron/dcm2nii.html .. _`mricron install`: http://www.cabiatl.com/mricro/mricron/install.html .. _dicom2nrrd: http://www.slicer.org/slicerWiki/index.php/Modules:DicomToNRRD-3.4 .. _Nrrd: http://teem.sourceforge.net/nrrd/format.html .. General software .. _gcc: http://gcc.gnu.org .. _xcode: http://developer.apple.com/TOOLS/xcode .. _mingw: http://www.mingw.org/wiki/Getting_Started .. _mingw distutils bug: http://bugs.python.org/issue2698 .. _cygwin: http://cygwin.com .. _macports: http://www.macports.org/ .. _VTK: http://www.vtk.org/ .. _ITK: http://www.itk.org/ .. _swig: http://www.swig.org .. _openmp: www.openmp.org/ .. Windows development .. _mingw: http://www.mingw.org/wiki/Getting_Started .. _msys: http://www.mingw.org/wiki/MSYS .. _powershell: http://www.microsoft.com/powershell .. _msysgit: http://code.google.com/p/msysgit .. _putty: http://www.chiark.greenend.org.uk/~sgtatham/putty .. _visualstudiobuildtools: http://landinghub.visualstudio.com/visual-cpp-build-tools .. Functional imaging labs .. _`functional imaging laboratory`: http://www.fil.ion.ucl.ac.uk .. _FMRIB: http://www.fmrib.ox.ac.uk .. Other organizations .. _enthought: .. _kitware: http://www.kitware.com .. _nitrc: http://www.nitrc.org .. General information links .. _`wikipedia FMRI`: http://en.wikipedia.org/wiki/Functional_magnetic_resonance_imaging .. _`wikipedia PET`: http://en.wikipedia.org/wiki/Positron_emission_tomography .. Mathematical methods .. _`wikipedia ICA`: http://en.wikipedia.org/wiki/Independent_component_analysis .. _`wikipedia PCA`: http://en.wikipedia.org/wiki/Principal_component_analysis .. Mathematical ideas .. _`wikipedia spherical coordinate system`: http://en.wikipedia.org/wiki/Spherical_coordinate_system .. _`mathworld spherical coordinate system`: http://mathworld.wolfram.com/SphericalCoordinates.html .. _`wikipedia affine`: http://en.wikipedia.org/wiki/Affine_transformation .. _`wikipedia linear transform`: http://en.wikipedia.org/wiki/Linear_transformation .. _`wikipedia rotation matrix`: http://en.wikipedia.org/wiki/Rotation_matrix .. _`wikipedia homogenous coordinates`: http://en.wikipedia.org/wiki/Homogeneous_coordinates .. _`wikipedia axis angle`: http://en.wikipedia.org/wiki/Axis_angle .. _`wikipedia Euler angles`: http://en.wikipedia.org/wiki/Euler_angles .. _`Mathworld Euler angles`: http://mathworld.wolfram.com/EulerAngles.html .. _`wikipedia quaternion`: http://en.wikipedia.org/wiki/Quaternion .. _`wikipedia shear matrix`: http://en.wikipedia.org/wiki/Shear_matrix .. _`wikipedia reflection`: http://en.wikipedia.org/wiki/Reflection_(mathematics) .. _`wikipedia direction cosine`: http://en.wikipedia.org/wiki/Direction_cosine .. vim:syntax=rst dipy-0.13.0/doc/make.bat000066400000000000000000000056131317371701200147210ustar00rootroot00000000000000@ECHO OFF REM Command file for Sphinx documentation set SPHINXBUILD=sphinx-build set ALLSPHINXOPTS=-d _build/doctrees %SPHINXOPTS% . if NOT "%PAPER%" == "" ( set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% ) if "%1" == "" goto help if "%1" == "help" ( :help echo.Please use `make ^` where ^ is one of echo. html to make standalone HTML files echo. dirhtml to make HTML files named index.html in directories echo. pickle to make pickle files echo. json to make JSON files echo. htmlhelp to make HTML files and a HTML help project echo. qthelp to make HTML files and a qthelp project echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter echo. changes to make an overview over all changed/added/deprecated items echo. linkcheck to check all external links for integrity echo. doctest to run all doctests embedded in the documentation if enabled goto end ) if "%1" == "clean" ( for /d %%i in (_build\*) do rmdir /q /s %%i del /q /s _build\* goto end ) if "%1" == "html" ( %SPHINXBUILD% -b html %ALLSPHINXOPTS% _build/html echo. echo.Build finished. The HTML pages are in _build/html. goto end ) if "%1" == "dirhtml" ( %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% _build/dirhtml echo. echo.Build finished. The HTML pages are in _build/dirhtml. goto end ) if "%1" == "pickle" ( %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% _build/pickle echo. echo.Build finished; now you can process the pickle files. goto end ) if "%1" == "json" ( %SPHINXBUILD% -b json %ALLSPHINXOPTS% _build/json echo. echo.Build finished; now you can process the JSON files. goto end ) if "%1" == "htmlhelp" ( %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% _build/htmlhelp echo. echo.Build finished; now you can run HTML Help Workshop with the ^ .hhp project file in _build/htmlhelp. goto end ) if "%1" == "qthelp" ( %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% _build/qthelp echo. echo.Build finished; now you can run "qcollectiongenerator" with the ^ .qhcp project file in _build/qthelp, like this: echo.^> qcollectiongenerator _build\qthelp\dipy.qhcp echo.To view the help file: echo.^> assistant -collectionFile _build\qthelp\dipy.ghc goto end ) if "%1" == "latex" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% _build/latex echo. echo.Build finished; the LaTeX files are in _build/latex. goto end ) if "%1" == "changes" ( %SPHINXBUILD% -b changes %ALLSPHINXOPTS% _build/changes echo. echo.The overview file is in _build/changes. goto end ) if "%1" == "linkcheck" ( %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% _build/linkcheck echo. echo.Link check complete; look for any errors in the above output ^ or in _build/linkcheck/output.txt. goto end ) if "%1" == "doctest" ( %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% _build/doctest echo. echo.Testing of doctests in the sources finished, look at the ^ results in _build/doctest/output.txt. goto end ) :end dipy-0.13.0/doc/mission.rst000066400000000000000000000010641317371701200155230ustar00rootroot00000000000000.. _mission: =================== Mission statement =================== Mission of Statement The purpose of dipy_ is to make it **easier to do better diffusion MR imaging research**. Following up with the nipy mission statement we aim to build software that is * **clearly written** * **clearly explained** * **a good fit for the underlying ideas** * **a natural home for collaboration** We hope that, if we fail to do this, you will let us know and we will try and make it better. See also :ref:`introduction` .. include:: links_names.inc dipy-0.13.0/doc/note_about_examples.rst000066400000000000000000000016021317371701200200750ustar00rootroot00000000000000========================= A note about the examples ========================= The examples here are some uses of the analysis and visualization functionality of dipy_, with example data from actual neuroscience experiments, or with synthetic data, which is generated as part of the example. All the examples presented in the documentation are generated from *fully functioning* python scripts, which are available as part of the source distribution in the doc/examples folder. If you want to replicate a particular analysis or visualization, simply copy the relevant ".py" script from the source distribution, edit out the body of the text of the example (which appear as blocks of text between triple quotes '"""') and alter it to your purpose. Thanks to the developers of PyMVPA_ for designing the software which enables us to provide these documented examples! .. include:: links_names.inc dipy-0.13.0/doc/old_highlights.rst000066400000000000000000000045011317371701200170310ustar00rootroot00000000000000.. _old_highlights: **************** Older Highlights **************** DIPY was an **official exhibitor** for OHBM 2015. .. raw :: html

    **DIPY 0.9.2** is now available for :ref:`download `. Here is a summary of the new features. * Anatomically Constrained Tissue Classifiers for Tracking * Massive speedup of Constrained Spherical Deconvolution (CSD) * Recursive calibration of response function for CSD * New experimental framework for clustering * Improvements and 10X speedup for Quickbundles * Improvements in Linear Fascicle Evaluation (LiFE) * New implementation of Geodesic Anisotropy * New efficient transformation functions for registration * Sparse Fascicle Model supports acquisitions with multiple b-values **DIPY 0.8.0** is now available for :ref:`download `. The new release contains state-of-the-art algorithms for diffusion MRI registration, reconstruction, denoising, statistical evaluation, fiber tracking and validation of tracking. For more information about dipy_, read the `DIPY paper`_ in Frontiers in Neuroinformatics. .. raw :: html
    So, how similar are your bundles to the real anatomy? Learn how to optimize your analysis as we did to create the fornix of the figure above, by reading the tutorials in our :ref:`gallery `. In dipy_ we care about methods which can solve complex problems efficiently and robustly. QuickBundles is one of the many state-of-the art algorithms found in DIPY. It can be used to simplify large datasets of streamlines. See our :ref:`gallery ` of examples and try QuickBundles with your data. Here is a video of QuickBundles applied on a simple dataset. .. raw:: html .. include:: links_names.incdipy-0.13.0/doc/old_news.rst000066400000000000000000000024641317371701200156610ustar00rootroot00000000000000.. _old_news: ********************** Past announcements ********************** - **DIPY 0.7.1** is available for :ref:`download ` with **3X** more tutorials than 0.6.0! In addition, a `journal paper`_ focusing on teaching the fundamentals of DIPY is available in Frontiers of Neuroinformatics. - A new **hands on DIPY** seminar to 50 neuroscientists from Canada, as part of QBIN's "Do's and dont's of diffusion MRI" workshop, 8 April, 2014. - The creators of DIPY will attend both ISMRM and HBM 2015. Come and meet us! - `DIPY paper`_ accepted in Frontiers of Neuroinformatics, 22 January, 2014. - **DIPY 0.7.1** Released!, 16 January, 2014. - **DIPY 0.7.0** Released!, 23 December, 2013. - **Spherical Deconvolution** algorithms are now included in the current development version 0.7.0dev. See the examples in :ref:`gallery `, 24 June 2013. - A team of DIPY developers **wins** the `IEEE ISBI HARDI challenge `_, 7 April, 2013. - **Hands on DIPY** seminar took place at the dMRI course of the CREATE-MIA summer school, 5-7 June, McGill, Montreal, 2013. - **DIPY 0.6.0** Released!, 30 March, 2013. - **DIPY 3rd Sprint**, Berkeley, CA, 8-18 April, 2013. - **IEEE ISBI HARDI challenge** 2013 chooses **DIPY**, February, 2013. dipy-0.13.0/doc/release0.10.rst000066400000000000000000000345131317371701200157660ustar00rootroot00000000000000.. _release0.10: ==================================== Release notes for DIPY version 0.10 ==================================== GitHub stats for 2015/03/18 - 2015/11/19 (tag: 0.9.2) These lists are automatically generated, and may be incomplete or contain duplicates. The following 20 authors (alphabetically ordered) contributed 1022 commits: * Alexandre Gauvin * Ariel Rokem * Bago Amirbekian * David Qixiang Chen * Dimitris Rozakis * Eleftherios Garyfallidis * Gabriel Girard * Gonzalo Sanguinetti * Jean-Christophe Houde * Marc-Alexandre Côté * Matthew Brett * Mauro Zucchelli * Maxime Descoteaux * Michael Paquette * Omar Ocegueda * Oscar Esteban * Rafael Neto Henriques * Rohan Prinja * Samuel St-Jean * Stefan van der Walt We closed a total of 232 issues, 94 pull requests and 138 regular issues; this is the full list (generated with the script :file:`tools/github_stats.py`): Pull Requests (94): * :ghpull:`769`: RF: Remove aniso2iso altogether. * :ghpull:`772`: DOC: Use xvfb when building the docs in a headless machine. * :ghpull:`754`: DOC: Should we add a side-car gitter chat to the website? * :ghpull:`753`: TST: Test DSI with b0s. * :ghpull:`767`: Offscreen is False for test_slicer * :ghpull:`768`: Document dipy.reconst.dti.iter_fit_tensor params * :ghpull:`766`: Add fit_tensor iteration decorator * :ghpull:`751`: Reorient tracks according to ROI * :ghpull:`765`: BF: Typo in data file name. * :ghpull:`757`: Optimize dipy.align.reslice * :ghpull:`587`: Fvtk 2.0 PR1 * :ghpull:`749`: Fixed deprecation warning in skimage * :ghpull:`748`: TST: added test for _to_voxel_tolerance. * :ghpull:`678`: BF: added tolerance for negative streamline coordinates checks * :ghpull:`714`: RF: use masks in predictions and cross-validation * :ghpull:`739`: Set number of OpenMP threads during runtime * :ghpull:`733`: Add RTOP, RTAP and RTPP and the relative test * :ghpull:`743`: BF: memleaks with typed memory views in Cython * :ghpull:`724`: @sinkpoint's power map - refactored * :ghpull:`741`: ENH: it is preferable to use choice rather than randint to not have * :ghpull:`727`: Optimize tensor fitting * :ghpull:`726`: NF - CSD response from a mask * :ghpull:`729`: BF: tensor predict * :ghpull:`736`: Added installation of python-tk package for VTK travis bot * :ghpull:`735`: Added comment on nlmeans example about selecting one volume * :ghpull:`732`: WIP: Test with vtk on Travis * :ghpull:`731`: Np 1.10 * :ghpull:`640`: MAPMRI * :ghpull:`682`: Created list of examples for available features and metrics * :ghpull:`716`: Refactor data module * :ghpull:`699`: Added gaussian noise option to estimate_sigma * :ghpull:`712`: DOC: API changes in gh707. * :ghpull:`713`: RF: In case a user just wants to use a single integer. * :ghpull:`700`: TEST: add tests for AffineMap * :ghpull:`677`: DKI PR3 - NF: Adding standard kurtosis statistics on module dki.py * :ghpull:`721`: TST: Verify that output of estimate_sigma is a proper input to nlmeans. * :ghpull:`572`: NF : nlmeans now support arrays of noise std * :ghpull:`708`: Check for bval dimensionality on read. * :ghpull:`707`: BF: Keep up with changes in scipy 0.16 * :ghpull:`709`: DOC: Use the `identity` variable in the resampling transformation. * :ghpull:`703`: Fix syn-3d example * :ghpull:`705`: Fix example in function compress_streamline * :ghpull:`635`: Select streamlines based on logical operations on ROIs * :ghpull:`702`: BF: Use only validated examples when building docs. * :ghpull:`689`: Streamlines compression * :ghpull:`698`: DOC: added NI citation * :ghpull:`681`: RF + DOC: Add MNI template reference. Also import it into the dipy.da… * :ghpull:`696`: Change title of piesno example * :ghpull:`691`: CENIR 'HCP-like' multi b-value data * :ghpull:`661`: Test DTI eigenvectors * :ghpull:`690`: BF: nan entries cause segfault * :ghpull:`667`: DOC: Remove Sourceforge related makefile things. Add the gh-pages upl… * :ghpull:`676`: TST: update Travis config to use container infrastructure. * :ghpull:`533`: MRG: some Cython refactorings * :ghpull:`686`: BF: Make buildbot Pyhon26-32 happy * :ghpull:`683`: Fixed initial estimation in piesno * :ghpull:`654`: Affine registration PR 3/3 * :ghpull:`684`: BF: Fixed memory leak in QuickBundles. * :ghpull:`674`: NF: Function to sample perpendicular directions relative to a given vector * :ghpull:`679`: BF + NF: Provide dipy version info when running dipy.get_info() * :ghpull:`680`: NF: Fetch and Read the MNI T1 and/or T2 template. * :ghpull:`664`: DKI fitting (DKI PR2) * :ghpull:`671`: DOC: move mailing list links to neuroimaging * :ghpull:`663`: changed samuel st-jean email to the usherbrooke one * :ghpull:`648`: Improve check of collinearity in vec2vec_rotmat * :ghpull:`582`: DKI project: PR#1 Simulations to test DKI * :ghpull:`660`: BF: If scalar-color input has len(shape)<4, need to fill that in. * :ghpull:`612`: BF: Differences in spherical harmonic calculations wrt scipy 0.15 * :ghpull:`651`: Added estimate_sigma bias correction + update example * :ghpull:`659`: BF: If n_frames is larger than one use path-numbering. * :ghpull:`658`: FIX: resaved npy file causing load error for py 33 * :ghpull:`657`: Fix compilation error caused by inline functions * :ghpull:`628`: Affine registration PR 2/3 * :ghpull:`629`: Quickbundles 2.1 * :ghpull:`637`: DOC: Fix typo in docstring of Identity class. * :ghpull:`639`: DOC: Render the following line in the code cell. * :ghpull:`614`: Seeds from mask random * :ghpull:`633`: BF - no import of TissueTypes * :ghpull:`632`: fixed typo in dti example * :ghpull:`627`: BF: Add missing opacity property to point actor * :ghpull:`626`: Use LooseVersion to check for scipy versions * :ghpull:`625`: DOC: Include the PIESNO example. * :ghpull:`624`: DOC: Corrected typos in Restore tutorial and docstring. * :ghpull:`619`: DOC: Added missing contributor to developer list * :ghpull:`618`: Update README file * :ghpull:`616`: Raise ValueError when invalid matrix is given * :ghpull:`576`: Piesno example * :ghpull:`615`: bugfix for double word in doc example issue #387 * :ghpull:`610`: Added figure with HBM 2015 * :ghpull:`609`: Update website documentation * :ghpull:`607`: DOC: Detailed github stats for 0.9 * :ghpull:`606`: Removed the word new * :ghpull:`605`: Release mode: updating Changelog and Authors * :ghpull:`594`: DOC + PEP8: Mostly just line-wrapping. Issues (138): * :ghissue:`769`: RF: Remove aniso2iso altogether. * :ghissue:`772`: DOC: Use xvfb when building the docs in a headless machine. * :ghissue:`754`: DOC: Should we add a side-car gitter chat to the website? * :ghissue:`771`: Should we remove the deprecated quickbundles module? * :ghissue:`753`: TST: Test DSI with b0s. * :ghissue:`761`: reading dconn.nii * :ghissue:`723`: WIP: Assign streamlines to an existing cluster map via QuickBundles * :ghissue:`738`: Import tkinter * :ghissue:`767`: Offscreen is False for test_slicer * :ghissue:`752`: TST: Install vtk and mesa on Travis to test the fvtk module. * :ghissue:`768`: Document dipy.reconst.dti.iter_fit_tensor params * :ghissue:`763`: Tensor Fitting Overflows Memory * :ghissue:`766`: Add fit_tensor iteration decorator * :ghissue:`751`: Reorient tracks according to ROI * :ghissue:`765`: BF: Typo in data file name. * :ghissue:`764`: 404: Not Found when loading Stanford labels * :ghissue:`757`: Optimize dipy.align.reslice * :ghissue:`587`: Fvtk 2.0 PR1 * :ghissue:`286`: WIP - FVTK refactor/cleanup * :ghissue:`755`: dipy.reconst.tests.test_shm.test_sf_to_sh: TypeError: Cannot cast ufunc add output from dtype('float64') to dtype('uint16') with casting rule 'same_kind' * :ghissue:`749`: Fixed deprecation warning in skimage * :ghissue:`748`: TST: added test for _to_voxel_tolerance. * :ghissue:`678`: BF: added tolerance for negative streamline coordinates checks * :ghissue:`714`: RF: use masks in predictions and cross-validation * :ghissue:`739`: Set number of OpenMP threads during runtime * :ghissue:`733`: Add RTOP, RTAP and RTPP and the relative test * :ghissue:`743`: BF: memleaks with typed memory views in Cython * :ghissue:`737`: Possibly set_number_of_points doesn't delete memory * :ghissue:`672`: Power map * :ghissue:`724`: @sinkpoint's power map - refactored * :ghissue:`741`: ENH: it is preferable to use choice rather than randint to not have * :ghissue:`730`: numpy 1.10 breaks master * :ghissue:`727`: Optimize tensor fitting * :ghissue:`726`: NF - CSD response from a mask * :ghissue:`729`: BF: tensor predict * :ghissue:`736`: Added installation of python-tk package for VTK travis bot * :ghissue:`735`: Added comment on nlmeans example about selecting one volume * :ghissue:`732`: WIP: Test with vtk on Travis * :ghissue:`734`: WIP: Fvtk 2.0 with travis vtk support * :ghissue:`688`: dipy.test() fails on centos 6.x / python2.6 * :ghissue:`731`: Np 1.10 * :ghissue:`725`: WIP: TST: Install vtk on travis with conda. * :ghissue:`640`: MAPMRI * :ghissue:`611`: OSX test fail 'we check the default value of lambda ...' * :ghissue:`715`: In current segment_quickbundles tutorial there is no example for changing number of points * :ghissue:`719`: Fixes #715 * :ghissue:`682`: Created list of examples for available features and metrics * :ghissue:`716`: Refactor data module * :ghissue:`699`: Added gaussian noise option to estimate_sigma * :ghissue:`712`: DOC: API changes in gh707. * :ghissue:`713`: RF: In case a user just wants to use a single integer. * :ghissue:`700`: TEST: add tests for AffineMap * :ghissue:`677`: DKI PR3 - NF: Adding standard kurtosis statistics on module dki.py * :ghissue:`721`: TST: Verify that output of estimate_sigma is a proper input to nlmeans. * :ghissue:`693`: WIP: affine map tests * :ghissue:`694`: Memory errors / timeouts with affine registration on Windows * :ghissue:`572`: NF : nlmeans now support arrays of noise std * :ghissue:`708`: Check for bval dimensionality on read. * :ghissue:`697`: dipy.io.gradients read_bvals_bvecs does not check bvals length * :ghissue:`707`: BF: Keep up with changes in scipy 0.16 * :ghissue:`710`: Test dipy.core.tests.test_sphere.test_interp_rbf fails fails on Travis * :ghissue:`709`: DOC: Use the `identity` variable in the resampling transformation. * :ghissue:`649`: ROI seeds not placed at the center of the voxels * :ghissue:`656`: Build-bot status * :ghissue:`701`: Changes in `syn_registration_3d` example * :ghissue:`703`: Fix syn-3d example * :ghissue:`705`: Fix example in function compress_streamline * :ghissue:`704`: Buildbots failure: related to streamline compression? * :ghissue:`635`: Select streamlines based on logical operations on ROIs * :ghissue:`702`: BF: Use only validated examples when building docs. * :ghissue:`689`: Streamlines compression * :ghissue:`698`: DOC: added NI citation * :ghissue:`621`: piesno example not rendering correctly on the website * :ghissue:`650`: profiling hyp1f1 * :ghissue:`681`: RF + DOC: Add MNI template reference. Also import it into the dipy.da… * :ghissue:`696`: Change title of piesno example * :ghissue:`691`: CENIR 'HCP-like' multi b-value data * :ghissue:`661`: Test DTI eigenvectors * :ghissue:`690`: BF: nan entries cause segfault * :ghissue:`667`: DOC: Remove Sourceforge related makefile things. Add the gh-pages upl… * :ghissue:`676`: TST: update Travis config to use container infrastructure. * :ghissue:`533`: MRG: some Cython refactorings * :ghissue:`686`: BF: Make buildbot Pyhon26-32 happy * :ghissue:`622`: Fast shm from scipy 0.15.0 does not work on rc version * :ghissue:`683`: Fixed initial estimation in piesno * :ghissue:`233`: WIP: Dki * :ghissue:`654`: Affine registration PR 3/3 * :ghissue:`684`: BF: Fixed memory leak in QuickBundles. * :ghissue:`674`: NF: Function to sample perpendicular directions relative to a given vector * :ghissue:`679`: BF + NF: Provide dipy version info when running dipy.get_info() * :ghissue:`680`: NF: Fetch and Read the MNI T1 and/or T2 template. * :ghissue:`664`: DKI fitting (DKI PR2) * :ghissue:`539`: WIP: BF: Catching initial fodf creation of SDT * :ghissue:`671`: DOC: move mailing list links to neuroimaging * :ghissue:`663`: changed samuel st-jean email to the usherbrooke one * :ghissue:`287`: Fvtk sphere origin * :ghissue:`648`: Improve check of collinearity in vec2vec_rotmat * :ghissue:`582`: DKI project: PR#1 Simulations to test DKI * :ghissue:`660`: BF: If scalar-color input has len(shape)<4, need to fill that in. * :ghissue:`612`: BF: Differences in spherical harmonic calculations wrt scipy 0.15 * :ghissue:`651`: Added estimate_sigma bias correction + update example * :ghissue:`659`: BF: If n_frames is larger than one use path-numbering. * :ghissue:`652`: MAINT: work around scipy bug in sph_harm * :ghissue:`653`: Revisit naming when Matthew is back from Cuba * :ghissue:`658`: FIX: resaved npy file causing load error for py 33 * :ghissue:`657`: Fix compilation error caused by inline functions * :ghissue:`655`: Development documentation instructs to remove `master` * :ghissue:`628`: Affine registration PR 2/3 * :ghissue:`629`: Quickbundles 2.1 * :ghissue:`638`: tutorial example, code in text format * :ghissue:`637`: DOC: Fix typo in docstring of Identity class. * :ghissue:`639`: DOC: Render the following line in the code cell. * :ghissue:`614`: Seeds from mask random * :ghissue:`633`: BF - no import of TissueTypes * :ghissue:`632`: fixed typo in dti example * :ghissue:`630`: Possible documentation bug (?) * :ghissue:`627`: BF: Add missing opacity property to point actor * :ghissue:`459`: streamtubes opacity kwarg * :ghissue:`626`: Use LooseVersion to check for scipy versions * :ghissue:`625`: DOC: Include the PIESNO example. * :ghissue:`623`: DOC: Include the PIESNO example in the documentation. * :ghissue:`624`: DOC: Corrected typos in Restore tutorial and docstring. * :ghissue:`619`: DOC: Added missing contributor to developer list * :ghissue:`604`: Retired ARM buildbot * :ghissue:`613`: Possible random failure in test_vector_fields.test_reorient_vector_field_2d * :ghissue:`618`: Update README file * :ghissue:`616`: Raise ValueError when invalid matrix is given * :ghissue:`617`: Added build status icon to readme * :ghissue:`576`: Piesno example * :ghissue:`615`: bugfix for double word in doc example issue #387 * :ghissue:`600`: Use of nanmean breaks dipy for numpy < 1.8 * :ghissue:`610`: Added figure with HBM 2015 * :ghissue:`609`: Update website documentation * :ghissue:`390`: WIP: New PIESNO example and small corrections * :ghissue:`607`: DOC: Detailed github stats for 0.9 * :ghissue:`606`: Removed the word new * :ghissue:`605`: Release mode: updating Changelog and Authors * :ghissue:`594`: DOC + PEP8: Mostly just line-wrapping.dipy-0.13.0/doc/release0.11.rst000066400000000000000000000200711317371701200157610ustar00rootroot00000000000000.. _release0.11: ==================================== Release notes for DIPY version 0.11 ==================================== GitHub stats for 2015/12/03 - 2016/02/21 (tag: 0.10) The following 16 authors contributed 271 commits. * Ariel Rokem * Bago Amirbekian * Bishakh Ghosh * Eleftherios Garyfallidis * Gabriel Girard * Gregory R. Lee * Himanshu Mishra * Jean-Christophe Houde * Marc-Alexandre Côté * Matthew Brett * Matthieu Dumont * Omar Ocegueda * Sagun Pai * Samuel St-Jean * Stephan Meesters * Vatsala Swaroop We closed a total of 144 issues, 55 pull requests and 89 regular issues; this is the full list (generated with the script :file:`tools/github_stats.py`): Pull Requests (55): * :ghpull:`933`: Updating release dates * :ghpull:`925`: fix typos * :ghpull:`915`: BF: correct handling of output paths in dipy_quickbundles. * :ghpull:`922`: Fix PEP8 in top-level tests * :ghpull:`921`: fix typo * :ghpull:`918`: Fix PEP8 in test_expectmax * :ghpull:`917`: Website 0.11 update and more devs * :ghpull:`916`: Getting website ready for 0.11 release * :ghpull:`914`: DOC: Update release notes for 0.11 * :ghpull:`910`: Singleton sl vals * :ghpull:`908`: Fix pep8 errors in viz * :ghpull:`911`: fix typo * :ghpull:`904`: fix typo * :ghpull:`851`: Tissue Classifier tracking example - changed seeding mask to wm only voxels * :ghpull:`858`: Updates for upcoming numpy 1.11 release * :ghpull:`856`: Add reference to gitter chat room in the README * :ghpull:`762`: Contextual enhancements of ODF/FOD fields * :ghpull:`857`: DTI memory: use the same step in prediction as you use in fitting. * :ghpull:`816`: A few fixes to SFM. * :ghpull:`811`: Extract values from an image based on streamline coordinates. * :ghpull:`853`: miscellaneous Python 3 compatibility problem fixes in fvtk * :ghpull:`849`: nlmeans use num threads option in 3d * :ghpull:`850`: DOC: fix typo * :ghpull:`848`: DOC: fix typo * :ghpull:`847`: DOC: fix typo * :ghpull:`845`: DOC: Add kurtosis example to examples_index * :ghpull:`846`: DOC: fix typo * :ghpull:`826`: Return numpy arrays instead of memory views from cython functions * :ghpull:`841`: Rename CONTRIBUTING to CONTRIBUTING.md * :ghpull:`839`: DOC: Fix up the docstring for the CENIR data * :ghpull:`819`: DOC: Add the DKI reconstruction example to the list of valid examples. * :ghpull:`843`: Drop 3.2 * :ghpull:`838`: "Contributing" * :ghpull:`833`: Doc: Typo * :ghpull:`817`: RF: Convert nan values in bvectors to 0's * :ghpull:`836`: fixed typo * :ghpull:`695`: Introducing workflows * :ghpull:`829`: Fixes issue #813 by not checking data type explicitly. * :ghpull:`830`: Fixed doc of SDT * :ghpull:`825`: Updated toollib and doc tools (#802) * :ghpull:`760`: NF - random seeds from mask * :ghpull:`824`: Updated copyright to 2016 * :ghpull:`815`: DOC: The previous link doesn't exist anymore. * :ghpull:`669`: Function to reorient gradient directions according to moco parameters * :ghpull:`809`: MRG: refactor and test setup.py * :ghpull:`821`: BF: revert accidentally committed COMMIT_INFO.txt * :ghpull:`818`: Round coords life * :ghpull:`797`: Update csdeconv.py * :ghpull:`806`: Relax regression tests * :ghpull:`814`: TEST: compare array shapes directly * :ghpull:`808`: MRG: pull in discarded changes from maintenance * :ghpull:`745`: faster version of piesno * :ghpull:`807`: BF: fix shebang lines for scripts * :ghpull:`794`: RF: Allow setting the verbosity of the AffineRegistration while running it * :ghpull:`801`: TST: add Python 3.5 to travis-ci test matrix Issues (89): * :ghissue:`933`: Updating release dates * :ghissue:`925`: fix typos * :ghissue:`915`: BF: correct handling of output paths in dipy_quickbundles. * :ghissue:`922`: Fix PEP8 in top-level tests * :ghissue:`886`: PEP8 in top-level tests * :ghissue:`921`: fix typo * :ghissue:`918`: Fix PEP8 in test_expectmax * :ghissue:`863`: PEP8 in test_expectmax * :ghissue:`919`: STYLE:PEP8 workflows * :ghissue:`896`: STYLE: PEP8 for workflows folder * :ghissue:`917`: Website 0.11 update and more devs * :ghissue:`900`: SLR example needs updating * :ghissue:`906`: Compiling the website needs too much memory * :ghissue:`916`: Getting website ready for 0.11 release * :ghissue:`914`: DOC: Update release notes for 0.11 * :ghissue:`910`: Singleton sl vals * :ghissue:`908`: Fix pep8 errors in viz * :ghissue:`890`: PEP8 in viz * :ghissue:`911`: fix typo * :ghissue:`905`: math is broken in doc * :ghissue:`904`: fix typo * :ghissue:`851`: Tissue Classifier tracking example - changed seeding mask to wm only voxels * :ghissue:`858`: Updates for upcoming numpy 1.11 release * :ghissue:`856`: Add reference to gitter chat room in the README * :ghissue:`762`: Contextual enhancements of ODF/FOD fields * :ghissue:`857`: DTI memory: use the same step in prediction as you use in fitting. * :ghissue:`816`: A few fixes to SFM. * :ghissue:`898`: Pep8 #891 * :ghissue:`811`: Extract values from an image based on streamline coordinates. * :ghissue:`892`: PEP8 workflows * :ghissue:`894`: PEP8 utils * :ghissue:`895`: PEP8 Tracking * :ghissue:`893`: PEP8 Viz * :ghissue:`860`: Added Travis-CI badge * :ghissue:`692`: Refactor fetcher.py * :ghissue:`742`: LinAlgError on tracking quickstart, with python 3.4 * :ghissue:`822`: Could you help me ? "URLError:" * :ghissue:`840`: Make dti reconst less memory hungry * :ghissue:`855`: 0.9.3rc * :ghissue:`853`: miscellaneous Python 3 compatibility problem fixes in fvtk * :ghissue:`849`: nlmeans use num threads option in 3d * :ghissue:`850`: DOC: fix typo * :ghissue:`848`: DOC: fix typo * :ghissue:`153`: DiffusionSpectrumModel assumes 1 b0 and fails with data with more than 1 b0 * :ghissue:`93`: GradientTable mask does not account for nan's in b-values * :ghissue:`665`: Online tutorial of quickbundles does not work for released version on macosx * :ghissue:`758`: One viz test still failing on mac os * :ghissue:`847`: DOC: fix typo * :ghissue:`845`: DOC: Add kurtosis example to examples_index * :ghissue:`846`: DOC: fix typo * :ghissue:`826`: Return numpy arrays instead of memory views from cython functions * :ghissue:`841`: Rename CONTRIBUTING to CONTRIBUTING.md * :ghissue:`839`: DOC: Fix up the docstring for the CENIR data * :ghissue:`842`: New pip fails on 3.2 * :ghissue:`819`: DOC: Add the DKI reconstruction example to the list of valid examples. * :ghissue:`843`: Drop 3.2 * :ghissue:`838`: "Contributing" * :ghissue:`833`: Doc: Typo * :ghissue:`817`: RF: Convert nan values in bvectors to 0's * :ghissue:`836`: fixed typo * :ghissue:`695`: Introducing workflows * :ghissue:`829`: Fixes issue #813 by not checking data type explicitly. * :ghissue:`805`: Multiple failures on Windows Python 3.5 build * :ghissue:`802`: toollib and doc tools need update to 3.5 * :ghissue:`812`: Python 2.7 doctest failures on 64-bit Windows * :ghissue:`685`: (WIP) DKI PR5 - NF: DKI-ODF estimation * :ghissue:`830`: Fixed doc of SDT * :ghissue:`825`: Updated toollib and doc tools (#802) * :ghissue:`760`: NF - random seeds from mask * :ghissue:`824`: Updated copyright to 2016 * :ghissue:`666`: Parallelized local tracking branch so now you can actually look at my code :) * :ghissue:`815`: DOC: The previous link doesn't exist anymore. * :ghissue:`747`: TEST: make test faster * :ghissue:`631`: NF - multiprocessing multi voxel fit * :ghissue:`669`: Function to reorient gradient directions according to moco parameters * :ghissue:`809`: MRG: refactor and test setup.py * :ghissue:`820`: dipy.get_info() returns wrong commit hash * :ghissue:`821`: BF: revert accidentally committed COMMIT_INFO.txt * :ghissue:`818`: Round coords life * :ghissue:`810`: Wrong input type for `_voxel2stream` on 64-bit Windows * :ghissue:`803`: Windows 7 Pro VM Python 2.7 gives 5 test errors with latest release 0.10.1 * :ghissue:`797`: Update csdeconv.py * :ghissue:`806`: Relax regression tests * :ghissue:`814`: TEST: compare array shapes directly * :ghissue:`808`: MRG: pull in discarded changes from maintenance * :ghissue:`745`: faster version of piesno * :ghissue:`807`: BF: fix shebang lines for scripts * :ghissue:`794`: RF: Allow setting the verbosity of the AffineRegistration while running it * :ghissue:`801`: TST: add Python 3.5 to travis-ci test matrixdipy-0.13.0/doc/release0.12.rst000066400000000000000000000742251317371701200157740ustar00rootroot00000000000000.. _release0.12: ==================================== Release notes for DIPY version 0.12 ==================================== GitHub stats for 2016/02/21 - 2017/06/26 (tag: 0.11.0) These lists are automatically generated, and may be incomplete or contain duplicates. The following 48 authors contributed 1491 commits. * Alexandre Gauvin * Antonio Ossa * Ariel Rokem * Bago Amirbekian * Bishakh Ghosh * David Reagan * Eleftherios Garyfallidis * Etienne St-Onge * Gabriel Girard * Gregory R. Lee * Jean-Christophe Houde * Jon Haitz Legarreta * Julio Villalon * Kesshi Jordan * Manu Tej Sharma * Marc-Alexandre Côté * Matthew Brett * Matthieu Dumont * Nil Goyette * Omar Ocegueda Gonzalez * Rafael Neto Henriques * Ranveer Aggarwal * Riddhish Bhalodia * Rutger Fick * Samuel St-Jean * Serge Koudoro * Shahnawaz Ahmed * Sourav Singh * Stephan Meesters * Stonge Etienne * Guillaume Theaud * Tingyi Wanyan * Tom Wright * Vibhatha Abeykoon * Yaroslav Halchenko * Eric Peterson * Sven Dorkenwald * theaverageguy We closed a total of 511 issues, 169 pull requests and 342 regular issues; this is the full list (generated with the script :file:`tools/github_stats.py`): Pull Requests (169): * :ghpull:`1273`: Release 0.12 doc fix * :ghpull:`1272`: small correction for debugging purpose on nlmeans * :ghpull:`1269`: Odf slicer * :ghpull:`1271`: Viz tut update * :ghpull:`1268`: Following up on #1243. * :ghpull:`1223`: local PCA using SVD * :ghpull:`1270`: Doc cleaning deprecation warning * :ghpull:`1267`: Adding a decorator for skipping test if openmp is not available * :ghpull:`1090`: Documentation for command line interfaces * :ghpull:`1243`: Better fvtk.viz error when no VTK installed * :ghpull:`1263`: Cast Streamline attrs to numpy ints, to avoid buffer mismatch. * :ghpull:`1254`: Automate script installation * :ghpull:`1261`: removing absolute path in tracking module * :ghpull:`1255`: Fix missing documentation content * :ghpull:`1260`: removing absolute path in reconst * :ghpull:`1241`: Csa and csd reconstruction workflow rebased * :ghpull:`1250`: DOC: Fix reconst_dki.py DKI example documentation typos. * :ghpull:`1244`: TEST: Decrease precision of tests for dki micro model prediction * :ghpull:`1235`: New hdf5 file format for saving PeaksAndMetrics objects * :ghpull:`1231`: TST: Reduce precision requirement for test of tortuosity estimation. * :ghpull:`1233`: Feature: Added environment override for dipy_home variable * :ghpull:`1234`: BUG: Fix non-ASCII characters in reconst_dki.py example. * :ghpull:`1222`: A lightweight UI for medical visualizations #5: 2D Circular Slider * :ghpull:`1228`: RF: Use cython imports instead of relying on extern * :ghpull:`1227`: BF: Use np.npy_intp instead of assuming long for ArraySequence attributes * :ghpull:`1226`: DKI Microstructural model * :ghpull:`1229`: RF: allow for scipy pre-release deprecations * :ghpull:`1225`: Add one more multi b-value data-set * :ghpull:`1219`: MRG:Data off dropbox * :ghpull:`1221`: NF: Check multi b-value * :ghpull:`1212`: Follow PEP8 in reconst (part 2) * :ghpull:`1217`: Use integer division in reconst_gqi.py * :ghpull:`1205`: A lightweight UI for medical visualizations #4: 2D Line Slider * :ghpull:`1166`: RF: Use the average sigma in the mask. * :ghpull:`1216`: Use integer division to avoid errors in indexing * :ghpull:`1214`: DOC: add a clarification note to simplify_warp_funcion_3d * :ghpull:`1208`: Follow PEP8 in reconst (part 1) * :ghpull:`1206`: Revert #1204, and add a filter to suppress warnings. * :ghpull:`1196`: MRG: Use dipy's array comparisons for tests * :ghpull:`1204`: Suppress warnings regarding one-dimensional arrays changes in scipy 0.18 * :ghpull:`1199`: A lightweight UI for medical visualizations #3: Changes to Event Handling * :ghpull:`1202`: Use integer division to avoid errors in indexing * :ghpull:`1198`: ENH: avoid log zero * :ghpull:`1201`: Fix out of bounds point not being classified OUTSIDEIMAGE (binary cla… * :ghpull:`1115`: Bayesian Tissue Classification * :ghpull:`1052`: Conda install * :ghpull:`1183`: A lightweight UI for medical visualizations #2: TextBox * :ghpull:`1186`: MRG: use newer wheelhouse for installs * :ghpull:`1195`: Make PeaksAndMetrics pickle-able * :ghpull:`1194`: Use assert_arrays_equal when needed. * :ghpull:`1193`: Deprecate the Accent colormap, in anticipation of changes in MPL 2.0 * :ghpull:`1140`: A lightweight UI for medical visualizations #1: Button * :ghpull:`1171`: fix:dev: added numpy.int64 for my triangle array * :ghpull:`1123`: Add the mask workflow * :ghpull:`1174`: NF: added the repulsion 200 sphere. * :ghpull:`1177`: BF: fix interpolation call with Numpy 1.12 * :ghpull:`1162`: Return S0 value for DTI fits * :ghpull:`1147`: add this fix for newer version of pytables. * :ghpull:`1076`: ENH: Add support for ArraySequence in `length` function * :ghpull:`1050`: ENH: expand OpenMP utilities and move from denspeed.pyx to dipy.utils * :ghpull:`1082`: Add documentation uploading script * :ghpull:`1153`: Athena mapmri * :ghpull:`1159`: TST - add tests for various affine matrices for local tracking * :ghpull:`1157`: Replace `get_affine` with `affine` and `get_header` with `header`. * :ghpull:`1160`: Add Shahnawaz to list of contributors. * :ghpull:`1158`: BF: closing matplotlib plots for each file while running the examples * :ghpull:`1151`: Define fmin() for Visual Studio * :ghpull:`1149`: Change DKI_signal to dki_signal * :ghpull:`1137`: Small fix to insure that fwDTI non-linear procedure does not crash * :ghpull:`942`: NF: Added support to colorize each line points indivdually * :ghpull:`1141`: Do not cover files related to benchmarks. * :ghpull:`1098`: Adding custom interactor for vizualisation * :ghpull:`1136`: Update deprecated function. * :ghpull:`1113`: TST: Test for invariance of model_params to splitting of the data. * :ghpull:`1134`: Rebase of https://github.com/nipy/dipy/pull/993 * :ghpull:`1064`: Faster dti odf * :ghpull:`1114`: flexible grid to streamline affine generation and pathlength function * :ghpull:`1122`: Add the reconst_dti workflow * :ghpull:`1132`: Update .travis.yml and README.md * :ghpull:`1125`: Intensity adjustment. Find a better upper bound for interpolating images. * :ghpull:`1130`: Minor corrections for showing surfaces * :ghpull:`1092`: Line-based target() * :ghpull:`1129`: Fix 1127 * :ghpull:`1034`: Viz surfaces * :ghpull:`1060`: Fast computation of Cross Correlation metric * :ghpull:`1124`: Small fix in free water DTI model * :ghpull:`1058`: IVIM * :ghpull:`1110`: WIP : Ivim linear fitting * :ghpull:`1120`: Fix 1119 * :ghpull:`1075`: Drop26 * :ghpull:`835`: NF: Free water tensor model * :ghpull:`1046`: BF - peaks_from_model with nbr_processes <= 0 * :ghpull:`1049`: MAINT: minor cython cleanup in align/vector_fields.pyx * :ghpull:`1087`: Base workflow enhancements + tests * :ghpull:`1112`: DOC: Math rendering issue in SFM gallery example. * :ghpull:`1109`: Changed default value of mni template * :ghpull:`1106`: Including MNI Template 2009c in Fetcher * :ghpull:`1066`: Adaptive Denoising * :ghpull:`1091`: Modifications for building docs with python3 * :ghpull:`1105`: Import reload function from imp module explicitly for python3 * :ghpull:`1102`: MRG: add pytables to travis-ci, Py35 full test run * :ghpull:`1100`: Fix for Python 3 in io.dpy * :ghpull:`1094`: Updates to FBC measures documentation * :ghpull:`1059`: Documentation to discourage misuse of GradientTable * :ghpull:`1063`: Fixes #1061 : Changed all S0 to 1.0 * :ghpull:`1089`: BF: fix test error on Python 3 * :ghpull:`1079`: Return a generator from `orient_by_roi` * :ghpull:`1088`: Restored the older implementation of nlmeans * :ghpull:`1080`: DOC: TensorModel.__init__ docstring. * :ghpull:`828`: Fiber to bundle coherence measures * :ghpull:`1072`: DOC: Added a coverage badge to README.rst * :ghpull:`1025`: PEP8: Fix pep8 in segment * :ghpull:`1077`: DOC: update fibernavigator link * :ghpull:`1069`: DOC: Small one -- we need this additional line of white space to render. * :ghpull:`1068`: Renamed test_shore for consistency * :ghpull:`1067`: Generate b vectors using disperse_charges * :ghpull:`1065`: improve OMP parallelization with scheduling * :ghpull:`1062`: BF - fix CSD.predict to work with nd inputs. * :ghpull:`1056`: Remove tracking interfaces * :ghpull:`1028`: BF: Predict DKI with a volume of S0 * :ghpull:`1041`: NF - Add PMF Threshold to Tractography * :ghpull:`1039`: Doc - fix definition of real_sph_harm functions * :ghpull:`1019`: MRG: fix heavy dependency check; no numpy for setup * :ghpull:`1018`: Fix: denspeed.pyx to give correct output for nlmeans * :ghpull:`1035`: Fix for fetcher files in Windows * :ghpull:`974`: Minor change in `tools/github_stats.py` * :ghpull:`1021`: Added warning for VTK not installed * :ghpull:`1024`: Documnetation fix for reconst_dsid.py * :ghpull:`981`: Fixes #979 : No figures in DKI example - Add new line after figure * :ghpull:`958`: FIX: PEP8 in testing * :ghpull:`1005`: FIX: Use absolute imports in io * :ghpull:`951`: Contextual Enhancement update: fix SNR issue, fix reference * :ghpull:`1015`: Fix progressbar of fetcher * :ghpull:`992`: FIX: Update the import statements to use absolute import in core * :ghpull:`1003`: FIX: Change the import statements in direction * :ghpull:`1004`: FIX: Use absolute import in pkg_info * :ghpull:`1006`: FIX: Use absolute import in utils and scratch * :ghpull:`1010`: Absolute Imports in Viz * :ghpull:`929`: Fix PEP8 in data * :ghpull:`941`: BW: skimage.filter module name warning * :ghpull:`976`: Fix PEP8 in sims and remove unnecessary imports * :ghpull:`956`: FIX: PEP8 in reconst/test and reconst/benchmarks * :ghpull:`955`: FIX: PEP8 in external * :ghpull:`952`: FIX: PEP8 in tracking and tracking benchmarks/tests * :ghpull:`982`: FIX: relative imports in dipy/align * :ghpull:`972`: Fixes #901 : Added documentation for "step" in dti * :ghpull:`971`: Add progress bar feature to dipy.data.fetcher * :ghpull:`989`: copyright 2008-2016 * :ghpull:`977`: Relative import fix in dipy/align * :ghpull:`957`: FIX: PEP8 in denoise * :ghpull:`959`: FIX: PEP8 in utils * :ghpull:`967`: Update index.rst correcting the date of release 0.11 * :ghpull:`954`: FIX: PEP8 in direction * :ghpull:`965`: Fix typo * :ghpull:`948`: Fix PEP8 in boots * :ghpull:`946`: FIX: PEP8 for test_sumsqdiff and test_scalespace * :ghpull:`964`: FIX: PEP8 in test_imaffine * :ghpull:`963`: FIX: PEP8 in core * :ghpull:`947`: FIX: PEP8 for test files * :ghpull:`897`: PEP8 * :ghpull:`926`: Fix PEP8 in fixes * :ghpull:`937`: BF : Clamping of the value of v in winding function * :ghpull:`907`: DOC: switch to using mathjax for maths * :ghpull:`932`: Fixes #931 : checks if nb_points=0 * :ghpull:`927`: Fix PEP8 in io and remove duplicate definition in test_bvectxt.py * :ghpull:`913`: Fix pep8 in workflows * :ghpull:`935`: Setup: go on to version 0.12 development. * :ghpull:`934`: DOC: Update github stats for 0.11 as of today. * :ghpull:`933`: Updating release dates Issues (342): * :ghissue:`1273`: Release 0.12 doc fix * :ghissue:`1272`: small correction for debugging purpose on nlmeans * :ghissue:`1269`: Odf slicer * :ghissue:`1143`: Slice through ODF fields * :ghissue:`1271`: Viz tut update * :ghissue:`1246`: WIP: Replace widget with ui components in example. * :ghissue:`1268`: Following up on #1243. * :ghissue:`1223`: local PCA using SVD * :ghissue:`1265`: Test failure on OSX in test_nlmeans_4d_3dsigma_and_threads * :ghissue:`1270`: Doc cleaning deprecation warning * :ghissue:`1251`: Slice through ODF fields - Rebased * :ghissue:`1267`: Adding a decorator for skipping test if openmp is not available * :ghissue:`1090`: Documentation for command line interfaces * :ghissue:`1243`: Better fvtk.viz error when no VTK installed * :ghissue:`1238`: Cryptic fvtk.viz error when no VTK installed * :ghissue:`1242`: DKI microstructure model tests still fail intermittenly * :ghissue:`1252`: Debug PR only - Odf slicer vtk tests (do not merge) * :ghissue:`1263`: Cast Streamline attrs to numpy ints, to avoid buffer mismatch. * :ghissue:`1257`: revamp piesno docstring * :ghissue:`978`: Use absolute import in align * :ghissue:`1179`: Automate workflow generation * :ghissue:`1253`: Automate script installation for workflows * :ghissue:`1254`: Automate script installation * :ghissue:`1261`: removing absolute path in tracking module * :ghissue:`1001`: Use absolute import in tracking * :ghissue:`1255`: Fix missing documentation content * :ghissue:`1260`: removing absolute path in reconst * :ghissue:`999`: Use absolute import in reconst * :ghissue:`1258`: Fix nlmeans indexing * :ghissue:`369`: Add TESTs for resample * :ghissue:`1155`: csa and csd reconstruction workflow * :ghissue:`1000`: Use absolute import in segment, testing and tests * :ghissue:`1070`: [Docs] Examples using deprecated function * :ghissue:`711`: Update api_changes.rst for interp_rbf * :ghissue:`321`: Median otsu figures in example don't look good * :ghissue:`994`: Use absolute import in dipy/core * :ghissue:`608`: Customize at runtime the number of cores nlmeans is using * :ghissue:`865`: PEP8 in test_imwarp * :ghissue:`591`: Allow seed_from_mask to generate random seeds * :ghissue:`518`: TODO: aniso2iso module will be completely removed in version 0.10. * :ghissue:`328`: "incompatible" import of peaks_from_model in your recent publication * :ghissue:`1241`: Csa and csd reconstruction workflow rebased * :ghissue:`1250`: DOC: Fix reconst_dki.py DKI example documentation typos. * :ghissue:`1244`: TEST: Decrease precision of tests for dki micro model prediction * :ghissue:`1235`: New hdf5 file format for saving PeaksAndMetrics objects * :ghissue:`1231`: TST: Reduce precision requirement for test of tortuosity estimation. * :ghissue:`1210`: Switching branches in windows and pip install error * :ghissue:`1209`: Move data files out of dropbox => persistent URL * :ghissue:`1233`: Feature: Added environment override for dipy_home variable * :ghissue:`1234`: BUG: Fix non-ASCII characters in reconst_dki.py example. * :ghissue:`1222`: A lightweight UI for medical visualizations #5: 2D Circular Slider * :ghissue:`1185`: unable to use fvtk.show after ubuntu 16.10 install * :ghissue:`1228`: RF: Use cython imports instead of relying on extern * :ghissue:`909`: Inconsistent output for values_from_volume * :ghissue:`1182`: CSD vs CSA * :ghissue:`1211`: `dipy.data.read_bundles_2_subjects` doesn't fetch data as expected * :ghissue:`1227`: BF: Use np.npy_intp instead of assuming long for ArraySequence attributes * :ghissue:`1027`: (DO NOT MERGE THIS PR) NF: DKI microstructural model * :ghissue:`1226`: DKI Microstructural model * :ghissue:`1229`: RF: allow for scipy pre-release deprecations * :ghissue:`1225`: Add one more multi b-value data-set * :ghissue:`1219`: MRG:Data off dropbox * :ghissue:`1218`: [Docs] Error while generating html * :ghissue:`1221`: NF: Check multi b-value * :ghissue:`1212`: Follow PEP8 in reconst (part 2) * :ghissue:`1217`: Use integer division in reconst_gqi.py * :ghissue:`1205`: A lightweight UI for medical visualizations #4: 2D Line Slider * :ghissue:`1166`: RF: Use the average sigma in the mask. * :ghissue:`1216`: Use integer division to avoid errors in indexing * :ghissue:`1215`: [Docs] Error while building examples: tracking_quick_start.py * :ghissue:`1213`: dipy.align.vector_fields.simplify_warp_function_3d: Wrong equation in docstring * :ghissue:`1214`: DOC: add a clarification note to simplify_warp_funcion_3d * :ghissue:`1208`: Follow PEP8 in reconst (part 1) * :ghissue:`1206`: Revert #1204, and add a filter to suppress warnings. * :ghissue:`1196`: MRG: Use dipy's array comparisons for tests * :ghissue:`1191`: Test failures for cluster code with current numpy master * :ghissue:`1207`: Follow PEP8 in reconst * :ghissue:`1204`: Suppress warnings regarding one-dimensional arrays changes in scipy 0.18 * :ghissue:`1107`: Dipy.align.reslice: either swallow the scipy warning or rework to avoid it * :ghissue:`1199`: A lightweight UI for medical visualizations #3: Changes to Event Handling * :ghissue:`1200`: [Docs] Error while generating docs * :ghissue:`1202`: Use integer division to avoid errors in indexing * :ghissue:`1188`: Colormap test errors for new matplotlib * :ghissue:`1187`: Negative integer powers error with numpy 1.12 * :ghissue:`1170`: Importing vtk with dipy * :ghissue:`1086`: ENH: avoid calling log() on zero-valued elements in anisotropic_power * :ghissue:`1198`: ENH: avoid log zero * :ghissue:`1201`: Fix out of bounds point not being classified OUTSIDEIMAGE (binary cla… * :ghissue:`1115`: Bayesian Tissue Classification * :ghissue:`1052`: Conda install * :ghissue:`1183`: A lightweight UI for medical visualizations #2: TextBox * :ghissue:`1173`: TST: Test on Python 3.6 * :ghissue:`1186`: MRG: use newer wheelhouse for installs * :ghissue:`1190`: Pickle error for Python 3.6 and test_peaksFromModelParallel * :ghissue:`1195`: Make PeaksAndMetrics pickle-able * :ghissue:`1194`: Use assert_arrays_equal when needed. * :ghissue:`1193`: Deprecate the Accent colormap, in anticipation of changes in MPL 2.0 * :ghissue:`1189`: Np1.12 * :ghissue:`1140`: A lightweight UI for medical visualizations #1: Button * :ghissue:`1022`: Fixes #720 : Auto generate ipython notebooks * :ghissue:`1139`: The shebang again! Python: bad interpreter: No such file or directory * :ghissue:`1171`: fix:dev: added numpy.int64 for my triangle array * :ghissue:`1123`: Add the mask workflow * :ghissue:`1174`: NF: added the repulsion 200 sphere. * :ghissue:`1176`: Dipy.tracking.local.interpolation.nearestneighbor_interpolate raises when used with Numpy 1.12 * :ghissue:`1177`: BF: fix interpolation call with Numpy 1.12 * :ghissue:`1162`: Return S0 value for DTI fits * :ghissue:`1142`: pytables version and streamlines_format.py example * :ghissue:`1147`: add this fix for newer version of pytables. * :ghissue:`1076`: ENH: Add support for ArraySequence in `length` function * :ghissue:`1050`: ENH: expand OpenMP utilities and move from denspeed.pyx to dipy.utils * :ghissue:`1082`: Add documentation uploading script * :ghissue:`1153`: Athena mapmri * :ghissue:`1097`: Added to quantize_evecs: multiprocessing and v * :ghissue:`1159`: TST - add tests for various affine matrices for local tracking * :ghissue:`1163`: WIP: Combined contour function with slicer to use affine * :ghissue:`940`: Drop python 2.6 * :ghissue:`1040`: SFM example using deprecated code * :ghissue:`1118`: pip install dipy failing on my windows * :ghissue:`1119`: Buildbots failing with workflow merge * :ghissue:`1127`: Windows buildbot failures after ivim_linear merge * :ghissue:`1128`: Support for non linear denoise? * :ghissue:`1138`: A few broken builds * :ghissue:`1148`: Actual S0 for DTI data * :ghissue:`1157`: Replace `get_affine` with `affine` and `get_header` with `header`. * :ghissue:`1160`: Add Shahnawaz to list of contributors. * :ghissue:`740`: Improved mapmri implementation with laplacian regularization and new … * :ghissue:`1045`: Allow affine 'shear' tolerance in LocalTracking * :ghissue:`1154`: [Bug] connectivity matrix image in streamline_tools example * :ghissue:`1158`: BF: closing matplotlib plots for each file while running the examples * :ghissue:`1151`: Define fmin() for Visual Studio * :ghissue:`1145`: DKI_signal should be dki_signal in dipy.sims.voxel * :ghissue:`1149`: Change DKI_signal to dki_signal * :ghissue:`1137`: Small fix to insure that fwDTI non-linear procedure does not crash * :ghissue:`827`: Free Water Elimination DTI * :ghissue:`942`: NF: Added support to colorize each line points indivdually * :ghissue:`1141`: Do not cover files related to benchmarks. * :ghissue:`1098`: Adding custom interactor for vizualisation * :ghissue:`1136`: Update deprecated function. * :ghissue:`1113`: TST: Test for invariance of model_params to splitting of the data. * :ghissue:`1134`: Rebase of https://github.com/nipy/dipy/pull/993 * :ghissue:`1064`: Faster dti odf * :ghissue:`1114`: flexible grid to streamline affine generation and pathlength function * :ghissue:`1122`: Add the reconst_dti workflow * :ghissue:`1132`: Update .travis.yml and README.md * :ghissue:`1051`: ENH: use parallel processing in the cython code for CCMetric * :ghissue:`993`: FIX: Use absolute imports in testing,tests and segment files * :ghissue:`673`: WIP: Workflow for syn registration * :ghissue:`859`: [WIP] Suppress warnings in tests * :ghissue:`983`: PEP8 in sims #884 * :ghissue:`984`: PEP8 in reconst #881 * :ghissue:`1009`: Absolute Imports in Tracking * :ghissue:`1036`: Estimate S0 from data (DTI) * :ghissue:`1125`: Intensity adjustment. Find a better upper bound for interpolating images. * :ghissue:`1130`: Minor corrections for showing surfaces * :ghissue:`1092`: Line-based target() * :ghissue:`1129`: Fix 1127 * :ghissue:`1034`: Viz surfaces * :ghissue:`394`: Update documentation for VTK and Anaconda * :ghissue:`973`: Minor change in `tools/github_stats.py` * :ghissue:`1060`: Fast computation of Cross Correlation metric * :ghissue:`1124`: Small fix in free water DTI model * :ghissue:`1058`: IVIM * :ghissue:`1110`: WIP : Ivim linear fitting * :ghissue:`1120`: Fix 1119 * :ghissue:`1121`: Recons dti workflow * :ghissue:`1083`: nlmeans problem * :ghissue:`1075`: Drop26 * :ghissue:`835`: NF: Free water tensor model * :ghissue:`1046`: BF - peaks_from_model with nbr_processes <= 0 * :ghissue:`1049`: MAINT: minor cython cleanup in align/vector_fields.pyx * :ghissue:`1087`: Base workflow enhancements + tests * :ghissue:`1112`: DOC: Math rendering issue in SFM gallery example. * :ghissue:`670`: Tissue classification using MAP-MRF * :ghissue:`332`: A sample nipype interface for fit_tensor * :ghissue:`1116`: failing to build the docs: issue with io.BufferedIOBase * :ghissue:`1109`: Changed default value of mni template * :ghissue:`1106`: Including MNI Template 2009c in Fetcher * :ghissue:`1066`: Adaptive Denoising * :ghissue:`351`: Dipy.tracking.utils.target affine parameter is misleading * :ghissue:`1091`: Modifications for building docs with python3 * :ghissue:`912`: Unable to build documentation with Python 3 * :ghissue:`1105`: Import reload function from imp module explicitly for python3 * :ghissue:`1104`: restore_dti.py example does not work in python3 * :ghissue:`1102`: MRG: add pytables to travis-ci, Py35 full test run * :ghissue:`1100`: Fix for Python 3 in io.dpy * :ghissue:`1103`: BF: This raises a warning on line 367 otherwise. * :ghissue:`1101`: Test with optional dependencies (including pytables) on Python 3. * :ghissue:`1094`: Updates to FBC measures documentation * :ghissue:`1059`: Documentation to discourage misuse of GradientTable * :ghissue:`1061`: Inconsistency in specifying S0 values in multi_tensor and single_tensor * :ghissue:`1063`: Fixes #1061 : Changed all S0 to 1.0 * :ghissue:`1089`: BF: fix test error on Python 3 * :ghissue:`1079`: Return a generator from `orient_by_roi` * :ghissue:`1088`: Restored the older implementation of nlmeans * :ghissue:`1080`: DOC: TensorModel.__init__ docstring. * :ghissue:`1085`: Enhanced workflows * :ghissue:`1081`: mean_diffusivity from the reconst.dti module returns incorrect shape * :ghissue:`1031`: improvements for denoise/denspeed.pyx * :ghissue:`828`: Fiber to bundle coherence measures * :ghissue:`1072`: DOC: Added a coverage badge to README.rst * :ghissue:`1071`: report coverage and add a badge? * :ghissue:`1038`: BF: Should fix #1037 * :ghissue:`1078`: Fetcher for ivim data, needs md5 * :ghissue:`953`: FIX: PEP8 for segment * :ghissue:`1025`: PEP8: Fix pep8 in segment * :ghissue:`883`: PEP8 in segment * :ghissue:`1077`: DOC: update fibernavigator link * :ghissue:`1069`: DOC: Small one -- we need this additional line of white space to render. * :ghissue:`1068`: Renamed test_shore for consistency * :ghissue:`1067`: Generate b vectors using disperse_charges * :ghissue:`1011`: Discrepancy with output of nlmeans.py * :ghissue:`1055`: WIP: Ivim implementation * :ghissue:`1065`: improve OMP parallelization with scheduling * :ghissue:`1062`: BF - fix CSD.predict to work with nd inputs. * :ghissue:`1057`: Workaround for https://github.com/nipy/dipy/issues/852 * :ghissue:`1037`: tracking.interfaces imports SlowAdcOpdfModel, but it is not defined * :ghissue:`1056`: Remove tracking interfaces * :ghissue:`813`: Windows 64-bit error in segment.featurespeed.extract * :ghissue:`1054`: Remove tracking interfaces * :ghissue:`1028`: BF: Predict DKI with a volume of S0 * :ghissue:`1041`: NF - Add PMF Threshold to Tractography * :ghissue:`1039`: Doc - fix definition of real_sph_harm functions * :ghissue:`1019`: MRG: fix heavy dependency check; no numpy for setup * :ghissue:`1018`: Fix: denspeed.pyx to give correct output for nlmeans * :ghissue:`1043`: DO NOT MERGE: Add a test of local tracking, using data from dipy.data. * :ghissue:`899`: SNR in contextual enhancement example * :ghissue:`991`: Documentation footer has 2008-2015 mentioned. * :ghissue:`1008`: [WIP] NF: Implementation of CHARMED model * :ghissue:`1030`: Fetcher files not found on Windows * :ghissue:`1035`: Fix for fetcher files in Windows * :ghissue:`1016`: viz.fvtk has no attribute 'ren' * :ghissue:`1033`: Viz surfaces * :ghissue:`1032`: Merge pull request #1 from nipy/master * :ghissue:`1029`: Errors building Cython extensions on Python 3.5 * :ghissue:`974`: Minor change in `tools/github_stats.py` * :ghissue:`1002`: Use absolute import in utils and scratch * :ghissue:`1014`: Progress bar works only for some data * :ghissue:`1013`: `dipy.data.make_fetcher` test fails with Python 3 * :ghissue:`1020`: Documentation does not mention Scipy as a dependency for VTK widgets. * :ghissue:`1023`: display in dsi example is broken * :ghissue:`1021`: Added warning for VTK not installed * :ghissue:`882`: PEP8 in reconst tests/benchmarks * :ghissue:`888`: PEP8 in tracking benchmarks/tests * :ghissue:`885`: PEP8 in testing * :ghissue:`902`: fix typo * :ghissue:`1024`: Documnetation fix for reconst_dsid.py * :ghissue:`979`: No figures in DKI example * :ghissue:`981`: Fixes #979 : No figures in DKI example - Add new line after figure * :ghissue:`958`: FIX: PEP8 in testing * :ghissue:`1005`: FIX: Use absolute imports in io * :ghissue:`997`: Use absolute import in io * :ghissue:`675`: Voxelwise stabilisation * :ghissue:`951`: Contextual Enhancement update: fix SNR issue, fix reference * :ghissue:`1015`: Fix progressbar of fetcher * :ghissue:`1012`: TST: install the dipy.data tests. * :ghissue:`992`: FIX: Update the import statements to use absolute import in core * :ghissue:`1003`: FIX: Change the import statements in direction * :ghissue:`996`: Use absolute import in dipy/direction * :ghissue:`1004`: FIX: Use absolute import in pkg_info * :ghissue:`998`: Use absolute import in dipy/pkg_info.py * :ghissue:`1006`: FIX: Use absolute import in utils and scratch * :ghissue:`1010`: Absolute Imports in Viz * :ghissue:`1007`: Use absolute import in viz * :ghissue:`929`: Fix PEP8 in data * :ghissue:`874`: PEP8 in data * :ghissue:`980`: Fix pep8 in reconst * :ghissue:`1017`: Fixes #1016 : Raises VTK not installed * :ghissue:`877`: PEP8 in external * :ghissue:`887`: PEP8 in tracking * :ghissue:`941`: BW: skimage.filter module name warning * :ghissue:`976`: Fix PEP8 in sims and remove unnecessary imports * :ghissue:`884`: PEP8 in sims * :ghissue:`956`: FIX: PEP8 in reconst/test and reconst/benchmarks * :ghissue:`955`: FIX: PEP8 in external * :ghissue:`952`: FIX: PEP8 in tracking and tracking benchmarks/tests * :ghissue:`982`: FIX: relative imports in dipy/align * :ghissue:`972`: Fixes #901 : Added documentation for "step" in dti * :ghissue:`901`: DTI `step` parameter not documented. * :ghissue:`995`: Use absolute import in dipy/data/__init__.py * :ghissue:`344`: Update citation page * :ghissue:`971`: Add progress bar feature to dipy.data.fetcher * :ghissue:`970`: Downloading data with dipy.data.fetcher does not show any progress bar * :ghissue:`986`: "pip3 install dipy" in Installation for python3 * :ghissue:`990`: No figures in DKI example * :ghissue:`989`: copyright 2008-2016 * :ghissue:`988`: doc/conf.py shows copyright 2008-2015. Should be 2016? * :ghissue:`975`: Use absolute import in imaffine, imwarp, metrics * :ghissue:`517`: TODO: Peaks to be removed from dipy.reconst in 0.10 * :ghissue:`977`: Relative import fix in dipy/align * :ghissue:`875`: PEP8 in denoise * :ghissue:`957`: FIX: PEP8 in denoise * :ghissue:`960`: PEP8 in sims #884 * :ghissue:`961`: PEP8 in reconst #880 * :ghissue:`962`: PEP8 in reconst #881 * :ghissue:`889`: PEP8 in utils * :ghissue:`959`: FIX: PEP8 in utils * :ghissue:`866`: PEP8 in test_metrics * :ghissue:`867`: PEP8 in test_parzenhist * :ghissue:`868`: PEP8 in test_scalespace * :ghissue:`869`: PEP8 in test_sumsqdiff * :ghissue:`870`: PEP8 in test_transforms * :ghissue:`871`: PEP8 in test_vector_fields * :ghissue:`864`: PEP8 in `test_imaffine` * :ghissue:`967`: Update index.rst correcting the date of release 0.11 * :ghissue:`862`: PEP8 in `test_crosscorr` * :ghissue:`873`: PEP8 in core * :ghissue:`831`: ACT tracking example gives weird streamlines * :ghissue:`876`: PEP8 in direction * :ghissue:`954`: FIX: PEP8 in direction * :ghissue:`965`: Fix typo * :ghissue:`968`: Use relative instead of absolute import * :ghissue:`948`: Fix PEP8 in boots * :ghissue:`872`: PEP8 in boots * :ghissue:`946`: FIX: PEP8 for test_sumsqdiff and test_scalespace * :ghissue:`964`: FIX: PEP8 in test_imaffine * :ghissue:`963`: FIX: PEP8 in core * :ghissue:`966`: fix typo * :ghissue:`947`: FIX: PEP8 for test files * :ghissue:`920`: STYLE:PEP8 for test_imaffine * :ghissue:`897`: PEP8 * :ghissue:`950`: PEP8 fixed in reconst/tests and reconst/benchmarks * :ghissue:`949`: Fixed Pep8 utils tracking testing denoise * :ghissue:`926`: Fix PEP8 in fixes * :ghissue:`878`: PEP8 in fixes * :ghissue:`939`: Fixed PEP8 in utils, denoise , tracking and testing * :ghissue:`945`: FIX: PEP8 in test_scalespace * :ghissue:`937`: BF : Clamping of the value of v in winding function * :ghissue:`930`: pep8 fix issue #896 - "continuation line over-indented for visual indent" * :ghissue:`943`: BF: Removed unsused code in slicer * :ghissue:`907`: DOC: switch to using mathjax for maths * :ghissue:`931`: dipy/tracking/streamlinespeed set_number_of_points crash when nb_points=0 * :ghissue:`932`: Fixes #931 : checks if nb_points=0 * :ghissue:`927`: Fix PEP8 in io and remove duplicate definition in test_bvectxt.py * :ghissue:`924`: in dipy/io/tests/test_bvectxt.py function with same name is defined twice * :ghissue:`879`: PEP8 in io * :ghissue:`913`: Fix pep8 in workflows * :ghissue:`891`: PEP8 in workflows * :ghissue:`938`: PEP8 issues solved in utils, testing and denoise * :ghissue:`935`: Setup: go on to version 0.12 development. * :ghissue:`934`: DOC: Update github stats for 0.11 as of today. * :ghissue:`933`: Updating release dates dipy-0.13.0/doc/release0.13.rst000066400000000000000000000167261317371701200157770ustar00rootroot00000000000000.. _release0.13: ==================================== Release notes for DIPY version 0.13 ==================================== GitHub stats for 2017/06/27 - 2017/10/24 (tag: 0.12.0) These lists are automatically generated, and may be incomplete or contain duplicates. The following 13 authors contributed 212 commits. * Ariel Rokem * Bennet Fauber * David Reagan * Eleftherios Garyfallidis * Guillaume Theaud * Jon Haitz Legarreta Gorroño * Marc-Alexandre Côté * Matthieu Dumont * Rafael Neto Henriques * Ranveer Aggarwal * Rutger Fick * Saber Sheybani * Serge Koudoro We closed a total of 115 issues, 39 pull requests and 76 regular issues; this is the full list (generated with the script :file:`tools/github_stats.py`): Pull Requests (39): * :ghpull:`1367`: BF: Import Streamlines object directly from nibabel. * :ghpull:`1361`: Windows instructions update + citation update * :ghpull:`1316`: DOC: Add a coding style guideline. * :ghpull:`1360`: [FIX] references order in workflow * :ghpull:`1348`: ENH: Add support for ArraySequence in`set_number_of_points` function * :ghpull:`1357`: Update tracking_quick_start.py rebase of #1332 * :ghpull:`1239`: Enable memory profiling of the examples. * :ghpull:`1356`: Add picking to slicer - rebase * :ghpull:`1351`: From tables to h5py * :ghpull:`1353`: FIX : improve epilogue accessibility for workflow * :ghpull:`1262`: VIZ: A lightweight UI for medical visualizations #6: 2D File Selector * :ghpull:`1352`: use legacy float array for printing numpy array * :ghpull:`1314`: DOC: Fix typos and formatting in .rst files and Python examples. * :ghpull:`1345`: DOC: Format README.rst file code blocks. * :ghpull:`1330`: ENH: Add Travis badge to README.rst. * :ghpull:`1315`: Remove GPL from our README. * :ghpull:`1328`: BUG: Address small_delta vs. big_delta flipped parameters. * :ghpull:`1329`: DOC: Fix typos in multi_io.py workflow file docstring. * :ghpull:`1336`: Test modification for windows 10 / numpy 1.14 * :ghpull:`1335`: Catch a more specific warning in test_csdeconv * :ghpull:`1319`: Correct white-space for fwdti example. * :ghpull:`1297`: Added eigh version of localpca to svd version * :ghpull:`1298`: Make TextActor2D extend UI instead of object * :ghpull:`1312`: Flags correction for windows * :ghpull:`1285`: mapmri using cvxpy instead of cvxopt * :ghpull:`1307`: PyTables Error-handling * :ghpull:`1310`: Fix error message * :ghpull:`1308`: Fix inversion in the dti mode doc * :ghpull:`1304`: DOC: Fix typos in dti.py reconstruction file doc. * :ghpull:`1303`: DOC: Add missing label to reciprocal space eq. * :ghpull:`1289`: MRG: Suppress a divide-by-zero warning. * :ghpull:`1288`: NF Add the parameter fa_operator in auto_response function * :ghpull:`1290`: Corrected a small error condition * :ghpull:`1279`: UI advanced fix * :ghpull:`1287`: Fix doc errors * :ghpull:`1286`: Last doc error fix on 0.12.x * :ghpull:`1284`: Added missing tutorials * :ghpull:`1278`: Moving ahead with 0.13 (dev version) * :ghpull:`1277`: One test (decimal issue) and a fix in viz_ui tutorial. Issues (76): * :ghissue:`1367`: BF: Import Streamlines object directly from nibabel. * :ghissue:`1366`: Circular imports in dipy.tracking.utils? * :ghissue:`1146`: Installation instructions for windows need to be updated * :ghissue:`1084`: Installation for windows developers using Anaconda needs to be updated * :ghissue:`1361`: Windows instructions update + citation update * :ghissue:`1248`: Windows doc installation update is needed for Python 3, Anaconda and VTK support * :ghissue:`1316`: DOC: Add a coding style guideline. * :ghissue:`1360`: [FIX] references order in workflow * :ghissue:`1359`: Epilogue's reference should be last not first * :ghissue:`1324`: WIP: Det track workflow and other improvements in workflows * :ghissue:`1348`: ENH: Add support for ArraySequence in`set_number_of_points` function * :ghissue:`1357`: Update tracking_quick_start.py rebase of #1332 * :ghissue:`1332`: Update tracking_quick_start.py * :ghissue:`1239`: Enable memory profiling of the examples. * :ghissue:`1356`: Add picking to slicer - rebase * :ghissue:`1334`: Add picking to slicer * :ghissue:`1351`: From tables to h5py * :ghissue:`1353`: FIX : improve epilogue accessibility for workflow * :ghissue:`1344`: Check accessibility of epilogue in Workflows * :ghissue:`1262`: VIZ: A lightweight UI for medical visualizations #6: 2D File Selector * :ghissue:`1352`: use legacy float array for printing numpy array * :ghissue:`1346`: Test broken in numpy 1.14 * :ghissue:`1333`: Trying QuickBundles (Python3 and vtk--> using: conda install -c clinicalgraphics vtk) * :ghissue:`1044`: Reconstruction FOD * :ghissue:`1247`: Interactor bug in viz_ui example * :ghissue:`1314`: DOC: Fix typos and formatting in .rst files and Python examples. * :ghissue:`1345`: DOC: Format README.rst file code blocks. * :ghissue:`1349`: Doctest FIX : use legacy printing * :ghissue:`1330`: ENH: Add Travis badge to README.rst. * :ghissue:`1337`: Coveralls seems baggy let's remove it * :ghissue:`1341`: ActiveAx model fitting using MIX framework * :ghissue:`1315`: Remove GPL from our README. * :ghissue:`1325`: Small is Big - Big is small (mapl - mapmri) * :ghissue:`1328`: BUG: Address small_delta vs. big_delta flipped parameters. * :ghissue:`1329`: DOC: Fix typos in multi_io.py workflow file docstring. * :ghissue:`1336`: Test modification for windows 10 / numpy 1.14 * :ghissue:`1323`: Warnings raised in csdeconv for upcoming numpy 1.14 * :ghissue:`1335`: Catch a more specific warning in test_csdeconv * :ghissue:`1042`: RF - move direction getters to dipy/direction/ * :ghissue:`1319`: Correct white-space for fwdti example. * :ghissue:`1317`: reconst_fwdti.py example figures not being rendered * :ghissue:`1297`: Added eigh version of localpca to svd version * :ghissue:`1313`: No module named 'vtkCommonCore' * :ghissue:`1318`: Mix framework with Cythonized func_mul * :ghissue:`1167`: Potential replacement for CVXOPT? * :ghissue:`1180`: WIP: replacing cvxopt with cvxpy. * :ghissue:`1298`: Make TextActor2D extend UI instead of object * :ghissue:`375`: Peak directiions test error on PPC * :ghissue:`1312`: Flags correction for windows * :ghissue:`804`: Wrong openmp flag on Windows * :ghissue:`1285`: mapmri using cvxpy instead of cvxopt * :ghissue:`662`: dipy/align/mattes.pyx doctest import error * :ghissue:`1307`: PyTables Error-handling * :ghissue:`1306`: Error-handling when pytables not installed * :ghissue:`1309`: step_helpers gives a wrong error message * :ghissue:`1310`: Fix error message * :ghissue:`1308`: Fix inversion in the dti mode doc * :ghissue:`1304`: DOC: Fix typos in dti.py reconstruction file doc. * :ghissue:`1303`: DOC: Add missing label to reciprocal space eq. * :ghissue:`1289`: MRG: Suppress a divide-by-zero warning. * :ghissue:`1293`: Garyfallidis recobundles * :ghissue:`1292`: Garyfallidis recobundles * :ghissue:`1288`: NF Add the parameter fa_operator in auto_response function * :ghissue:`1290`: Corrected a small error condition * :ghissue:`1279`: UI advanced fix * :ghissue:`1287`: Fix doc errors * :ghissue:`1286`: Last doc error fix on 0.12.x * :ghissue:`1284`: Added missing tutorials * :ghissue:`322`: Missing content in tracking.utils' documentation * :ghissue:`570`: The documentation for `dipy.viz` is not in the API reference * :ghissue:`1053`: WIP: Local pca and noise estimation * :ghissue:`881`: PEP8 in reconst * :ghissue:`880`: PEP8 in reconst * :ghissue:`1169`: Time for a new release - scipy 0.18? * :ghissue:`1278`: Moving ahead with 0.13 (dev version) * :ghissue:`1277`: One test (decimal issue) and a fix in viz_ui tutorial. dipy-0.13.0/doc/release0.6.rst000066400000000000000000000322621317371701200157120ustar00rootroot00000000000000=================================== Release notes for DIPY version 0.6 =================================== GitHub stats for 2011/02/12 - 2013/03/20 The following 13 authors contributed 972 commits. * Ariel Rokem * Bago Amirbekian * Eleftherios Garyfallidis * Emanuele Olivetti * Ian Nimmo-Smith * Maria Luisa Mandelli * Matthew Brett * Maxime Descoteaux * Michael Paquette * Samuel St-Jean * Stefan van der Walt * Yaroslav Halchenko * endolith We closed a total of 225 issues, 100 pull requests and 125 regular issues; this is the full list (generated with the script :file:`tools/github_stats.py`): Pull Requests (100): * :ghpull:`146`: BF - allow Bootstrap Wrapper to work with markov tracking * :ghpull:`143`: Garyfallidis tutorials 0.6 * :ghpull:`145`: Mdesco dti metrics * :ghpull:`141`: Peak extraction isbi * :ghpull:`142`: RF - always use theta and phi in that order, (not "phi, theta") * :ghpull:`140`: Sf2sh second try at correcting suggestions * :ghpull:`139`: Spherical function to spherical harmonics and back * :ghpull:`138`: Coding style fix for dsi_deconv * :ghpull:`137`: BF - check shapes before allclose * :ghpull:`136`: BF: add top-level benchmarking command * :ghpull:`135`: Refactor local maxima * :ghpull:`134`: BF - fix shm tests to accept antipodal directions as the same * :ghpull:`133`: Corrected test for Deconvolution after the discrete direction finder was removed * :ghpull:`124`: Remove direction finder * :ghpull:`77`: Rework tracking * :ghpull:`132`: A new fvtk function for visualizing fields of odfs * :ghpull:`131`: Add missing files * :ghpull:`130`: Implementation of DSI deconvolution from E.J. Canales-Rodriguez * :ghpull:`128`: Colorfa * :ghpull:`129`: RF - minor cleanup of pdf_odf code * :ghpull:`127`: Adding multi-tensor simulation * :ghpull:`126`: Improve local maxima * :ghpull:`122`: Removed calculation of gfa and other functions from inside the odf(sphere) of DSI and GQI * :ghpull:`103`: Major update of the website, with a few examples and with some additional minor RFs * :ghpull:`121`: NF: Allow the smoothing parameter to come through to rbf interpolation. * :ghpull:`120`: Fast squash fix * :ghpull:`116`: RF: common dtype for squash_ without result_type * :ghpull:`117`: Fix directions on TensorFit and add getitem * :ghpull:`119`: RF: raise errors for Python version dependencies * :ghpull:`118`: Seperate fa * :ghpull:`111`: RF - clean up _squash in multi_voxel and related code * :ghpull:`112`: RF: fix vec_val_vect logic, generalize for shape * :ghpull:`114`: BF: fix face and edge byte order for sphere load * :ghpull:`109`: Faster einsum * :ghpull:`110`: TST: This is only almost equal on XP, for some reason. * :ghpull:`108`: TST + STY: Use and assert_equal so that we get more information upon failure * :ghpull:`107`: RF: A.dot(B) => np.dot(A, B) for numpy < 1.5 * :ghpull:`102`: BF - Allow ndindex to work with older numpy than 1.6. * :ghpull:`106`: RF: allow optional scipy.spatial.Delaunay * :ghpull:`105`: Skip doctest decorator * :ghpull:`104`: RF: remove deprecated old parametric testing * :ghpull:`101`: WIP: Fix isnan windows * :ghpull:`100`: Small stuff * :ghpull:`94`: Multivoxel dsi and gqi are back! * :ghpull:`96`: ENH: Implement masking for the new TensorModel implementation. * :ghpull:`95`: NF fetch publicly available datasets * :ghpull:`26`: Noise * :ghpull:`84`: Non linear peak finding * :ghpull:`82`: DTI new api * :ghpull:`91`: Shm new api * :ghpull:`88`: NF - wrapper function for multi voxel models * :ghpull:`86`: DOC: Fixed some typos, etc in the FAQ * :ghpull:`90`: A simpler ndindex using generators. * :ghpull:`87`: RF - Provide shape as argument to ndindex. * :ghpull:`85`: Add fast ndindex. * :ghpull:`81`: RF - fixup peaks_from_model to take use remove_similar_vertices and * :ghpull:`79`: BF: Fixed projection plots. * :ghpull:`80`: RF - remove some old functions tools * :ghpull:`71`: ENH: Make the internals of the io module visible on tab completion in ip... * :ghpull:`76`: Yay, more gradient stuff * :ghpull:`75`: Rename L2norm to vector_norm * :ghpull:`74`: Gradient rf * :ghpull:`73`: RF/BF - removed duplicate vector_norm/L2norm * :ghpull:`72`: Mr bago model api * :ghpull:`68`: DSI seems working again - Have a look * :ghpull:`65`: RF: Make the docstring and call consistent with scipy.interpolate.Rbf. * :ghpull:`61`: RF - Refactor direction finding. * :ghpull:`60`: NF - Add key-value cache for use in models. * :ghpull:`63`: TST - Disable reconstruction methods that break the test suite. * :ghpull:`62`: BF - Fix missing import in peak finding tests. * :ghpull:`37`: cleanup refrences in the code to E1381S6_edcor* (these were removed from... * :ghpull:`55`: Ravel multi index * :ghpull:`58`: TST - skip doctest when matplotlib is not available * :ghpull:`59`: optional_traits is not needed anymore * :ghpull:`56`: TST: Following change to API in dipy.segment.quickbundles. * :ghpull:`52`: Matplotlib optional * :ghpull:`50`: NF - added subdivide method to sphere * :ghpull:`51`: Fix tracking utils * :ghpull:`48`: BF - Brought back _filter peaks and associated test. * :ghpull:`47`: RF - Removed reduce_antipodal from sphere. * :ghpull:`41`: NF - Add radial basis function interpolation on the sphere. * :ghpull:`39`: GradientTable * :ghpull:`40`: BF - Fix axis specification in sph_project. * :ghpull:`28`: Odf+shm api update * :ghpull:`36`: Nf hemisphere preview * :ghpull:`34`: RF - replace _filter_peaks with unique_vertices * :ghpull:`35`: BF - Fix imports from dipy.core.sphere. * :ghpull:`21`: Viz 2d * :ghpull:`32`: NF - Sphere class. * :ghpull:`30`: RF: Don't import all this every time. * :ghpull:`24`: TST: Fixing tests in reconst module. * :ghpull:`27`: DOC - Add reference to white matter diffusion values. * :ghpull:`25`: NF - Add prolate white matter as defaults for multi-tensor signal sim. * :ghpull:`22`: Updating my fork with the nipy master * :ghpull:`20`: RF - create OptionalImportError for traits imports * :ghpull:`19`: DOC: add comments and example to commit codes * :ghpull:`18`: DOC: update gitwash from source * :ghpull:`17`: Optional traits * :ghpull:`14`: DOC - fix frontpage example * :ghpull:`12`: BF(?): cart2sphere and sphere2cart are now invertible. * :ghpull:`11`: BF explicit type declaration and initialization for longest_track_len[AB] -- for cython 0.15 compatibility Issues (125): * :ghissue:`99`: RF - Separate direction finder from model fit. * :ghissue:`143`: Garyfallidis tutorials 0.6 * :ghissue:`144`: DTI metrics * :ghissue:`145`: Mdesco dti metrics * :ghissue:`123`: Web content and examples for 0.6 * :ghissue:`141`: Peak extraction isbi * :ghissue:`142`: RF - always use theta and phi in that order, (not "phi, theta") * :ghissue:`140`: Sf2sh second try at correcting suggestions * :ghissue:`139`: Spherical function to spherical harmonics and back * :ghissue:`23`: qball not properly import-able * :ghissue:`29`: Don't import everything when you import dipy * :ghissue:`138`: Coding style fix for dsi_deconv * :ghissue:`137`: BF - check shapes before allclose * :ghissue:`136`: BF: add top-level benchmarking command * :ghissue:`135`: Refactor local maxima * :ghissue:`134`: BF - fix shm tests to accept antipodal directions as the same * :ghissue:`133`: Corrected test for Deconvolution after the discrete direction finder was removed * :ghissue:`124`: Remove direction finder * :ghissue:`77`: Rework tracking * :ghissue:`132`: A new fvtk function for visualizing fields of odfs * :ghissue:`125`: BF: Remove 'mayavi' directory, to avoid triggering mayavi import warning... * :ghissue:`131`: Add missing files * :ghissue:`130`: Implementation of DSI deconvolution from E.J. Canales-Rodriguez * :ghissue:`128`: Colorfa * :ghissue:`129`: RF - minor cleanup of pdf_odf code * :ghissue:`127`: Adding multi-tensor simulation * :ghissue:`126`: Improve local maxima * :ghissue:`97`: BF - separate out storing of fit values in gqi * :ghissue:`122`: Removed calculation of gfa and other functions from inside the odf(sphere) of DSI and GQI * :ghissue:`103`: Major update of the website, with a few examples and with some additional minor RFs * :ghissue:`121`: NF: Allow the smoothing parameter to come through to rbf interpolation. * :ghissue:`120`: Fast squash fix * :ghissue:`116`: RF: common dtype for squash_ without result_type * :ghissue:`117`: Fix directions on TensorFit and add getitem * :ghissue:`119`: RF: raise errors for Python version dependencies * :ghissue:`118`: Seperate fa * :ghissue:`113`: RF - use min_diffusivity relative to 1 / max(bval) * :ghissue:`111`: RF - clean up _squash in multi_voxel and related code * :ghissue:`112`: RF: fix vec_val_vect logic, generalize for shape * :ghissue:`114`: BF: fix face and edge byte order for sphere load * :ghissue:`109`: Faster einsum * :ghissue:`110`: TST: This is only almost equal on XP, for some reason. * :ghissue:`98`: This is an update of PR #94 mostly typos and coding style * :ghissue:`108`: TST + STY: Use and assert_equal so that we get more information upon failure * :ghissue:`107`: RF: A.dot(B) => np.dot(A, B) for numpy < 1.5 * :ghissue:`102`: BF - Allow ndindex to work with older numpy than 1.6. * :ghissue:`106`: RF: allow optional scipy.spatial.Delaunay * :ghissue:`105`: Skip doctest decorator * :ghissue:`104`: RF: remove deprecated old parametric testing * :ghissue:`101`: WIP: Fix isnan windows * :ghissue:`100`: Small stuff * :ghissue:`94`: Multivoxel dsi and gqi are back! * :ghissue:`96`: ENH: Implement masking for the new TensorModel implementation. * :ghissue:`95`: NF fetch publicly available datasets * :ghissue:`26`: Noise * :ghissue:`84`: Non linear peak finding * :ghissue:`82`: DTI new api * :ghissue:`91`: Shm new api * :ghissue:`88`: NF - wrapper function for multi voxel models * :ghissue:`86`: DOC: Fixed some typos, etc in the FAQ * :ghissue:`89`: Consisten ndindex behaviour * :ghissue:`90`: A simpler ndindex using generators. * :ghissue:`87`: RF - Provide shape as argument to ndindex. * :ghissue:`85`: Add fast ndindex. * :ghissue:`81`: RF - fixup peaks_from_model to take use remove_similar_vertices and * :ghissue:`83`: Non linear peak finding * :ghissue:`78`: This PR replaces PR 70 * :ghissue:`79`: BF: Fixed projection plots. * :ghissue:`80`: RF - remove some old functions tools * :ghissue:`70`: New api dti * :ghissue:`71`: ENH: Make the internals of the io module visible on tab completion in ip... * :ghissue:`76`: Yay, more gradient stuff * :ghissue:`69`: New api and tracking refacotor * :ghissue:`75`: Rename L2norm to vector_norm * :ghissue:`74`: Gradient rf * :ghissue:`73`: RF/BF - removed duplicate vector_norm/L2norm * :ghissue:`72`: Mr bago model api * :ghissue:`66`: DOCS - docs for model api * :ghissue:`49`: Reworking tracking code. * :ghissue:`68`: DSI seems working again - Have a look * :ghissue:`65`: RF: Make the docstring and call consistent with scipy.interpolate.Rbf. * :ghissue:`61`: RF - Refactor direction finding. * :ghissue:`60`: NF - Add key-value cache for use in models. * :ghissue:`63`: TST - Disable reconstruction methods that break the test suite. * :ghissue:`62`: BF - Fix missing import in peak finding tests. * :ghissue:`37`: cleanup refrences in the code to E1381S6_edcor* (these were removed from... * :ghissue:`55`: Ravel multi index * :ghissue:`46`: BF: Trying to fix test failures. * :ghissue:`57`: TST: Reverted back to optional definition of the function to make TB hap... * :ghissue:`58`: TST - skip doctest when matplotlib is not available * :ghissue:`59`: optional_traits is not needed anymore * :ghissue:`56`: TST: Following change to API in dipy.segment.quickbundles. * :ghissue:`52`: Matplotlib optional * :ghissue:`50`: NF - added subdivide method to sphere * :ghissue:`51`: Fix tracking utils * :ghissue:`48`: BF - Brought back _filter peaks and associated test. * :ghissue:`47`: RF - Removed reduce_antipodal from sphere. * :ghissue:`41`: NF - Add radial basis function interpolation on the sphere. * :ghissue:`33`: Gradients Table class * :ghissue:`39`: GradientTable * :ghissue:`45`: BF - Fix sphere creation in triangle_subdivide. * :ghissue:`38`: Subdivide octahedron * :ghissue:`40`: BF - Fix axis specification in sph_project. * :ghissue:`28`: Odf+shm api update * :ghissue:`36`: Nf hemisphere preview * :ghissue:`34`: RF - replace _filter_peaks with unique_vertices * :ghissue:`35`: BF - Fix imports from dipy.core.sphere. * :ghissue:`21`: Viz 2d * :ghissue:`32`: NF - Sphere class. * :ghissue:`30`: RF: Don't import all this every time. * :ghissue:`24`: TST: Fixing tests in reconst module. * :ghissue:`27`: DOC - Add reference to white matter diffusion values. * :ghissue:`25`: NF - Add prolate white matter as defaults for multi-tensor signal sim. * :ghissue:`22`: Updating my fork with the nipy master * :ghissue:`20`: RF - create OptionalImportError for traits imports * :ghissue:`8`: X error BadRequest with fvtk.show * :ghissue:`19`: DOC: add comments and example to commit codes * :ghissue:`18`: DOC: update gitwash from source * :ghissue:`17`: Optional traits * :ghissue:`15`: Octahedron in dipy.core.triangle_subdivide has wrong faces * :ghissue:`14`: DOC - fix frontpage example * :ghissue:`12`: BF(?): cart2sphere and sphere2cart are now invertible. * :ghissue:`11`: BF explicit type declaration and initialization for longest_track_len[AB] -- for cython 0.15 compatibility * :ghissue:`5`: Add DSI reconstruction in Dipy * :ghissue:`9`: Bug in dipy.tracking.metrics.downsampling when we downsample a track to more than 20 points dipy-0.13.0/doc/release0.7.rst000066400000000000000000000125021317371701200157060ustar00rootroot00000000000000=================================== Release notes for DIPY version 0.7 =================================== GitHub stats for 2013/03/29 - 2013/12/23 (tag: 0.6.0) The following 16 authors contributed 814 commits. * Ariel Rokem * Bago Amirbekian * Eleftherios Garyfallidis * Emmanuel Caruyer * Erik Ziegler * Gabriel Girard * Jean-Christophe Houde * Kimberly Chan * Matthew Brett * Matthias Ekman * Matthieu Dumont * Mauro Zucchelli * Maxime Descoteaux * Samuel St-Jean * Stefan van der Walt * Sylvain Merlet We closed a total of 84 pull requests; this is the full list (generated with the script :file:`tools/github_stats.py`): Pull Requests (84): * :ghpull:`292`: Streamline tools * :ghpull:`289`: Examples checked for peaks_from_model * :ghpull:`288`: Link shore examples * :ghpull:`279`: Update release 0.7 examples' system * :ghpull:`257`: Continuous modelling: SHORE * :ghpull:`285`: Bad seeds cause segfault in EuDX * :ghpull:`274`: Peak directions update * :ghpull:`275`: Restore example * :ghpull:`261`: R2 term response function for Sharpening Deconvolution Transform (SDT) * :ghpull:`273`: Fixed typos + autopep8 * :ghpull:`268`: Add gfa shmfit * :ghpull:`260`: NF: Command line interface to QuickBundles. * :ghpull:`270`: Removed minmax_normalize from dipy.reconst.peaks * :ghpull:`247`: Model base * :ghpull:`267`: Refactoring peaks_from_model_parallel * :ghpull:`219`: Update forward sdeconv mat * :ghpull:`266`: BF - join pool before trying to delete temp directory * :ghpull:`265`: Peak from model issue #253 * :ghpull:`264`: peak_from_model tmp files * :ghpull:`263`: Refactoring peaks calculations to be out of odf.py * :ghpull:`262`: Handle cpu count exception * :ghpull:`255`: Fix peaks_from_model_parallel * :ghpull:`259`: Release 0.7 a few cleanups * :ghpull:`252`: Clean cc * :ghpull:`243`: NF Added norm input to interp_rbf and angle as an alternative norm. * :ghpull:`251`: Another cleanup for fvtk. This time the slicer function was simplified * :ghpull:`249`: Dsi metrics 2 * :ghpull:`239`: Segmentation based on rgb threshold + examples * :ghpull:`240`: Dsi metrics * :ghpull:`245`: Fix some rewording * :ghpull:`242`: A new streamtube visualization method and different fixes and cleanups for the fvtk module * :ghpull:`237`: WIP: cleanup docs / small refactor for median otsu * :ghpull:`221`: peaks_from_model now return peaks directions * :ghpull:`234`: BF: predict for cases when the ADC is multi-D and S0 is provided as a volume * :ghpull:`232`: Fix peak extraction default value of relative_peak_threshold * :ghpull:`227`: Fix closing upon download completion in fetcher * :ghpull:`230`: Tensor predict * :ghpull:`229`: BF: input.dtype is used per default * :ghpull:`210`: Brainextraction * :ghpull:`226`: SetInput in vtk5 is now SetInputData in vtk6 * :ghpull:`225`: fixed typo * :ghpull:`212`: Tensor visualization * :ghpull:`223`: Fix make examples for windows. * :ghpull:`222`: Fix restore bug * :ghpull:`217`: RF - update csdeconv to use SphHarmFit class to reduce code duplication. * :ghpull:`208`: Shm coefficients in peaks_from_model * :ghpull:`216`: BF - fixed mask_voxel_size bug and added test. Replaced promote_dtype wi... * :ghpull:`211`: Added a md5 check to each dataset. * :ghpull:`54`: Restore * :ghpull:`213`: Update to a more recent version of `six.py`. * :ghpull:`204`: Maxime's [Gallery] Reconst DTI example revisited * :ghpull:`207`: Added two new datasets online and updated fetcher.py. * :ghpull:`209`: Fixed typos in reconst/dti.py * :ghpull:`206`: DOC: update the docs to say that we support python 3 * :ghpull:`205`: RF: Minor corrections in index.rst and CSD example * :ghpull:`173`: Constrained Spherical Deconvolution and the Spherical Deconvolution Transform * :ghpull:`203`: RF: Rename tensor statistics to remove "tensor_" from them. * :ghpull:`202`: Typos * :ghpull:`201`: Bago's Rename sph basis functions corrected after rebasing and other minor lateral fixes * :ghpull:`191`: DOC - clarify docs for SphHarmModel * :ghpull:`199`: FIX: testfail due to Non-ASCII character \xe2 in markov.py * :ghpull:`189`: Shm small fixes * :ghpull:`196`: DOC: add reference section to ProbabilisticOdfWeightedTracker * :ghpull:`190`: BF - fix fit-tensor handling of file extensions and mask=none * :ghpull:`182`: RF - fix disperse_charges so that a large constant does not cause the po... * :ghpull:`183`: OPT: Modified dipy.core.sphere_stats.random_uniform_on_sphere, cf issue #181 * :ghpull:`185`: DOC: replace soureforge.net links with nipy.org * :ghpull:`180`: BF: fix Cython TypeError from negative indices to tuples * :ghpull:`179`: BF: doctest output difference workarounds * :ghpull:`176`: MRG: Py3 compat * :ghpull:`178`: RF: This function is superseded by read_bvals_bvecs. * :ghpull:`170`: Westin stats * :ghpull:`174`: RF: use $PYTHON variable for python invocation * :ghpull:`172`: DOC: Updated index.rst and refactored example segment_quickbundles.py * :ghpull:`169`: RF: refactor pyx / c file stamping for packaging * :ghpull:`168`: DOC: more updates to release notes * :ghpull:`167`: Merge maint * :ghpull:`166`: BF: pyc and created trk files were in eg archive * :ghpull:`160`: NF: add script to build dmgs from buildbot mpkgs * :ghpull:`164`: Calculation for mode of a tensor * :ghpull:`163`: Remove dti tensor * :ghpull:`161`: DOC: typo in the probabilistic tracking example. * :ghpull:`162`: DOC: update release notes * :ghpull:`159`: Rename install test scripts dipy-0.13.0/doc/release0.8.rst000066400000000000000000000561021317371701200157130ustar00rootroot00000000000000.. _release0.8: =================================== Release notes for DIPY version 0.8 =================================== GitHub stats for 2013/12/24 - 2014/12/26 (tag: 0.7.0) The following 19 authors contributed 1176 commits. * Andrew Lawrence * Ariel Rokem * Bago Amirbekian * Demian Wassermann * Eleftherios Garyfallidis * Gabriel Girard * Gregory R. Lee * Jean-Christophe Houde * Kesshi jordan * Marc-Alexandre Cote * Matthew Brett * Matthias Ekman * Matthieu Dumont * Mauro Zucchelli * Maxime Descoteaux * Michael Paquette * Omar Ocegueda * Samuel St-Jean * Stefan van der Walt We closed a total of 388 issues, 155 pull requests and 233 regular issues; this is the full list (generated with the script :file:`tools/github_stats.py`): Pull Requests (155): * :ghpull:`544`: Refactor propspeed - updated * :ghpull:`543`: MRG: update to plot_2d fixes and tests * :ghpull:`537`: NF: add requirements.txt file * :ghpull:`534`: BF: removed ftmp variable * :ghpull:`536`: Update Changelog * :ghpull:`535`: Happy New Year PR! * :ghpull:`531`: BF: extend pip timeout to reduce install failures * :ghpull:`527`: Remove npymath library from cython extensions * :ghpull:`528`: MRG: move conditional compiling to C * :ghpull:`530`: BF: work round ugly MSVC manifest bug * :ghpull:`529`: MRG: a couple of small cleanup fixes * :ghpull:`526`: Readme.rst and info.py update about the license * :ghpull:`525`: Added shore gpl warning in the readme * :ghpull:`524`: Replaced DiPy with DIPY in readme.rst and info.py * :ghpull:`523`: RF: copy includes list for extensions * :ghpull:`522`: DOC: Web-site release notes, and some updates on front page. * :ghpull:`521`: Life bots * :ghpull:`520`: Relaxing precision for win32 * :ghpull:`519`: Christmas PR! Correcting typos, linking and language for max odf tracking * :ghpull:`513`: BF + TST: Reinstated eig_from_lo_tri * :ghpull:`508`: Tests for reslicing * :ghpull:`515`: TST: Increasing testing on life. * :ghpull:`516`: TST: Reduce sensitivity on these tests. * :ghpull:`495`: NF - Deterministic Maximum Direction Getter * :ghpull:`514`: Website update * :ghpull:`510`: BF: another fvtk 5 to 6 incompatibility * :ghpull:`509`: DOC: Small fixes in documentation. * :ghpull:`497`: New sphere for ODF reconstruction * :ghpull:`460`: Sparse Fascicle Model * :ghpull:`499`: DOC: Warn about the GPL license of SHORE. * :ghpull:`491`: RF - Make peaks_from_model part of dipy.direction * :ghpull:`501`: TST: Test for both data with and w/0 b0. * :ghpull:`507`: BF - use different sort method to avoid mergsort for older numpy. * :ghpull:`504`: Bug fix float overflow in estimate_sigma * :ghpull:`494`: Fix round * :ghpull:`503`: Fixed compatibility issues between vtk 5 and 6 * :ghpull:`498`: DTI `min_signal` * :ghpull:`471`: Use importlib instead of __import__ * :ghpull:`419`: LiFE * :ghpull:`489`: Fix diffeomorphic registration test failures * :ghpull:`484`: Clear tabs from examples for website * :ghpull:`490`: DOC: corrected typos in the tracking PR * :ghpull:`341`: Traco Redesign * :ghpull:`483`: NF: Find the closest vertex on a sphere for an input vector. * :ghpull:`488`: BF: fix travis version setting * :ghpull:`485`: RF: deleted unused files * :ghpull:`482`: Skipping tests for different versions of Scipy for optimize.py * :ghpull:`480`: Enhance SLR to allow for series of registrations * :ghpull:`479`: Report on coverage for old scipy. * :ghpull:`481`: BF - make examples was confusing files with similar names, fixed * :ghpull:`476`: Fix optimize defaults for older scipy versions for L-BFGS-B * :ghpull:`478`: TST: Increase the timeout on the Travis pip install * :ghpull:`477`: MAINT+TST: update minimum nibabel dependency * :ghpull:`474`: RF: switch travis tests to use virtualenvs * :ghpull:`473`: TST: Make Travis provide verbose test outputs. * :ghpull:`472`: ENH: GradientTable now calculates qvalues * :ghpull:`469`: Fix evolution save win32 * :ghpull:`463`: DOC: update RESTORE tutorial to use new noise estimation technique * :ghpull:`466`: BF: cannot quote command for Windows * :ghpull:`465`: BF: increased SCIPY version definition flag to 0.12 * :ghpull:`462`: BF: fix writing history to file in Python 3 * :ghpull:`433`: Added local variance estimation * :ghpull:`458`: DOC: docstring fixes in dipy/align/crosscorr.pyx * :ghpull:`448`: BF: fix link to npy_math function * :ghpull:`447`: BF: supposed fix for the gh-439, but still unable to reproduce OP. * :ghpull:`443`: Fix buildbots errors introduced with the registration module * :ghpull:`456`: MRG: relax threshold for failing test + cleanup * :ghpull:`454`: DOC: fix docstring for compile-time checker * :ghpull:`453`: BF: refactor conditional compiling again * :ghpull:`446`: Streamline-based Linear Registration * :ghpull:`445`: NF: generate config.pxi file with Cython DEF vars * :ghpull:`440`: DOC - add info on how to change default tempdir (multiprocessing). * :ghpull:`431`: Change the writeable flag back to its original state when finished. * :ghpull:`408`: Symmetric diffeomorphic non-linear registration * :ghpull:`438`: Missing a blank line in examples/tracking_quick_start.py * :ghpull:`405`: fixed frozen windows executable issue * :ghpull:`418`: RF: move script running code into own module * :ghpull:`437`: Update Cython download URL * :ghpull:`435`: BF: replaced non-ascii character in dipy.reconst.dti line 956 * :ghpull:`434`: DOC: References for the DTI ODF calculation. * :ghpull:`430`: Revert "Support read-only numpy array." * :ghpull:`427`: Support read-only numpy array. * :ghpull:`421`: Fix nans in gfa * :ghpull:`422`: BF: Use the short version to verify scipy version. * :ghpull:`415`: RF - move around some of the predict stuff * :ghpull:`420`: Rename README.txt to README.rst * :ghpull:`413`: Faster spherical harmonics * :ghpull:`416`: Removed memory_leak unittest in test_strealine.py * :ghpull:`417`: Fix streamlinespeed tests * :ghpull:`411`: Fix memory leak in cython functions length and set_number_of_points * :ghpull:`409`: minor corrections to pipe function * :ghpull:`396`: TST : this is not exactly equal on some platforms. * :ghpull:`407`: BF: fixed problem with NANs in odfdeconv * :ghpull:`406`: Revert "Merge pull request #346 from omarocegueda/syn_registration" * :ghpull:`402`: Fix AE test error in test_peak_directions_thorough * :ghpull:`403`: Added mask shape check in tenfit * :ghpull:`346`: Symmetric diffeomorphic non-linear registration * :ghpull:`401`: BF: fix skiptest invocation for missing mpl * :ghpull:`340`: CSD fit issue * :ghpull:`397`: BF: fix import statement for get_cmap * :ghpull:`393`: RF: update Cython dependency * :ghpull:`382`: Cythonized version of streamlines' resample() and length() functions. * :ghpull:`386`: DOC: Small fix in the xval example. * :ghpull:`335`: Xval * :ghpull:`352`: Fix utils docs and affine * :ghpull:`384`: odf_sh_sharpening function fix and new test * :ghpull:`374`: MRG: bumpy numpy requirement to 1.5 / compat fixes * :ghpull:`380`: DOC: Update a few Dipy links to link to the correct repo * :ghpull:`378`: Fvtk cleanup * :ghpull:`379`: fixed typos in shm.py * :ghpull:`339`: FVTK small improvement: Arbitrary matplotlib colormaps can be used to color spherical functions * :ghpull:`373`: Fixed discrepancies between doc and code * :ghpull:`371`: RF: don't use -fopenmp flag if it doesn't work * :ghpull:`372`: BF: set integer type for crossplatform compilation * :ghpull:`337`: Piesno * :ghpull:`370`: Tone down the front page a bit. * :ghpull:`364`: Add the mode param for border management. * :ghpull:`368`: New banner for website * :ghpull:`367`: MRG: refactor API generation for sharing * :ghpull:`363`: RF: make cvxopt optional for tests * :ghpull:`362`: Changes to fix issue #361: matrix sizing in tracking.utils.connectivity_matrix * :ghpull:`360`: Added missing $ sign * :ghpull:`355`: DOC: Updated API change document to add target function change * :ghpull:`357`: Changed the logo to full black as the one that I sent as suggestion for HBM and ISMRM * :ghpull:`356`: Auto-generate API docs * :ghpull:`349`: Added api changes file to track breaks of backwards compatibility * :ghpull:`348`: Website update * :ghpull:`347`: DOC: Updating citations * :ghpull:`345`: TST: Make travis look at test coverage. * :ghpull:`338`: Add positivity constraint on the propagator * :ghpull:`334`: Fix vec2vec * :ghpull:`324`: Constrained optimisation for SHORE to set E(0)=1 when the CVXOPT package is available * :ghpull:`320`: Denoising images using non-local means * :ghpull:`331`: DOC: correct number of seeds in streamline_tools example * :ghpull:`326`: Fix brain extraction example * :ghpull:`327`: add small and big delta * :ghpull:`323`: Shore pdf grid speed improvement * :ghpull:`319`: DOC: Updated the highlights to promote the release and the upcoming paper * :ghpull:`318`: Corrected some rendering problems with the installation instructions * :ghpull:`317`: BF: more problems with path quoting in windows * :ghpull:`316`: MRG: more fixes for windows script tests * :ghpull:`315`: BF: EuDX odf_vertices param has no default value * :ghpull:`305`: DOC: Some more details in installation instructions. * :ghpull:`314`: BF - callable response does not work * :ghpull:`311`: Bf seeds from mask * :ghpull:`309`: MRG: Windows test fixes * :ghpull:`308`: typos + pep stuf * :ghpull:`303`: BF: try and fix nibabel setup requirement * :ghpull:`304`: Update README.txt * :ghpull:`302`: Time for 0.8.0.dev! * :ghpull:`299`: BF: Put back utils.length. * :ghpull:`301`: Updated info.py and copyright year * :ghpull:`300`: Bf fetcher bug on windows * :ghpull:`298`: TST - rework tests so that we do not need to download any data * :ghpull:`290`: DOC: Started generating 0.7 release notes. Issues (233): * :ghissue:`544`: Refactor propspeed - updated * :ghissue:`540`: MRG: refactor propspeed * :ghissue:`542`: TST: Testing regtools * :ghissue:`543`: MRG: update to plot_2d fixes and tests * :ghissue:`541`: BUG: plot_2d_diffeomorphic_map * :ghissue:`439`: ValueError in RESTORE * :ghissue:`538`: WIP: TEST: relaxed precision * :ghissue:`449`: local variable 'ftmp' referenced before assignment * :ghissue:`537`: NF: add requirements.txt file * :ghissue:`534`: BF: removed ftmp variable * :ghissue:`536`: Update Changelog * :ghissue:`535`: Happy New Year PR! * :ghissue:`512`: reconst.dti.eig_from_lo_tri * :ghissue:`467`: Optimize failure on Windows * :ghissue:`464`: Diffeomorphic registration test failures on PPC * :ghissue:`531`: BF: extend pip timeout to reduce install failures * :ghissue:`527`: Remove npymath library from cython extensions * :ghissue:`528`: MRG: move conditional compiling to C * :ghissue:`530`: BF: work round ugly MSVC manifest bug * :ghissue:`529`: MRG: a couple of small cleanup fixes * :ghissue:`526`: Readme.rst and info.py update about the license * :ghissue:`525`: Added shore gpl warning in the readme * :ghissue:`524`: Replaced DiPy with DIPY in readme.rst and info.py * :ghissue:`523`: RF: copy includes list for extensions * :ghissue:`522`: DOC: Web-site release notes, and some updates on front page. * :ghissue:`521`: Life bots * :ghissue:`520`: Relaxing precision for win32 * :ghissue:`519`: Christmas PR! Correcting typos, linking and language for max odf tracking * :ghissue:`513`: BF + TST: Reinstated eig_from_lo_tri * :ghissue:`508`: Tests for reslicing * :ghissue:`515`: TST: Increasing testing on life. * :ghissue:`516`: TST: Reduce sensitivity on these tests. * :ghissue:`495`: NF - Deterministic Maximum Direction Getter * :ghissue:`514`: Website update * :ghissue:`510`: BF: another fvtk 5 to 6 incompatibility * :ghissue:`511`: Error estimating tensors on hcp dataset * :ghissue:`509`: DOC: Small fixes in documentation. * :ghissue:`497`: New sphere for ODF reconstruction * :ghissue:`460`: Sparse Fascicle Model * :ghissue:`499`: DOC: Warn about the GPL license of SHORE. * :ghissue:`491`: RF - Make peaks_from_model part of dipy.direction * :ghissue:`501`: TST: Test for both data with and w/0 b0. * :ghissue:`507`: BF - use different sort method to avoid mergsort for older numpy. * :ghissue:`505`: stable/wheezy debian -- ar.argsort(kind='mergesort') causes TypeError: requested sort not available for type ( * :ghissue:`506`: RF: Use integer datatype for unique_rows sorting. * :ghissue:`504`: Bug fix float overflow in estimate_sigma * :ghissue:`399`: Multiprocessing runtime error in Windows 64 bit * :ghissue:`383`: typo in multi tensor fit example * :ghissue:`350`: typo in SNR example * :ghissue:`424`: test more python versions with travis * :ghissue:`493`: BF - older C compliers do not have round in math.h, uisng dpy_math instead * :ghissue:`494`: Fix round * :ghissue:`503`: Fixed compatibility issues between vtk 5 and 6 * :ghissue:`500`: SHORE hyp2F1 * :ghissue:`502`: Fix record vtk6 * :ghissue:`498`: DTI `min_signal` * :ghissue:`496`: Revert "BF: supposed fix for the gh-439, but still unable to reproduce O... * :ghissue:`492`: TST - new DTI test to help develop min_signal handling * :ghissue:`471`: Use importlib instead of __import__ * :ghissue:`419`: LiFE * :ghissue:`489`: Fix diffeomorphic registration test failures * :ghissue:`484`: Clear tabs from examples for website * :ghissue:`490`: DOC: corrected typos in the tracking PR * :ghissue:`341`: Traco Redesign * :ghissue:`410`: Faster spherical harmonics implemenation * :ghissue:`483`: NF: Find the closest vertex on a sphere for an input vector. * :ghissue:`487`: Travis Problem * :ghissue:`488`: BF: fix travis version setting * :ghissue:`485`: RF: deleted unused files * :ghissue:`486`: cvxopt is gpl licensed * :ghissue:`482`: Skipping tests for different versions of Scipy for optimize.py * :ghissue:`480`: Enhance SLR to allow for series of registrations * :ghissue:`479`: Report on coverage for old scipy. * :ghissue:`481`: BF - make examples was confusing files with similar names, fixed * :ghissue:`428`: WIP: refactor travis building * :ghissue:`429`: WIP: Refactor travising * :ghissue:`476`: Fix optimize defaults for older scipy versions for L-BFGS-B * :ghissue:`478`: TST: Increase the timeout on the Travis pip install * :ghissue:`477`: MAINT+TST: update minimum nibabel dependency * :ghissue:`475`: Does the optimizer still need `tmp_files`? * :ghissue:`474`: RF: switch travis tests to use virtualenvs * :ghissue:`473`: TST: Make Travis provide verbose test outputs. * :ghissue:`470`: Enhance SLR with applying series of transformations and fix optimize bug for parameter missing in old scipy versions * :ghissue:`472`: ENH: GradientTable now calculates qvalues * :ghissue:`469`: Fix evolution save win32 * :ghissue:`463`: DOC: update RESTORE tutorial to use new noise estimation technique * :ghissue:`466`: BF: cannot quote command for Windows * :ghissue:`461`: Buildbot failures with missing 'nit' key in dipy.core.optimize * :ghissue:`465`: BF: increased SCIPY version definition flag to 0.12 * :ghissue:`462`: BF: fix writing history to file in Python 3 * :ghissue:`433`: Added local variance estimation * :ghissue:`432`: auto estimate the standard deviation globally for nlmeans * :ghissue:`451`: Warning for DTI normalization * :ghissue:`458`: DOC: docstring fixes in dipy/align/crosscorr.pyx * :ghissue:`448`: BF: fix link to npy_math function * :ghissue:`447`: BF: supposed fix for the gh-439, but still unable to reproduce OP. * :ghissue:`443`: Fix buildbots errors introduced with the registration module * :ghissue:`456`: MRG: relax threshold for failing test + cleanup * :ghissue:`455`: Test failure on `master` * :ghissue:`454`: DOC: fix docstring for compile-time checker * :ghissue:`450`: Find if replacing matrix44 from streamlinear with compose_matrix from dipy.core.geometry is a good idea * :ghissue:`453`: BF: refactor conditional compiling again * :ghissue:`446`: Streamline-based Linear Registration * :ghissue:`452`: Replace raise by auto normalization when creating a gradient table with un-normalized bvecs. * :ghissue:`398`: assert AE < 2. failure in test_peak_directions_thorough * :ghissue:`444`: heads up - MKL error in parallel mode * :ghissue:`445`: NF: generate config.pxi file with Cython DEF vars * :ghissue:`440`: DOC - add info on how to change default tempdir (multiprocessing). * :ghissue:`431`: Change the writeable flag back to its original state when finished. * :ghissue:`408`: Symmetric diffeomorphic non-linear registration * :ghissue:`333`: Bundle alignment * :ghissue:`438`: Missing a blank line in examples/tracking_quick_start.py * :ghissue:`426`: nlmeans_3d breaks with mask=None * :ghissue:`405`: fixed frozen windows executable issue * :ghissue:`418`: RF: move script running code into own module * :ghissue:`437`: Update Cython download URL * :ghissue:`435`: BF: replaced non-ascii character in dipy.reconst.dti line 956 * :ghissue:`434`: DOC: References for the DTI ODF calculation. * :ghissue:`425`: NF added class to save streamlines in vtk format * :ghissue:`430`: Revert "Support read-only numpy array." * :ghissue:`427`: Support read-only numpy array. * :ghissue:`421`: Fix nans in gfa * :ghissue:`422`: BF: Use the short version to verify scipy version. * :ghissue:`415`: RF - move around some of the predict stuff * :ghissue:`420`: Rename README.txt to README.rst * :ghissue:`413`: Faster spherical harmonics * :ghissue:`416`: Removed memory_leak unittest in test_strealine.py * :ghissue:`417`: Fix streamlinespeed tests * :ghissue:`411`: Fix memory leak in cython functions length and set_number_of_points * :ghissue:`412`: Use simple multiplication instead exponentiation and pow * :ghissue:`409`: minor corrections to pipe function * :ghissue:`396`: TST : this is not exactly equal on some platforms. * :ghissue:`407`: BF: fixed problem with NANs in odfdeconv * :ghissue:`406`: Revert "Merge pull request #346 from omarocegueda/syn_registration" * :ghissue:`402`: Fix AE test error in test_peak_directions_thorough * :ghissue:`403`: Added mask shape check in tenfit * :ghissue:`346`: Symmetric diffeomorphic non-linear registration * :ghissue:`401`: BF: fix skiptest invocation for missing mpl * :ghissue:`340`: CSD fit issue * :ghissue:`397`: BF: fix import statement for get_cmap * :ghissue:`393`: RF: update Cython dependency * :ghissue:`391`: memory usage: 16GB wasn't sufficient * :ghissue:`382`: Cythonized version of streamlines' resample() and length() functions. * :ghissue:`386`: DOC: Small fix in the xval example. * :ghissue:`385`: cross_validation example doesn't render properly * :ghissue:`335`: Xval * :ghissue:`352`: Fix utils docs and affine * :ghissue:`384`: odf_sh_sharpening function fix and new test * :ghissue:`374`: MRG: bumpy numpy requirement to 1.5 / compat fixes * :ghissue:`381`: Bago fix utils docs and affine * :ghissue:`380`: DOC: Update a few Dipy links to link to the correct repo * :ghissue:`378`: Fvtk cleanup * :ghissue:`379`: fixed typos in shm.py * :ghissue:`376`: BF: Adjust the dimensionality of the peak_values, if provided. * :ghissue:`377`: Demianw fvtk colormap * :ghissue:`339`: FVTK small improvement: Arbitrary matplotlib colormaps can be used to color spherical functions * :ghissue:`373`: Fixed discrepancies between doc and code * :ghissue:`371`: RF: don't use -fopenmp flag if it doesn't work * :ghissue:`372`: BF: set integer type for crossplatform compilation * :ghissue:`337`: Piesno * :ghissue:`370`: Tone down the front page a bit. * :ghissue:`364`: Add the mode param for border management. * :ghissue:`368`: New banner for website * :ghissue:`367`: MRG: refactor API generation for sharing * :ghissue:`359`: cvxopt dependency * :ghissue:`363`: RF: make cvxopt optional for tests * :ghissue:`361`: Matrix size wrong for tracking.utils.connectivity_matrix * :ghissue:`362`: Changes to fix issue #361: matrix sizing in tracking.utils.connectivity_matrix * :ghissue:`360`: Added missing $ sign * :ghissue:`358`: typo in doc * :ghissue:`355`: DOC: Updated API change document to add target function change * :ghissue:`357`: Changed the logo to full black as the one that I sent as suggestion for HBM and ISMRM * :ghissue:`356`: Auto-generate API docs * :ghissue:`349`: Added api changes file to track breaks of backwards compatibility * :ghissue:`348`: Website update * :ghissue:`347`: DOC: Updating citations * :ghissue:`345`: TST: Make travis look at test coverage. * :ghissue:`338`: Add positivity constraint on the propagator * :ghissue:`334`: Fix vec2vec * :ghissue:`343`: Please Ignore this PR! * :ghissue:`324`: Constrained optimisation for SHORE to set E(0)=1 when the CVXOPT package is available * :ghissue:`277`: WIP: PIESNO framework for estimating the underlying std of the gaussian distribution * :ghissue:`336`: Demianw shore e0 constrained * :ghissue:`235`: WIP: Cross-validation * :ghissue:`329`: WIP: Fix vec2vec * :ghissue:`320`: Denoising images using non-local means * :ghissue:`331`: DOC: correct number of seeds in streamline_tools example * :ghissue:`330`: DOC: number of seeds per voxel, inconsistent documentation? * :ghissue:`326`: Fix brain extraction example * :ghissue:`327`: add small and big delta * :ghissue:`323`: Shore pdf grid speed improvement * :ghissue:`319`: DOC: Updated the highlights to promote the release and the upcoming paper * :ghissue:`318`: Corrected some rendering problems with the installation instructions * :ghissue:`317`: BF: more problems with path quoting in windows * :ghissue:`316`: MRG: more fixes for windows script tests * :ghissue:`315`: BF: EuDX odf_vertices param has no default value * :ghissue:`312`: Sphere and default used through the code * :ghissue:`305`: DOC: Some more details in installation instructions. * :ghissue:`314`: BF - callable response does not work * :ghissue:`16`: quickie: 'from raw data to tractographies' documentation implies dipy can't do anything with nonisotropic voxel sizes * :ghissue:`311`: Bf seeds from mask * :ghissue:`307`: Streamline_tools example stops working when I change density from 1 to 2 * :ghissue:`241`: Wrong normalization in peaks_from_model * :ghissue:`248`: Clarify dsi example * :ghissue:`220`: Add ndindex to peaks_from_model * :ghissue:`253`: Parallel peaksFromModel timing out on buildbot * :ghissue:`256`: writing data to /tmp peaks_from_model * :ghissue:`278`: tenmodel.bvec, not existing anymore? * :ghissue:`282`: fvtk documentation is incomprehensible * :ghissue:`228`: buildbot error in mask.py * :ghissue:`197`: DOC: some docstrings are not rendered correctly * :ghissue:`181`: OPT: Change dipy.core.sphere_stats.random_uniform_on_sphere * :ghissue:`177`: Extension test in dipy_fit_tensor seems brittle * :ghissue:`171`: Fix auto_attrs * :ghissue:`31`: Plotting in test suite * :ghissue:`42`: RuntimeWarning in dti.py * :ghissue:`43`: Problems with edges and faces in create_half_unit_sphere * :ghissue:`53`: Is ravel_multi_index a new thing? * :ghissue:`64`: Fix examples that rely on old API and removed data-sets * :ghissue:`67`: viz.projections.sph_projection is broken * :ghissue:`92`: dti.fa division by 0 warning in tests * :ghissue:`306`: Tests fail after windows 32 bit installation and running import dipy; dipy.test() * :ghissue:`310`: Windows test failure for tracking test_rmi * :ghissue:`309`: MRG: Windows test fixes * :ghissue:`308`: typos + pep stuf * :ghissue:`303`: BF: try and fix nibabel setup requirement * :ghissue:`304`: Update README.txt * :ghissue:`302`: Time for 0.8.0.dev! * :ghissue:`299`: BF: Put back utils.length. * :ghissue:`301`: Updated info.py and copyright year * :ghissue:`300`: Bf fetcher bug on windows * :ghissue:`298`: TST - rework tests so that we do not need to download any data * :ghissue:`290`: DOC: Started generating 0.7 release notes. dipy-0.13.0/doc/release0.9.rst000066400000000000000000000122701317371701200157120ustar00rootroot00000000000000.. _release0.9: =================================== Release notes for DIPY version 0.9 =================================== GitHub stats for 2015/01/06 - 2015/03/18 (tag: 0.8.0) The following 12 authors contributed 235 commits. * Ariel Rokem * Bago Amirbekian * Chantal Tax * Eleftherios Garyfallidis * Gabriel Girard * Marc-Alexandre Côté * Matthew Brett * Maxime Descoteaux * Omar Ocegueda * Qiyuan Tian * Samuel St-Jean * Stefan van der Walt We closed a total of 80 issues, 35 pull requests and 45 regular issues; this is the full list (generated with the script :file:`tools/github_stats.py`): Pull Requests (35): * :ghpull:`594`: DOC + PEP8: Mostly just line-wrapping. * :ghpull:`575`: Speeding up LiFE * :ghpull:`595`: BF: csd predict multi * :ghpull:`599`: BF: use dpy_rint instead of round for Windows * :ghpull:`603`: Fix precision error in test_center_of_mass * :ghpull:`601`: BF: Some versions (<0.18) of numpy don't have nanmean. * :ghpull:`598`: Fixed undetected compilation errors prior to Cython 0.22 * :ghpull:`596`: DOC + PEP8: Clean up a few typos, PEP8 things, etc. * :ghpull:`593`: DOC: fixed some typos and added a notes section to make the document mor... * :ghpull:`588`: Nf csd calibration * :ghpull:`565`: Adding New Tissue Classifiers * :ghpull:`589`: DOC: minor typographic corrections * :ghpull:`584`: DOC: explain the changes made to the integration lenght in GQI2. * :ghpull:`568`: Quickbundles2 * :ghpull:`559`: SFM for multi b-value data * :ghpull:`586`: BF: all_tensor_evecs should rotate from eye(3) to e0. * :ghpull:`574`: Affine registration PR1: Transforms. * :ghpull:`581`: BF: Normalization of GQI2 `gqi_vector`. * :ghpull:`580`: docstring for tensor fit was wrong * :ghpull:`579`: RF: Compatibility with scipy 0.11 * :ghpull:`577`: BF: update cython signatures with except values * :ghpull:`553`: RF: use cholesky to solve csd * :ghpull:`552`: Small refactor of viz.regtools * :ghpull:`569`: DOC: How to install vtk using conda. * :ghpull:`571`: Bf cart2sphere * :ghpull:`557`: NF: geodesic anisotropy * :ghpull:`566`: DOC: Some small fixes to the documentation of SFM. * :ghpull:`563`: RF: Cleanup functions that refer to some data that no longer exists here... * :ghpull:`564`: fixed typo * :ghpull:`561`: Added option to return the number of voxels fitting the fa threshold * :ghpull:`554`: DOC: Link to @francopestilli's matlab implementation of LiFE. * :ghpull:`556`: RF: change config variable to C define * :ghpull:`550`: Added non-local means in Changelog * :ghpull:`551`: Website update * :ghpull:`549`: DOC: Update download link. Issues (45): * :ghissue:`594`: DOC + PEP8: Mostly just line-wrapping. * :ghissue:`575`: Speeding up LiFE * :ghissue:`595`: BF: csd predict multi * :ghissue:`599`: BF: use dpy_rint instead of round for Windows * :ghissue:`603`: Fix precision error in test_center_of_mass * :ghissue:`602`: Precision error in test_feature_center_of_mass on 32-bit Linux * :ghissue:`601`: BF: Some versions (<0.18) of numpy don't have nanmean. * :ghissue:`598`: Fixed undetected compilation errors prior to Cython 0.22 * :ghissue:`597`: tracking module not building on cython 0.22 * :ghissue:`596`: DOC + PEP8: Clean up a few typos, PEP8 things, etc. * :ghissue:`404`: A better way to create a response function for CSD * :ghissue:`593`: DOC: fixed some typos and added a notes section to make the document mor... * :ghissue:`588`: Nf csd calibration * :ghissue:`565`: Adding New Tissue Classifiers * :ghissue:`589`: DOC: minor typographic corrections * :ghissue:`584`: DOC: explain the changes made to the integration lenght in GQI2. * :ghissue:`568`: Quickbundles2 * :ghissue:`559`: SFM for multi b-value data * :ghissue:`586`: BF: all_tensor_evecs should rotate from eye(3) to e0. * :ghissue:`585`: NF: Initial file strcuture skeleton for amico implementation * :ghissue:`574`: Affine registration PR1: Transforms. * :ghissue:`581`: BF: Normalization of GQI2 `gqi_vector`. * :ghissue:`580`: docstring for tensor fit was wrong * :ghissue:`579`: RF: Compatibility with scipy 0.11 * :ghissue:`577`: BF: update cython signatures with except values * :ghissue:`553`: RF: use cholesky to solve csd * :ghissue:`552`: Small refactor of viz.regtools * :ghissue:`569`: DOC: How to install vtk using conda. * :ghissue:`571`: Bf cart2sphere * :ghissue:`557`: NF: geodesic anisotropy * :ghissue:`567`: NF - added function to fetch/read stanford pve maps * :ghissue:`566`: DOC: Some small fixes to the documentation of SFM. * :ghissue:`414`: NF - added anatomically-constrained tractography (ACT) tissue classifier * :ghissue:`560`: dipy.data: three_shells_voxels is not there * :ghissue:`563`: RF: Cleanup functions that refer to some data that no longer exists here... * :ghissue:`564`: fixed typo * :ghissue:`561`: Added option to return the number of voxels fitting the fa threshold * :ghissue:`554`: DOC: Link to @francopestilli's matlab implementation of LiFE. * :ghissue:`556`: RF: change config variable to C define * :ghissue:`555`: Use chatroom for dev communications * :ghissue:`354`: Test failures of 0.7.1 on wheezy and squeeze 32bit * :ghissue:`532`: SPARC buildbot fail in multiprocessing test * :ghissue:`550`: Added non-local means in Changelog * :ghissue:`551`: Website update * :ghissue:`549`: DOC: Update download link. dipy-0.13.0/doc/sphinxext/000077500000000000000000000000001317371701200153415ustar00rootroot00000000000000dipy-0.13.0/doc/sphinxext/docscrape.py000066400000000000000000000422351317371701200176640ustar00rootroot00000000000000"""Extract reference documentation from the NumPy source tree. """ from __future__ import division, absolute_import, print_function import inspect import textwrap import re import pydoc from warnings import warn import collections import sys class Reader(object): """A line-based string reader. """ def __init__(self, data): """ Parameters ---------- data : str String with lines separated by '\n'. """ if isinstance(data, list): self._str = data else: self._str = data.split('\n') # store string as list of lines self.reset() def __getitem__(self, n): return self._str[n] def reset(self): self._l = 0 # current line nr def read(self): if not self.eof(): out = self[self._l] self._l += 1 return out else: return '' def seek_next_non_empty_line(self): for l in self[self._l:]: if l.strip(): break else: self._l += 1 def eof(self): return self._l >= len(self._str) def read_to_condition(self, condition_func): start = self._l for line in self[start:]: if condition_func(line): return self[start:self._l] self._l += 1 if self.eof(): return self[start:self._l+1] return [] def read_to_next_empty_line(self): self.seek_next_non_empty_line() def is_empty(line): return not line.strip() return self.read_to_condition(is_empty) def read_to_next_unindented_line(self): def is_unindented(line): return (line.strip() and (len(line.lstrip()) == len(line))) return self.read_to_condition(is_unindented) def peek(self, n=0): if self._l + n < len(self._str): return self[self._l + n] else: return '' def is_empty(self): return not ''.join(self._str).strip() class NumpyDocString(collections.Mapping): def __init__(self, docstring, config={}): docstring = textwrap.dedent(docstring).split('\n') self._doc = Reader(docstring) self._parsed_data = { 'Signature': '', 'Summary': [''], 'Extended Summary': [], 'Parameters': [], 'Returns': [], 'Yields': [], 'Raises': [], 'Warns': [], 'Other Parameters': [], 'Attributes': [], 'Methods': [], 'See Also': [], 'Notes': [], 'Warnings': [], 'References': '', 'Examples': '', 'index': {} } self._parse() def __getitem__(self, key): return self._parsed_data[key] def __setitem__(self, key, val): if key not in self._parsed_data: warn("Unknown section %s" % key) else: self._parsed_data[key] = val def __iter__(self): return iter(self._parsed_data) def __len__(self): return len(self._parsed_data) def _is_at_section(self): self._doc.seek_next_non_empty_line() if self._doc.eof(): return False l1 = self._doc.peek().strip() # e.g. Parameters if l1.startswith('.. index::'): return True l2 = self._doc.peek(1).strip() # ---------- or ========== return l2.startswith('-'*len(l1)) or l2.startswith('='*len(l1)) def _strip(self, doc): i = 0 j = 0 for i, line in enumerate(doc): if line.strip(): break for j, line in enumerate(doc[::-1]): if line.strip(): break return doc[i:len(doc)-j] def _read_to_next_section(self): section = self._doc.read_to_next_empty_line() while not self._is_at_section() and not self._doc.eof(): if not self._doc.peek(-1).strip(): # previous line was empty section += [''] section += self._doc.read_to_next_empty_line() return section def _read_sections(self): while not self._doc.eof(): data = self._read_to_next_section() name = data[0].strip() if name.startswith('..'): # index section yield name, data[1:] elif len(data) < 2: yield StopIteration else: yield name, self._strip(data[2:]) def _parse_param_list(self, content): r = Reader(content) params = [] while not r.eof(): header = r.read().strip() if ' : ' in header: arg_name, arg_type = header.split(' : ')[:2] else: arg_name, arg_type = header, '' desc = r.read_to_next_unindented_line() desc = dedent_lines(desc) params.append((arg_name, arg_type, desc)) return params _name_rgx = re.compile(r"^\s*(:(?P\w+):`(?P[a-zA-Z0-9_.-]+)`|" r" (?P[a-zA-Z0-9_.-]+))\s*", re.X) def _parse_see_also(self, content): """ func_name : Descriptive text continued text another_func_name : Descriptive text func_name1, func_name2, :meth:`func_name`, func_name3 """ items = [] def parse_item_name(text): """Match ':role:`name`' or 'name'""" m = self._name_rgx.match(text) if m: g = m.groups() if g[1] is None: return g[3], None else: return g[2], g[1] raise ValueError("%s is not a item name" % text) def push_item(name, rest): if not name: return name, role = parse_item_name(name) items.append((name, list(rest), role)) del rest[:] current_func = None rest = [] for line in content: if not line.strip(): continue m = self._name_rgx.match(line) if m and line[m.end():].strip().startswith(':'): push_item(current_func, rest) current_func, line = line[:m.end()], line[m.end():] rest = [line.split(':', 1)[1].strip()] if not rest[0]: rest = [] elif not line.startswith(' '): push_item(current_func, rest) current_func = None if ',' in line: for func in line.split(','): if func.strip(): push_item(func, []) elif line.strip(): current_func = line elif current_func is not None: rest.append(line.strip()) push_item(current_func, rest) return items def _parse_index(self, section, content): """ .. index: default :refguide: something, else, and more """ def strip_each_in(lst): return [s.strip() for s in lst] out = {} section = section.split('::') if len(section) > 1: out['default'] = strip_each_in(section[1].split(','))[0] for line in content: line = line.split(':') if len(line) > 2: out[line[1]] = strip_each_in(line[2].split(',')) return out def _parse_summary(self): """Grab signature (if given) and summary""" if self._is_at_section(): return # If several signatures present, take the last one while True: summary = self._doc.read_to_next_empty_line() summary_str = " ".join([s.strip() for s in summary]).strip() if re.compile('^([\w., ]+=)?\s*[\w\.]+\(.*\)$').match(summary_str): self['Signature'] = summary_str if not self._is_at_section(): continue break if summary is not None: self['Summary'] = summary if not self._is_at_section(): self['Extended Summary'] = self._read_to_next_section() def _parse(self): self._doc.reset() self._parse_summary() sections = list(self._read_sections()) section_names = set([section for section, content in sections]) has_returns = 'Returns' in section_names has_yields = 'Yields' in section_names # We could do more tests, but we are not. Arbitrarily. if has_returns and has_yields: msg = 'Docstring contains both a Returns and Yields section.' raise ValueError(msg) for (section, content) in sections: if not section.startswith('..'): section = (s.capitalize() for s in section.split(' ')) section = ' '.join(section) if section in ('Parameters', 'Returns', 'Yields', 'Raises', 'Warns', 'Other Parameters', 'Attributes', 'Methods'): self[section] = self._parse_param_list(content) elif section.startswith('.. index::'): self['index'] = self._parse_index(section, content) elif section == 'See Also': self['See Also'] = self._parse_see_also(content) else: self[section] = content # string conversion routines def _str_header(self, name, symbol='-'): return [name, len(name)*symbol] def _str_indent(self, doc, indent=4): out = [] for line in doc: out += [' '*indent + line] return out def _str_signature(self): if self['Signature']: return [self['Signature'].replace('*', '\*')] + [''] else: return [''] def _str_summary(self): if self['Summary']: return self['Summary'] + [''] else: return [] def _str_extended_summary(self): if self['Extended Summary']: return self['Extended Summary'] + [''] else: return [] def _str_param_list(self, name): out = [] if self[name]: out += self._str_header(name) for param, param_type, desc in self[name]: if param_type: out += ['%s : %s' % (param, param_type)] else: out += [param] out += self._str_indent(desc) out += [''] return out def _str_section(self, name): out = [] if self[name]: out += self._str_header(name) out += self[name] out += [''] return out def _str_see_also(self, func_role): if not self['See Also']: return [] out = [] out += self._str_header("See Also") last_had_desc = True for func, desc, role in self['See Also']: if role: link = ':%s:`%s`' % (role, func) elif func_role: link = ':%s:`%s`' % (func_role, func) else: link = "`%s`_" % func if desc or last_had_desc: out += [''] out += [link] else: out[-1] += ", %s" % link if desc: out += self._str_indent([' '.join(desc)]) last_had_desc = True else: last_had_desc = False out += [''] return out def _str_index(self): idx = self['index'] out = [] out += ['.. index:: %s' % idx.get('default', '')] for section, references in idx.items(): if section == 'default': continue out += [' :%s: %s' % (section, ', '.join(references))] return out def __str__(self, func_role=''): out = [] out += self._str_signature() out += self._str_summary() out += self._str_extended_summary() for param_list in ('Parameters', 'Returns', 'Yields', 'Other Parameters', 'Raises', 'Warns'): out += self._str_param_list(param_list) out += self._str_section('Warnings') out += self._str_see_also(func_role) for s in ('Notes', 'References', 'Examples'): out += self._str_section(s) for param_list in ('Attributes', 'Methods'): out += self._str_param_list(param_list) out += self._str_index() return '\n'.join(out) def indent(str, indent=4): indent_str = ' '*indent if str is None: return indent_str lines = str.split('\n') return '\n'.join(indent_str + l for l in lines) def dedent_lines(lines): """Deindent a list of lines maximally""" return textwrap.dedent("\n".join(lines)).split("\n") def header(text, style='-'): return text + '\n' + style*len(text) + '\n' class FunctionDoc(NumpyDocString): def __init__(self, func, role='func', doc=None, config={}): self._f = func self._role = role # e.g. "func" or "meth" if doc is None: if func is None: raise ValueError("No function or docstring given") doc = inspect.getdoc(func) or '' NumpyDocString.__init__(self, doc) if not self['Signature'] and func is not None: func, func_name = self.get_func() try: # try to read signature if sys.version_info[0] >= 3: argspec = inspect.getfullargspec(func) else: argspec = inspect.getargspec(func) argspec = inspect.formatargspec(*argspec) argspec = argspec.replace('*', '\*') signature = '%s%s' % (func_name, argspec) except TypeError as e: signature = '%s()' % func_name self['Signature'] = signature def get_func(self): func_name = getattr(self._f, '__name__', self.__class__.__name__) if inspect.isclass(self._f): func = getattr(self._f, '__call__', self._f.__init__) else: func = self._f return func, func_name def __str__(self): out = '' func, func_name = self.get_func() signature = self['Signature'].replace('*', '\*') roles = {'func': 'function', 'meth': 'method'} if self._role: if self._role not in roles: print("Warning: invalid role %s" % self._role) out += '.. %s:: %s\n \n\n' % (roles.get(self._role, ''), func_name) out += super(FunctionDoc, self).__str__(func_role=self._role) return out class ClassDoc(NumpyDocString): extra_public_methods = ['__call__'] def __init__(self, cls, doc=None, modulename='', func_doc=FunctionDoc, config={}): if not inspect.isclass(cls) and cls is not None: raise ValueError("Expected a class or None, but got %r" % cls) self._cls = cls self.show_inherited_members = config.get( 'show_inherited_class_members', True) if modulename and not modulename.endswith('.'): modulename += '.' self._mod = modulename if doc is None: if cls is None: raise ValueError("No class or documentation string given") doc = pydoc.getdoc(cls) NumpyDocString.__init__(self, doc) if config.get('show_class_members', True): def splitlines_x(s): if not s: return [] else: return s.splitlines() for field, items in [('Methods', self.methods), ('Attributes', self.properties)]: if not self[field]: doc_list = [] for name in sorted(items): try: doc_item = pydoc.getdoc(getattr(self._cls, name)) doc_list.append((name, '', splitlines_x(doc_item))) except AttributeError: pass # method doesn't exist self[field] = doc_list @property def methods(self): if self._cls is None: return [] return [name for name, func in inspect.getmembers(self._cls) if ((not name.startswith('_') or name in self.extra_public_methods) and isinstance(func, collections.Callable) and self._is_show_member(name))] @property def properties(self): if self._cls is None: return [] return [name for name, func in inspect.getmembers(self._cls) if (not name.startswith('_') and (func is None or isinstance(func, property) or inspect.isgetsetdescriptor(func)) and self._is_show_member(name))] def _is_show_member(self, name): if self.show_inherited_members: return True # show all class members if name not in self._cls.__dict__: return False # class member is inherited, we do not show it return True dipy-0.13.0/doc/sphinxext/docscrape_sphinx.py000066400000000000000000000171171317371701200212560ustar00rootroot00000000000000import re, inspect, textwrap, pydoc import sphinx from docscrape import NumpyDocString, FunctionDoc, ClassDoc class SphinxDocString(NumpyDocString): def __init__(self, docstring, config={}): self.use_plots = config.get('use_plots', False) NumpyDocString.__init__(self, docstring, config=config) # string conversion routines def _str_header(self, name, symbol='`'): return ['.. rubric:: ' + name, ''] def _str_field_list(self, name): return [':' + name + ':'] def _str_indent(self, doc, indent=4): out = [] for line in doc: out += [' '*indent + line] return out def _str_signature(self): return [''] if self['Signature']: return ['``%s``' % self['Signature']] + [''] else: return [''] def _str_summary(self): return self['Summary'] + [''] def _str_extended_summary(self): return self['Extended Summary'] + [''] def _str_param_list(self, name): out = [] if self[name]: out += self._str_field_list(name) out += [''] for param,param_type,desc in self[name]: out += self._str_indent(['**%s** : %s' % (param.strip(), param_type)]) out += [''] out += self._str_indent(desc,8) out += [''] return out @property def _obj(self): if hasattr(self, '_cls'): return self._cls elif hasattr(self, '_f'): return self._f return None def _str_member_list(self, name): """ Generate a member listing, autosummary:: table where possible, and a table where not. """ out = [] if self[name]: out += ['.. rubric:: %s' % name, ''] prefix = getattr(self, '_name', '') if prefix: prefix = '~%s.' % prefix autosum = [] others = [] for param, param_type, desc in self[name]: param = param.strip() if not self._obj or hasattr(self._obj, param): autosum += [" %s%s" % (prefix, param)] else: others.append((param, param_type, desc)) if autosum: out += ['.. autosummary::', ' :toctree:', ''] out += autosum if others: maxlen_0 = max([len(x[0]) for x in others]) maxlen_1 = max([len(x[1]) for x in others]) hdr = "="*maxlen_0 + " " + "="*maxlen_1 + " " + "="*10 fmt = '%%%ds %%%ds ' % (maxlen_0, maxlen_1) n_indent = maxlen_0 + maxlen_1 + 4 out += [hdr] for param, param_type, desc in others: out += [fmt % (param.strip(), param_type)] out += self._str_indent(desc, n_indent) out += [hdr] out += [''] return out def _str_section(self, name): out = [] if self[name]: out += self._str_header(name) out += [''] content = textwrap.dedent("\n".join(self[name])).split("\n") out += content out += [''] return out def _str_see_also(self, func_role): out = [] if self['See Also']: see_also = super(SphinxDocString, self)._str_see_also(func_role) out = ['.. seealso::', ''] out += self._str_indent(see_also[2:]) return out def _str_warnings(self): out = [] if self['Warnings']: out = ['.. warning::', ''] out += self._str_indent(self['Warnings']) return out def _str_index(self): idx = self['index'] out = [] if len(idx) == 0: return out out += ['.. index:: %s' % idx.get('default','')] for section, references in idx.iteritems(): if section == 'default': continue elif section == 'refguide': out += [' single: %s' % (', '.join(references))] else: out += [' %s: %s' % (section, ','.join(references))] return out def _str_references(self): out = [] if self['References']: out += self._str_header('References') if isinstance(self['References'], str): self['References'] = [self['References']] out.extend(self['References']) out += [''] # Latex collects all references to a separate bibliography, # so we need to insert links to it if sphinx.__version__ >= "0.6": out += ['.. only:: latex',''] else: out += ['.. latexonly::',''] items = [] for line in self['References']: m = re.match(r'.. \[([a-z0-9._-]+)\]', line, re.I) if m: items.append(m.group(1)) out += [' ' + ", ".join(["[%s]_" % item for item in items]), ''] return out def _str_examples(self): examples_str = "\n".join(self['Examples']) if (self.use_plots and 'import matplotlib' in examples_str and 'plot::' not in examples_str): out = [] out += self._str_header('Examples') out += ['.. plot::', ''] out += self._str_indent(self['Examples']) out += [''] return out else: return self._str_section('Examples') def __str__(self, indent=0, func_role="obj"): out = [] out += self._str_signature() out += self._str_index() + [''] out += self._str_summary() out += self._str_extended_summary() for param_list in ('Parameters', 'Returns', 'Other Parameters', 'Raises', 'Warns'): out += self._str_param_list(param_list) out += self._str_warnings() out += self._str_see_also(func_role) out += self._str_section('Notes') out += self._str_references() out += self._str_examples() for param_list in ('Attributes', 'Methods'): out += self._str_member_list(param_list) out = self._str_indent(out,indent) return '\n'.join(out) class SphinxFunctionDoc(SphinxDocString, FunctionDoc): def __init__(self, obj, doc=None, config={}): self.use_plots = config.get('use_plots', False) FunctionDoc.__init__(self, obj, doc=doc, config=config) class SphinxClassDoc(SphinxDocString, ClassDoc): def __init__(self, obj, doc=None, func_doc=None, config={}): self.use_plots = config.get('use_plots', False) ClassDoc.__init__(self, obj, doc=doc, func_doc=None, config=config) class SphinxObjDoc(SphinxDocString): def __init__(self, obj, doc=None, config={}): self._f = obj SphinxDocString.__init__(self, doc, config=config) def get_doc_object(obj, what=None, doc=None, config={}): if what is None: if inspect.isclass(obj): what = 'class' elif inspect.ismodule(obj): what = 'module' elif callable(obj): what = 'function' else: what = 'object' if what == 'class': return SphinxClassDoc(obj, func_doc=SphinxFunctionDoc, doc=doc, config=config) elif what in ('function', 'method'): return SphinxFunctionDoc(obj, doc=doc, config=config) else: if doc is None: doc = pydoc.getdoc(obj) return SphinxObjDoc(obj, doc, config=config) dipy-0.13.0/doc/sphinxext/github.py000066400000000000000000000124031317371701200171750ustar00rootroot00000000000000"""Define text roles for GitHub * ghissue - Issue * ghpull - Pull Request * ghuser - User Adapted from bitbucket example here: https://bitbucket.org/birkenfeld/sphinx-contrib/src/tip/bitbucket/sphinxcontrib/bitbucket.py Authors ------- * Doug Hellmann * Min RK """ # # Original Copyright (c) 2010 Doug Hellmann. All rights reserved. # from docutils import nodes, utils from docutils.parsers.rst.roles import set_classes def make_link_node(rawtext, app, type, slug, options): """Create a link to a github resource. :param rawtext: Text being replaced with link node. :param app: Sphinx application context :param type: Link type (issues, changeset, etc.) :param slug: ID of the thing to link to :param options: Options dictionary passed to role func. """ try: base = app.config.github_project_url if not base: raise AttributeError if not base.endswith('/'): base += '/' except AttributeError as err: raise ValueError('github_project_url configuration value is not set (%s)' % str(err)) ref = base + type + '/' + slug + '/' set_classes(options) prefix = "#" if type == 'pull': prefix = "PR " + prefix node = nodes.reference(rawtext, prefix + utils.unescape(slug), refuri=ref, **options) return node def ghissue_role(name, rawtext, text, lineno, inliner, options={}, content=[]): """Link to a GitHub issue. Returns 2 part tuple containing list of nodes to insert into the document and a list of system messages. Both are allowed to be empty. :param name: The role name used in the document. :param rawtext: The entire markup snippet, with role. :param text: The text marked with the role. :param lineno: The line number where rawtext appears in the input. :param inliner: The inliner instance that called us. :param options: Directive options for customization. :param content: The directive content for customization. """ try: issue_num = int(text) if issue_num <= 0: raise ValueError except ValueError: msg = inliner.reporter.error( 'GitHub issue number must be a number greater than or equal to 1; ' '"%s" is invalid.' % text, line=lineno) prb = inliner.problematic(rawtext, rawtext, msg) return [prb], [msg] app = inliner.document.settings.env.app #app.info('issue %r' % text) if 'pull' in name.lower(): category = 'pull' elif 'issue' in name.lower(): category = 'issues' else: msg = inliner.reporter.error( 'GitHub roles include "ghpull" and "ghissue", ' '"%s" is invalid.' % name, line=lineno) prb = inliner.problematic(rawtext, rawtext, msg) return [prb], [msg] node = make_link_node(rawtext, app, category, str(issue_num), options) return [node], [] def ghuser_role(name, rawtext, text, lineno, inliner, options={}, content=[]): """Link to a GitHub user. Returns 2 part tuple containing list of nodes to insert into the document and a list of system messages. Both are allowed to be empty. :param name: The role name used in the document. :param rawtext: The entire markup snippet, with role. :param text: The text marked with the role. :param lineno: The line number where rawtext appears in the input. :param inliner: The inliner instance that called us. :param options: Directive options for customization. :param content: The directive content for customization. """ app = inliner.document.settings.env.app #app.info('user link %r' % text) ref = 'https://www.github.com/' + text node = nodes.reference(rawtext, text, refuri=ref, **options) return [node], [] def ghcommit_role(name, rawtext, text, lineno, inliner, options={}, content=[]): """Link to a GitHub commit. Returns 2 part tuple containing list of nodes to insert into the document and a list of system messages. Both are allowed to be empty. :param name: The role name used in the document. :param rawtext: The entire markup snippet, with role. :param text: The text marked with the role. :param lineno: The line number where rawtext appears in the input. :param inliner: The inliner instance that called us. :param options: Directive options for customization. :param content: The directive content for customization. """ app = inliner.document.settings.env.app #app.info('user link %r' % text) try: base = app.config.github_project_url if not base: raise AttributeError if not base.endswith('/'): base += '/' except AttributeError as err: raise ValueError('github_project_url configuration value is not set (%s)' % str(err)) ref = base + text node = nodes.reference(rawtext, text[:6], refuri=ref, **options) return [node], [] def setup(app): """Install the plugin. :param app: Sphinx application context. """ app.info('Initializing GitHub plugin') app.add_role('ghissue', ghissue_role) app.add_role('ghpull', ghissue_role) app.add_role('ghuser', ghuser_role) app.add_role('ghcommit', ghcommit_role) app.add_config_value('github_project_url', None, 'env') return dipy-0.13.0/doc/sphinxext/math_dollar.py000066400000000000000000000037731317371701200202130ustar00rootroot00000000000000import re def dollars_to_math(source): r""" Replace dollar signs with backticks. More precisely, do a regular expression search. Replace a plain dollar sign ($) by a backtick (`). Replace an escaped dollar sign (\$) by a dollar sign ($). Don't change a dollar sign preceded or followed by a backtick (`$ or $`), because of strings like "``$HOME``". Don't make any changes on lines starting with spaces, because those are indented and hence part of a block of code or examples. This also doesn't replaces dollar signs enclosed in curly braces, to avoid nested math environments, such as :: $f(n) = 0 \text{ if $n$ is prime}$ Thus the above line would get changed to `f(n) = 0 \text{ if $n$ is prime}` """ s = "\n".join(source) if s.find("$") == -1: return # This searches for "$blah$" inside a pair of curly braces -- # don't change these, since they're probably coming from a nested # math environment. So for each match, we replace it with a temporary # string, and later on we substitute the original back. global _data _data = {} def repl(matchobj): global _data s = matchobj.group(0) t = "___XXX_REPL_%d___" % len(_data) _data[t] = s return t s = re.sub(r"({[^{}$]*\$[^{}$]*\$[^{}]*})", repl, s) # matches $...$ dollars = re.compile(r"(?= 3: sixu = lambda s: s else: sixu = lambda s: unicode(s, 'unicode_escape') def mangle_docstrings(app, what, name, obj, options, lines, reference_offset=[0]): cfg = {'use_plots': app.config.numpydoc_use_plots, 'show_class_members': app.config.numpydoc_show_class_members, 'show_inherited_class_members': app.config.numpydoc_show_inherited_class_members, 'class_members_toctree': app.config.numpydoc_class_members_toctree} u_NL = sixu('\n') if what == 'module': # Strip top title pattern = '^\\s*[#*=]{4,}\\n[a-z0-9 -]+\\n[#*=]{4,}\\s*' title_re = re.compile(sixu(pattern), re.I | re.S) lines[:] = title_re.sub(sixu(''), u_NL.join(lines)).split(u_NL) else: doc = get_doc_object(obj, what, u_NL.join(lines), config=cfg) if sys.version_info[0] >= 3: doc = str(doc) else: doc = unicode(doc) lines[:] = doc.split(u_NL) if (app.config.numpydoc_edit_link and hasattr(obj, '__name__') and obj.__name__): if hasattr(obj, '__module__'): v = dict(full_name=sixu("%s.%s") % (obj.__module__, obj.__name__)) else: v = dict(full_name=obj.__name__) lines += [sixu(''), sixu('.. htmlonly::'), sixu('')] lines += [sixu(' %s') % x for x in (app.config.numpydoc_edit_link % v).split("\n")] # replace reference numbers so that there are no duplicates references = [] for line in lines: line = line.strip() m = re.match(sixu('^.. \\[([a-z0-9_.-])\\]'), line, re.I) if m: references.append(m.group(1)) # start renaming from the longest string, to avoid overwriting parts references.sort(key=lambda x: -len(x)) if references: for i, line in enumerate(lines): for r in references: if re.match(sixu('^\\d+$'), r): new_r = sixu("R%d") % (reference_offset[0] + int(r)) else: new_r = sixu("%s%d") % (r, reference_offset[0]) lines[i] = lines[i].replace(sixu('[%s]_') % r, sixu('[%s]_') % new_r) lines[i] = lines[i].replace(sixu('.. [%s]') % r, sixu('.. [%s]') % new_r) reference_offset[0] += len(references) def mangle_signature(app, what, name, obj, options, sig, retann): # Do not try to inspect classes that don't define `__init__` if (inspect.isclass(obj) and (not hasattr(obj, '__init__') or 'initializes x; see ' in pydoc.getdoc(obj.__init__))): return '', '' if not (isinstance(obj, collections.Callable) or hasattr(obj, '__argspec_is_invalid_')): return if not hasattr(obj, '__doc__'): return doc = SphinxDocString(pydoc.getdoc(obj)) if doc['Signature']: sig = re.sub(sixu("^[^(]*"), sixu(""), doc['Signature']) return sig, sixu('') def setup(app, get_doc_object_=get_doc_object): if not hasattr(app, 'add_config_value'): return # probably called by nose, better bail out global get_doc_object get_doc_object = get_doc_object_ app.connect('autodoc-process-docstring', mangle_docstrings) app.connect('autodoc-process-signature', mangle_signature) app.add_config_value('numpydoc_edit_link', None, False) app.add_config_value('numpydoc_use_plots', None, False) app.add_config_value('numpydoc_show_class_members', True, True) app.add_config_value('numpydoc_show_inherited_class_members', True, True) app.add_config_value('numpydoc_class_members_toctree', True, True) # Extra mangling domains app.add_domain(NumpyPythonDomain) app.add_domain(NumpyCDomain) # ------------------------------------------------------------------------------ # Docstring-mangling domains # ------------------------------------------------------------------------------ from docutils.statemachine import ViewList from sphinx.domains.c import CDomain from sphinx.domains.python import PythonDomain class ManglingDomainBase(object): directive_mangling_map = {} def __init__(self, *a, **kw): super(ManglingDomainBase, self).__init__(*a, **kw) self.wrap_mangling_directives() def wrap_mangling_directives(self): for name, objtype in list(self.directive_mangling_map.items()): self.directives[name] = wrap_mangling_directive( self.directives[name], objtype) class NumpyPythonDomain(ManglingDomainBase, PythonDomain): name = 'np' directive_mangling_map = { 'function': 'function', 'class': 'class', 'exception': 'class', 'method': 'function', 'classmethod': 'function', 'staticmethod': 'function', 'attribute': 'attribute', } indices = [] class NumpyCDomain(ManglingDomainBase, CDomain): name = 'np-c' directive_mangling_map = { 'function': 'function', 'member': 'attribute', 'macro': 'function', 'type': 'class', 'var': 'object', } def wrap_mangling_directive(base_directive, objtype): class directive(base_directive): def run(self): env = self.state.document.settings.env name = None if self.arguments: m = re.match(r'^(.*\s+)?(.*?)(\(.*)?', self.arguments[0]) name = m.group(2).strip() if not name: name = self.arguments[0] lines = list(self.content) mangle_docstrings(env.app, objtype, name, None, None, lines) self.content = ViewList(lines, self.content.parent) return base_directive.run(self) return directive dipy-0.13.0/doc/stateoftheart.rst000066400000000000000000000033301317371701200167150ustar00rootroot00000000000000.. _stateoftheart: ============================ A quick overview of features ============================ Here are just a few of the state-of-the-art :ref:`technologies ` and algorithms which are provided in Dipy_: - Reconstruction algorithms: CSD, DSI, GQI, DTI, DKI, QBI, SHORE and MAPMRI. - Fiber tracking algorithms: deterministic and probabilistic. - Simple interactive visualization of ODFs and streamlines. - Apply different operations on streamlines (selection, resampling, registration). - Simplify large datasets of streamlines using QuickBundles clustering. - Reslice datasets with anisotropic voxels to isotropic. - Calculate distances/correspondences between streamlines. - Deal with huge streamline datasets without memory restrictions (using the .dpy file format). - Visualize streamlines in the same space as anatomical images. With the help of some external tools you can also: - Read many different file formats e.g. Trackvis or Nifti (with nibabel). - Examine your datasets interactively (with ipython). For more information on specific algorithms we recommend starting by looking at Dipy's :ref:`gallery ` of examples. For a full list of the features implemented in the most recent release cycle, check out the release notes. .. toctree:: :maxdepth: 1 release0.11 release0.10 release0.9 release0.8 release0.7 release0.6 ================= Systems supported ================= Dipy_ is multiplatform and will run under any standard operating systems such as *Windows*, *Linux* and *Mac OS X*. Every single new code addition is being tested on a number of different builbots and can be monitored online `here `_. .. include:: links_names.inc dipy-0.13.0/doc/subscribe.rst000066400000000000000000000011001317371701200160120ustar00rootroot00000000000000.. _subscribe: ========= Subscribe ========= Dipy_ is a part of the Nipy_ community and we are happy to share the same e-mail list. This makes sense as we can share ideas with a broader community and boost collaboration across neuroimagers. If you want to post to the list you will need first to subscribe to the `nipy mailing list`_. We suggest to begin the subject of your e-mail with ``[Dipy]``. This will help us respond faster to your questions. Additional help can be found on the Neurostars_ website, or on the `dipy gitter`_ channel. .. include:: links_names.inc dipy-0.13.0/doc/theory/000077500000000000000000000000001317371701200146215ustar00rootroot00000000000000dipy-0.13.0/doc/theory/b_and_q.rst000066400000000000000000000123451317371701200167430ustar00rootroot00000000000000.. _b-and-q: ========================= DIY Stuff about b and q ========================= This is a short note to explain the nature of the ``B_matrix`` found in the Siemens private (CSA) fields of the DICOM headers of a diffusion weighted acquisition. We trying to explain the relationship between the ``B_matrix`` and the *b value* and the *gradient vector*. The acquisition is made with a planned (requested) $b$-value - say $b_{req} = 1000$, and with a requested gradient direction $\mathbf{g}_{req} = [g_x, g_y, g_z]$ (supposedly a unit vector) and peak amplitude $G$. When the sequence runs the gradient is modulated by an amplitude envelope $\rho(t)$ with $\max |\rho(t)| = 1$ so that the time course of the gradient is $G\rho(t)\mathbf{g}.$ $G$ is measured in units of $T \mathrm{mm}^-1.$ This leads to an important temporal weighting parameter of the acquisition: .. math:: R = \int_0^T ( \int_0^t \rho ( \tau ) \, d{ \tau } )^2 \, d{t}. (See Basser, Matiello and LeBihan, 1994.) Another formulation involves the introduction of k-space. In standard in-plane MR image encoding .. math:: \mathbf{k} = \gamma \int \mathbf{g}(t)dt. For the classical Stejskal and Tanner pulsed gradient spin echo (PGSE) paradigm where two rectangular pulses of width $\delta$ seconds are spaced with their onsets $\Delta$ seconds apart $R = \Delta (\Delta-\delta/3)^2.$ The units of $R$ are $s^3$. The $b$-matrix has entries .. math:: b_{ij} = \gamma^2 G^2 g_i g_j R, where $\gamma$ is the gyromagnetic radius (units $\mathrm{radians}.\mathrm{seconds}^{-1}.T^{-1}$) and $i$ and $j$ are axis direcrtions from $x,y,z$ . The units of the B-matrix are $\mathrm{radians}^2 . \mathrm{seconds} . \mathrm{mm}^{-2}.$ .. math:: \mathbf{B} = \gamma^2 G^2 R \mathbf{g} \mathbf{g}^T. The b-value for the acquisition is the trace of $\mathbf{B}$ and is given by .. math:: b = \gamma^2 G^2 R \|\mathbf{g}\|^2 = \gamma^2 G^2 R. ================================ The B matrix and Siemens DICOM ================================ Though the Stejskal and Tanner formula is available for the classic PGSE sequence, a different sequence may be used (e.g. TRSE on Siemens Trio), and anyway the ramps up and down on the gradient field will not be rectangular. The Siemens scanner software calculates the actual values of the $b_{ij}$ by numerical integration of the formula above for $R$. These values are in the form of the 6 'B-matrix' values $[b_{xx}, b_{xy}, b_{xz}, b_{yy}, b_{yz}, b_{zz}]$. In this form they are suitable for use in a least squares estimation of the diffusion tensor via the equations across the set of acquisitions: .. math:: \log(A(\mathbf{q})/A(0)) = -(b_{xx}D_{xx} + 2b_{xy}D_{xy} + 2b_{xz}D_{xz} + \ b_{yy}D_{yy} + 2b_{yz}D_{yz} + b_{zz}D_{zz}) The gradient field typically stays in the one gradient direction, in this case the relationship between $b$, $\mathbf{g}$ and the $b_{ij}$ is as follows. If we fill out the symmetric B-matrix as: .. math:: \mathbf{B} = \begin{pmatrix} b_{xx} & b_{yx} & b_{yz}\\ b_{xy} & b_{yy} & b_{xz}\\ b_{xz} & b_{yz} & b_{zz} \end{pmatrix} then $\mathbf{B}$ is equal to the rank 1 tensor $\gamma^2 G^2 R \mathbf{g} \mathbf{g}^T$. By performing an eigenvalue and eigenvector decomposition of $\mathbf{B}$ we obtain .. math:: \mathbf{B} = \lambda_1\mathbf{v}_1\mathbf{v}_1^T + \lambda_2\mathbf{v}_2\mathbf{v}_2^T + \lambda_3\mathbf{v}_3\mathbf{v}_3^T, where only one of the $\lambda_i$, say $\lambda_1$, is (effectively) non-zero. (Because the gradient is always a multiple of a constant direction $\mathbf{B}$ is a effectively a rank 1 tensor.) Then $\mathbf{g} = \pm\mathbf{v}_1$, and $b = \gamma^2 G^2 R = \lambda_1$. The ``b-vector`` $\mathbf{b}$ is given by: .. math:: \mathbf{b}_{\mathrm{actual}} = \gamma^2 G^2 R \mathbf{g}_{\mathrm{actual}} = \lambda_1 \mathbf{v}_1. Once we have $\mathbf{b}_{actual}$ we can calculate $b_{actual} = \|\mathbf{b}_{actual}\|$ and $\mathbf{g}_{actual} = \mathbf{b}_{actual} / b_{actual}$. Various sofware packages (e.g. FSL's DFT-DTIFIT) expect to get N x 3 and N x 1 arrays of $\mathbf{g}_{actual}$ (``bvecs``) and $b_{actual}$ values (``bvals``) as their inputs. ======================= ... and what about 'q'? ======================= Callaghan, Eccles and Xia (1988) showed that the signal from the narrow pulse PGSE paradigm measured the Fourier transform of the diffusion displacement propagator. Propagation space is measured in displacement per unit time $(\mathrm{mm}.\mathrm{seconds}^{-1})$. They named the reciprocal space ``q-space`` with units of $\mathrm{seconds}.\mathrm{mm}^{-1}$. .. math:: :label: fourier q = \gamma \delta G /{2\pi} .. math:: b = 4 \pi^2 q^2 \Delta Diffusion spectroscopy measures signal over a wide range of $b$-values (or $q$-values) and diffusion times ($\Delta$) and performs a $q$-space analysis (Fourier transform of the diffusion signal decay). There remains a bit of mystery as to how $\mathbf{q}$ (as a vector in $q$-space) is specified for other paradigms. We think that (a) it only matters up to a scale factor, and (b) we can loosely identify $\mathbf{q}$ with $b\mathbf{g}$, where $\mathbf{g}$ is the unit vector in the gradient direction. dipy-0.13.0/doc/theory/bmatrix.rst000066400000000000000000000061471317371701200170310ustar00rootroot00000000000000================================ The B matrix and Siemens DICOM ================================ This is a short note to explain the nature of the ``B_matrix`` found in the Siemens private (CSA) fields of the DICOM headers of a diffusion weighted acquisition. We trying to explain the relationship between the ``B_matrix`` and the *b value* and the *gradient vector*. The acquisition is made with a planned (requested) b value - say $b_{req} = 1000$, and with a requested gradient direction $\mathbf{g}_{req} = [g_x, g_y, g_z]$ (supposedly a unit vector). Note that here we're using $\mathbf{q}$ in the sense of an approximation to a vector in $q$ space. Other people use $\mathbf{b}$ for the same concept, but we've chosen $\mathbf{q}$ to make the exposition clearer. For some purposes we want the q vector $\mathbf{q}_{actual}$ which is equal to $b_{actual} . \mathbf{g}_{actual}$. We need to be aware that $b_{actual}$ and $\mathbf{g}_{actual}$ may be different from the $b_{req}$ and $\mathbf{g}_{req}$! Though the Stejskal and Tanner formula is available for the classic PGSE sequence, a different sequence may be used (e.g. TRSE on Siemens Trio), and anyway the ramps up and down on the gradient field will not be rectangular. The Siemens scanner software calculates the effective directional diffusion weighting of the acquisition on the basis of the temporal profile of the applied gradient vector field. These are in the form of the 6 ``B_matrix`` values $[b_{xx}, b_{xy}, b_{xz}, b_{yy}, b_{yz}, b_{zz}]$. In this form they are suitable for use in a least squares estimation of the diffusion tensor via the equations across the set of acquisitions: .. math:: \log(A(\mathbf{q})/A(0)) = -(b_{xx}D_{xx} + 2b_{xy}D_{xy} + 2b_{xz}D_{xz} + \ b_{yy}D_{yy} + 2b_{yz}D_{yz} + b_{zz}D_{zz}) The gradient field typically stays in the one gradient direction, in this case the relationship between $\mathbf{q}$ and the $b_{ij}$ is as follows. If we fill out the symmetric B-matrix as: .. math:: \mathbf{B} = \begin{pmatrix} b_{xx} & b_{yx} & b_{yz}\\ b_{xy} & b_{yy} & b_{xz}\\ b_{xz} & b_{yz} & b_{zz} \end{pmatrix} then $\mathbf{B}$ is equal to the rank 1 tensor $b\mathbf{g}\mathbf{g}^T$. One of the ways to recover $b$ and $\mathbf{g}$, and hence $\mathbf{q}$, from $\mathbf{B}$ is to do a singular value decomposition of $\mathbf{B}: \mathbf{B} = \lambda_1\mathbf{v}_1\mathbf{v}_1^T + \lambda_2\mathbf{v}_2\mathbf{v}_2^T + \lambda_3\mathbf{v}_3\mathbf{v}_3^T$, where only one of the $\lambda_i$, say $\lambda_1$, is effectively non-zero. Then $b = \lambda_1$, $\mathbf{g} = \pm\mathbf{v}_1,$ and $\mathbf{q} = \pm\lambda_1\mathbf{v}_1.$ The choice of sign is arbitrary (essentially we have a choice between two possible square roots of the rank 1 tensor $\mathbf{B}$). Once we have $\mathbf{q}_{actual}$ we can calculate $b_{actual} = |\mathbf{q}_{actual}|$ and $\mathbf{g}_{actual} = \mathbf{q}_{actual} / b_{actual}$. Various sofware packages (e.g. FSL's DFT-DTIFIT) expect to get 3 × N and 1 × N arrays of $\mathbf{g}_{actual}$ and $b_{actual}$ values as their inputs. dipy-0.13.0/doc/theory/gqi.rst000066400000000000000000000021041317371701200161300ustar00rootroot00000000000000.. _gqi: ============================== Generalised Q-Sampling Imaging ============================== These notes are to help the user of the DIPY module understand Frank Yeh's Generalised Q-Sampling Imaging (GQI) [reference?]. The starting point is the classical formulation of joint k-space and q-space imaging (Calaghan 8.3.1 p. 438) using the narrow pulse gradient spin echo (PGSE) sequence of Tanner and Stejskal: .. math:: S(\mathbf{k},\mathbf{q}) = \int \rho(\mathbf{r}) \exp [j 2 \pi \mathbf{k} \cdot \mathbf{r}] \int P_{\Delta} (\mathbf{r}|\mathbf{r}',\Delta) \exp [j 2 \pi \mathbf{q} \cdot (\mathbf{r}-\mathbf{r'})] \operatorname{d}\mathbf{r}' \operatorname{d}\mathbf{r}. Here $S$ is the (complex) RF signal measured at spatial wave number $\mathbf{k}$ and magnetic gradient wave number $\mathbf{q}$. $\rho$ is the local spin density (number of protons per unit volume contributing to the RF signal). $\Delta$ is the diffusion time scale of the sequence. $P_{\Delta}$ is the averages diffusion propagator (transition probability distribution). dipy-0.13.0/doc/theory/index.rst000066400000000000000000000001701317371701200164600ustar00rootroot00000000000000===================== Theory and concepts ===================== Contents: .. toctree:: :maxdepth: 2 spherical dipy-0.13.0/doc/theory/spherical.rst000066400000000000000000000055701317371701200173340ustar00rootroot00000000000000.. _spherical: ======================= Spherical coordinates ======================= There are good dicussions of spherical coordinates in `wikipedia spherical coordinate system`_ and `Mathworld spherical coordinate system`_. There is more information in the docstring for the :func:`~dipy.core.geometry.sphere2cart` function. Terms ===== Origin Origin of the sphere P The point represented by spherical coordinates OP The line connecting the origin and P radial distance or radius. The Euclidean length of OP. z axis The vertical of the sphere. If we consider the sphere as a globe, then the z axis runs from south to north. This is the zenith direction of the sphere. Reference plane The plane containing the origin and orthogonal to the z axis (zenith direction) y axis Horizontal axis of the sphere, orthogonal to the z axis, on the reference plane. West to east for a globe. x axis Axis orthogonal to y and z axis, on the reference plane. For a globe, this will be a line from behind the globe through the origin towards us, the viewer. Inclination angle The angle between OP and the z axis. This can also be called the polar angle, or the co-latitude. Azimuth angle or azimuthal angle or longitude. The angle between the projection of OP onto the reference plane and the x axis The physics convention ====================== The radius is $r$, the inclination angle is $\theta$ and the azimuth angle is $\phi$. Spherical coordinates are specified by the tuple of $(r, \theta, \phi)$ in that order. Here is a good illustration we made from the scripts kindly provided by `Jorge Stolfi`_ on wikipedia. .. _`Jorge Stolfi`: http://commons.wikimedia.org/wiki/User:Jorge_Stolfi .. image:: spherical_coordinates.png The formulae relating Cartesian coordinates $(x, y, z)$ to $r, \theta, \phi$ are: .. math:: r=\sqrt{x^2+y^2+z^2} \theta=\arccos\frac{z}{\sqrt{x^2+y^2+z^2}} \phi = \operatorname{atan2}(y,x) and from $(r, \theta, \phi)$ to $(x, y, z)$: .. math:: x=r \, \sin\theta \, \cos\phi y=r \, \sin\theta \, \sin\phi z=r \, \cos\theta The mathematics convention ========================== See `wikipedia spherical coordinate system`_ . The mathematics convention reverses the meaning of $\theta$ and $\phi$ so that $\theta$ refers to the azimuthal angle and $\phi$ refers to the inclination angle. Matlab convention ================= Matlab has functions ``sph2cart`` and ``cart2sph``. These use the terms ``theta`` and ``phi``, but with a different meaning again from the standard physics and mathematics conventions. Here ``theta`` is the azimuth angle, as for the mathematics convention, but ``phi`` is the angle between the reference plane and OP. This implies different formulae for the conversions between Cartesian and spherical coordinates that are easy to derive. .. include:: ../links_names.inc dipy-0.13.0/doc/theory/spherical_coordinates.png000066400000000000000000005464331317371701200217120ustar00rootroot00000000000000PNG  IHDRlXfbKGD X pHYsHHFk> vpAglX`3IDATxw|ϜB!ЫbA \XrZ~l (H @$!33? 7\M.e_/_~gB91}B!B!B4w!B!B!2) !B!Bq&B!B!!B!B!@ lB!B!B) !B!Bq&B!B!!B!B!@ lB!B!B) !B!Bq&B!B!!B!B!@ lB!B!B) !B!Bq&B!B!!B!B!@ lB!˗/_ 0 ÿիW߯B!ıK lB!UVZбcǎ;B!%6!B64MӄooB!8v>~B!{ywy̙3gΜYvRJ)|'| 4hߩB!]2M!(......袋.sa[o[oIaM!B!BP(L2eʔ?/p 7p ~B!&B.. &L0a„?O>pwy~B!$6!Bqq`1cƌ3=> |px^x?߳gϞ={W_}W_eYeZq(l5_t`V!"[B! &L 4hП k6o޼yfhԨQF0 0ׇ~Uj̄9y+M +7',7 4,:n54>ޢVɠfV!&KDB!*P@k; jժUV~j_Udšz-y==;D, X MK^ܪ"ZPD|^'gn֌s&) !BT`… .;E) QPI. k2-[=3coz4j?3Nk`~^Ts5;"hB!DlٲeS!R[\ M}63i]cP͋u삄'+#%(fYҧo=g~MrCf'%&@$o3< >.3s@ Bq̑B!);eC] 4>Bu\LRt8~aJ~M iw~uǼ)'rB!(tp|q|A/FeIEVC6K4 })H76T;!8ڤ&BqUVZ ,,,,, N%ujw(w) eMȟkz$OhzS ZY5~Z#Q`nj 6!8f(!B#<`Ν;w P_/w+Ȁ l<=*)wn6jw*!G`B!VZjժB뺮8iiRTW=zWI +2ikn0bHN|5^3iw።'Lgf L!$3؄B!sΝ;l\Zh۴iӦMN'S6$e_f<0j g>H9M#;PUɛ@b*d\]66w*!G`B!fϞ={?ߞ7nܸ)EUnr!-O-%Vq1]w$MsSJol;sy^RJCN'hlB!ܬYf͚W^zjӉj&+26mZ0'&FA†gw[Zbz# 2%hB!D5}۷úu֭[͛7oj n3#%Nv.+ڹ1gb g"eB3dB!D5` }aԤI&M$*'k-83.(L+P j<';4n[?t.Ԙ<]\8>!r 4wBdB!D5Sځ*]J*" gOy;4omuaZ4Q<lNpݍhR66Cfd TB!6) !BT3gϞ={6i^P˗/_|ߩhgIna0?l5J&?q'4ѡj%Ew573FC^MS;Z m§ZxW׭{gF!_ +ݭC!BTePXXXXXxm۶m۶$ͮMEaj}m+ ,1u)P?܆Pˍv4%͆=w.=v^ߩBm2M!O>ܹsΝyyyyyyЩSN:}5ϞPme>5\>łx[8 ZMu:꛸")UvN-w7ͪiBE !BTׯԪUVZpYguY8R \u]84>7j{ԴY>ԽvcA}Z{g>`Ł2K+\n;BQ!BjFkz衇.]t_@ `ڴiӦM(SW}>7IKIWgwjP/NbJiJP]^Ad@[t8sߩBmR`B!8Ftڵk׮eYf͚5TUWi-Kss7ڟYq-jN^ˁ:iJP]^{Y>԰^MTB!6Y"*B!A(vaVeaWTs!?. )U[^Fx] ;BHM!Bng^zv/maCÇCmGB\jLaxOZ-t}s/񑷚[Xw&!GtB!b?šPQ,[>a O266CNu4 u]&>U3Ƌ-`OzF!_dB!8b'dzԑm*608Cb\B©}:L\ >yƛl!؄B!1ծvBiu.7WNss4~*ԻNf! ~{wVN;B?Q!BQvVyWV91l\=D֋;2hUqz!QoM#+ ܁JgV+N#/2M!BTK }VXSҥk!ST>4\Bȉ13y~'UƍpWHP'If{S !؄B!DP3Iew 3 lyEx4>~WBY1,HSFi* dZGلB}R`B!URiϴQY/|MAMS?Y%#aT6 0P 8 <;[3 f'Cx;B?HM!BT uL[;iAσe Q$sɾnptg v9ZN w:!G49B!\nݺfMLpO)bkC_]g@m6<) qĸ;jdB!BQd/۱bzXXٙ{Q9Co?KI6a=#h@ScwM>&`v1 MCM QUx.gĺ: Nfh#8Ɏ4'%:46p:#u>;u>أ:tc>t3%8Oۻ^k]`?l @Ұ j.d!a߯*&B!|Pme\vmscĖ7YQһCLJ@ ލnXsmt%6QNܺN4خ)Y:Ëwb;FnP{e4siǹ:tYL֠:Z3Tk=~ v6Tlgs6f1πul`6 y`þ#~;fcs`>an\S͜V3PQLaӾ}&7R`B!Gծ=;!-&3u齰}B3 xU7 4W/4UP_|n|S tY6 j )_gi Ni :[hқǮ :t(n*pZ/u>5nܳ֠{~m8w`m[:١{Nv/hi! V콿IK%oeq 013(Ij mf̭F X̩6@ pN*0=hf\ Y[Mrc *Sx޸ߚV?06f@SJWKfy'뀭$ \ 4w` ;(C&^Uw!Bqduڵk׮eYf͚5?oiuSV6pǩ ko:k[<0o4%]]yy9>)pܹusυ2qltcj NS;VR Q585u`5W HxE8ܕ prN=T`s$z/k*9Œ5s@jvNdczF9Od3 kjdQoc6LyVRƗ`bTor[59º$6aZSkσ c%N?ML]~U؄B!GVsuɶ[> BZ.goP,M DՔgܘo1˙ǵ"ahm;N(8Z6ZgănvNss/tAǃ3Ιqt$yWs5y]sp 鞯ۻTe;|g78^3?j{k gOfe^?T̞P0]5 us=-Œt-X盫ԥ6;PjkIV#s'f`h| @blǎS6 f=8f˶ݯ;j&Bq8Rno6'epj{(պ懵@R~|s!0-08WCCmgm`c)gmۧ~\A :Fh/Q]u8\DZ#ppxt8N89J~=ۙ\nopƹj zm۠sV۟{9mytszI|uj $A90QD0b'ЈFzNe!y\9m0CdT <g0Y[@bYVK"t01x43!P ;f# MoeCԧakjN59TB!1pJ}2Z dٽNW 6J6a~_8yѓ]g@Ƹ?; 3mܖttIs[;~ Zg.=CAݻυ؟o Ϻ.N֠5~Bs99T_Jj^hƻFs+ 0k݈5瀺Jeb.~afNR̆i`I`.4BrafOj8*07/@9f*Q5 * Y-*Y y:h0#~U؄B!Z`Kە=d,ɂ; rCĤ~D >kԋ=Y^긫٦8gr;=W/'rŸw{Oiՠ8q n+gzNCp:#[Ku댯68k`~NUg*T)T9i*yq19fWPuMlVORU[0j9jJ 8Cm'BT-K:4":FSF lB!Lj-gK@Nboz_$)CΘ{^uց]><:nw3o ܅NF&pGyg8~T'3$OY5w[݇Mрw:Ar:[Z6v>wǀ;ad^/A4V{xRsHc=#̗yD0g_2pu!gfPn0>P5~V1[uXT<}:|PjH3Ģ/ ܢ۠iz5B oȍEgԮ4KLltB!1 l%Z{@)u2QCk7In YN:v.rxDp׻Ap8_9AX|ؙnMo pw#))?:C+At wk9Aw& ֻ;I_%=9A TS`lީrxd> Y۬{͖F 擪7.j&y=*,Ts>pNu6WwdM4Gb.P#X'#Ҹ2Wl׊=vNݩ{ @[~ѺߩCsNk[ rRw&>qDU#6!Bc l?4}I!­Wy}u> u{նG@`1wb^zN~}ҳK>NГ wq`L˝܎ xgn|;fkpN;F\ԳKev+TPjfx8_cy  _H6c5չ`7/8ټPeoi浠Uw@%ͳAEAcFȽ磌]gaO)E@Wꦴjw*!V-\ƒ&1Og6w:QHM!1cƌ3s9s^ {6x\΋Zg,`5t֞ n8KG]rIVg7z(Is^w "*',4w^cz]y`jJ@UvY!o<6Z4`N7?4uj~>51_*5 Uԩ\ny%O'A%U YK!)Y?__辺7TBQxBQ)f̦IsS5<j5jYDU#6!BծvBmwuvuN^z Nm̙onA_3ݎZ؟qGj spgSzӽ:_z{9Tu/f?P6ټvy^VRߩ:0_7nS`|.2'9ɼ,+u@| HPÌ3W:N+DmT:KAk*Y@Tw;6wؿ꾺;$D5Jq0xy6={BT\xR(؄BQeu]: B'8ۜ|N|е1;tkgRNIo^m3Buvh68٩) Z.ݰAB!,{t|/L۠=]hAwlt}7Vc83RCh/4tŹSN:8g֡@u_~ͽ]s]?owNk*9Œ5sxhbnz3\v#fs XMU GGjT2Y `[IZ`^Z_Qjlxng!QxHRjM>vuDz.0n`+!$S+}p>cWM 3i]&DՒ㮎7W(69TB37;#<zر0 PtBQSlkpmE588Zjcmvpkh NM5xՠkmvp{{3Rkpܻ[8]i`h۟5}Kt+KU!0@i/zjfMPQV#k`^d <f'X :kj1"ZX.Tm3T'u$Wqf3P͍LQ1jg\`-r6n΅~;=^SԮ]+;a.>_Ws˙{?:73qFiG]#|s$Ve#TB限z98JTUR`BqLm st˵&%|; "N]wF|Mn( 81+81{ ZzނO 3Ι5wZ3Ľ>?нdh#`^m~omLW)\i0+x30k9O~6`\o3b3׌6Uh,45-*J0mu:u0MT~Im[y;3/qA\FUqBñ d؄8|rvn ԼLH&Jq06n 9/ԩ^raeT߱!8"}iqh 8Ի ; `k7[]G@Si|pNp&88v ܈u O@h :2t8Qp^FtSZt/ӥa 6,#Kgo3I1/l| 5@޾oi@ 4ɾsl~Ke*ԋ_ٰ/9Pj~^U+]*8:Fy'š؄R}2Z낽N#:&Uzv\֠s'JXXgpйcG^6k?p!4P |Bp.w{ۗ3Tkh}&}38ۜ1:+U Fyu6+yUg#IjzBste>TV,Ph3R50@wP90'tT \5j(H(y#7Q~ DUg(C  nl\ %mK,NȘ5?&=aO_ Gf qyVa^ybTBeR !p{`׮~U؄.u ½+7^G mn{_LJQ`Od=J7+` utpyb7Uw{]BV TB}o3Rb=fm[/PPWLs"XC̥֯&m`;;R̆i`56[L2Y=kyu`r̫ܠ2Vo2 ~qxgi֒EP-<š5_uBϬQyE&mR^̖ӡFYP礚I)~Bm׏^2jU 3N>Tq8!m8`y=ý8Gv>XT:O58͜vqn 'k X}} v6Tlgs6{Sk̋Z}|N%X`RLƀZfUAM2.yZjc6y2ƛAZwy-σ1Y-UI`uZ90hE!{ s/7 Yr@ý>Yx}n&5!; oj:7Ōwߩc;kv"$] huf:JTUR`*u،Pk__Hg3[AqgnP踄~B}{.:vJhޯ^Lh^6NTUR`(pB58:5<7GkЪtiwN֠ÝsMt悳))NhN8Cv8o:B3T t6mP"8׵wߜa]UfJU ")k] ';ԗ2OUceMI# 끷\V g*4f?jM !%>Fl u]9gY ΀nG؄8|r}Zz JMw*!āN K_ cl;'48N1~UtՊ9Ŷȵ']zQNN8EN7-c0m^ ΥNz~N烞Z4p.vh Mi3f8cF_g9CoQD0OnO<k8h]j3O ̓F5&qb[Xs-%`^oje`4sn3VZ:v^5T9|0cTZ\I W JwJ:F g҄/} /^\Jiooװ#x}šhv^lkn<{ 5wG}W3 j_\;p%[L"BWy < F{&S\,щ@ S )6?U+]FNgQp=f}ھnRh<*SN7}A=?ZZ꤮7~[L^UCYOm4n1~QoC`ZYV߯hk=f[5*`Ś *H7:\.^u*3I̝2Ҝg~J_h/ϟ J O{h dH&7;NN^8pԠ;e}GA!9;vYl,I)]LXɗ%ޜr;M [wj!aW[tG KфƮD. h,w6QuIMjm-ӚBN-CXbpr)`^bAնvYׁa `޾NPZ |Uf1\5μTjmjZj>Xif5T{F`5O5RU}!s{𥅥Xz C+ze;4@14d~,-dDg %Ǯݩ{"wAl*ݗ}\D[q${zߩGē^3I>T5Hc9BJk1@ 6;&|c 63<ձu|S p(PߟЏb 9{&QVJ"v-YlA,U,don\ ;vʍE I ˒l0{Ba۾;q&ROa/XSu#2X `DXӉJ@N#XD6yo6hH{ Pi7@%JMn(L.u}~BMwU qߩC/uk]6x* JTuR` cv  cfB!đngl˽9|{PbC _kM?x#8]?Fl4WJљw aji iJTuDTKfo͍p=@ !6'7mde䘛g=VQO՜ u~L4Fp'ЀI~GBT2yYp`$Ml0B~B 'ii4n;dU0J K*:6S !. N;Vaҋ`-S/@T ]nx|H{5$B)ԫq {3bߩU-*TB'j޵;.&|lL ]6v^so҃N%(>bKǵ)UCbE7@=/4ұ mjߩUڽ^ףlh QPBϡwXӈB` [6C2{پB !l˜H;^Yy2ַ !P+5Ԋ5;qP0vVz~BT ޯ^վ`+ ~gB}U0UY b\(N!߼5qa͹[!2=Y[bj.+X曁;GD !#7{ L:F"d&Uۍ m[6IT}DTP2X? LG9/J!_rtE3yBGۗm "ꄫд}Cl2$H_aM!0o_s~726*>(rM|yB C JTR`B0K'o`!8 &;~U$65ߒn%0ouL;5%t)"2eC5 uߡBsƁځ!VT qȤ&*V~`_WF!đV8c"H+*#cU3p{H_@J75.>"AnWm'S !@e`;.&*pp<!8B.%g7oʈtwIm37aQ<$ŷJjLŅuiL)6ykua#Nۗ̂(SB lRt V}N(4B!Uiό׷Nτԫu]1wx꘵K(mn8&̩vVz^!*NzoZQ~B Fw_-ʊ JGqHMT +?8w4B!Vi9S2ן1lmGK Ԩbo&o@)PSQx+ƻ~B ;EG!sg;.V+*` sc|fൡT~BG>s l).;-BWAWCk aXB@,.˜H:S"59TBtV:0]3`J`q 6Q)k:ơyP]S !swU6XSYOs:34,ʺ}5WXBj¹'z= ~B t@XӈF lR6́8N%bOjIHy `srp9u!iyA#>~B#ǘT9D-3Fq0t^bjYIӈFJ!0jFSvS !ı>[1e븍? >cY>ļx+Ğ^إ@k~4~;B=tM08`7(;@} au" VqߩDu!6Q)݁\`#9}w;BT_zD։:cl;oM70vE\m$:8BAwwXmS8Rߩ67BwB7N#)J!e  9#4&M!nےr?.-y3ҵ kn:%6+V-6GZ!*cܛ ~*D lBT L{XAkZITR`ef 敆VIo_L!n;w 3sgn'E56GO:̚wOHaM!Ľ;F ;@>N%K l&R6 4WvS!Dյk-;a/2Uk\RX|AbjJ3)5xNR#8.n^waJT72MT*sMc K{S !DW33}Fvn^x4nH~މ ȍ1#,S !DM9d& !wt{~V JT7R`JWIX@C$}1b{aʂi+&S!_h%&wC3K;Bj u)a)v=lЦj8-%*@븰Nb; [-Xኢ˺}n(\g7GgBR~k!L૰N-/~ЍtC>M Q%8uz=/7_C Ҳ_qlR 4gԒBj`OXʢ+M;`gi;BFo ߟ%D*ӊdQIa0o ) ^`TwI;޺$:N#+)J%2Šz.@n/ |gdnR@i攚:C}V!Kme 0p`F&?Bv6^xjZsʎ]ϔ <⴫wXo]Jɟ\>;DuzWDv\eo7⮯v>֊:׀ynEuumuoNB6}; d?}[Z\pWkÞ{^.A9;V#Oxk}H/֏_5q93oyt@̨h~ Zuc??E֍xLf3n =eIS>sQlLcd !vݛ @[3;%Œ˖ 4 wNtPsMBW !ğvim@vo~ν ~*棘PLj VW~Օ13o'{Z39׃n PTdPWυ6QRZw9;G!]qS!o^ʿLj:Jj _| (d\DؼacIF*+d?οmGk \孆okudb5y3%jǾ7]xA&S$NzHqS[^vH>Ɨ@Xj01beՌNcMta&6Qu`ԅ _]]yPwZɗN!P2eDmoYݷmd]\2Y;CQ΄DCRaN_ vn9Ǜ[33ݬenйy|Xbc-+#hc}3ӶmN$t{q| 6唉Ӂ_ Akvoa]Æl w}pЏNN/[rGmƶC?o{7k $ cޮ YTޝys4ozj:{mtBw~9[n?qNC3WKv:tisJ#a[S[Nq.0@ΰMO&ٞ}Z\{^ HJ |xLҡܺiW8:xY`Ef|o,끄5^#7܁Lw3%w6QHMTJ;%`/wYf&IQ993{6yo)C­TUTsT\;0F9{}ʧ*qٓv7x|{pn]N^f.j$HޙGp0M o\Y< פMw [t[ZP#bRPc\]B:M'5 ]?שoA3S~:''\W;nȽe`5Y.0tB8N/`< 멍aJTWR`R0d>v%x.H;qSݵ:H̋ [è]qmjEƭڷu[jg;8vmg'Wwk75`q<|jKqRo?V}ڪ%MiAe0`.q nDTP ?J6|F&aa@ukO篹cH7Cb7hHZc|!lBkH{1M{gMT?B=jxбp=4B?>GBBt~'A+a{g^ʷ~v_ y C -޲ܜ*7\ѣRuT8tΫ~5nw*ޯԶ߁/z}2l #Z\s5\W^X~_\ȗ :x ieJWtKs־aEJTW2MTJ˭SIt>s-ۜR!Niϕ*}waÌȵX{_Arr gC BpBN-UcGo[3A?4b'`+栺ҽFQ!J5 54Xso7eF}U_ zca.*{ܿ/|W`[Wfϭ?8;o l| c10Baυn) aPNGD͂tߩQiu2 6b,FI&$nr\h86 */⻨~ǺoHmz,<ީ^~~^9\}vWC`9 /ܻf-\饷S>e[ ݱooF~_Ց>} |1;@89Qz@8k~ՕDbFku+;;ߩs6~}X}`ȝA[kM7ռ|U\tم(L|~aIc 4KL~_>š7֎x:\qRN`)ųnUR[s޽ܴ 7AqTIa~_a{T8O'8N4sF໽M`TA"o-Q)³=TB?N"ltMm!u/.rLve\,4~u-BbOFct;{ZĹGܧ]^ͦ%uBh̯qzu lFڻ k 9}gw'}{ĥnfNQpNK:E?;\\G to[9?7פ~_=o (W.;@8Zu`r.~՝D(2mΉ:TB8U^l_a;6,-/dQ#Mzd7ۧ@MRSTgxg># ;.ڹy4$?+o 8Wy{⥠n[~_Si0Ek+̖|a|1zJ K"zv~=pC|us4Bav \wQIMTJmZޝ˞w*!đPsG֮vǯ?{e9f c 鰒bhrI'=&ֽ%Bh wz!`=~1P8n WӮod}^I#pg.6"|=TKOʠǰvFLzχW֟8<~+e'GgHʷtتFq ܚ{,~4~՝D0rMN;pmAT6-}6̺F]^zmΔnr8ܳa9(Pr_m\xA/3XRor`,|>kqpӦ^.}}k4k<ϭS'zoys[߻ Gub_M!āpp~pB@ q$HMTjLs``؆w!ġ('5W'<\ NJhg7ӴtUCiaMOMܳ>9W|Wn~_S~¾]ӈN lR Szw!(7| ҟXvmwK&+v]PDnJط4ΠsE~~u8bp+>Q2kY=_ Z1!JT]8q#zfyƮ/-B||žaY%ǭϮtKS S\.c:~ԋ c xL;,WL; lJQn[ee} }%ep;7{^Yϸb{oSk29HOQE38 wf5cǧ9S^%a'ckzqG(Lj J!7n/)hY!W,jr^{o5}pf>u>.}UpwrC"$[C9>^4PWGQ6 ֒;3 Q1&*V~0 r!˾ v\EaK|NdF]0h|q`ۧ%5Q;}5㉠8e^@+Z u(5#h\|!*o\4Hr7?^հylftYݡow`?\tp> wwjW/+XgoZuJXߩDu'6QSAb_fF3iYy9:eWcʺ}R pr/<wj!M~!XvuW}i_>7:{exr/TjOxqe?U7BY_ >fOxޛk8 Gyܫ+k~_5ݥl4 JTwR`Z0Zf{ȡOqvkڦ^mxxüP? ԫqD B??a=n\}=88 NA>{5-߯Mi&6hǝ yY=q{44" o>gLb5*Bk,`sSN lR , ^[w)8J&jnQW-_o9)ʺ}6ұ}]I#~~ɒ8}ۏ춭孠}nJq ZRVQ;›&āq-'̄wgWc{=p;(tG[A?X[Vdo&}5BTݨlL FN%;)J-(kN%D;6 }7[8寁KjT'4oţe>4l qxe{Ò8u}-wk z===F,ƒ˟u8:K;G 9wFLKpL4<{+{6ivƓ0>{Lɤe{3=uRoOزqS`T!+&|gg۱~j+37;#VZ8 r^WQjEAԆ5{& FcHsQM.Xvxয়o@h^{S5*ZYQ3/[^̳uF_͠BmnTԽ$~[j;K*fϞ6m->űAwZ?v `l*(~Tt*t~eܰ#ʎ7cDk(}ti H{5ppv~zSȀqaoF8JYt_M3 G\pSRZw=**sy T^o*h[^NqEfj W^yO_ez"J~nwߩ8 ڀac|B)UJt:I0鞣JʡYH+XyV`MnCq{9$6Ӣu{}6!M虇BTi;{x=N%??^=Ɩ09i[άj0+bJ6q] N0w*qK pp~pFC?Й~`]I]wjJtx9%'9AG+y +^w4DmϏ3Z=;_]C)f, Wҹ;CfIi{ !{g o[}\\^NN~GH "go춧múZ!6Ŏ+jEJ+&V~`8طU~PYA`}a%Fau¾vӳ7}ֽ{.詉WG\4CE]C?Ot&x*ރ> cu9UlyHΝf|_66Zjl:l\ =vn`'MoПO\`Ɓ3LJ+&@MskX~X_t{J#LRԧ}lR.on }RZ}x;槑ThYcIVXrp{Utx̶qB"21Ƽ?&z&OZ~MϋYLrq:j04 kLTX!6Q% 5,n'TBY}f8'ywiDUBjZ X"7CPk|ZY/Е\쉖SVZ7< hzQk=3~t ݃aD!D`m $Ajjw*!, 97y+|ZQMƆ] ϊ0ƠFwA̅@P5;ӝ6Ο;1xL{6yد_]||ϊUXHVos{k'Z,e'Z{u(lB`]GGB61\m37ᩍ/LQ0ᝩӾ{zvk=-_ةk/Kqqz f&6Q%j*;hѓAw*!,B4+;~4to_qG=z:uԩS,˲, %K\{M;H-\i-`ޫ:!b^܍ kx7ה݁ 2ǂԼǫSE VwCf 's{v&իW^ 77777RRRRRR ǁSgq1bĈp}w~;vرȄ@n/$;C/I6Xtκy-g+0_96SXF:굯 }}o ?B| '^;8HMT)a)}4BYVKc^6v5N%LQ}___3 A~h֬Yf 7qOw?v[Ž>şm>>@ ~~GX-dtײ Ywn~>S9vVT`U[K93\sCc$m{ +G]GUށ5?(93O^/np3\q#Dur_j;́:g56 )T fnnN5*-\',d+YkN;8HMT).f`ɺi8Exlr?M\ SΝcʹ0nܸqƕwqЮ]vA^=d= ESvm*;P/r[ybcccccu֭[׍yʕ+WP( f[hꂅˡhk=k3?s?%e/HUJrAE*:3%3߿^?Wp{uQg=yaa DDDF|8v3ջ̧`#4mzǹdؚ!aUӳomwJ!w߫0e6qIMT)aaN}i8,>ϲ7yThq;z_>84 ~Yxv^VrǡPV/g2{jmˬusnzl6 ruu|<'Dԯ]T*޿!ƒYY̳D+U^l=˟ey+˛!WQυ wFZQS40RSN;s;y0zdv/QQZ~^ ӂ1~JT)a]aaeS6qWyTK}֝]w*q׬񯔫܎0+>˨{ۊm絧=B]?mh !N=,ؗ]one7 : s P+˟t,oϵg5/073`XW dxBzɠNR/ks*#GY6׬2PsT%h%9`ޫ:q=QQ4 xKT"0 s9bJrf&I>~)oL vqoRx+`xGycb|zS30p!t`m"hϽ3'U06k I̒`8uO`Wg`pe'b&yKi@R]F;{$G}1:~b2G;B|5j i MaСCؚn١a78y<.!f,/l/ÎVgol9O⩡D;Z rx;S\R*+l魘~/N1]}y%K+$)ZD{__xig֬|2!-u;ET=.:R@M`08=~kwGK[ɻB3˗돞>`Wi_ǩ(9s5C~k2a\Mct*F:uW*-R3TCr"{Vg2Sd?'W 26M% JMRSzӄ5%K,YD…p~4Vp/vII/Ya.HZ1 1iYq;_պy>MhP5߰ty+ ;Pwz3&*x 0a7u^1o>τܦ9urvuudN~,vj 5u8v8:^zW3bIi+ׂ]&F:\&  y۱?[ӟIf DV+7A)e`֭[n8p@_^{ #K?PmV}nJ}q (΍ ?دEy6$xF(l+N8:sS~Ņw3 ڪ& /x9nWߕ~|0 ~Q+5ԻmXA0f2vu6ȝVMM{"*:r29X>76?j 5吩 6mSb${#r6!Ta((Y,#+4mxYBSL2eq<<Bw9~qtߝ9'jsԯ[n-IDATWKfg FrQjrcˀBb(.Aq . ^g`'O (.p\lU=CBi`WuPyCջ*ׄۦUR/[aϲ=N:J`J2Y*fpMb޴jG~[10awVʻeh3P>]\r` '昆GLt4iҤI[nݺuk[n[\822ls^ie3Pf3s-Bb`!Mv||cb5P_on!ZUYl^' EL, 6g]{^9' }>6,U}So`u:K(8c9<= {ڞ wj (=Hk|2bl$L9US>'Ix]`Wcpqv<]oרQFرcwFȑ#G  o ǫQtͺ|ڼ&ؼ Z2d#>ج=0Ė&dP]ΫҲBvl @b+?akA`ǚvvΣo-nBOQf2_yAДJLf) I>^%)pkCIRo v+r/|m)' vUU ?Mӎǀ *+6iI^-\nly@¶)Tn܉_A0G||0X!dt"ءDt_4ar_:OO ۵iۺdTٓeׯNW?8z?uST o!KCfHH+N0ӜfZBW4[+U{ NuE+᤺GpZdυ;5s}/8a\1Sͅ7Œ뺜p lS>_FH>Yގ"gvqo¥綿g``plㄺ\LBY1#E] Csunf3<8;߳?oRLg2bl$G}f\R}[E=\Ĭ,k<`4ȟ-[lٲ%lݺu֭{ݻwÏG~c-c_L; ߿?l߾}p}w}pNC8ON~G8v6^ DW@qXo~*5 Q2eŶxy*9ּ?]gp6t |b'}tSv91!ѷ֘f`p*;m! B`Wu2a7?kƮZ>>|vO G%"#}msIn1B !\Xk"B< Jwe}y202uL{e> Xel9S{ 를J~PoN7<m{Ϡx b +(R=i'^9*)^K/K|$E qc 4\$.O*!{ڹ ,( O88A|[;gf93yn go5008D3*EڥO~{FC{1>x?>~0qkK_࿉.9ҹL]i颡r gpMbrs#*U\LLwK7۲*+NʨC֓?ԅSsV'Y{ /. jiF@ԂV'm`;x'o+':n o>mn(r7(ю]BY`ALtPLHz`zPgy;t,<o7!P%pv:*W&l < Fޕ{ϔ 4[;xԥ7 v뇱ҧߎ ~k\$+K[k`Wc_ IDETh00&-/W@Ba\Y-pđ@& 23 m ťjY1]yicYmBoYҳςz-H+r@YPL輄1VT}֛l6![ gPp?`rXm ^뇈Mk#-U*kJkI0בZn v5u 2ؔiӎ^ +.VA~/UWȓ},8+Z8B߉Xksz _ﻰPb 43M!}xVf Z.癯\Wq Iay=5˅U=2 řD1QeTh[IF#?3ZF . W0!eZN:fpMc޼IJ?)79+(I ѷaʔIZણ]\ XFH`e_ i,CM[csR102)ʋkޠ>Q;*\76elV\X;lfiŵ _ }ނW [^oc-l9UG8]kYU 9UW(!:wxaKgAÝ?h`5[} 6JGZl2Y! A i,vӺ@QPSW) R?!_ Pe'\oȣwM-IojL1n2%gx eR@/ɦ X9C5ԄVqP\ &v:g7ͯ /AWSpu}[W]`qc[m:}\202HRSV']\ȑ}PRꃜ~z7v\-jaڬVB@FB}|ZE}?5Ao):_]q!~-^B}pi\{:|_70086 (a0!w%l'`OO|`Welە+msYF2cl4!R5[ߖse,*+iI 'e"wBK)~ rtnlC+P9ˊk UҡPdJKũB_>r?@ Z<---0S?jx;xoaQL{ B g``pOsV lzQQzcيe2a̘n[o,5f8CKUmKyK $kͤX  k/A/\ȣwM(cp>Ӫr:E~4_LE'9 2uJ1(@lny seXlY(8y*}mb9qپBw멐+b``pIjS`pᒗ3D|uT-xas6m*wg3qNpNsoUn*:```p)%Irie#Hrk30܈ %M_rTyp0LFk2vuԶ@hi5>z3 ]2MQ[*_w^*ю[B&iBK0s|,rꂔسn5AT'fj! =;քF ؜!ǝZKaQJ{  (Ju$@x^U= HH?0_ji˩y r-j4H+rWeR}a2O%%}˒iJCշ>yğHU].Wz[]"~3fuPExܟ'Yh&<,XRp UyB;!/ A|@&j*<PBCC(+b7 Q 5* ')B000fE 6&Kz.2ޟmMszZCkg3A._YUy>qD2AFPKaέTA_ ./G{@%ej3+ 5''vF~[?Qo'(o6"Q\hj7Q-i۹ާ;\N"go6u-߈mܯ IRRG ځZQU oe45j7I1-y$ګ(mIUAUNApm^w:]~ ?3_ PE$( (@e xlw-HۄV /Pzm*?G? 6(YdJJgdPqRX UY>@9upL=ۿSM:Eݠq%?ÇBOa!= ,vg@>/:I N\j^cC}bC1J iMtQ)6S V B!Mhv 8295UC;&!U;`>+8{dsH2c8 ̿:W4z&ˍ4I /%`dp3~PUgVMJM8L%ZD&2T/88j8o @ {%0FJQO% W@xU~g7ܒ@)-WUPkȲ ` gB]^JY(  ؑM~+rr)ׅ@yL 2寅V 43wRS&q mPJu ( 0 2PkϪ( TL5@H0:zgԵ 45 vu>V FG߷i!kہj9D逧w07Ս#Ynդ%TVTg`7`Lu{v@]vKj3P,8SP 5SyJj$Yr%g>PPRkmeP:Ro@Vu9AB+5ĒDiMXmk c@a2I ! n~ )2e¤}Y1+x~o*+Ct~ L- nfp]# ?s ss w5 vU1fl91jJK_?:$^ρ.c_M6㵔PMeT F|I^M٠Svց[|l4([I3@GAQ vbb0>@k`< Al]9ALfxZ4Pc OPcÅW+PԵ"U=u*vUnws]R"AP j|~~TJh;~t[cGv *biuzPyu9;V!P>T) eB c@}NY (~VOsqFTH tA,"M)_OAj+؈E v)0PF}>7E]gI U56J O/}(K^ްƪ&q&\|WŰ`Wgp?xzF n|ֶ*:FȁueyQapu<j R?+]ՏEK9ٽ&Vol̟x^[}{4?QPr+ 5PP`f(QX^.cۅAzO(w7{jbiwXkKhW=!t1=mY aX*~!B!Q `oi L,Kwyi:ͦ^ CA8~!AI ; -3T=ZB.tyn!%y칭g kuRmq&iBZ`a~m?7 vCZB\R4HXv05i*h2W\:>lm0D Y"RFFnBek/ U`-i> Xn1l- h+0_Gr>{:j:?ylSsC1<76u,dɛ>rV^ ޖl(x컝3 ST%AؔWo:}Sl* vQ_|C*w|i289iK_r`lR>p-]ALޒ2ͩ-х:PykXA- bm bM)$50KzT J \FF%ܙ44l>l.2 ̈8l`Է{X't.Qu#HiEc0?w䵰 rwd!sm~=Gqs=w]0gSKJ5LXE%}܉|e+f<K^*E]vS)b v5.8QվŽ`We`pyYqt) IՃ\ݥT}@ 3 rc8sxPZ+9sМajځ Bj4-b8 EV>9 T*(PFkBTikrxy 6cK_[Ϋűh[S*yHnf4X_2(C]b uG V4ˬk֙ r^~s}Fn-Q pk]*\۪޶zݷiLP1.cb8*"r5!^Xh/`ggzOۖhysP&g;o xz{]9 Htv4S]p͜;-%sK. 9&s^d<.GH8z!j_]V |e,Wyo9tve:Hh<~Mok`p=!RG}[m^y&؆sAN홭!Ӟ76'dkx&rlϺ ض;Ŀ 0de`%F}pZ4g44¯B7QP+pou}̧>j'v}1~B*YoYSکIK9XjԆoʍ xՑ*݂;|rJ@匿T*>&{j"]۱jw+qz93EJ0EzkBn,_M_!=KdAkTTTTTSn'|5v'_ mڴiӦ~X;kɗ ,B8ex9ϩsZ+s?,rȹs3s9=Wu {,UB`w|)$V)}?Il_Bx'D Q&J%d VC$뱸fPy#qX({>7 h}\ZGQ.m ̪ժo;2{`We`pyO%mjKd_ʝcO3u:9 ee@f{ݼP|R3ii&XK4qFx" C"CHU!9,UQr(J?~|m}@xûeW$dW)@م BuL2~-z Ak.,*ZG?QL(HN,b*Z+JF<*dK;!11111F5jٳgHHHHHHC:VZj*װaÆ Cʋ"9=ֺ='՞sV{̷J.ki]43}EE|}[n\(7<67MNN-=BL¾s'a;wܹ[o[oq4iBR9ݙo[M _t:N&eDw޽{nh޼yaK.]t'؞5k֬YUVZ~~ ݸ;vرcumɒ%K,~~vuWC`3.b,M vU)3A~/Uwzq;r@nY/A'JC]3*@g١p-6ҏ`-iqv8V/x?MAZ)&ŵfD|haL߀/T Tg;]NboEm}.!E7)m֙@ ^B7X5Y aL7ЃiUUt^VMM8kZ\;]¦ݢ*Ls*rBR b@x" l .\p M7tM7??ӧO>h|~w࿂빫=-뼅!Æqn3k},WHNm=<ߍ92ZxЕ[Mn7R q¤{N~Br^W+aXG d[9rȑ}ӧԫW^zP"Z1՟e6E[cm۶mm۶m۶-lݺu֭ȂW^yW^˗/_V\rJx'x }f`РA ]3p9{ٳgO} / /=eᵂ!\ͶV$rgG`We`py1颇p,ϑ/ZCMcQouk9me6I9Gfn8۩_ Y7./QM3biS󾩉~/2s]QXCQU.Qo-u"xO6hq^YogX% Ә}. BPi\-2/tZK]~gw{D'3L*d(엖Y+ Y?y\9=-s_k]05}nml:AYT@9s⩝pxө{ɸ38!ݖ[75  u?u?ybS :urF8:lMh!%5\??Ǐ?~\?N{>+7`u)՟?·ifeu&i-gΜ9s TTRJy͛7oolmfǏ?{>_7W=zѣGaޘů{clihi_]ELK}erH~6U]:r >3''`fVdvGX!&0_G#$`U +tY`ef; U/ܫۆϿ6kpTvOOW`MCk f'ĹgvpvOzx󛍦*|_לESLq szʨYVbΣ9nMpxGm bmjYq0>\HI?\;[ChFypjpnCt|4[g`Q{\ [wܪyCJl&6Z-_[2v"b?(7Ɛ]^3pǒҔF{gG7@fRx8^cNB& ;ߍf/[O.K𿡰Pol̼ۮP y@)" 2dȐ!zbX,PzիF5jfC;wܹsM6mڴ)4k֬Yf~ͱ4;NůblY4KH8+؍1 P zz^>`Wun]0gS(<=Y2&Iy{3VBsFJ|eG;.Ó=Bb礉@kyv[NB鷷@5di~ܹ"'9ZB5ȳ>t ~ $9|Pog-g>^[,LjL M1[KaX;,!/={WjQک[HiMu\(-۷o|`Kڪé95ezɥT``p+; zTPM %2#Dw+0|WK5:i_B_K(e['!ɐΑ@#p>g:4=]Np"ٹ Hs gg9^:wgH>~`Ԭ^ZA$KGH M dfϞ={l8uԩSYc1ah##bBls[[ ߯թIjb@h^Ͽze5wPlٲe˖y͛>|g%K,YԩSN}p8%~3C>wn4Gsϙ.?fp]aT-$͝eC`3)4rA׊ATU+|SNw<?_me̛qW|?_`9b2Y>1,- *&` =? ( ΂ε_Jh+TUssfs; fwƚ~N9|BNZR|[V}[9[A}C|1~"L;T<۩qiw,~W|kNe٤SAB:p\(+8z1geLM.lPNhS)(5DCi4)S=V ס4S?H7Z.}HJ,B3Bo  8>rSapñ2X9-OeBA[RSLC{ OK" B&61oV]$IG=z¹3_2O=`Y+%u財壠Tjvktl(Y3Kr QZG׆7U, JE_$뱸fPy#qܹ+; `Z!8믿jZ>%Ttҥ=qN'N8q&_6mڴiШM %(KIfiC`302UTyr90NzLo'9k{U*?yq+8=^r/(9Y/T7mZaݣNS}`rhzdȇ[ 4ԘQc mE)c/5\ݴtL?Z;tI*vԥ]>^ yByb 7lR7Wڪ?M ?U58 C*/Lю"يش5yw9r>d= ο }h0JGvL=s;H@=;mA6aC'K=↕_!J=ust6ﵔf'ggD {AeuzmpVRZ:[._{4Rwh~[g)ⱎì2+VXbzׯ CJق pLF &DBa9ZH`ޣ &uzݔж^y~_lllll,˅ݦ ` 7ũLC,skywy~ˍt:ΥjZ>5GO?OѣG { 68o m!\W7239SY3W3B_![ !oV@GQ*;T ;zj6YOT"`j|' _ BkFVSݴhB3js ^+?@4;p 8??{ c瘹 dF_DG%#j[LK"ͱf* ~ H~s-ɦ T>->a E:,_ A CrEشkE¦o؁p~hC8Pwk 3X*&*`B3 8MQI)akjɅr>L'q}%_Ck?;|u;0Xh!v>G!deCX2hQz-y;%Ww~# eS莔E@CpFȉHYGi6Cx+ASw_ pH)`Wugw:Zŋ/^ؿe˖-[ԷqBRCQKoԨQFtaK`СCnrrrrrriiJ>.!OeI }ӧh=f^*nSHHHHZ j.]t颧jh%K,YDm>/qqqqqqz+6sk׮]v }۷׷5ARz +%M'%AF]~;\R?K|-|A_)* pN(SK,vȯ z f˟SB~ Y^Ko. #]6tU&PuK]omF;OCk,vZ1BZ̰r)^3`ۉ3M;B 63|Z>_xj-)6OOh}mupgoYqK vTӄ:S,Hx;.u%G|5M;vBelo˨rOȚs,# Aj,%B9%/,,[cRa ͅw]殦[,qoQPc^1ArRPdDt] @քeH*f>ngqfoL^ u]@Iw]ڑzW op^c˻o|4kvPEvv߻ɕ' `ӗzk5k֬Y\VZj`5Յh8`&d/EDDDDDNu:@sτS(_}'NݷR2T=B!4G^2eʔ B+[o[o_]iZ,}ׄҊ+VXQ?رcǎ &L0lܸqW^z߮fp]anm2u b]g>cl4{3wANxL3g[:h#E} !U/\ӟՎ#??V^[ph9~]{zKh[93f~ J_ !o0A 2ZfpN5[(Tc\+-&-~ւq?JVx8\B_QH8 T_Q ]]-4uS]j8X(g/gI8_mM_:-Z6slٲe˖cVUpaH"saFp>BrC|]R)O@GsŸ'bt NMR9qK?Kf7Yݙ_B3cSτ[ʄ ҸSN8b၃k?<쮭PP-:tÑ Q&_dA!un.kڂ sa }[ vڵkNe\fZ8m… .[TҟӧO>}ZC&ii8p*hr< ,o_0?s̙3g`׮]v˲,˲~/pkBԿ`fΚh+W\r%tԩSNPۯѭ[nݺd{7x Dž׫_|v:fp]a7s9N4}]E#p+_ ?J+ 9 R2}MM tU Bj{tj3@k5},߀{nVw(ڊ EŅ3;DXDDw Qr :2e!k-٩p2+~#RoiMjyu_|hLO38񊪨 I6&x {,ەwafBYq}ȑ#G^SlϜPf)oȑ#Gv޽{P&L\xPRmJ%[@iuh[jGAɖ@=6W~ҏV4'&ihBp_4j֬Yf{K/K/W_}WPVZj7nܸqF[hѢE ;Ç%M\]]Ēe &v9ʑ |'#r;=yکFwAxX m`͔bb9s,H`5q9cb|Q7fQ M!9jZ_05d!oǕ:>S $$:N_Cܯf["?w+wq Olס8R\N1>N-EXAw)'Pq[xsFSqק$;%epk}ĢVJJ[P9A*E!;QTvtJm-rqH [ llH82qq9:Jc\&aȭW9k!K7;4%6D* |:9lQEHeL`=m" XHI>`?rGNny3v!4 [B[Ɔ-3MXss&L0vWǧzׄ6A7='e?5x%X$ 2;麃M`SUw)ms/ BPw.k|fi@c,ͷ3OΝP6eҊYFwځc퐚־a.G>B`ݻw6jժU^Œnu[h:m枆fЧO>}o˖-[lߤI&Mjtd2iPD%J/s{blfYZfuds79wX\(avq|‘ 9#lM X~FfudYݭ60󡏃T_Gb,Ơ`_/yf3g뺆šp l>-"'74GEtҍpSct'Nl^לiY9 xt Nx%$fE!SCN/q ܂Gg}Er׶v^)j2>azX==@Ѣx]6NEmz OM^٫4@x7EVyϛUq p%ŝ{ S9?=c{<众z ։|k)Q) ЃJ!lT ;qe1'M|f±g\ ۇC K{fdNJ; fvV+ B"FH˃}U:&/py8qG'Nd1)JGNol6\ddddd$P. >ˑm鱠)S"Z }qr:K0 6C3U>m֭[-W`k۶m۶m/֗|,}[nݺu͎CYv i jZVŅF\-*Es~ Ύ* ] 5 p*5(8r8ׁ!cjNǔ@/! aaHL#}5Wӳs?"OSv%jBoxEE^ӎr6rP,"a̶Tl fM>KPgiTiW2cO2Z3;] R VVwx=' D0!l[)Ro:h~w:imOw&2}B=}fJϦ 5U6+p`yz8J'JC9UCn_Н}.7={?k'/wMjް>8}gawͧ9+څs)wR9iU89v+(8v\7}]7vk@KID/^Xj"ʄ&7gYVP_ZzKCϽ!4;شP5:(׿Xvt}T^fno҆_,ٳgϞ@f _yyyyyyZC56lذa-[lٲם忿"jp]aMpg Yt= EO].%Bvj~z`cW H ;ř7cDUM~RY<,,}]qKX J+v(s1\ Yx6*/8gQ­.Z wp >ڄ.Mќh䳶vKhRj2!մ <׵PRwڧ>[MfvN+y%$ZC=w T?Sy5M.U5Nq9մ=-AZ[fi"a.θ.Pn տ*>nt}s>?3Mz̽Y!f-8ߓ #Nki^'࿌\F\ Y/?^9(/,,!Ⓢ%'L%PwsBYb/_͸K)8bgZd ̚ yr?̗_j"Gz8g{o菺îWw {%I^=ݜ{x澑-Ħ*@9MSmrEW[Ĺ -&9bKh-.ߕ`΄2䙲F*wA3w$2.9v@ȞT Rحg`}5, U4%r$!W 弯zGjX!=̰F0w *5:To$<2eCSᗦK))_D^y;Q9=?ib,QȓG%UVZ58~!*Ŏ>ȔJXo\V/uN;֜AaIl[X5V֋eʔ)SL<W5?lia 111111 ꋖ{%-ZhѢE;tСC 8ƻ06 s)b+M Ӝ_@̥n`pvZ kD;`YcPX_ rTuXSN: x~nI]PZ㤝8dʢ?4*g_npڻ4p~&p9gf94-px>{Ms3{M 9p;VOEU05Vi8=G+n==\삚&z@+L6@E--VW+y@ Z0]ƪEεz.$nV~꭭R-lj.(g*\0VV+!ŸN(qld2fpضM!#!"#lID&|:4*& X Kyt?w6z ;ffewpӺvc˔h{} oS۳Ȳdk{^Ňuv1ڙ=g!3LXEqOHڽ:txS9[]k cgǔ2#CդOA*ypjtep |񐛐3.y(H QQ#_]!oAz3<H1?r2Xf"P`"U6-b>p-Uķ!rk`J[4`=~dބ c?gPoW5A͠FZGxAힽ17. ȵQk##j < X2㜥{аaÆ Š+VX OxƻJ_TN.رcǎ-&}ߥK)jMTJ/۫Aٮ"ǀO(| "qLZU}Zd/i &M4i_v-r-- Yh!vn?KȅhN9s̙)Svڵk_m޽{_ kC`3o5(@>9\K1W5Z\}A'{ݜ1+|f"&ο L#L:(Ib "2yzڕRxwÀt/P3U⌾ܻ_$띪:uZ])Ї ,{Mu EYdErr :kV~tRMXӜt6aӱ>IM3]wFW"OA7(3F2PM[CT< x.rXz $/^BZyiP5u-hPc5",v]~߭>״tT: 5$y$5PXޥդu"l=`ʔn5uP``yش*rw97lw~+d3,sV䎃n{Pq@%$5`y>gBh Ω lw 0TZ05|,{x|K?!5g|,*~?]}"-19y(]lcr<ׄ&$maKR/=_6S%TDD`.ZOmj%wWPj>sԆ=jCy,A2Y@.!{ Bnhs {VXm:Bx}j4 5i+[Y&l/~:ڱRU.5.ݯMϟ _=}߹ DCU1*wJT#s7.-DaM)M7tM7Ǐ_nݺu` L={$_7k~=ETC{ OkyaY+%u!ք@CIr円V.!7?8Eccccc/8jԨQFAnnnnn{~- 4Ϩ(]8F8A2eʔ)8qƍ7߯BxtȌ1bĈK\R^/;g #ZϺv(Ʊ`WepԄ\[un3sXvZV@aF!YPFp\zr̫'K^~r_0wl1O ZMytiPڃ#YL[Օ-~.!gi&sRIѝ[ޭ-8w׭pѲzuh }]$Tqgj>ό6wݚ&jNE8u%袼.{WB\߯mz}H(YdJJV1Ǻze>>Ρ~uo+t۲K-?noٱv&&{dmΞOd˾ ?BnR/0ϽͰv}ރz5 bSHx5/߳<9A\8) B$&<5׆ m֥@iA|wRO@ʔSN[3!H:Ұma+& OYg@3B3wo^77E;VⅨTM ;GT%PS=nB1u Y{,@MЎ8!uE_zhgXs?C}:I#кk@xH>:?~ ̰X4zyÇi  KCP6aԳ< <}oPaHL?BoOM82v'We+# qڲB55c֌Y3_~yK{mvm` @ 7;/b&]s|RکVGMؕzMaP/ߪUVZmڴiӦw޽{ٳgϞ=|~-if+5TkQku.˝FZ$tR+ MZ?_*\3شUw륰|C&: %]wOOZt`^_Qj)jNup=haɮQw i AQrx,쓐)7 *|_\k]c`S c2 9-{ԅ>hҼyF&P+9GԽ@i|\8 ppr;I _ Iտ#;wm~U{ʃ-`tA/ψ1Yz+PȚK2 qHnoPu2݁9DkAHOV9PqO0QJ|b:5 h/X D+"?%ǩn5OvhT@u ow=8VFs?+OA$GK*y+t͡_ [=`=lq+}XQP컦SPPgdivڵkv`DC˗P#3@<.7e((82qdbsE-}!%AN /~5LN>ެ?߯ A5jԨ>{짟~駟tgK/K/~]vڵ+3f̘177C 2dHpիW^Fzիk֬Yfț?4!N&/VXKIIIIIwiEX0l%"-_rn+XioV}aȥkp}bǙh˃!@Ϊ]9 c l\pj:?}啇cO;r-$> #Cv\%#Uo@{쵐aڪ&OQz{fc̗vfה<IMR>}[Q%! 74KHӝeZmUuGpR͟gvZ+o?WA@''ճh&>Mݯߝ9jӜ>iE̵ju q;>Kvj%yvSj^ we̙7 qa"&z,oU7 dz"Xjءu`՞u5n<0rӅ>Pj-EE=w Mt}U:czR#LԏӜu N Rt!4i:;%Zn,]l/W#tL'JZ' 3yn0W_Ue 4woMni4&~(e@t_`{ FA1[!򳰅%JiXŲ g(-m@Ënd}q0ﬓc.3\Z˗1̙3gΜ ߿~ܗ_~_a9Y; 9Ⴔ2 "c-7J[o[ov?k֬YfAr}'Y1S\|ۇU)wJR@322222;yNVׯ_^.j6f.>}=zf̘1c ]Xl6f; |]wu]oo_/uԩSnq_|_СCH>{{648>n*[#>:d;:m!~hS[զ6@ro ܅X̓~lU4{`Wgpol r#suNQ3aͽR)] Z+Ͳ;,F'@!E›i.TR!At;$oǛ[0|sfHKM1yP~P9K]lnh۷*ݹtPS*gճ^%TENծ{0Q= @u?wu\KQ\۴(x>W3I!{LfrPgvCVH4hXxɳ+BV|Fl3d>'op[]':oSacqڪ wIr _&ܳz͆f=96Q '_|~;lN_{!ⅰu5>rYԓp y898 9 du-t^ƗekE.xa{r>(W>x}3AyRNiiX$;Իީ$t&QC@)} g!.Kk!,jJ>6PP빿Vw7]}s* ͞рk[ =D"3AX*߬ 6"7W%!Q&N 3TO+O ͜B}-zw"zW,M9tǖ}_cߊ *?-UJ 1W+؞$gQM^)C *TNNXtYjnd;-Gq|֑׋i~wpU쬵bg< 5(|RG}[H}fuۉSTg(r 5R[4mzBZ\]6NGiDhr6a[38GՁ!{p9-ƵYC\0璱_/YYY9[ ̿C c\pvf<#+ߖ%׵Ѕ.Qt};>h3P;{QaމH;y"_6mݖ.f/&ڻmh8H;!–6;%7cSww_1%“KX*rYXg ><>qbu程9YRc# ٖ炢~c6 OB]P+e*pq~UmNn\Nk#;*/WlԭN:tUlδ˅zTݦ KG2l( j3"V|2X5GxH'f?oLBv/A{ +7C?vq}n7"2Gh}G}<3<[ C ۱7slk\XS󸮂ʎ9 oms>&vBtBrgi3:t!O(UNdezjT]0gϞ={iWVZjŇ$\.u}+VX:dSrʕ+=Fh346Ms].4AiӦM6k-O?O?}w06c!a㛶mF%kl(1J 4] pA[Caǃ2%+a?Fn1 a̴ˍo'@L~bjAdXD\ԦK_ 8dpώa`7o%,N"* mri%p mŅxM*啂ZL__>3hq] %cЯߵNSn!%nGZQHJ nZ ']ԡBUOdغl+PsyC7xxn~yA_ďYe; z;C -p>-$so?鰲"z+xGv On{ǧ圃D[fWfo"dž}4tȺu@q ]ʍi1}* l2.s`o0^ɻ'C{'DM0-1Ud v V)w+زmمiPf͚5kǏ?}۷/ZüYK }SA # rut) I>7rm1KWAV/"ޚn7sK7̙3g{%u3́cj~.7oÆ 6l88uԩSӧO> <^:rZ] $Z I*=l.)a<ͯ9g<S%U?_s ]gsGgS.u%ȝuNzg;H23- 6/!]w!|R=Ś)-ȹC"B*קj:P}|! wDGL RR5o)u{`o7q8|~#ەҖ48.?O-mY5jAcԝa&K)-ؓGxB~7DND;(BZ_fsE}??њQH )Jaն_\~Ciq@ wj]$~iAFdW y|2ķ ~7(+d¥2^M AHMB0%K5͏O>;c/Jyy\~X~5?\09ʇ^?FwzH7d^ |vGU7aNb 0`(p#R*oa'-KҁJzҺvڵkB-k:>ǟCJv u˔z ~w}-[lٲ;Ϡov0͎;vء+(;scuY PCcW Qr[ءoy|)ѻK *4XkgX9ׁ!։-ʀWB)ܱvYX>n"P\&J}ׯN5Mxrx(n甤GB^ܯ5g$wju]͠ͶHv{J^[9c]3؄dܳ7s 8AOA2ckTä]kvR&p;7J5-sߑ#M&(y获Gx nopdPZC*{hN"x!V-{x u$(t}ۇe ◦ 4ZNW)"B2$ǭ= e+)A&GRcAIQ^TcBUPFUnS?B(nw}UۅmmP)@AcWӴ[c \.&Jit9Ԅ;=g .{p疨}}g qbdm ɸu9NÜByzBhVtF-`: #P䝭E,jw=~t<,u|9?* &xZ 0S똼j򡭭M]]edgdw,@!v@\c܋\šCKX "d}`\ho[q6<P8mC^T-3P%,??#F1YpB(ܕ"5V)) $_*p$'u,BqwZa}4iҤI_|>WֹsΝ;_š`3.ɲ;{/74SJE%s],'ʹ'NWqA],mjQãJ| ![j]9XiO(SYJM+5`Wepd\f(iP="uE_LTib.vqEi> \! ~ΰ|f-^q;w 6VT#z~ZNݳҦ u`[z7ޯ Zk5|~*ϬЩ3rC\PGzwLdc}$+U S{s<]՞ )R jl,4Wk# +nywЇ'%)AEA:jYmBĕ(wֆӻ[]s+9Ғ 1B+PZb`Z gP|[H5A:+)>B&pkKs)~h[8}_֢Ы~V{SqU-ԋd?xiЬNTňZe^L:uԩ0hРA<Ԟ={3U^u-ov=T^[}g=Qξ ,!]Ωr䛳ʀTJIxDž&Da7fGs\) ; ,3lK^_Bt>XƟИvq:~,y =H*jY,N@8P5ϺZ8/aL19X|?Mq͊RAKL-[w+=M-&H%Ÿhjyl u p>š[aɉ|}ݏ@:tnɻ}K?@\^{w*I(g7p gVuRzV(L[r[ݷOz5Ky  ; ,3u w -A8J(aWZq;(]\{\_ߦQXbT\B[+GS .aNi4t sVcԁYt aA2C*& |ݡ8wZOA̵&i-bfV4ӭHHs;kCOKKfjx<_/iN6% 2|5H=bG{.l@d+8t ]snp: >5_q9|V=%o{Gal]ĎkhSq8`Z׹Nhz5xT9hײoyPe: vW!Bwuir?b{^P",|G(6;|ӿrYEe5+} W M Yy#eӗN^E33MСVz;C. 댧zꩧO>O<ҳ>|aRJ*U]!\6Utҕ[7<P.l].'J/d-[pf,dxz!(3AmOv?mA1Uۏݼ3B> b)<\dukcypkq?ᣈ?gi!Ѕ| zQ4\i@|078ǛwgJ l>˜X nq|Cb~WM,j[M]̛Y oe3M۠ӽvvڕ: u^jW/ 3 ir3c*:j%@X%*) >@yH0M )3<;ށ$t.HQn)`EE$A(wPJ!@hғ͖sgfgwI(=;sy)R/r3~ \ 1t:ioh?MײHl~(eݸS@3+ߌD x,UF)\E(5Nz[ ~t=bߕBJդ]<x ηn @\,4,3Hܫ;6_ͺi]՗PeJ>UvDsr'?`[p|, Awq]n~mk<@𤰓o ߯'@F^ sR0d0ZCo8tġ! ׳~˛@_~;xbL?D;#kҸzk˻ Xf[|M* %kPRyͿ}.svq'ЫWÇ3HR/+%ؼOcwGAN g՝ {>ڟҕ慑w*׀sol8?Rj9/$ˀʻ4,YyQ;3/-Wkk ѹ}dp,Yj >J㸑 2ÐivtOm΄ aQfbu׹SX2sn5t6@anf9VJd{$&OD(@.&z$Fd+ 8`QzݶtߙUSՂ!ϊSŦ' F@MMM h|hHY@U*k|DACjkiN'̴=I+9H2#H :E[Zi%Jx? S_U9[2R(J2TOX;'BP!"٨XT4Z*@#uYD {cv;ġ(X'Selҍ+q`u|*UvlO~CHq0b39+ |q17E f}wk=Vm>U2͘eGJ&m%@`8|/vYSkG~kTZ> x otjc _po@Đ?iH<%Z Wc:g%ؼOcS1cڸ7oMAcZ*ț[{@RȻbE hO@@Ui=.r`@ڇgi,P8u{V^֏LX"~=wPG-3p^S`IMTgS5*Џ 0 9UZ#9ʜs_{{ ҴяY/pؙxRa z)H')_JD[#a@B}.0bZ&N[ ʹ$8+h~r릪tSF( _ghLt^b@:F &0+k6J$Ѡx4(HÔ5MkEՏ ׏3,K' pd ePpk0oGVtwYu O#Lo/g ϶Ev+xB<!@ gȿ\aAuնfwzU ~=(SSܪl)i(mV(37dmʱ?wF}W:u c,ϼNwUy%݁‰y,-wV] Y@$Kl98?rЙK?[NDy /+? oLuZi9``;Puřu@NKWe*^+'/v }G}ӑ(=U޳t8^5SΞ~@OXp WU)JekN.gŕ6F&s%uubn..-\QqzƬ4m@S՚(bkڮl >N֭tcV5\R2ظ"c8k }ZcM ᥚGH?L'>w t7@hO0d,\r0vI@nRTb!h)Y&gPPS ~M:0˘܌dIB@ZB0%W$d3]hN()Yzv%1#_˱ ͅ: m aLUhF  ӱL.ֶ>W(g͔]l82#(WQMʮ#U YR$):T˺ݑc{(:gInP0kzt|< w#Ǖut&o V~*a~\Qݫ!N5N橶fr+7=/!C'%}rSg٠\߸RZP ~ݯ&=y"Dn@*qɹ~r,؟{ς9,>B Ӊ-$I<4{m~;o-d ?+6u*}*~!Atd Hޫ֫uO#mB>ʝs#O^\^iy wBkhZLZ|J:\^lA dQBv/ BKyÃRMݾ62*= o$<)So/S\ys|]lqczCU)D|ag/μ>{W:Tu=eGUwh8R$ddįLL~*gB* R+)dR sLeѠ )3K1L2'WxU8t`+-8BF\\IwyyFw%S.HbÔr$d[Sc"R?놟$;[=3"LNfgBWxqBUr+2^l$+0L_@pK7s 2bϵMrmHɃa}#I c? 1\zƛwg#BhWE7\nVZb/Pna_do m;z?9}*QUK`S >;@-7VlxX'^mtt1v T.][?lftwv}Rd\KOR ݖm?~*znl8`nT7>}}m 4 1ࣰ߀ -CE)wODJ#Ƞ@>;=i {(,J@Yzr)=,mq@ӹ/8VO@#TW޳!#0Z }yZq5{V^xBA?,ǖ=s]"I6ZC5ˣ2# e) 4{P5it#yS%s,4eY̚K8a'gԖc4SWt1*a2(׉ͳϠB" HA0qN!8E24ڎ+Xo%zDbu5_r8|"VNíǓPvYt Sa^ iPԫ»a:\z0G@Pj=]},Ef}ܔC2QfȌCR[g6쓮#J>:8. ? atO9 ~͵}>0!OK> ׺)&ㄓI<@dea9N nb#pg٥IJxIb>sOĸӶ•= NRϋfъO0`~P8xEiIRۂ߬j " /x 6/8ʚǁ3W;A]%ͼ578*_[sP2,?_+)  LŞ#=>.i99[Z_s5+YyN@֔kH?}#m_oGCyOim}(TDu[Zm.axFMwWfp6})~;M)K >okٿ]d ]-B&g 4HRMXL" G#k4w?@@"9 mҫM~_ ի@ϖS: ,4 譁zpeqMr0"K4Sl3pHZE3r 9L.$%hXf؊lVFЈyW}fEiKd$5#O8̭E9J‡(V"dCVgB>hsMW;⓾+ݖ%,4I:+Hda Z3XegG(lKd*X`ݰ\7LRxI0a?L t)u;6z컲[2MVs Y曑2H5"}Cy#3kf 7|f`vʨ@ mQ>8ˎd[] DᱼQ$: 0PᇐѻPOsP©CUP7@ǫՏfy`?WϜ/xYza Sbn\޳ؖn o˱\|8V'U 흃o2 ,9g@jϪ[P޳sN= -8e-#pO`:F^fPx$ʸOmMݖJ6e}ՕH3(:$0D#֜=?;sԅeּjk.Spr+z3K-|?]׺,@bT(yKJ!bO=1 SMb%& ՚JOeӥT>N \DV Ka{Zzpi@Pff?>\gmtvϷ+miqy+P齨2M͒)\R͒W_$R(h\FuFU`ZFЛ<;MUq)A-Wp+<&퐞-@AC``o!YJī Sw/yr?% _ M~y2 %F!x*% >81/}y'h[+'QT[ח8'~b|85^-'Y&|#LGLC6isx;K ;b2vZZv @}2^N`Z>Sj/-0µ\~!L_6K X4,xقAΓ6Hݵvu1kK% R W6df4+\{Ot׭#:T\92.ҒEd,ؙ5@2Hhfɷ͏ 6/N*23})n]x@pǀ_C= skK_%m,y~֠'ZRd6=[/r*fw2ljV30g{v^r'Z^%If<׾%׭5"IyUJ>-ZT]zjYqeX p?sH{ q22Mq.HZ{@Y<' Og`/fc2Ln1em)ـmV$ !\#F24ew8 #Q$3bgoLS)D LƯ80DӡdZP-:BI=QCy֡`{Ro79a: 5D_nf*2!(ʸ=[W%7gk(́)%+B8?0.h3`afRʉkcWi)YBMrTZR Q#q@%+nSK# p&F+a(8_phŲ]-/-H_3Ck cTQ5y&$0 g2  L)MTHj-I}L3iz"tQ,4WdD:F: QzU)9J1bHnά$ND֑/Y1U+2"'xUDz2Rw)P,̂ʕd\٤6sEr`VjUΏ$(YhNSz)$"<^%/wW,Ɣj L>3lߜέ$N eA*EVN4A>TgAgu.pVWOJYThIe#\u&n3 }:zW3悔 @u:T5אjDu=b/ ?v;1N f9J5 ŚBōe%S 0*%ql0a'M#/(5gF^_Qs?. [ȨoB`m( H '>{N NjzY? #߀j@T0k PD{'g\I;sڽY_@f^l^tж+!^-k/Y}u԰D_z~  ,)Hx[-?o0^(d}p7RVW ]{vw.}m6Ëtt~[bߐ=zUHy61җe^]?u)J'uqX6A-FB.൉Yp6w~* BthRL3ƴfrfe*dR% _$v2L% REaY d*e2G[0FF[ L`[P*hr!靭yBB*{',4:0X&u+j+T4 b 2Ni+ȉTXɈ<=q 0Z%dVK8es5 7Sq"!E)[ h4 | Lϰ ԔY%klg,k1P@R 3Aobg0US9U5ŊQV-1lgjyiKcƢ^gTy/gJF_odðNOJ[ ?b[h;\zx˽WA#l)l9 ߅P:37hnF+_kԲ!p*4rYnCg3w)n9+s1?~;Щ_c}|긙jg튒j'EM8^Xm1S_v!k .o.5z@ϻf,=;/pow|0$ҾgVKgOuG:<%^yzpܫ;(B) 3Dڧ_2|\ʹsk6>'œ3ˌT,P[m^x(aԖC9g$uj *'rbm[-v}<3_~\I aY88Q&ZLFB?14\ƈ8XcětDZ!g3)0bNH:6!Uh BVz:XF9TOEaׇ*ٟRwOim$FjYhX&t\d`#PԔoek%[6Hn7]''[;8֛YUyJ9&I2w2{] J~|a&]ĐSsq$;]%c-[y)['3ԖP^V¿Gw]8S1Pm# ke\X^r.ZAQ{S[FjySG=|Ы@R`9 l8huZSIN+i6Va!.p jܲ)dL5il, ܺ©oέ4oESQ:[AkNqQ~jlGT|y'':ĵO~O+ X06 hC88\p)?ooZ@ ِ/tH;lP4F?-?x0=+E/ſ^w~aFFcT[ƹ=9[ q2>Q30)'@>{^*ۥaVIY TBd $(:d_yEΕfŽs@I^+T2q,jA),x-q::K>qK\4eXi?;+8־#p9{c>l.C_h )& dǦZB=Ɠ ԅ4D@R5*ZYl2 Tf-\Um$:04 tpnk Ŭ, GM҇sKݪX*ud(03 hoHʼn`M4du%("EI%ʱ4.Bi4G%T#nuZnkCNBUɕtÉ'ƕo8Z2/ ĤV7*~ X!>3d Nv:VauQ2r,?&F !Q+n5U""YQBժʿwv $>Tu`cbb}3[N<xhN4=e~/kIi$ZR~fbokY˦p g&U,یd^AvIVɲ촵LUl kipU++&`SmG#qk=_,? %t>@5:zWtЧ&sZcn-D ΉjժV~p.͙b@&y%ؼ#p0L)@Q5%hU=ۈdd~ \^~X.G}@X\pp+FV@>BJ:{[*NTV n꽫7^/~+. ؤU bF'f߹BQp׷xjp[LΖ.ǛvN!U3Ni#GQrVPƯKNutpҫgl׮MăS:*O4 Z{t>@SZ"BJR[1MmT &ҕ׬=0(yfx\:"@K4%RĮW֗۲o l lP0!&ZMȄownچQbPfb>L=ZH=W2z pa{/AA$0 躱3}?y@8*n[oIm,,-bUXYbs遰@,SqaFie ZϷ^!x(I@÷{'|?UwƐ;Ou=>|>c'z \`,>PX^/Y, T m\9 2n\HKJ0T @Uޫׇ"SOxMnk\޳})h,WD\knQ/\A^x*}gu‘a-WV56P*Ah,drNzSpbK49[\Λo*Rj0SP-VSe~lQ55~}l]lq֗wB W"τ_y@=OxhRpbNlJʫSS7P#b[38 H;۰[#0Y`@2˩Ppd䘐d `T@#4YSȩnpV# ҭ'3(3$Z'29E2DPr*7W4e'VIpTjTej]bd@%P~=u'KS.OPJtV*>']ImL1USX#r- ddPЁO#zL4%#rT;J{q4 m{]er 'WQuF]+?n'q1dߔf;L8&^lrxG/YG-1]4# ם6o~1Z? VL}Qw[RK6V zbY4XuÀFIݹ%BmkuSמ-l?n4Z1ΖQ͂i8n"Ϸ^\GIד6]8u U8w62GW D$"lA"? 䭠~ZY^9OP}߼$:Ingay^ ^wRV]J8\xDd `inPH+?sK {glP4 EY!L`ʱ)4MB0XӼ=YQ!"UjUO86S}qB&2\6+iD7 Rv|l7mqEze=? yq` q~Q^;+-Ko%[|VlľBo+{#8QD15֔dvOĔU4[]P:^ kӖVv'͆/)48اt;EbKje tQ@h!@2u?Fm@JMklwW //~kO&'W]>[Ab,B]!Ok݀K}v] 8HZ?‚C+|g,qH >:y|-GЛ@EGŷ*(Y9ERMF}ؚfcFPwRWH5)MJ#TҢ)cM۞3ܷ2tYheyJt!N'Olc@j?C2Wa@B9#c5+񬔀&Q LA;@(բ Sa<##0eirpq~&3K)kTn &O'ZK)r S|Tb)%3A4 B4NXG}x5Lmt!v[KlFgN rB1˛bA֠duRCƘ|-(R? Ler1\7ԛe1e툵{fdߕK|b>SjCn]Ԕu:2T+2;Mzk6S{ٌLh$чTS+/:n9P_ntw^2h߲k۶llq<[N mJ[q' ,wųlwg>|j]YW]/5:Y9xƩ3gMBFTlbW&T%ZS8v& i?[co7jaiR48*h^7@ x^ Ё 1mSdYZ [5U(yֶ8+) y0 Y TmS1&@("an4PbDn&@j-ZjxᵈzqGtҷ-8{VeHJǯ%; Km'!WKz-=;_`o u<7 ҷ9[62rhZbbㄖF(LZ@t+m3l&XS3Ҝ׃Ε@u'{-9YЅOZGyt #ndqš!BE+h)!!@cH=@)Ϧ)0)vd0pL`5i%Mf8FsÎ'5r ߞYNi\e)D5~ƖJb*5aJR[HEmuU=Ô\_vWFCn fH}^V>TKzi&4J7 Xjt޶Z-р0X)ZsdJ*- "݉'*D@~;hߋK}_z3]#BUk'>5Ěv0kt-YC`M2E;)? GmĚ GUKFj-d}~f咵zvjN2WoT-Y'YjhIu7ɢɡoQE՟ke<}[荜F-bҊ~I9jW,u5G3B_v?~om Qq`*}\μvTYs@"#S)ǧW /͋;W%_V6 ֪լP'hk:Pj \zmDžv-^[(~}|wgEyna>l>D΍:U޳sPsOo/jP|6El*CKͶm2܍2+^Sh-̂Ƴ[O=+׌l̃k"ޯq@ 8Zjc/YпԊ2g@#BKw1^He 4y<_!Ԛ6Q&f 7a8<:#hBf $WH&p\J=-3TMtϯ2CgalΖAPmkUgA/:HeVLc$#VLu-hԛ)T/OiT-rSd xՑ:@d[:^|Z>#|N_3GwrB[Q]ojKBQ,Ө$Z^NӜGUlj:TWb4͑HY.gG_lKG٨(1kOO6甡@-CRd~TN5췟Ldm}Q-?oLfsyd2aٔXG|௶rp֞9%3v\|=9cB|EOxLZBO 6wղg啦(s_>[⦲)ȌJ5cɁ6sWڕ4+SAܘ"Nw%\x2ge,B>Z{ @;B_>۠蠠@Qn7s.>iQanlysT=B{md_ge`⎀)立u,r.-YB>.W>F;.|}ܲB_8lE@N4ևl?$RѷߴF-+VJ0vbҙilmesn*͊L̃[>K!д8!-g|-F!(Ts(f!Y׮L |3vݝԤT&چ,#3E'x w !9`nDXy7 GrSfUhϬNрߴ{Y',-Í*5rM81H1(s:gag);B-B}} t 1唢$Rf)5oϭ*! "ĽQ#nrg! q4' ɉ1[DUM!8Ag>2(\ޒőB 0GU+~NFDWV^,@;_gZ*Cwiy ETS*ĸ2J9ɾ^>[q7=+߶]V=wŗ+>(>py*%PI0/J֜f5w]{V!i R|q gM+EY~%ʇA~򀞨q&VCr׎Y Gpʕ .2zS#-6[Vک=h5nJ|֞z}^d0ů=@bO{@ckTtae;BSo. 4r]钡k/hcTOruBG'L<_ge`⎀OڎROjMFd]>s,(B j\!Kyq}˅g3#QzgszdC @!co+? 6EIAU:=Oa'N`U@]ڸFBJFM̀W@(D\g-;W\txLh*y0P;O#9-)Фɫ\Ba4#b4$B9}ocOrʱJ(a)'µC׎ޙb㩭DB0M)f ڢ)ʷGf\](XXD[)lv}V 7^5 ~'|ޒYBE#4҂{< N[}OuZɃfU%nU iyS־ >Wq%ۊG~5jѥ@_vAk.DWm;'PUtnzc@bF{m݌]Cw"?gzTz2*4b+V۸ 2|8AFFT 04*nHcNί4^i {~f{7>^#bqX?G@B`xg i}>yHFĜf;BowOjTqwUDRB=+AW~G%BA@`_HЦt\e*Bw_/Psxy꿏_5_E[u츯͍t*֕9;ølhM5][xjs\M&?,V6` 4(q<ЬܒZ:9ĭ\&bfBC}{$W>qFU֍a-`!8ŅQŊҗRf~Z͖G5ZQ5MJ[r8+8uJf~e"@)xZCR뮵NrЦI- _':wiӗ^p3櫭3Ѯ}r xi 7-xCH$_9zhs~FllPKg~nTCY=2#~qgr$ W\˨FC>]0;R '}̶͑)n\3g3Ǐ˲ZNaߤə?@(f)rQр03r`Gu5[[#@1 $ݾ%;WR> 1@!*[9=d ~wS]v? +)y%ؼףC5.D[EƢkk@PR}&CjyodW|-/5sCy 9=. Azz ڿG}єd!{нAJo9̵_ZظzW vFl۱wŅ.ps7kH4%N’HF,99df%|,qL8Ƭ@Xsl34լl@ȲτTjŨ4n u] kc͈/ ` 5u#cYiZ0S&̈2a,kXmR+ܐ>AGmQ-[<34"HN%߳b9ĜbuL: WS 9{γm,?؆&`e4W+'(& =LĄ>v"n<)M= .=ີYi7GyZ![L'˶7|y4+H#/^'7lrboa.BE\:B1м-@a\ђA@!ۢ#*{/33+Á̶>zmt(fQ.‹k⎂ HŎ7⻭=_3s$^b͋i1 ȯytyGԟ[2^jܔl)"=VVpK8Ěz%guĚul5CvQK4kJ0+8< t¼y]4\cUi@mm-,z 4>L^`8&*;Y7MM"Y;F=15Fqbm4\j2׸˕q@2Ub9LI&rR< Ь\9xe) 5a< |\'k%9@76NA\CD3(㦢t! 5~ҝNlS#ָLJB9+ŒGNO8_X! \ԕXSh. M~LAQĞn>6(sLӕh|]qSX[% /{ X=`60qc_[pij U5}=ԖO 5uY=rBUx-jbd)hΫ|uoo%b'@D)@~:˧2z*;+5kMlc5 VBp,2ϡ-D\,զe魡ߒ`~&4b-乙', Ě/{~b'tg.+5W|^͟1L{I)~X͋; 2Oۺ%1@\o#H;l` *9Rdyqg#$B Kz5ڳ(dz@qٛX-}h@n^{Ѳ Hܕ[X3x֚b+5 B$Q'X{ φ\)zz׀>{tjv~YȫG\Y %SO| Ϲeg\d@ȳ)8Eri HqjC{RbpFŚ~)ڜ'KVNۇI;4bS6PpdJYijJ:q?Sik$8;x;mQ^ +i׳u&>4bʓ"MyRfQGF SheePy `o1l$AQ\3ԸBH&Ne{:^vJF\ᔂE󷼿k,D3pA{wގX Otm%3uhHgn-- duW+1e9ZaVd HNyԣF%[G#kSW}@b*~Ǭ1Ui3n5*7b"؈/SQX;yijj4͢hƲoUg\]sr% |Sk[=@ܴ"`w 0Z͟B>ϫEv}c!ْ]kh߻o͢5߼WM#WZg~cly :R+ ] ,)‹`⎂X<W:tڲtG7i 8y[Tx-k!^b͋[Z29eyqSfn!@cMX:ơ)ǔ_~hRW:3NbqvY'pFq#v ʳ8FrH.ۏe?Κp@FrǓ0ڱo/űNjPv 0+il<hr 7Z#4ex2txT a 6+ NPXNk>Kɕj֔y[t`{9iuHw>+7>+BuR E^qb5SpsJ ]}ѰJ:쟳g"}w,j zD~bYY!XR5p}[]:Sv\ߤ˜s*l9:2 L?8Tz2hQl{.hXdAC=B]8$S,F8VRPr\,$)\ӗ%ܬRM&MIV8v^Nre ZY҂/u|1`ݛZ>}:>|q>q/oi`w63a{v>YpOV $ JT<rW}B䮙~\/;UyqG)Ic"z!PШV@pP;!9^b͋ ӣkM۲t>{6#uJSbpeiP45&Tefg-n2\GpRmy;q `Z3>  kTh Ҁl3NJ3?1k'$Wω5c^):N0" S83k%U(St1Ԕ) :( X)ߊ[4$9yǍYBLc'4Β̡3mxd v4\FغpcGf!qR5[~y@"#lT_ܞXM/98^ۆd@QpIh8F9:gA'}}LM z)]5p,A.~gjf'~Uc)E_|L{g~ (Z`XjYF5Lƽ=@͔VGHӗP1I0{UɜSn5f rU/bblcƣK:1 [8d4Z&Spb[o7~)et:抄( %fTqd,sMӔkkQƕ`JV[wgED Ιd$Sщ|b} Z m.ڞ(L.y}$`zU|Fl%ؼ`^/go@ESA@VM~} ދ;naR>&o=lǷLv>D:RNӃZi((*#t!߀T0ZPK+EЬ\|c-n>Խo-2F:4:P{iϘTh&D-|9CpFs}qLh0)h<44#cռ e'vbLQ&-gIrO?֏)8ҍhĠ )x\xb'a<#ب&x!arjd i+j;ߓnc!Y3 V- zzŕ3 A`$?/gZ(t*J'y#oMt߯FDl1߅ޢ@bSԵ׮s|Yt~AYg l]l-w3w];G] z.x֧^Z=A!):l<[ԪW|)Μ8NT1y''Q2Z9rPzꈯUQXx9sfZfa8gz+ؾ_j w-I5ѝSF 8^΄˯{*Z6S2וӬF"lDgP'HrCUJKR@\ScLF?.N/}[sW< ?>ᅡ`2Țzv^dk|{^xqcZD%e-uϞ@1rV>|oC=,.[-9Ov4[5zf ~^/k(.zV>e^0vjI^ECH2 (CMOWb5{CvTrcq+w( I[k"X94h<+#O=n咶,5= pY,!#k]sƸRh%cB+h8y'ՌD o%9* g2d".U(h5ү4Ez:Pqy1+ep^s4R+0luT! s{h ̴D[DxMG4Dzi "Ѭe,uE~BLC}F@@`@dzY@[Y]e &5ՠsFg[e;Wj3Fݏwj?WD?dрGݭj`̐M>]xJk֪|걼B@SZR 6d6cԀՖ}ϚS!$ύNg Fa,'0ZFyE"vs>y X߳]ucOA!rCq=X?K.j9t#VMN̒  l,Z#gx6>z,2$>#h\Ys}׉T!ŀ\IVk]Mkw[ o[m)AqQH _p "Y[mH ׼@~pC@QiFMUz^K%ؼpM.ao8m%plMث@J-o}uN rM?ګy}^WQ2)Oio ZZWi.=?9֩u̻'5J7pph7 ni".g:˗l[n%u~7c{Hs2=qWc^~xN pI@v:ikM^RMP+ bRG:z0ԅ8zu@mQ-}Ί3F"l7zPM@^ xy, :Vhq<nN~]P%?ShL; ݜ_  }~⃣]HW_xVDCvr&YFl*? >W:d2Eڼh.D])h4Q8F_,FL5vVc)KD) 4fB˖)L=n876QFdZ4[iJ2R4vR* 5o|^HGgX U69v=WXCt+G9?bY;*a?it,SOZ#}8'_LGe[tQV֎<~@@-G}@zCZ.= W:@i,Dؔ@"=ƗxEൈzqG'M׶w6 ɱr#YK|Ě&BZ(`sy{?ߥ{:c-O.ЬRJ&:̓Y4% )=->St ʉ*)@e:/u)ֈ0O?hiAG:^pl-3ѹALj\FhJT뤳eR%ilYMYggylU5N1_'|pQ6j <6z81ll5vnU ح}҉,ƶPndyX,7iZddۗZ9=Q 59ürI$, ^n@hҖj1mc7a>;kyk'^^XZ޹`Ͷ'[O'urm 'G̬/6j)9på}g>R\|T1p)#k'\|&N}~nZO̩ˁ \z> <#׀ׯ~6Z_ ݼ_E((I7+a0Sv>Eb |Ȍ@vq@ Q4L&pVIu(ZoqoWB{ضȹ55UVP!5T,q3OP5`8lz4_b>k/밙S@jR2E6}B(kiߘ"I6̈:Em+@o:IT2_siZ-l߼?+pq8pd趯Esd5VI窿,RiS2rzHQmXB{YlZ'2DWq%F1Jvr"0+(̊KX:YA9AǕiLiFidekj2EH6j+{įLt±KrVq%0 0EEpdJSl?nAe B*8f?B[>_qtҞBSsCl.D4KK+Y4 ĮůD(^ٶT7)Ɲ%}ZrE ,KvdouT`ӚlH[yi`OFA f#:Y8Uhy߮ >g%qJ~1/(ۮc E:CYKyB-ϨvRMSĺ@ˠ`U 7 !U 礘ӵ{ˍc) f NQ-ꞥ5 %L)WV'/5 I3[;CO[U cvgoc_]1xanx ŧ  eɏR0 Yb@OtTޓWʏLy@7ɇ2@JHK\i B!z@xK'$L~a> _a yQYGBC@ &kSe1, V2?doMb8|``6}[6|96ҁ{r: *Wf4]Lkopx 6/(!ـ /[ TIR0ݭbϪo|< yYk! ? Azq@(G*"ና*!͂z]-j,{v5{h=3њ+3n*H'@AL(\U ˵sO Eb-(nT[, qBL9jpx62)E6)BK)5w\Ʋ4{l\%`Yn f*v>Yf)4>O-:L7^B3?#m<4[-ϧɀ80ea/tB*;KHU5!gi8`:M(FH} 5g۵k# r|]B〒ZwmCWs[L(gBOϙk7)=s(*É:f|oD bu6<e*DԺp8Toz|K#<jeJ9~cl労1MǽeUo)UUi slgiwvC$I?hm~}BiMXFc#ē! @xVQyvjӈYe5 M|v  #*>=pݏVB#jcg)[! HTБ9t߱c@گ_dߗeXHq'y`{[6#* ,n ̨l@F1ՀBnv{u'|`µkE~XMZ!đa0t* ^#?jrqs?&, -`Z-IDATB2YNڔ}6qb ;)۫р_I}@fk%W WjPRy /KyqGb+Iہ -{FVPIے{%@9r Uؼ^gŝE~ɀoNX o"rXW{v^G b= /ZH՗֒7ݮҁC:zN+CwU%G==0aϨg'U5Jm WL3c^0嗞8d vpE'¸O|DZ&WrǑW)StiVUPe $FykYm+٘RejB*' YL38h֍giWکe T:a?)_FB%@$ץ 02 G=Es]=n5(99@lϯz.FXB'*vORSDQ#̳Pskg7l:G;FYZhٺvv5_Fzs62 J<,2c{M]84VQb3JP9+d=7J s9,S}X+ϸe. J x(_x&つ_O~HG59_;,lZ`5+n$˭ Ɣb(e̖SfqmQP3yZHKK˧N\ 䋧]}dmF@t m >IW~0`Fu`5V|+5n୷>嗁u4](,,((*mLf|XhѰymmx'jmH!CۇcQ`1KF/[QXa s7t޶'`Z/0 Д&?}4bk[ 伟3\i|Lq BO~`F&l=w$x̾v<w0p+P[:ʾQ@u7İG*V|JL Δyq"sj#PGOPտS6ymV%g3@[iVc~KN EK2׽$5W[@ݣ![CGKO YԱ@è?!47U(DSc vrE'S~pSb!{ ` Sl6n7M)%xkL4pcFX 62VBf4*jj^ d+84q_sl{F2x)Pw1˧z;*B\ 98zJkdd03H#@fid-p~k>KB}NtNHWșSS^ſZ-Uހ0#ŞXߊPЕx".]k,58GWm97h6pϣ/lW'<XD]k<{)\[T(%̓"gv.Wf+7y"n]TWcug sWX F? FLhe)9Ьf 8JI>rgQ C2z^)\[0(ydu Żf4 V pIWm⹧dR3k#5ׄ 5jԮ]*`,|[,%Aֶgn;[]hpѰs=7?P@v^ dhOA5́'!u:*ڰJDy /%ؼ#ɀBF@݋~o_55!@尰Ȭ>+/t9;  z<8PQavPq~};"E }#2:^HNݱV9Vy`5< {8WOX]=.]6wuxI.)xCo$c&t4u@;HYoZ1Ik eQ2Y?l/=,cM8˔ptZ0 ݦq_XBL5 q%I'h[P8ER'@]@`H@V^ɘvj1ұVmY+CnڽoIg fdyk[(?/O1 ?C2,b聨d,%UGuZE;>xtU,Νճl6u}eiu͌P4^x5da%#<u օ-g3G"l5n@ Mm}VjїR2L˼r(x=@!K b뵱GYƀ K/W޹4 b֒=FbbׯQ$IeW&O~M`J0BE!<ԩc f4Ӏ%";vKQY>ɳ״uSjjĞb!]Ŕvbg3 c<ŽU棴jYobgF0F <0]ucZ0͗cX9\>#V d[(FTHe\&͡tM+Oc:lAA@pfWIIc"Pvk쁜R@ehe1کz)W565Lm>DƼ0?Qql5qACv8#!Xf@2ʼn9ODϬsVZ~Bq}oYլrW4)'댙|ۗsudYuގVbr-9k 8p\?edޝ.k=p%.@~/آ⧀Zl%d@~PIXfO% jx%=yi\mVZ'%wK2ho\vJP_x, ֧VCYwJ&( ټݓA$F{F0Ŗ1>4avOQ7%myHt dRrq̱U.c݉\y@@Oj)KJ+u#a+#x{@m=Uol<2VO%F 'g9qԕ)x;Q&vS,LT[UrHH)=@8o+@` !k1d`]e8.K_[{3b sKqn<"Հ^xL?[ o3:߽`Ύ,Ι[ܢI(+J,Sfjyl4!] IOQmc|C8yQ92xx*Op?J7Q3<%sISȽwI\&{%eW[RoY4ܴw7 |K?w];8LC()Xl6`hԨiz8}ԩXKGOx@m2@?~/"R 'C t[ T5yww_:> ~n=` _  Tz g-a!TsQfhJ<VF } LifUDZBV)m}0G/bT@H 9??=&@Hȴo;=Upaٓ}un3X٪Wͻ;ַgEnwt5FkEn{H.(1:<٨n^0x 6/HLt8Htz<ޜO{z0=҅t ".J}6^xpǥrIlL"#x 7 1;;y.b_G) V3$%(Jy Mr@o 8[Te-7>3 !@T8$s_Jk ɡat-jjˆ)N{ȂS^e4uG}߄igH 9eaj8@Ç4mڲeÆZ oyy׮„A6m:vlFGA@ޯ9{ h0rw@ TMֺr!@ WKM26ڵ$!BG  H]?XKPٶ'MI})P kiRဩ\?>z`f ͷu^`Af_F )2Y.m4$zͺ'n;0kܼwwU<cRڦqΨdk]^Á&Byw:w$ΞNwXJ|hQѣD'c[Kkх}^xጋE?GO ڶj[{V>X.; _wRT=;]at7q~Θ[Oy eo (FɫNJ5Fiz({_p!!A9{ea ygtxs)|,;8ZM& RGD-ʹA2hF2nDĝ5ϧ&l[@e}j8+IU^x{;1gpgRJdF[@ml;0y$ɫZ jɫ'yB̤3Se-K\k cϳϵJK얷?IS1D ` Uom7>- ϼx^ϖ_te(=ɺj, QJP*sϦsVPdL]jBUF/ڶK1@+,/^(0 , ؤA xsؒ<1B~XݚR&&8P6f@b).X~h޼m&MJ%M3 33==+ $-kȐ#ر{vQ4য়_z5=='|:`9`/F,MZ] fwpk1 EKw{לX^!c؉?Ys^'@tX7DL-J0GPhRͣڕYxqKyqG񬬳 m7K鈸߼PNm\܇uRfTz /qiAzӀ5ݞa ?UտAZ;X%{q]֐k\LY~:䟠/+`'SAFlʻ54)+TהUFӀOظ.sR ^zkKDh|FvRMQ]uz}U_%߁_ (8g/ /P<ʯZi?@_:N߼}ptzmo'u:[b]X\0=$ 2& < Ìjک/CJ'܌ZH4KњBԹ*BPr60}ŀyq؟+Ck6=ZUdF,b7@T: -xqt.XYnO@4D, 1shݦ<3 /_b~eY+8Z ?tqkآq ͯ0}AF2iSr)5pz:aoQXʀf$SX)sWɫY6o!l+p3Һb%) Mtıl7\csJLHZU[MZR0YJIiζЎj %.&,mw̋34Ijdc*@DA}3o}6d {pQzџdH{T ef?YD|M ΖoK5SjFV=7 5m>s ߦyGQuqM$BHA*EDTPzS& H{/B  tғݝȒª<<;s{g; CsTn!qjI .*~g\A_@)FѩCVێ* FB6^^k7n\:TVNr F#ٓ&-\7o޹ٕA-[֭ MjU"7CHݻi[{lMAB q </l#<^Q |@+"f57WBCsKS溅>Bxf^H3s)3) сT*ِ.Ek~3 i=27 t-q [FƙXE":j4RkZRΰG\xC)ɻ|4W=xS965o8-V5{_G jѷ_v_?]1E ak{=_lfT׋)4eqb:+<*dը[IllBBd$୕ I9aPKG=aa?|'B _-տ9,60}.ZI!uq7M]*tꝬEW>`!2M'eJ&aR8>7ߏI ~{NہSm',qC8龗7'I}Sޅj֭T j֬_bEeS׼yӧ/[׮]rI2~[ijՠEv6MY׬ٺ]w߷6^^uԼEz_X]#phȍ=!bVHpиx]v!o}%,n>s /(QMnqO>/d̛1h_#xj6s-nOeIUKz:me+2LRNH&A8䙖SZff\<^N~Co$1{= .Awg8\xB5,1KRSҪd֮֝|k(r D=| 9vMܖu43t錩.0h`f:?$7c[Hy[ 0yGXX[27E hLXl;#VOonzBɳozZ4S9`3R/`N^e=t/xkgH<ڞ31t̮B͍+'9?E83:+kVeXO srOxygf@ҴĕIvЮtUZ{-6 Z睟9N:'ےլ#$kDϹ Α#R@ڌ ǃ})pP8dǡ{W#-pnd,DN1Cؾha,xuq^6 ;bn͟굊olm\-: U`Sy+' 2;|?0O+p'HXqbK9bxu[w!RlUT'3{5%8$`ZWm!aÐ(NzRs9i˥L7 ͭA .vmٗV{gJ|/xgJ8W_^lϖf7ߝ4wg!ĘemiHe4Dnʐhו{:&NiׂX"2T_h)Ziw npeAH~<=OG+_Ξ=y…g3*UQL=ڶ%o߸q^ <{Uc:?U T_j-^q1z<\/ǡc(8\uvX yWt4XQ'|K8io,-[o' g O_ǜЦi=OFf mWIotm8˚$im)J]T9oڅ޹ƥj{(:OKGey !HLGw;iF`mu!~2r.y:'oI#tn^Lcթm[gwswn3|A րDmt\8t**'jY ^TGm@bAEAS4? 5Qvphؙ>ۧ?-Y}׷З+3k&kHͅ 5nUnds<83)=jSyE.]Xbƍd!ĤŖbϬGŚy2̎`O kYĞJSiiSh7yed '%dx@%ï@C+s@n.[#(I4e0XBUE47Fs Ȓ2!e Dw-!66666M?~lOJ5e ^Q@QAQ^|C E9]%8wk ViVA#.Uz^%O 9OW RG7nf-ep$+|^&s2Ş/#32@}v]r73/9>ܳyzX:^u<HMuL V lVO$=ݝ3Rݎξ9N֠`'}c"p QK^phE' jn"`ӡNΤU/OQw n{`L7;y H VP\alKNmCTj%] b]q:[\ ،rzl]CUX?v#A~ov^270Sʵ n;Ny/+,=;ͪp|ۮK`WB>~!XZ[˹3Sb뾙\@^jX>^6#-2ėf#3C`Dz}KCM,"a:+@4 Rwv-J}e"(4YWDy<B]P(@ME e0BfwFQEA`eZ{{{{;ݸqFp09w ))ii pw;OT^bL'qr>Pݓc3$OulP*J@1ZnʥI`4 4hТEZЪUv͚AuÅ?RnTzzH+)e:J?/a(?;Qzkɻ'%2̖Nf e}sl=sr, 7e}hfRQh3M殗sfZB_ޒHO3}hn3rg[$i< X٥&L*$ˮ \JY^p{Q!iO6ct.JCL,؀fA:}e#h[gJ 9I+ &ݩWs,\>t^N:b,GŴ bܚé;hWjJGOF WvgXR>Jo AVWuK&\Wx=Q.qS̒JB eJׁYCpZ:GYHzʋ/){]Dr̔org5Gۏn+}f3^n)6u4׌ ^Fe)M`RRW$m $es,]>J5)xOg"q# ~^0(ƏAw<.nJ߂ͣG W3,Ni{w. [w^q*8VˆX::ˑh 6!W"F$KI?)>x!{,R&xs*UTMOS^_ _:%4t**#amRO~󇿇?}-Vͬ[?R%5t;!xmAڶ> rty6̌ uH3Z޷~ ["ĥWwGٙb ƾFg 3̮Ϛ22 23u^7ytl#~(!$O3yX=; x.K|3Ó Ll- jDM/k=:td8aM <-Z6)5yU,1QGb֭L1PR%Y::_5iU\!H8.. (g)&$~ADA 99 |]jmS~W l*o5c~&p8ʿAb͝wP]~fhqVzg!YX} L_iu$<|]1&!-|Åazljtm7/9J6h Q\('*JS@)0C&c2 J2 0(a1rAU^'KM3ge &{E1 gb/aP4OHPDY'@Pa JASPPgX.TecĊ zJSAn4ͼnʌc @1y!|u^N}buwJuYbh08z昨  D #aҮ'ۇ\H_=B.N#d7BON^Zٗ2wlËǐ-f.0 5ԛPUCB?>b^`{ ̌'x톷ʄtK6^.<|e,qm׍`6(JyAOK+e-gPv〲yj^pSz5kϿ9&<2{S)f=r7#^ske9y5^i:n4lI_ɈL_ϥi͖' $&KEVA 6Լews3S@?%ތײU*Y}4Hw?˿Xf{gRHBKRmATjL p}Wydi:.bIJ#aPn c )E&)L1.+.}-,q' a` m2SƑG5rU{1#71N F?P5 Vfw*|w(z݂+ ˲gfYKG_L۱R*~4ӎ0NѦ.fK>DJP@ɠ h%JhAhT(`k`KT MԸզ P60?ltT#A\u/?_"*@PJ*ڂY2X*U`Sy{3p-(ptttttn·>Bm KG;gZw I?x[z)%A>`\rPUN.RV~|2vyq Ie8,N(!Ӆ/,S&io& V *(uh)StU ",>eĻ=!B{@lij!d)Kej bLMb^i032-SdiaWFnDF1i<+w)hI t`$ڜh!zIS2A0}er]I(.՝wlz\ԟ l>Nyw^FsK Di{f egV.<.$s^NfJ@͔e 4f}yx!>~0}^/fJpm坟CB>%,Z:V3:n8r'pppppp~-@.Ɖ,J0?1v;cVn~0~7e|Jn0y.H9+JAY5pGٓ%xĮü%*?A4FEx#8bKw`D b3:0^"NiTF /bE)|uȒAo`"A?0uݜz(0|4Jj }ܡK"ٜ`|fSP^mfws덤[AXгALq |b<݀Fmr9L"]+A>^p/ ow"wtZp#8a{Oý蜭em2jrʍsUlP$t̖TC<.dT﹮&Ȳ_J77+0̮irJI?NΞp5 'y5..;/D /kφPꃊ]2,%۔h)Ү0H1id_&j%'Mg+ `ۏSS`*>{~F3徱vU j _.y0NGFJs]ZEX4vAMَۋ{T5QyKi5\w[P2[Ggş#ƾvliGs|_;XR_.y(7BS[㝂Q6(9^gv8~h+فe?WQ021a ׏G'(^ 7.q|θ$xennmmeem VVV*BiD~PY+Cc@-TF8Vj"aXSAJwa-H3rbru Tꖾ+\kfs@⥑#sN\F ?˅U!A:_6zs߬;  W. ><ϣx1q#?700ɟw5@Bu>jF yΏmGwbnsO'_xd)xv=kF讀SOUY3y{/pRÜ<̔"f he㩕M\ o/SjFJz; }{efx$lK\utJoa${wQps >Y#wɩ$22yR=BYoxB˟-IGbco܀yk2@CfJ:͗Try){֗}:}l[GoNU .<-^Gq"G]\MYUT?]a:9QɳGPfo9Rda溡$0k(@3/eXg<^Xx7"%?c׋ЌL;iRԨXҎ3 |ya .=m0@zi Xh6@ 54vנLi'FE幌=33_o4!-՛G񠸢ȋl&0pkͣȫo{C~˖U[MrtZBb8:x}f0 ^?ѽ= AWʀ<| ?[!8N/M`gc cy剆Gk 5^tk-|qÕXAlJ#,@sP+ŃM^1G)ťR;R5_K@SY lI/uTYA!Aao*}OG KZ9mo 1 c%N{;ޫ aCˇ@oc rZLvJ[ AŇ?*$o]`O^qmDp ^}:kn^vE<4%Q%^qϳ, _*ξ`y9xmإ0=Jr*Y`ɥ R9rʾ\so:!uZYI }BLek19-3q9w4ff\Nr?㩗񧞛U?E+w25;/7N_8|;t % %;C eJMޚUC2r(6'Pe|5554'-BK~>NPy9!mϽ#_rwvH RiNTɳ XY7PlvzTw+kUHSQy5ϕr>9Hh xp<Zy/~6x|@ 9{*Yz*uTMFeYaׯa6:ːwcR{nA4lqq쌟~toC~q$EqK&o2Y) rKy7{TfE4FRYq,] :E%'MF+״m~4O ysI$M5=A$}kRChC@ Pw%gnμ|-l]y~hyD:.} SIMXit03Ϟ.e6}IޥW_%͏?)=񜌭=O5#dLf<55y=󅱜<2 r2I1W2LlWN`qX }y*~uon#/3#\/}>Kjݜ3% 9t{ǤLF :27?L!j]]BwV0 "W)l*[6]~1x;WM5'h?/CeKRݗ3cs2M-a6c5gB}O(V=lG\oԏwp"IId ,rݥ'Jv\ FV!kui d9U$u~'G:ŭ;6^y<p-;_#KG1 @>hr3l[ˎq>դ۵/Nv"T{^U`SQvsº`[i] M)PQ=CC X!,1i0?ЬԬ4e,`W&P0BaN_-e_ʫ.R>zq3-L 8O`<5 ػ}n~)Oضm۶m 66666/^xq٧qA"dN^= ޯDû[/kbk<)Az)ALǻ"B{ ~R?^}~U`S9x!zC'0=6lcH V/F0>20'̤ ^arpr4azYŖK^ڜ?xNBR ŷaț7 >z<\=d7Ld=gޜ6%9ESm\ffT}ϔKrD y޾4.?rM+ 9(UICPz`s%E(~%|W }<,'Bۋ636Oq ^6b,h^.z? -} kvAG_`٠ν^s]¤ا.L>J'8MmU(U0z']3|L۔p… .~TrT>zhja0ʷǤ,r} nŠ .v_n~}}Sy>kg/rg-{O (V{?&?UL<ScZB^QEA*&мqj%P6`I`Oi#* & n|.8C,bXM\]K YK?hz[b SSQWMaW_0V~o9 gwprgK]{ySQRAo۽Ke7]]. ׹yxyaX^( wo'qXS-=r^nP<woNɡL϶k\%/B/\ >ga|鍿mX:kfQn׼V9xu(tӊ9+LW.|%OO()\y㵫p.ᗧ!B2;pJ .\#:]XPzD_K4?ˍМSFtSr>rl| }_{!#'? MXhe{;l Vk[/t}Z bX)N#ȮbNy\$ɦ6xW ȲsÇw0~&<ە+W\ܠI&M4< ))))))lŋ/^z+]5H:ש|o`V]?]8ѽy&Hq?&ֈ^E" 7Sϔ j7*G^}#$M}/!*R|RQy9 YIy7-ЮCy_ME%_W L} ^GCGAi!MH;u6$p"E1f񀣪rE&ܴ@ePf;mGe}}Apʽ9(3'y!a9>5 {ubĕQ40xyO0۝՚@rqlŸsi?͕2?HnX?LWs'PF8s.<R6 ɣ&mm WǂzgJ}N(,߿V@ӻJ11V4otޟ[zIޑ?3HH6+O} Uo996)8~׺kPUQU`SQDȵ%{qPb9P̶{IUQaa?|'B _-?֥[}Zb0ycY:*P"As3@v^2ӂN4N=% d VvxϺ,<& ]~\7Cȱt1.9Xz0lFJߟ2 ,BMq=Nq'klmlSLzb33'eK 0% |Lše4)s3Kf aͬ!ͤ3cVN__?x $KM!@)|{wUts]hyo*pQSB< %@m8 "Iյ26$oeyhCV-T=2jOBaU U_(\kcP)qMZ FMj+lQ@<|~mP믿B1E>/sN:uԳYg1j VF۷o߾ u!F;/ͩ> |̳[6 ÛF~W\IY2Uq'g-E~X9k"nxPҫV l**!rqIR'=o|*R( ?c,w`ꟃE}c)dq:i`27B0ۮq]Jb` iBAڬ6+Ӈ8EQ \t(k wO vܹsgʕ+W\ڷo߾}{f <%/pٳgI /)5u AXYBz)ο]^?`C;!l]``q|4B;MII3_]vtkV l**u?uJK}9`\bXT%Q.N.bٰ| kЦpہ?aS3 %f/q: {;:Z{l}X=F@H+@%zu`s@)NiPZ3jAso#l@*JX?|~w^ rǀ}n[A91!XܘJ'yש.[ ۏmjzf}%%zt[FEW]%~~4Ihj}xԪW]P/7S dHLġW9'_Y{d2"<_)̑o ^V@yrO9o| J166C/LIgFRZS|RބɽAu=M⦃ែ qTCCA^6ѨCg0$}UMMeV+>QHJ\sM%B.2JKkDgwZ+>1k~gS.PƌE2bPҚ=ڷZc꯮kRg5A"&pQ K5\O+5J`voߒ@7e=>裏 e~AHOIH)->~5˼4*轳F&+΂QgDes*P$|Pn*H(u`ˆ ?nC|Y"X' Y:#:NJuO UHqJݚdZ 6ع[ l**U`SQl Yp( iSSSQy1b Mȶj " tt#iwO(_">g-@qڕ գ.H?T(qtt>2V)!0n0 5) w ~";_ҮlfuNoܰ8VmoleE, \ \ yN5 tyrte|v {Vo|\ZU{ֱFĝ:w gKek4`?p&%WyJ_316x}ةO_ t'+0[xaNf{&}n j>/C0g y0 {eLL3 `^u:O)RJd"!a|0H^€ɽA]ds&tO2+*"(H`4Z sLe&i>U5@3~@s`"K@Z9\]w^t,,A@3 hF0(ᗺB<,-CUtH1`/W@f%ketG.|"vn[1П%~7:qjWZ[r:@! "ÀϼĀA=$gZkygϫUX+D8$E>N=p~d_p… M);?ZXEKb!88888nL_dJ.>*XÓN 6G.Vޛ,hi,fD^ 7E>hki⬺FS*ҫUy[P6*EQ 6i[ӾN?8ѩRG)AjQNV\;nhoo+J>i9 nA1ƕ`mvY`$(gЊi |%O{fA7oVvuwAo V PaeNXV]ªAR!DKg2__tҥKuF+H !|8Z!1111$:{*TPBHoO ֕=mMlNDOP}/Ad@0{&iISoյ T{gT -KUb|2mMJ}Aneu@÷ lbOn}m+ gJU^/rgPcg8۩_.avo{\?ow.Ȳ,28&:;$AcW6@) jAͻuUg>MҐ A{L[=k>[1ovu:NBK o>iōSJa3yѣG5k֬Y3sI&M4 ><Ы'hޗ鴠-=4hǎ;v.So+$ŕ]s%RȵO~Co4/)JGxT!6pw] ^p*D.<|]jmS~W l**"+"îί[~mV_**op9t^}}c蕧RW,סE4V݉0b3C:Z:*EҦAM.w{y`킝WAAB=o&cSѺԁ["5b]R]@^q)1ӳy _ǣ,},٤oa?fbc\Mٰ TvܦBeQ@6%wګf>n 4S0 BUEEd{Y[[BX@LgA`T fU.h'5ur;\iWW\|Bd R O}wz89[]e2/zAC^A(@[ HF||yOw;c=Nm@([-DMظ*pp}cG_u4y~ ? j~_RP7aӠġ ΂[keew_LAMj0axh<~?Xd pZmuc?̚5k֬Yпg[ojT:Vj3رR%3FRț7o޼y!>>>>>+VX1zn`$4~e4eVGi``uZ7Qs0T6FB s@iMMnAPН;tPޱm͆QͩW l**YWB`6#s%NE%w$'M*wB߾>9 6mC[::Z͇!mXX;Y:* M`kЯG_ ޽u=(}Wa]vÚ/TJ!)ft,5y=KGr+Ʋ;uKPG'G",d%IIV2rVU4Էod_'jO^~cA /U>xOLpD'8%.˧ppwwTd6?Xp…Y#!'/]Lw*W9~5Iϟ{wmbĆ'XwU52<_F!0?~  þ0l[+vp=;WyyG~*p:ԻWF F5RyCv<**ϠYZ iIY:*CY'|-'+IKGe9S_eG?~о;zǏcb?_W5`ˎšDA]x4\`ӆ&s>?ӧCH?&qL>댎r}> \]+7fX;{?@X9Z~АkX (ӷ|3=oΜ9s/a-sΝ;w.u|EkRv&x;^8yyVXQF5E=&w UJDE[Bcc=4ڲU.ꟇZ"g4报4_ s >Q5ˠ>r**YݖfXH< ( GŊREw/| vlZ=zK|տԨكKu Ne<:2g-ڲ; . gsu u[ghUTT hfѮJO[MQW퇄spp凋*[7jq lj&J` -޺̭Z!qo8fmJ݁ö-B{ scǎ;v,_qdw iٯ~g 2ץ]c֭! |,?Zvp@,!wI|2r%} 39@IDATَת l**Yy S}@(!nJ1``*=+RY Ncqo!@tf.h?|0ұ8cO< =W_ EIVR(! ȥjjTaMEE%8EM8 \Up^m fO-2=Wq#@qG VK$03'k%U͛7o iiiiii0hkX| ˞w۶m۶mi/^xqXYKg͘kf3U;w ` $Y:nAyӢ(|}[?XQ6O~KRmE-UQɂ Fd[`aJ}6**oW(!DtT#[ؒȒ`# 6؇8gT^-A5v %%995z7K lھmmѪBDT2`W9+]S~{zԃe~/:D,[y Gj.K`]gw\A0yYf͚5Я_~= #FO>}49On-oE?|Wܝ!]:!_UX3\J._qД  K/!ei;r8$غ:[G'kKRmETT=mje~;i< ~hbr|#0?{먨N.eV*-~$G^t4*Elj a罬y~~pbzOx̩-ʿ g;y /?UySy|P=3̗բғÊm@Zc}f0s̙3gBXXXXXonj3f8vرcÇ>\2:$lyBHjһ oKx݇._}:rf`4zqxwmW\S0J4'.PANkvb _΅SE " K>l$𡥣z[5}p#E>=iQ<͜O9z{\ٽAeYK^EF[O2,XH)B-ʿeH^4'5ľ랤):]AsI| Qg! *TP!W`shp+1+jui 7aÚfkM竖/;]lz,|`ԵiۡaHʂJ5-ݿMl<I#} x(QWWp;ξ I:W l**Y9kekfgX.a^?!rdsH Mn v>6-JH{b<(:%Qy!. BP -ȕ>:]mo ܻ~z`z[gVim\|wIZEE必7+|waapիWBrrrrrryZ͋כ 7*-a jI8\ v_`e+ӱ|áq5B=ZvMtWpu$j@o|;&<1 +_AL`5]{Aȋ=R [-^dׇ*dAq !Ƽj_y#$Xk 7?v>8t**#FI! *cRQ9^z]hؿRy`A#3@]6&{n=?!)J T 1tZEE?7>H?v_ުæ`ٳgϞ=y˗/_<]K'[λtC:a @ ,C`G9f|f)S:maxߡWe{BjuTAֲ݁50?pg-uX~zci!#0n0Ej cn~ g!ƣWbBTT`q. l0qne"ogy?/dmdK8ddmUTDS^x re&آE=U"^v 'Yd=s(ʳZax,U5J'3ޡTL~vN/622 eWr1E_e_ (KJ_H8. s?[5ܱe~_u:WԪYjԟW=j_ys]"dpA%WޫV*w-տ9"EP9{ C=P>ౚ j^J&m% Co}TyM\>YϔfoUTB%Ah(GivZ:7Ǵhٙw׹vi 8(|xf(_tX#KG6\r>\ (۩BhPp5 7*l.S>SvepuAG.*4J$A)wHG!\{r>8lqjN|ԟTaJ$YJ2=2z0LSaȊf[W #+(?^򁥣zs^lm4׃.mpKG%wwsuH km]\`ێ Cޙs0 UXSQQy(f=!ZȠy^)~l\ 8[ ]>e s,y4g>*A#0}pYiήn8㦹Q0}l8:4^]ή>}Z:!ߤW`ǐcC-lYFw( V؛ף!a[..Th]`@rcUT4PQ'ML@w{>{<#ͶM724pHE5U0L#y(ot0C'X :wPKz:;Mu&5^'8ruOk****!PfPM@%ܰO^MtW+_(7eo`]iˇ݇_J/=m1^`S;\+3_43j6.Ҥ9WCu0Mp[BDMR7ޡsyZ,z#-ݲZ"u"͟HBS:@߻ޮkpOvrʓҫQQAIҁ8z3q,ФH[4SƷDϞ$yn %0<~wܺ:WrWX&~aa[+bҿTfa4m<u%._5sogp뾝 n0 ''GG\hNzI_?$)ftk{JE͒fJ.Ct7>y4mq~0DHrHrH$ʟ2lԫհ^KP{x5C*,8AǓbZ)m0r- 옺,y_wCj)!|\Mv?^?X{kZ/~G Ї{.P9HBpvwwjTTG`SQyA`fV[  Egv*F-qѼy256QW;嗷6I; kժUVO?~uF1bĈgɓ'O~MVT^/jJ6\Lp|LX%@ }zF[C <`3zu3k}%jb!ߍ9Ez4 $Z*vuѽ~ڿdjoK9;kei-տ %} 6k}HKKKgJ]5Y$xXxѪC`SQ#)@PuvCb+:E}?nۛ|dR_3/QD*P}ho-/pm~3vjӁv`ԦMCXǺ:]u->S&HW2Pϻlj ht yon4H=Abg]VV\rJܹsY#sΝ;w@j/axŋC=z>}̙3gΜ|T^; lX]G> n e+y\1Vxt B @n~ݟNn*oq 5p (tto+) f]QXo0$=:hGVy̴t*ETT >ua P+ޮ[YKG\byhfǚ{Ny' 9mj6le-':߈џ %˔(9\z=EH1TR bEy7]?%\|y%z^CWXb ԩSNr?M4iҤIu2hڴiӦMa˖-[lFѨ-(%**٠+מhcx(\Tsukوca+3%z*E|_+|mLu`bz>nڹ5ԃp>|]FAE?T k***~YB3qPyU0t~cƌ3f~ x{g2eʔ)S֮]vZUX l**`UkCp2qȊ /U}vP<(vUySXZ7.*oQKekw_,?ӱZMZO4/ ;~ޑ~n[\h:iSUTT(9RHyѨDc{ALJ-z0hٴm;`tI{u1ٓ 5,,Ro]M/m<TXzB<5ni+8%=8H=gj|| H!b! H0zB UNFu'N8q?~GDDDDD@͛7o111111ɓ'Ǒ57N׋*dnVM_euV _~/GSzԔT U@J7R{Ѫ:)%C nۊFuQb#徂 %w\~f}Pd6Yk3A"nO+8%`ۣ;|an.EVsÖ . J/Z0t޵ދ`7| AaU|$B{'u%|*[2RRRRRRu֭[vvvvvvZ޼y} ZVT6l.7nXR2 kr9$vdC{׷l`b6ҫU쟼6 :{gy o$ǍL>q뤿kȵ`ߍ<9զ ~rPT5[:ZՃMEK9A'`NcZ_}ߞsfpw)pϣ)7neg WjB׬??F;pvyw'v}dP+!- Zc^:ŋ/^e˖-[67cF)gFӃ&;vر#Yf͚5狢("lٲerDȡ ":o,m!JJ}<gɏ䦲P5!uѬe6S䕖(py"3_ ||`ݏe ktXN8WNɃ'TaMEE忏rAP_QyQ v|= +E1{`/ǵ0%G^lYC=M*50ф_|i (ve~r31tW{p1%O)ʹjEkQ&eV~go0 ̞={0lذaÆ=+e0}ӧۆ i^NI/߼}lyM9t~ck ٚC܅HAM%WHlξ E[0(ŠmmM"Tv*B jOV9L+R_TܕS6$00q4kWG;KG,7nܸq#iӦM6gxe>̀ T $UT9nwjVf Q%_GTTTF !:⣴_uB7?.rY=oo__OOawיmhUTTT,%L9BK ^+-rZk1QG}GQlM!{*H/*`AEED6D, "E@ޛ:{6~Hu]\|愲)iृcZߟwG2!I8mڠ7SaC6ܰ&O$mYSc}Vφ'SPcjKM¡{>87jI8Zeo,U_mfڋ/c5k֬Y-ZhѢ &%zz?H!RCS>OF߽eIkUimѩ~Y; =)Fw  fxgF^ ;hd(ҳ覠4 LjN|nt!_ܰkG6A;Wt;Pŏoª6:n >8=-a嚍?8]c=7@ʔ2a֤}ztkw"P=xl0_NMҙsBu"݂z\8c\ N{|4ăWE^ۍ A%gTPhk'ϒge/G7p.I}@ v's5i˳BppѢ0Y_tպntjAGtF* lol7*?*ח'ڇ5`t+͹9Vg֡uѐ1o};rȑ#e5jԨQtBa{ Jkz'srPziՊ>6-ٱ93ӌAc,}$~F2F̨ .bt;S?r6dx{f@O]Om55A4I칩]ѱv78T։3!·ج%|4zぅ,dtGG%3"A)=1fgz }.<;쐭796}3ԝKkע_7g>άå'/YWW\yꀂYM!VDM7DMK922]W(12I~{ ?kL G+':[۪kt{/?Zo]x &9O> 4 4: Bgނ_ oӽ[xu % gB}xiVz .VV +vuXqu'` .v0N3;jZиqƍC֭[n ۷o߾=TVZjwN]u]Ǐ?_h6ῑt= )6)%e8y@es5jUyghC gqG#M ^3=|FP\}\T_Wo[GVS;^{Q;'=sWyk5 ///j |P1RM;|R'&eX?FCLU!{oɬm4X5s+<⹰mιj u6fL_SKƠjipTbm--]q.Jq[_rrkd7/Z}vڵk_~qƍ7ѣG=ZP 6mڴiӦkvڵkE-Zݻsu9Wbi1)SL2)RH"wNb idŸ<&7]\Six%65ngL6lL?ѽ&6p<mi,̺=Uo^0,Jt:.<Ca&.( -Zn/R )TΞ@&/_| Vy(naXay ~UiS^/^ nozo2G#VWυ|m.X3f٠e˖-[SN:>|a~a… .:t6o޼yfR>)Bà-+ҲhF)p~4挺ntʔ_wwwwXץ_/WybćFAxM/sF*R{c&J԰cҳW+@JV,xOիF=i|N8qĉ^WU]!l}&Q`fق ΢ѩ HP<2 oBǩ;AȄe@"I>LvU F{?)`֪y_6: 8kiKyշлS= 3<h ~%x ;H4rReѡ Zzժ+^DĐg͇6A/[p}ܩ_ >G(*CIsceGlaKU_}甤KX=zѣ F%'''''L9>| F 4h|yQ`v?ؒvڵk׆裏 6)y͛7ZhѢELo lpСCA:uԩ*TPlڴiӦM~I0M&LOH񮯌Nϊ ӡ=Xٖ3"G\3f3;`0kKA1FwުIX5E:T>k;㯭`ˮ?:߳h]PIRPd%5,v?P +hѢE .\'۟l=8QAf _K.]tix饗^zrQQQQQ1Lu4M`)n&|2.64/ػKy(Zy>ȕ&υu)BzN)sK ڂ[xa|&Lxv8<*EH!:|z⹶jt }KJеDӔ>1pzv +N;5zl؀s7w( &=ZZЖ*lt/59eWZO=fGqpuwF) Y$*VZ_tA<|V@՗kl Dj c<$ߟܶ^{Ż}5\m-6KI[ѽWeʔ)SLAҥK.]+VX-[lٲ8p_/=====Ə?~xC:Ν;w9{E]&򸥼 Pk}NsEsi(gUEj MG!JnP7:pɍm=YE[?s+?{L>X,_[;hNxkitZA?׈遷$ѩ\T{ ߽v׹B͜}TxjY%lgG@z{kn7:p9HW0\r+ %!br{(:* :ZLզS.rQ\7MMG!{o߾}z뭷 XBGQEQ`ܹs΅e˖-[`-STM~0oYAUT1:]P@}%+VJBeyzAXG2w~9XK]'mIͩ>S.(zx{Q6a  AAEk\86Uiw/ z; RF}濵ÁЮWݒ`gK1AϔM?k׭^\PXn/fOwnW[og^(u=̳;]8dhk]m1LFQ6fO& KZnSK f{e6: <,YѠ46t7对PRp[l@L뇮qޱ/@pp`"O,K\­N~]wjwzs>Rޠ"Sy04 ++##;ZN0tnoOxAB]AB п^9Plgys[x{Z[ckbV@4MoI;S/}L(,7}flW)@Zv<ZolJBNNNNNdeeeeer\Z0B-33333<֭[n +VXL.Q`[`ilf{ T%+|#fZ EER J'7\ K漑 ax.O2:OL͟ӂ1RNwE^0AaF)7+|><ρ7L7e1KA_;I~Dz\ ahfO8y~{Q *Ӕi G8DB[3'R&­\{,[u]z >E-Pz]rV0gXvn|y&jN}=T}`N2P g0 }Õ9;\xMT: j ~) *}@?ٴc@<jzhM#kfLfI-P/LRm >˓ ;hX&:u}Н#Cjj"w dztY>#s{&S K*(uN7= 4HO}#sH$U>s'[+(i+1m{z2:5( &LS-?jxcm5)/A_2P݄gWBraP`^ë[yQ]PۿvKۡGe^0-}sԚ rπ}LxDي97އP𾼰RP4~htA?DMn)f+h;[h UףT )QZgL 9Gk.<9y]Nei͛Ri{ԏNuvijeVrW u{3&} ;IRQ oxyΐFA}~J^rei'*=g;-} ??~yL}\:Q9ù|S0qڴAowѽ"1P9)IYPѫTjK)kt[+u)ik -6sC6YҌNg<q!ׂ9j3j1.}>^ |5C 0EuN0g:h'w]]Zcu$FAq_oߤȀ]55^䝦FA/yL )@Jd-m|1ezC^//IdL@u 6>(@Hn] OS@%y^w4c c[w# +:w[ȶ% zrz`S.l~iکvFpz}CUT$<>+ʟ?=u wem3^Ax`[`l ax_駤ss270TpoI"9LX nvCv)0:Ճ+S˶ /Oκ)24.pCȇ`nbZiK\Ptw5Nu^􄗆968qS)%z.٭lx}k '8ԛ\,hE|(|zxFߝ I>A%I?FB͗‘T?~Z7(7/֗,c_C͚%7>I*k'C7[Wn&4mYug [qQYҿc!y%W#5ފia?\J*~v/_:D P#_`h\ԃvAn[b(Qϐ2}X|'舾 /` \@_nJ-Aߧ_7PQ_mw0 Y.r˟Ox>۩SByrw}gq'8uh艹? KD7;vVB7O+g/q#}Oxot/ bP%ſ:iѝph쾠cPsU=Zh7sϖ`>_ hfӖuAɫuB#ͼo K(8✳We)k{8['H-Lr dJnzumfC!C! /19,IFr+de3ןHeK[4ޕCBN'( ¿p*J쁉&PyBؚwͻpkJ;\<ҊO%KgS[X*x'zi}ѩ k b9S ёSĒSހ>1:#:tXV[`/nkpuKzm2H{e^U­02ʏ,9jԉ*_aM{@6   /&)Mf}J}L{~bO6NW_kSp]K6 {!C- mPH[w4{L`}=zxL6W*stKF|j$&4WuUp ~8,bXC-q*F6^Ɔ,-u /U5/_UU𱺛N-Q`/Crg꿗+YS=zrlyz^C mqԞ`Mx>A[:_N[x\ߞZ]nH*R#Tm~ xf[ c*{agPowV+Q5'Ñ~ác}i^}WnE7[f֚|7 ܨBŰ*;#6aQ`=RkGgwK8ÁٻG"*6jt:ADM+g6_O˖锜#&h̊pmcbːq={`Y9FMoftf#$TN ޙSA׊U*T̾01 ̬0e.(R$448>ncf:Ug3Vٗ3{ ʞv:hM},˲,CE撇 \_+ʄ +CɊ\ /-[6ߥzzpыւu^Y?;.Zl^G>bMavgk>:u}uh9]&sdqL(V75ˣѽ,G56kJFH>HʁŹ+rQ^@G5nA^5\%Nkb6}si:lYZn`6LBq/deZ3U(1\{rkg:{NJ;oPrzw;rrp}xBw &=1'c ]xxr%gf,kH6uKWܩ CSCʁW]>F~LLiR\ӡmЌN%ET5sU.h!jchNg%xn%\*5IYo~|\g}H2.>K,m~XzVV? |V 9djrb |vː`nNad7J?*m Rv4ӎ&a>Vp\EOk> ҩ aW>;{8p'$*ZyWo}Vl(j|<*^!+~VUңJۡgŷ+ /Z ?ȋ̓M Rnst䙳G#N<1lMHL3$ɲ$ ; BuiW5T U/רZ+RT<'nЌ64:؀PXE{l yVZ,gN'+deI R&N\{ŎLctu (P<\n yuy4щ#_HN3g6-ҭ",z9{N'ܪݍW's-f6HK1twwl@=m/dY-2:K=2I(nXISĝ|;i N}? |}}|`r̀?6KrL0A3/.?GTwS% 4llUM` 5w][(XzJjPrhǠl ߗk0O w`Zz+ G8= .pnWB!bB5|!3BBߟ:w;7 fWP8Sj3Pr*k>Y軼&C Y8veϱnxei=O/7A!ep}u^Y |w N3< 4j^vFC_p%deĬ[-m>BN'ܮMr_{R_͞y*CESh}7+ ,L{- F+fz.քb{· ,o9~q<u>bzuMH!gۿu|B#h<{Ge Mm/Jȝ^ .ݩnzѪL;YŃ> ߼YpBďˑB7hM6G1{% lGe8-VX?WF} ļqkC!äYo*/}: o-Uh$bVOš沟HR (3 Z_MQ omΖDȶoy!e+ҾX?Luj#ܡaMŦ~WXyeԎ/sysS ! l/\t–-ǎ3 (tB uFBBp )/}G\uCE*4op5:?(Qb޾~Y8zXoֲjkٙHx੯cb3/?^WR_ R)^nW~A ¥a}A委8;KgMMV:b&Fψ}rtsӻ'xl8Gom}|2 EC5{[.ԱYPYL M#* :hy& l59uHH>Ľ|H"9yZj04Jjvقy>hq|f Ig!~w^LL, 3~7J-ڕYKBȑaAϑmJ[oUn?x콿|v|j:d9U:MgN'*GܧC!eIzL Y粚k/ 75! poNHSFbKs{۾SC] O{  uȖeW!sH Qh^(]e+o/?_Vp|m:g1"<ktk5*dx$S[]~tͿW P)9G.gtz!߰t#zqFC6l[skZxx"K&J_$Y%Sv= I=cB5koj7䁷ūO#?5$XfX 1'2g2\mq Yv쟺uw5FMX1ճJ5+T̳L[ ie¶wFzXP^7w% q2<㔮0o=ѻmJSV[ћ3ȍAՐ cu\k> 2I#9==;octuMo߇q75 >";RxL0K/;7q&|5n\]2Z9QEZB"@)⽢O)<s[y={#xeWuYZҌN'F)󬐖nO qsFXTw#g`߼P'\ k؝9}Cr+tFXbs! lpvm ֪DRVѩr^Iq ebP[~;<''yo#ݜ#Wǟ=(VG{T\v%0"r.ٳN]mس?wG _̅x$ 7zX%I!50#}2Ssj fևÕ=bR2MYq73o#xb -9b@֜k=sڀAvਕ]Q&4ޮCm?4Ni'D%lJ޻>ћAKf4?7c}Qٞ9Ni/UQ]«>^|m8~{ُPL %K̩MJ,)ƒ ܉Cs,pоh[`_nPjZ_Hɼ=dl~)}<3M#9R}-y.x>4rDE]|8TQu:Ztpw~ AxXƙDctA?!\o2/)%KRX[R ᗔV﮴< >/fOw/jqH) MiW}XbzBObτb ~Q7Cս5{MA/NWLs}AEjetOn1*[̙Y ҫC3z6U۩-& >ط_{y@Lޖ8@?l RJ/4b` 5b #6X+:R^P#Qp7-EC\❪J]KvAgafi^r98u3 #)Zp>ZS| ߔo%z[d[0o54T#F  wj:_l9;Aڬ$i<_LYF,| *H ;$E= ;5J|[,nPR [eG & g2*![g1DMn5e+\V*ה@0t£.vV0s/ YyxM'h߽o "7~zw3w{MH|3-l<"fXI{ѽ  <,]}\B.W<<gPPٝ/7 ri<,/y U>7{Mav#Ng)Z J1Wjt:A6AVi Ȳ5˜(S _vYSF^zߨ`V?vYa{Тr䦭v>y)z)*cLo`{:ذb3#tI{rFGypx@Z/Ӟ5GjZgZ[= no nl쯀YlF"`rlu%rxrE0}5';9tR0(Tu!W݇4Y ύr\&ϋšQ+VQ ,񖒶p4FgMGKlljE#k$<\EUaՄ9׾L Ml8̽Y{!yȵV$@N;N[ J.タ(s=ƎG,xxt@/j ozx1g+28;f2J:,mvm򤵁*pHV7QZm_Et];yr@ΔrfAwrB:rk00 3wutwOpKyzLIͥs5(NL1IYZ@ ^`.YCN%6A#rF6:Px$+Bjhu"eؗwƶk0Fgkw9xa"0㖂q?䅦&Mdht f;6*{)` F)XsC WԐm$x^4x^-ڧGZ" "p YS%l5W cJ6AVy}h43giTSr]āYϮf)oTRHf|3%Hxl%fm6I8wxvN٥N; e'{ȝL4hiZ 2&,?{wey >2lgW/a74#b,ț|1{p΍v<Y?gl# <ݭaYl}>mOCs oXbm8A=sG:j:SWQ=zi/vkQyvveCnf5GGG)DhM[-T}g2y`V%F X6X{[C򝹑%TUSUe^agkepnw;kU\me35¹\pr& zt2@A*nm(?[=1 p q y|Nh. ^P$@Pj[ |lb1[o|JG( 128 fVUdr&{˖.S pk?R 7Ւ{egmd:l 4G4[ 5/&y#xĹx I*@zUPT9%9}r_ٙn=av-@ȟl[ǃe<֑b`i`׬m)b 9Axʔ-2yJ5p~T*SP*dpv6ʛ Js ݉AN' 8,Kiv[`y,zzl?Z7G_8w0]-An3G9&b dMu"*w锳a瞓m%e ,et:^9}"~b@p_[^P,| .=WκQQ/Bx|hbg Þ{w H4 jN*:*r9_a켮7:7xϛ?uՒaZE*ZjIg0w$ѽ"[bAR+;@JQ^ݧ<@8+8#@)LP8(\ĴS O+ݼ27-`o{~KiG !7 DЇP4ݿT`;e q `[_rrkd7&UtT` Q`;R3~< QȱRT,9S5Zz7|\j{+AU]k.mzr^.ʇC!=kGN*NU\ϻJ9v:tSW-J-28/f:ۂrUm~X[roXmA^kc LCLq&7?mNPSE`~bzLLV7Fҙ#Q`3F1.uT>'EF:Gz\O)ߺ"AyKYZ Ԯ4WiPJ[u?42GR<޺,`b=n-R`bnaYĂD&#!}?QRk}(jyl-F Åb z7#P~[J! lpl.ϒuL5@nfmR-5X΂ss`3fm}e9L̇V0eHMV+[qDF\{ 'Hu ^4BQϩf~뼼Cכlt*A0( ]vomk%v*T:I٦ӒgwZtpah]]y$Y%]>}yl6{2(/tcd:L5R7J5^t PeJ%m=hٮeКhSU~D"(5Q8hE[ Z}uj@]yK.o* 7*K׽af m]Y;\NJCer $/.s}SUw87OWmѶ'@-kGS佦)r6fiL)2SU0]0M0y4A>ӓ&3`(}&s=s2ˤL X[6Zc07+AJ7# `us$LH/f,W|K)Bi/| a-9eJ! lp\xmσj e#Vft*ve۳B ttt?ob+hql{=zJ{K;A^brXNwּmY(NIzt`qv8(s!uoZ5]oaR~{mh/X|}qԠ,tꔂ'?șrMzoYװFS,ѽ|ts z*k=A T ½蛳-kdn-5(R="Sz"b7UA=zXn rxq 116pX}pQOin|*[ eȍڈ#Fqc1;ڪUVZu[X̑v_=Ȼ5i:%K,Y$$$$$$$}v;Tg_ضkqPþuc{G,EK3AD_M0: peZs^L!}Evôii!/I׋š Um0mFc ֛vkS!ӏ vTiژ1cƌfli͛7o {ݻw/3w;frY{5 :vرcG-8֭[n:YKz^kK8_b#kjO=-4 ½VZҳ?MZCh'R)AxRжvOS pXcL6SA[٩\FÔ'8wŋ/^ 0`?iҤI&鰵XM=t~ ,}eG=z'Otv[k-|sx  &L0ϟ?}~Z.Y9wmR&#<Z\]mtAvA$I -7]J3Ȟ:[^^E^SSN)D PC\f3: &wl&+ɹnO+mNeG xh%7|7z܊+VXQQQQQQw=߿=3J1Aft*AVIz~Rlrs>t샠ZB7S蔂 ӯ)Ax)'畺`Rj5: &w1mPPGb|\u* }k!0nܸqE-Z`*r\.]tR0Mf2J\Efy[iqFot*Ao='葐J)(A)P_J-'萂 < z4XNo.Ă U\C'NSxhVAnEךyРA @-ZhQp(@FFFFFF"xzzzzzc+W[˭h]>xd8ypA[-'jJAO-W)UϹBW"Wg]θ 2%OIq>&tB lpY}->^ot'3BWAFk׮]BCCCCC!gZ:3rfg}W.f+hk'}ѩA|ZO-Z f<#hN)m >yd$Wz-bI$I8p T\rʠ>Wu}ZF̓\N65*h_kFA:j:UN m>B=SFQȮnN &wjgayLIϟ?t]u/\p…'u_mwyA[FAxt4WH I1R>ϖ*nҝ_Gv)״*JZAZiFA.7oSkNTsodqy%CΆgR.̙3gΜ `@Ε[.}W>y|ܶWJZlqA9}cbKƣ)Gsu)A@U*h[H lpYܬ7VrTƱ3s^ԭSgϞ={8wwwwww5jԨQzO>O> pbIl= SCiOA[]&}N%p vmvi9 Z|X}MqFA(TkԂlJ Q`k ߴhVTSK4x5 |{{nݺuւ^˟ :uԩSBRJ*Up\H//+^A>&ś.}.sʹ7nt -ʫ<䎙?$K7S [+K_V[079N)Wj Vж̒5TP8 E)Zr URNuZ%Mv2hi yLL,FA{2R_fjK lpWYz*&kѩ=TBdzXy?ƍ7n_2dȐ!C`̘1cƌ=w-\ “O>OBDDDDDD_~u;wܹsG\^xH d#EnAR1Cont*AO8g]G NߙoyCXƑ@^"]6]AAxkXV[[VF Q`n6dNJ% !(.k88цe˖-[ٳgϞ=0,oArdws(bѣGN6mڴire)`7`to}'N8QpjZrʕ+W 7_˩ 9YmP;1bĈ_pܹsΝ;K,Yd XB"Ֆj[9`* z SiA<['@fGFxv9E}g+ lc)AQA?DM[&:\s?[V9/˰nݺu9s̙34iҤIpm[8el2֯uQ<=====aȑ#G]C?>Hsl9^{p˾rhjAxedm춎yc~- /a52 AWրl74: .2 ܩWM5j SK-uT&θF~:~jH3a0h"j}n]-`.ss%+ .;N{ߥkHsZP".<4BJQɧde(c+[^3:Ճ'mqOy2C?<ÂCN'p57 }PkX\U([cFA`{2(yJy9WyN^pTpZu|_zuؚ;'eepMmz]疛WET. FڃCn*]hS >zI4Hqš&O Q fikѩp#>9SQTy]/0MF+HBѐ&}mryxZ97S p/l @f"[n% }|FAr|(ݺQN -nt:A(6A,VV4eѩ3w00k%FA0:!uHFJGsgn K5A= үmJ Q`{Zڜ lWԊJѩΐKn (m *S  =A q;Wiy^+(D÷)A?uzE.h[,!dS B" lpX֜Rgt*A RAA[]^S ?˚V RHH.u+1<ۋz0: q\\K] )j5: .&U1EK1 )qi}A[_U7: ½Ԣի"qbP2Pt﷑e=ΚetJAղJ]= i%L*)auK {)A %CsNkip6A,K@$&RrL ڪyJq*QҐv:Qk^7 V6GVRRkϸʃeOF&庹4#N/'eT[<{fsɾ/d#;KB6esA nK]ZJkժmkk\P"ʎldOHN#x- L73ID'= ckvEejzmo`պ٪PW56(8r;BSQxpZ|9nBBE%-|>jDddwly-qJ[ 7»v"K˫55)%؝SQwysEQYR Amc5Z:z>Iک@Tf^|W>JDDaݸܢ\\8onX|zp+GGzk lt:c.@EQU.h;:{ d|jv"ڈ՗/m{ HީhL`0yqӞzA[D z9?s l,ˆ.,؈F@Ukvճ+Rn~ݩќQԕ6}X pOgw6"#]YgF;% at0gLQ+p\ +T8vt&.n8wױ_#"8F4*i*jwO5'PʥwEy6iW@UmCey,&fԋZp8l[w7ww4> XwKgȏ"B(M'D?sݹ"F'lD#]׾y)1qNEO[Uz{^*? D h ztD4Z۽hk[p|6QqlD#뇊 #7`!|MUv":`?~y/貰GD?lݗ| [|-gh=@dhȜv@29Vٽ. F"lw;|v#"":`#A;D^hw%2NE]_<Jr <%NEDl] =uq~1f@!N O:E;aDDD4ڰ`#Ay[֮7bw*ڳ[U| Dݩh8sӣ]Ow 3| aG=aDDDt`F4x7/8eu9Iv"wl|r( ȹv"aqh(/_1Xu=}N1ƝETP\ɀ*VhuLX1n@V=mRnE7DX#?/;uo&_qF`%YkM0YOuqc`DXwf8`: q^G Y2Z އX 6T9|Ppr2 )uZlw:~œ+{dKk 7R/NEtb00dntVQc y0!0K G =@̲.eֺ}MR\"Tq hzzM5L`_XVR0"""cS$ʧ0@}RsjʅF Ipݙ.ŒϬ,2z/:Vg:7H i@(ߤJ)BVr^@z.Zm@Mԙ2N9[OI-4H@ٙu{ÁޅZfK J:㐧 'p_#"""'؈FPkGgiKMpVٝ_^[P|9N;p-0!5yݩdmf_EEaUm7V.=.J|"@,PW%P&b2[4aP%H*)>wP@y'(JNGQ3`݉rrh+=!&Âhv| {gz:mw:~ֿ[:;Դ$pujݩDa>hyk2nnH0V l]b,ys'`Z׼X_k0aLG (+J| J/x)U,"l=flz`! +̆3f;ƍ;%łhy ѧ|坷#/B? Fvٸ h]Roߔ 9)"LoaEhD ~7v#הJCQb7@U3dCEJEpӶ4tBL@ާS&-} bwJ"""oN  ClFxfT^l^XLz`ݡNbEn<|ώ"L/P|O,IrAܯP(|@%mQ/b&@ISnB]VT&`L^շӚO3};DDDD48,؈l{~m xۡq Qٝ_ۃ=@}:p'MN5sfL07kf'`d_Kuc38Xg]\vf^6[+!iEXӻ[ѷ(*:kDʲL*9i 07 $DDDD'lD6uAcss/0~aT>FƖ/jzs+RffTnwgc-N3HEJfOEح֝aaƥېQDe VU @~Hz^^()UʽR⋃EXP#/„ ȿT76IN(һ(Sl[g/]\7 ;%ɋ >N'mw*~MjT6T? YMLTUSRʝғ递"%u-JyrK@iuqH.Bu$*"OO(\-G }}|) +_@ǵ=ۼ[_wTDZD{ (Ju)No0L/ I憁"/Œ"l67 m}ul E]"Q%*EnE@i^ͅ& NKeSѷX m7ܒCSX%e4͸B_ hړV9cO߻@u i {?; k`|`M=¥5xLP0eX?7r P׫Hɡİ՞Y #wv>0T{:,؈lPnw*_iTg36۝6"L+J@ASM.N}Ư)F?Ԝ!gK`nn0C^ y^2HrzR<| (e& *> w@FfFt$1x}jn-9SX d5=kt]V3E=EzY/DcK˶ʗ, B(ڝlD6CEۖρI b%Rpv;L ?Ew-u]Y3E;0ômӦGe_8X=c) ?yp8]\(*vBjEʪ qjVаn0M 8"`qX_U)kv\;f3|0՝"fK2 w"""" 6"4n`%_o\ '. |{\~vyR=%{SEПӚ^px^h Q+ Q_8Y u p|.]c(q9wW)t3\Wgp8= 8i"xz ֮\g$WٝĶyK i^4 teQSJ{@V`'пDDDDԏ :*{d⢂^;q.׀ޭm޶+U>W\?qKR.p Ύ[>/& `S-! X+>E;NGtbdId}Sbêo&w4O&Iv%"""c 6"lعw{c~j>#IJ{t@{Q[m\=김[g.}$KUW"@ }Cwup9Y#*Ghw  &"a'ݩDXh4'ڝh+G:~dmQ@#5H(KmoMz!h`#чرzk"IUpXr7w*sn9Q<$:\~Z48W*9]>ާ?V xC/"/ϹgÛlކ2h}J^ irT HYЁw ;vgXUUZng,vg"-(אָ-tuEyqLkNfFDDDt󪈈D+0*xv!fQl|뒶WS冟`wJ""""i,؈ijIxחٝ^FdWzkpyo9NIDDDDvaFDDV ; =b- ^%zCOtS&kwJ"""" 6"":,|%@ 0ڝhdi u[ ^]yu=d$?rsM"""":`#"Rx*7v!})zR V JM`zSyv$"""цDRȬ1NCt|.JTțX/7iڀ(߀c QDD4:)r!#xxNE4zkJ 6zQ Ϳ pv$"""юlDDtXJ#fYbٝhxt9(QTsr bFDDDDCÂK UuUg>hw*cӑzcPz^GkcY 7>u炳o)DÂK4ʦ5_7GzDvjl*4U-8Tó""""":JhS7tDEt-usJ@ jN:m& \PY1)%R& }`m~im۝hpO;dOM 8TS\"":"S>XT4t,ZZկ]>LM_6?m<%"#R\rHX֥vѢ3P~;+ G?p)DDDD4Ʊ`#"#RVUX=VwS^^tzd;w;Sx)0Ѡpnb'mzլ;-0ӀR ӻ+ڧcX#"""":,؈hP*ṰZPf^ ͮvo]]$3DSXѠgԹbN3V؝KӋtyUv~%7=gS:g? DzNIDDDDtbcFDD"nV)/FowOZw)P|{uK[}ϛde$= ;%ET\N2+ٺ4x/O[S}K!=@OG}gFD؝hlaFDD,~'`g?N=ػ3s'q_ ePSM,؈hP?++7NCv]e@UR--%v)v$""""XѠIQ@4uJϭ~P Rz0 7.\_S v ]TӜ:9-ط8p?ߟ0 p!>nS\8FDDcdԈk/*mMάf?r~؂3Xى A%cqqiN_b ۙ7p)DDDDDbFDDb\lev7,ycD ;*sUUJDDDD4*`#"AKNqȤ?]9v{SZU[8=C'ֈFlDD4(׊ksQjwc ƀ}"Brbc)R.s (v$""""aFDDawO Nu⫍̨ԾԴ# &/.1,J2 e2߭F5Ѡ(.%f`m4֟;ՉG%YoӁ<*`] $IdwZ""""" lDD4(Dxm; Zwr՚PJ&]u!h`#"A;dC$ bP'S<,w-ܻhq%zM>y2㛙iwJ""""":| G=k(tUfPZmޫοw2M)~v$"""" 6"".1;ctfP`:weO&P?wX#""""kXѠ(,7v=$k.|gƝ=~)x`FDD"+0e@E^J7Ne?=L/ +*g~ ~¥ ;%,؈hH% !Kz @ٗ58&>@nwƸ wk؝FlDD4$Y+.UfiO{yWik"P_vѝ{]Z Xp3SC|v$""""ш9 "!z*4 t,ZwMm*ޫ= 79v$""""ьlDD4$Oـy4NsF?bI]Dɏ=dߐ3UUkDDDDD4,؈hHB6" s*g}Dy&zבYi \cFDDCk"u`mtfݩ kVUO?o ל9ʪ؝ND,؈hHD*fuh~f]TsrUPRӞ@K)I9 S!~KADDCI\neڝj$K$Uا [>U*`P H$Iih,`FDDCbw*1'ڝ @<9@:g+'5HԢfw.κDDDDD4 щEq+"h`m:Kcje@e]YZS:WxNR'OEN`Q#""""lDD4$J :F'ac{溢@ۭO4齱iSEX#""""DJg3zX7r7avQ@كUtD(lxf aI_DDDDDD'lDD4$xi{k$Y(5م@=:3M b!"""" 6""J߹Dt۞fth@V-)=>IWΪ2''}TdƂDd36ˍ*co8\(1UmRwb |j_LG ,d8)F"/gr{}oV2V@c|ߛri@( `FDDGE (6+ gM}ͼc1LrHq/ ?0_`#""Jī3޼Y?{%v{)ՓOG_JrggeBv'ڽDDDDDD 6"":*"CHrcywgP`y^@JSȩHO~@_h щIY\)vLL_pp+{B{N``#""JM-s_HjXRQ# ߙ{ɝTpX#""""lDDtTEn܁7V74}*y0$1g" ,v'""""">`#"w XW[FаqZ]π?Hq͙b.: DDtb)]U)>sF蒄`_H$IiNQ'><4*$'X#""""'؈hmZ6u5sR۝h`#"""""""":D`#"""""""":3/yPM:IENDB`dipy-0.13.0/doc/theory/spherical_coordinates.svg000066400000000000000000000654521317371701200217220ustar00rootroot00000000000000 X X Y Y Z Z --> r r --> --> --> --> --> --> θ θ φ φ --> (r,θ,φ) (r,θ,φ) dipy-0.13.0/doc/tools/000077500000000000000000000000001317371701200144475ustar00rootroot00000000000000dipy-0.13.0/doc/tools/LICENSE.txt000066400000000000000000000003661317371701200162770ustar00rootroot00000000000000These files were obtained from https://www.mail-archive.com/sphinx-dev@googlegroups.com/msg02472.html and were released under a BSD/MIT license by Fernando Perez, Matthew Brett and the PyMVPA folks. Further cleanups by the scikit-image crew. dipy-0.13.0/doc/tools/apigen.py000066400000000000000000000432541317371701200162740ustar00rootroot00000000000000""" Attempt to generate templates for module reference with Sphinx To include extension modules, first identify them as valid in the ``_uri2path`` method, then handle them in the ``_parse_module_with_import`` script. Notes ----- This parsing is based on import and introspection of modules. Previously functions and classes were found by parsing the text of .py files. Extension modules should be discovered and included as well. This is a modified version of a script originally shipped with the PyMVPA project, then adapted for use first in NIPY and then in skimage. PyMVPA is an MIT-licensed project. """ # Stdlib imports import os import re from inspect import getmodule from importlib import import_module from types import BuiltinFunctionType, FunctionType from inspect import ismethod # suppress print statements (warnings for empty files) DEBUG = True class ApiDocWriter(object): ''' Class for automatic detection and parsing of API docs to Sphinx-parsable reST format''' # only separating first two levels rst_section_levels = ['*', '=', '-', '~', '^'] def __init__(self, package_name, rst_extension='.txt', package_skip_patterns=None, module_skip_patterns=None, other_defines=True ): ''' Initialize package for parsing Parameters ---------- package_name : string Name of the top-level package. *package_name* must be the name of an importable package rst_extension : string, optional Extension for reST files, default '.rst' package_skip_patterns : None or sequence of {strings, regexps} Sequence of strings giving URIs of packages to be excluded Operates on the package path, starting at (including) the first dot in the package path, after *package_name* - so, if *package_name* is ``sphinx``, then ``sphinx.util`` will result in ``.util`` being passed for searching by these regexps. If is None, gives default. Default is: ['\.tests$'] module_skip_patterns : None or sequence Sequence of strings giving URIs of modules to be excluded Operates on the module name including preceding URI path, back to the first dot after *package_name*. For example ``sphinx.util.console`` results in the string to search of ``.util.console`` If is None, gives default. Default is: ['\.setup$', '\._'] other_defines : {True, False}, optional Whether to include classes and functions that are imported in a particular module but not defined there. ''' if package_skip_patterns is None: package_skip_patterns = ['\\.tests$'] if module_skip_patterns is None: module_skip_patterns = ['\\.setup$', '\\._'] self.package_name = package_name self.rst_extension = rst_extension self.package_skip_patterns = package_skip_patterns self.module_skip_patterns = module_skip_patterns self.other_defines = other_defines def get_package_name(self): return self._package_name def set_package_name(self, package_name): ''' Set package_name >>> docwriter = ApiDocWriter('sphinx') >>> import sphinx >>> docwriter.root_path == sphinx.__path__[0] True >>> docwriter.package_name = 'docutils' >>> import docutils >>> docwriter.root_path == docutils.__path__[0] True ''' # It's also possible to imagine caching the module parsing here self._package_name = package_name root_module = self._import(package_name) self.root_path = root_module.__path__[-1] self.written_modules = None package_name = property(get_package_name, set_package_name, None, 'get/set package_name') def _import(self, name): ''' Import namespace package ''' mod = __import__(name) components = name.split('.') for comp in components[1:]: mod = getattr(mod, comp) return mod def _get_object_name(self, line): ''' Get second token in line >>> docwriter = ApiDocWriter('sphinx') >>> docwriter._get_object_name(" def func(): ") 'func' >>> docwriter._get_object_name(" class Klass(object): ") 'Klass' >>> docwriter._get_object_name(" class Klass: ") 'Klass' ''' name = line.split()[1].split('(')[0].strip() # in case we have classes which are not derived from object # ie. old style classes return name.rstrip(':') def _uri2path(self, uri): ''' Convert uri to absolute filepath Parameters ---------- uri : string URI of python module to return path for Returns ------- path : None or string Returns None if there is no valid path for this URI Otherwise returns absolute file system path for URI Examples -------- >>> docwriter = ApiDocWriter('sphinx') >>> import sphinx >>> modpath = sphinx.__path__[0] >>> res = docwriter._uri2path('sphinx.builder') >>> res == os.path.join(modpath, 'builder.py') True >>> res = docwriter._uri2path('sphinx') >>> res == os.path.join(modpath, '__init__.py') True >>> docwriter._uri2path('sphinx.does_not_exist') ''' if uri == self.package_name: return os.path.join(self.root_path, '__init__.py') path = uri.replace(self.package_name + '.', '') path = path.replace('.', os.path.sep) path = os.path.join(self.root_path, path) # XXX maybe check for extensions as well? if os.path.exists(path + '.py'): # file path += '.py' elif os.path.exists(os.path.join(path, '__init__.py')): path = os.path.join(path, '__init__.py') else: return None return path def _path2uri(self, dirpath): ''' Convert directory path to uri ''' package_dir = self.package_name.replace('.', os.path.sep) relpath = dirpath.replace(self.root_path, package_dir) if relpath.startswith(os.path.sep): relpath = relpath[1:] return relpath.replace(os.path.sep, '.') def _parse_module(self, uri): ''' Parse module defined in *uri* ''' filename = self._uri2path(uri) if filename is None: print(filename, 'erk') # nothing that we could handle here. return ([], []) f = open(filename, 'rt') functions, classes = self._parse_lines(f) f.close() return functions, classes def _parse_module_with_import(self, uri): """Look for functions and classes in an importable module. Parameters ---------- uri : str The name of the module to be parsed. This module needs to be importable. Returns ------- functions : list of str A list of (public) function names in the module. classes : list of str A list of (public) class names in the module. """ mod = import_module(uri) # find all public objects in the module. obj_strs = [obj for obj in dir(mod) if not obj.startswith('_')] functions = [] classes = [] for obj_str in obj_strs: # find the actual object from its string representation if obj_str not in mod.__dict__: continue obj = mod.__dict__[obj_str] # Check if function / class defined in module if not self.other_defines and not getmodule(obj) == mod: continue # figure out if obj is a function or class if (hasattr(obj, 'func_name') or isinstance(obj, BuiltinFunctionType) or ismethod(obj) or isinstance(obj, FunctionType)): functions.append(obj_str) else: try: issubclass(obj, object) classes.append(obj_str) except TypeError: # not a function or class pass return functions, classes def _parse_lines(self, linesource): ''' Parse lines of text for functions and classes ''' functions = [] classes = [] for line in linesource: if line.startswith('def ') and line.count('('): # exclude private stuff name = self._get_object_name(line) if not name.startswith('_'): functions.append(name) elif line.startswith('class '): # exclude private stuff name = self._get_object_name(line) if not name.startswith('_'): classes.append(name) else: pass functions.sort() classes.sort() return functions, classes def generate_api_doc(self, uri): '''Make autodoc documentation template string for a module Parameters ---------- uri : string python location of module - e.g 'sphinx.builder' Returns ------- head : string Module name, table of contents. body : string Function and class docstrings. ''' # get the names of all classes and functions functions, classes = self._parse_module_with_import(uri) if not len(functions) and not len(classes) and DEBUG: print('WARNING: Empty -', uri) # dbg # Make a shorter version of the uri that omits the package name for # titles uri_short = re.sub(r'^%s\.' % self.package_name, '', uri) head = '.. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n' body = '' # Set the chapter title to read 'module' for all modules except for the # main packages if '.' in uri_short: title = 'Module: :mod:`' + uri_short + '`' head += title + '\n' + self.rst_section_levels[2] * len(title) else: title = ':mod:`' + uri_short + '`' head += title + '\n' + self.rst_section_levels[1] * len(title) head += '\n.. automodule:: ' + uri + '\n' head += '\n.. currentmodule:: ' + uri + '\n' body += '\n.. currentmodule:: ' + uri + '\n\n' for c in classes: body += '\n:class:`' + c + '`\n' \ + self.rst_section_levels[3] * \ (len(c) + 9) + '\n\n' body += '\n.. autoclass:: ' + c + '\n' # must NOT exclude from index to keep cross-refs working body += ' :members:\n' \ ' :undoc-members:\n' \ ' :show-inheritance:\n' \ '\n' \ ' .. automethod:: __init__\n\n' head += '.. autosummary::\n\n' for f in classes + functions: head += ' ' + f + '\n' head += '\n' for f in functions: # must NOT exclude from index to keep cross-refs working body += f + '\n' body += self.rst_section_levels[3] * len(f) + '\n' body += '\n.. autofunction:: ' + f + '\n\n' return head, body def _survives_exclude(self, matchstr, match_type): ''' Returns True if *matchstr* does not match patterns ``self.package_name`` removed from front of string if present Examples -------- >>> dw = ApiDocWriter('sphinx') >>> dw._survives_exclude('sphinx.okpkg', 'package') True >>> dw.package_skip_patterns.append('^\\.badpkg$') >>> dw._survives_exclude('sphinx.badpkg', 'package') False >>> dw._survives_exclude('sphinx.badpkg', 'module') True >>> dw._survives_exclude('sphinx.badmod', 'module') True >>> dw.module_skip_patterns.append('^\\.badmod$') >>> dw._survives_exclude('sphinx.badmod', 'module') False ''' if match_type == 'module': patterns = self.module_skip_patterns elif match_type == 'package': patterns = self.package_skip_patterns else: raise ValueError('Cannot interpret match type "%s"' % match_type) # Match to URI without package name L = len(self.package_name) if matchstr[:L] == self.package_name: matchstr = matchstr[L:] for pat in patterns: try: pat.search except AttributeError: pat = re.compile(pat) if pat.search(matchstr): return False return True def discover_modules(self): ''' Return module sequence discovered from ``self.package_name`` Parameters ---------- None Returns ------- mods : sequence Sequence of module names within ``self.package_name`` Examples -------- >>> dw = ApiDocWriter('sphinx') >>> mods = dw.discover_modules() >>> 'sphinx.util' in mods True >>> dw.package_skip_patterns.append('\.util$') >>> 'sphinx.util' in dw.discover_modules() False >>> ''' modules = [self.package_name] # raw directory parsing for dirpath, dirnames, filenames in os.walk(self.root_path): # Check directory names for packages root_uri = self._path2uri(os.path.join(self.root_path, dirpath)) # Normally, we'd only iterate over dirnames, but since # dipy does not import a whole bunch of modules we'll # include those here as well (the *.py filenames). filenames = [f[:-3] for f in filenames if f.endswith('.py') and not f.startswith('__init__')] for filename in filenames: package_uri = '/'.join((dirpath, filename)) for subpkg_name in dirnames + filenames: package_uri = '.'.join((root_uri, subpkg_name)) package_path = self._uri2path(package_uri) if (package_path and self._survives_exclude(package_uri, 'package')): modules.append(package_uri) return sorted(modules) def write_modules_api(self, modules, outdir): # upper-level modules main_module = modules[0].split('.')[0] ulms = ['.'.join(m.split('.')[:2]) if m.count('.') >= 1 else m.split('.')[0] for m in modules] from collections import OrderedDict module_by_ulm = OrderedDict() for v, k in zip(modules, ulms): if k in module_by_ulm: module_by_ulm[k].append(v) else: module_by_ulm[k] = [v] written_modules = [] for ulm, mods in module_by_ulm.items(): print("Generating docs for %s:" % ulm) document_head = [] document_body = [] for m in mods: print(" -> " + m) head, body = self.generate_api_doc(m) document_head.append(head) document_body.append(body) out_module = ulm + self.rst_extension outfile = os.path.join(outdir, out_module) fileobj = open(outfile, 'wt') fileobj.writelines(document_head + document_body) fileobj.close() written_modules.append(out_module) self.written_modules = written_modules def write_api_docs(self, outdir): """Generate API reST files. Parameters ---------- outdir : string Directory name in which to store files We create automatic filenames for each module Returns ------- None Notes ----- Sets self.written_modules to list of written modules """ if not os.path.exists(outdir): os.mkdir(outdir) # compose list of modules modules = self.discover_modules() self.write_modules_api(modules, outdir) def write_index(self, outdir, froot='gen', relative_to=None): """Make a reST API index file from written files Parameters ---------- path : string Filename to write index to outdir : string Directory to which to write generated index file froot : string, optional root (filename without extension) of filename to write to Defaults to 'gen'. We add ``self.rst_extension``. relative_to : string path to which written filenames are relative. This component of the written file path will be removed from outdir, in the generated index. Default is None, meaning, leave path as it is. """ if self.written_modules is None: raise ValueError('No modules written') # Get full filename path path = os.path.join(outdir, froot + self.rst_extension) # Path written into index is relative to rootpath if relative_to is not None: relpath = (outdir + os.path.sep).replace( relative_to + os.path.sep, '') else: relpath = outdir idx = open(path, 'wt') w = idx.write w('.. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n') title = "API Reference" w(title + "\n") w("=" * len(title) + "\n\n") w('.. toctree::\n\n') for f in self.written_modules: w(' %s\n' % os.path.join(relpath, f)) idx.close() dipy-0.13.0/doc/tools/build_modref_templates.py000077500000000000000000000053301317371701200215360ustar00rootroot00000000000000#!/usr/bin/env python """Script to auto-generate our API docs. """ from __future__ import print_function, division # stdlib imports import sys import re from os.path import join as pjoin # local imports from apigen import ApiDocWriter # version comparison from distutils.version import LooseVersion as V # ***************************************************************************** def abort(error): print('*WARNING* API documentation not generated: %s' % error) exit() if __name__ == '__main__': package = sys.argv[1] outdir = sys.argv[2] try: other_defines = sys.argv[3] except IndexError: other_defines = True else: other_defines = other_defines in ('True', 'true', '1') # Check that the package is available. If not, the API documentation is not # (re)generated and existing API documentation sources will be used. try: __import__(package) except ImportError as e: abort("Can not import " + package) module = sys.modules[package] # Check that the source version is equal to the installed # version. If the versions mismatch the API documentation sources # are not (re)generated. This avoids automatic generation of documentation # for older or newer versions if such versions are installed on the system. installed_version = V(module.__version__) info_file = pjoin('..', package, 'info.py') info_lines = open(info_file).readlines() source_version = '.'.join([v.split('=')[1].strip(" '\n.") for v in info_lines if re.match( '^_version_(major|minor|micro|extra)', v )]) print('***', source_version) if source_version != installed_version: abort("Installed version does not match source version") docwriter = ApiDocWriter(package, rst_extension='.rst', other_defines=other_defines) docwriter.package_skip_patterns += [r'\.fixes$', r'\.externals$', r'\.tracking\.interfaces.*$', r'\.tracking\.gui_tools.*$', r'.*test.*$', r'^\.utils.*', r'\.boots\.resampling.*$', r'\.fixes.*$', r'\.info.*$', r'\.pkg_info.*$', ] docwriter.write_api_docs(outdir) docwriter.write_index(outdir, 'index', relative_to=outdir) print('%d files written' % len(docwriter.written_modules)) dipy-0.13.0/doc/tools/docgen_cmd.py000077500000000000000000000127411317371701200171130ustar00rootroot00000000000000#!/usr/bin/env python """ Script to generate documentation for command line utilities """ from os.path import join as pjoin from os import listdir import re from subprocess import Popen, PIPE, CalledProcessError import sys import importlib import inspect # version comparison from distutils.version import LooseVersion as V def sh3(cmd): """ Execute command in a subshell, return stdout, stderr If anything appears in stderr, print it out to sys.stderr https://github.com/scikit-image/scikit-image/blob/master/doc/gh-pages.py Copyright (C) 2011, the scikit-image team All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. Neither the name of skimage nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True) out, err = p.communicate() retcode = p.returncode if retcode: raise CalledProcessError(retcode, cmd) else: return out.rstrip(), err.rstrip() def abort(error): print('*WARNING* Command line API documentation not generated: %s' % error) exit() def get_rst_string(module_name, help_string): """ Generate rst text for module """ dashes = "========================\n" rst_text = "" rst_text += dashes rst_text += module_name + "\n" rst_text += dashes + "\n" rst_text += help_string return rst_text if __name__ == '__main__': # package name: Eg: dipy package = sys.argv[1] # directory in which the generated rst files will be saved outdir = sys.argv[2] try: __import__(package) except ImportError as e: abort("Cannot import " + package) module = sys.modules[package] # Check that the source version is equal to the installed # version. If the versions mismatch the API documentation sources # are not (re)generated. This avoids automatic generation of documentation # for older or newer versions if such versions are installed on the system. installed_version = V(module.__version__) info_file = pjoin('..', package, 'info.py') info_lines = open(info_file).readlines() source_version = '.'.join( [v.split('=')[1].strip(" '\n.") for v in info_lines if re.match('^_version_(major|minor|micro|extra)', v)]) print('***', source_version) if source_version != installed_version: abort("Installed version does not match source version") # generate docs command_list = [] workflows_folder = pjoin('..', 'dipy', 'workflows') workflow_class = module = importlib.import_module( "dipy.workflows.workflow") for f in listdir(workflows_folder): fpath = pjoin(workflows_folder, f) module_name = inspect.getmodulename(fpath) if module_name is not None: module = importlib.import_module( "dipy.workflows." + module_name) members = inspect.getmembers(module) for member_name, member_obj in members: if(inspect.isclass(member_obj)): if (issubclass(member_obj, workflow_class.Workflow) and not member_obj == workflow_class.Workflow): # member_obj is a workflow print("Generating docs for: ", member_name) if hasattr(member_obj, 'run'): help_string = inspect.getdoc(member_obj.run) doc_string = get_rst_string(member_name, help_string) out_f = member_name + ".rst" output_file = open(pjoin(outdir, out_f), "w") output_file.write(doc_string) output_file.close() command_list.append(out_f) print("Done") # generate index.rst print("Generating index.rst") index = open(pjoin(outdir, "index.rst"), "w") index.write("Command Line Utilities Reference\n") index.write("================================\n\n") index.write(".. toctree::\n\n") for cmd in command_list: index.write(" " + cmd) index.write("\n") index.close() print("Done") dipy-0.13.0/doc/upload-gh-pages.sh000077500000000000000000000015671317371701200166340ustar00rootroot00000000000000#!/bin/bash # Upload website to gh-pages USAGE="$0 []" HTML_DIR=$1 if [ -z "$HTML_DIR" ]; then echo $USAGE exit 1 fi if [ ! -e "$HTML_DIR/index.html" ]; then echo "$HTML_DIR does not contain an index.html" exit 1 fi if [ -d "$HTML_DIR/.git" ]; then echo "$HTML_DIR already contains a .git directory" exit 1 fi PROJECT=$2 if [ -z "$PROJECT" ]; then echo $USAGE exit 1 fi ORGANIZATION=$3 if [ -z "$ORGANIZATION" ]; then ORGANIZATION=nipy fi upstream_repo="https://github.com/$ORGANIZATION/$PROJECT" cd $HTML_DIR git init git checkout -b gh-pages git add * # A nojekyll file is needed to tell github that this is *not* a jekyll site: touch .nojekyll git add .nojekyll git commit -a -m "Documentation build - no history" git remote add origin $upstream_repo git push origin gh-pages --force rm -rf .git # Yes dipy-0.13.0/doc/upload_docs.py000066400000000000000000000063011317371701200161550ustar00rootroot00000000000000#!/usr/bin/env python # Script to upload docs to gh-pages branch of dipy_web that will be # automatically detected by the dipy website. import os import re import sys from os import chdir as cd from subprocess import check_call def sh(cmd): """Execute command in a subshell, return status code.""" print("--------------------------------------------------") print("Executing: %s" % (cmd, )) print("--------------------------------------------------") return check_call(cmd, shell=True) # paths docs_repo_path = "_build/docs_repo" docs_repo_url = "git@github.com:nipy/dipy_web.git" if __name__ == '__main__': # get current directory startdir = os.getcwd() # find the source version info_file = '../dipy/info.py' info_lines = open(info_file).readlines() source_version = '.'.join( [v.split('=')[1].strip(" '\n.") for v in info_lines if re.match( '^_version_(major|minor|micro|extra)', v)]) print("Source version: ", source_version) # check for dev tag if(source_version.split(".")[-1] == "dev"): dev = True print("Development Version detected") else: dev = False # pull current docs_repo if not os.path.exists(docs_repo_path): print("docs_repo not found, pulling from git..") sh("git clone %s %s" % (docs_repo_url, docs_repo_path)) cd(docs_repo_path) print("Moved to " + os.getcwd()) try: sh("git checkout gh-pages") except: while(1): print("\nLooks like gh-pages branch does not exist!") print("Do you want to create a new one? (y/n)") choice = str(input()).lower() if choice == 'y': sh("git checkout -b gh-pages") sh("rm -rf *") sh("git add .") sh("git commit -m 'cleaning gh-pages branch'") sh("git push origin gh-pages") break if choice == 'n': print("Please manually create a new gh-pages branch and try again.") sys.exit(0) else: print("Please enter valid choice ..") sh("git pull origin gh-pages") # check if docs for current version exists if (os.path.exists(source_version)) and (dev is not True): print("docs for current version already exists") else: if(dev is True): print("Re-building docs for development version") else: print("Building docs for a release") # build docs and copy to docs_repo cd(startdir) # remove old html and doctree files try: sh("rm -rf _build/json _build/doctrees") except: pass # generate new doc and copy to docs_repo sh("make api") sh("make rstexamples") sh("make json") sh("cp -r _build/json %s/" % (docs_repo_path,)) cd(docs_repo_path) if dev is True: try: sh("rm -r %s" % (source_version,)) except: pass sh("mv json %s" % (source_version,)) sh("git add .") sh("git commit -m \"Add docs for %s\"" % (source_version,)) sh("git push origin gh-pages") dipy-0.13.0/fake_pyrex/000077500000000000000000000000001317371701200146775ustar00rootroot00000000000000dipy-0.13.0/fake_pyrex/Pyrex/000077500000000000000000000000001317371701200160065ustar00rootroot00000000000000dipy-0.13.0/fake_pyrex/Pyrex/Distutils/000077500000000000000000000000001317371701200177725ustar00rootroot00000000000000dipy-0.13.0/fake_pyrex/Pyrex/Distutils/__init__.py000066400000000000000000000000771317371701200221070ustar00rootroot00000000000000# to work around bug in setuptools monkeypatching of distutils dipy-0.13.0/fake_pyrex/Pyrex/Distutils/build_ext.py000066400000000000000000000000321317371701200223160ustar00rootroot00000000000000build_ext = "placeholder" dipy-0.13.0/fake_pyrex/Pyrex/__init__.py000066400000000000000000000000771317371701200201230ustar00rootroot00000000000000# to work around bug in setuptools monkeypatching of distutils dipy-0.13.0/requirements.txt000066400000000000000000000001631317371701200160260ustar00rootroot00000000000000# Check against .travis.yml file and dipy/info.py cython>=0.25.1 numpy>=1.7.1 scipy>=0.9 nibabel>=2.1.0 h5py>=2.4.0dipy-0.13.0/scratch/000077500000000000000000000000001317371701200141715ustar00rootroot00000000000000dipy-0.13.0/scratch/coordmap_example.py000066400000000000000000000033561317371701200200710ustar00rootroot00000000000000import numpy as np from dipy.tracking.vox2track import track_counts from dipy.tracking.utils import density_map import nibabel as nib from nibabel.trackvis import write, empty_header grid = np.mgrid[1.1:1.8:3j,1.1:1.8:3j,.5:5] grid = np.rollaxis(grid, 0, 4) streamlines = [] for ii in grid: for jj in ii: streamlines.append(jj) #Treat these streamlines as if they are in trackvis format and generate counts counts_trackvis = density_map(streamlines, (4,4,5), (1,1,1)) #Treat these streamlines as if they are in nifti format and generate counts counts_nifti = track_counts(streamlines, (4,4,5), (1,1,1), return_elements=False) print("saving trk files and track_count volumes") aff = np.eye(4) aff[0, 0] = -1 img = nib.Nifti1Image(counts_trackvis.astype('int16'), aff) nib.save(img, 'counts_trackvis.nii.gz') img = nib.Nifti1Image(counts_nifti.astype('int16'), aff) nib.save(img, 'counts_nifti.nii.gz') hdr = empty_header() hdr['voxel_size'] = (1,1,1) hdr['voxel_order'] = 'las' hdr['vox_to_ras'] = aff hdr['dim'] = counts_nifti.shape #Treat these streamlines like they are in trackvis format and save them streamlines_trackvis = ((ii,None,None) for ii in streamlines) write('slAsTrackvis.trk', streamlines_trackvis, hdr) #Move these streamlines from nifti to trackvis format and save them streamlines_nifti = ((ii+.5,None,None) for ii in streamlines) write('slAsNifti.trk', streamlines_nifti, hdr) """ Trackvis: A------------ | C | | | ----B-------- | | | | ------------- | | | | ------------D A = [0, 0] B = [1, 1] C = [.5, .5] D = [3, 3] Nifti: A------------ | C | | | ----B-------- | | | | ------------- | | | | ------------D A = [-.5, -.5] B = [.5, .5] C = [0, 0] D = [2.5, 2.5] """ dipy-0.13.0/scratch/learning_old.py000066400000000000000000000745151317371701200172140ustar00rootroot00000000000000''' Learning algorithms for tractography''' import numpy as np from dipy.core import track_metrics as tm import dipy.core.track_performance as pf from scipy import ndimage as nd import itertools import time import numpy.linalg as npla def larch(tracks, split_thrs=[50.**2,20.**2,10.**2], ret_atracks=False, info=False): ''' LocAl Rapid Clusters for tractograpHy Parameters ---------- tracks : sequence of tracks as arrays, shape (N1,3) .. (Nm,3) split_thrs : sequence of 3 floats with the squared distances approx_tracks: bool if True return an approximation of the initial tracks info: bool print some information Returns -------- C : dict a tree graph containing the clusters atracks : sequence of approximated tracks the approximation preserves initial shape. ''' ''' t1=time.clock() print 'Reducing to 3-point approximate tracks...' tracks3=[tm.downsample(t,3) for t in tracks] t2=time.clock() print 'Done in ', t2-t1, 'secs' print 'Reducing to n-point approximate tracks...' atracks=[pf.approx_polygon_track(t) for t in tracks] t3=time.clock() print 'Done in ', t3-t2, 'secs' print('Starting larch_preprocessing...') C=pf.larch_preproc(tracks3,split_thrs,info) t4=time.clock() print 'Done in ', t4-t3, 'secs' print('Finding most similar tracks in every cluster ...') for c in C: local_tracks=[atracks[i] for i in C[c]['indices']] #identify the most similar track in the cluster C[c] and return the index of #the track and the distances of this track with all other tracks msi,distances=pf.most_similar_track_mam(local_tracks,metric='avg') C[c]['repz']=atracks[C[c]['indices'][msi]] C[c]['repz_dists']=distances print 'Done in ', time.clock()-t4 if ret_atracks: return C,atracks else: return C ''' return def detect_corresponding_tracks(indices,tracks1,tracks2): ''' Detect corresponding tracks from 1 to 2 Parameters ---------- indices : sequence of indices of tracks1 that are to be detected in tracks2 tracks1 : sequence of tracks as arrays, shape (N1,3) .. (Nm,3) tracks2 : sequence of tracks as arrays, shape (M1,3) .. (Mm,3) Returns ------- track2track : array of int showing the correspondance ''' li=len(indices) track2track=np.zeros((li,3)) cnt=0 for i in indices: rt=[pf.zhang_distances(tracks1[i],t,'avg') for t in tracks2] rt=np.array(rt) track2track[cnt-1]=np.array([cnt,i,rt.argmin()]) cnt+=1 return track2track.astype(int) def detect_corresponding_tracks_extended(indices,tracks1,indices2,tracks2): ''' Detect corresponding tracks from 1 to 2 Parameters: ---------------- indices: sequence of indices of tracks1 that are to be detected in tracks2 tracks1: sequence of tracks as arrays, shape (N1,3) .. (Nm,3) indices2: sequence of indices of tracks2 in the initial brain tracks2: sequence of tracks as arrays, shape (M1,3) .. (Mm,3) Returns: ----------- track2track: array of int showing the correspondance ''' li=len(indices) track2track=np.zeros((li,3)) cnt=0 for i in indices: rt=[pf.zhang_distances(tracks1[i],t,'avg') for t in tracks2] rt=np.array(rt) track2track[cnt-1]=np.array([cnt,i,indices2[rt.argmin()]]) cnt+=1 return track2track.astype(int) def rm_far_ends(ref,tracks,dist=25): ''' rm tracks with far endpoints Parameters ---------- ref : array, shape (N,3) xyz points of the reference track tracks : sequence of tracks as arrays, shape (N1,3) .. (Nm,3) dist : float endpoint distance threshold Returns ------- tracksr : sequence reduced tracks indices : sequence indices of tracks ''' indices=[i for (i,t) in enumerate(tracks) if tm.max_end_distances(t,ref) <= dist] tracksr=[tracks[i] for i in indices] return tracksr,indices def rm_far_tracks(ref,tracks,dist=25,down=False): ''' Remove tracks which are far away using as a distance metric the average euclidean distance of the following three points start point, midpoint and end point. Parameters ---------- ref : array, shape (N,3) xyz points of the reference track tracks : sequence of tracks as arrays, shape (N1,3) .. (Nm,3) dist : float average distance threshold down: bool {True, False} if down = True then ref and tracks are already downsampled if down = False then downsample them Returns ------- tracksr : sequence reduced tracks indices : sequence indices of tracks ''' if down==False: tracksd=[tm.downsample(t,3) for t in tracks] refd=tm.downsample(ref,3) indices=[i for (i,t) in enumerate(tracksd) if np.mean(np.sqrt(np.sum((t-refd)**2,axis=1))) <= dist] tracksr=[tracks[i] for i in indices] return tracksr, indices if down==True: indices=[i for (i,t) in enumerate(tracks) if np.mean(np.sqrt(np.sum((t-ref)**2,axis=1))) <= dist] tracksr=[tracks[i] for i in indices] return tracksr,indices def missing_tracks(indices1,indices2): ''' Missing tracks in bundle1 but not bundle2 Parameters: ------------------ indices1: sequence of indices of tracks in bundle1 indices2: sequence of indices of tracks in bundle2 Returns: ----------- indices: sequence of indices of tracks in bundle1 absent from bundle2 Example: ------------- >>> tracksar,indar=rm_far_tracks(ref,tracksa,dist=20) >>> fornix_ind=G[5]['indices'] >>> len(missing_tracks(fornix_ind, indar)) = 5 >>> tracksar,indar=rm_far_tracks(ref,tracksa,dist=25) >>> fornix_ind=G[5]['indices'] >>> len(missing_tracks(fornix_ind, indar)) = 0 ''' return list(set(indices1).difference(set(indices2))) def skeletal_tracks(tracks,rand_selected=1000,ball_radius=5,neighb_no=50): ''' Filter out unnescessary tracks and keep only a few good ones. Aka the balls along a track method. Parameters: ---------------- tracks: sequence of tracks rand_selected: int number of initially selected fibers ball_radius: float balls along tracks radii neighb_no: int lowest threshold for the number of tracks included Returns: ----------- reps: sequence of indices of representative aka skeletal tracks. They should be <= rand_selected ''' trackno=len(tracks) #select 1000 random tracks random_indices=(trackno*np.random.rand(rand_selected)).astype(int) tracks3points=[tm.downsample(t,3) for t in tracks] #store representative tracks representative=[] representative_indices=[] #store indices of already visited tracks i.e. which already have a representative track visited=[] import time t1=time.clock() # for every index of the possible representative tracks for (i,t) in enumerate(random_indices): #if track is not already classified if i not in visited: print(i,t) #rm far tracks tracksr,indices=rm_far_tracks(tracks3points[t],tracks3points,dist=25,down=True) cnt_neighb=0 just_visited=[] #for every possible neighbour track tr with index tri for tri in indices: cnt_intersected_balls=0 #for every point of the possible representative track for p in tracks[t]: #if you intersect the sphere surrounding the point of the random track increase a counter if tm.inside_sphere(tracks[tri],p,ball_radius): cnt_intersected_balls+=1 #if all spheres are covered then accept this track as your neighbour if cnt_intersected_balls ==len(tracks[t]): cnt_neighb+=1 just_visited.append(tri) #if the number of possible neighbours is above threshold then accept track[t] as a representative fiber if cnt_neighb>=neighb_no: representative.append(t) visited=visited+just_visited print 'Time:',time.clock()-t1 return representative def detect_corpus_callosum(tracks,plane=91,ysize=217,zsize=181,width=1.0,use_atlas=0,use_preselected_tracks=0,ball_radius=5): ''' Detect corpus callosum in a mni registered dataset of shape (181,217,181) Parameters: ---------------- tracks: sequence of tracks Returns: ---------- cc_indices: sequence with the indices of the corpus_callosum tracks left_indices: sequence with the indices of the rest of the brain ''' cc=[] #for every track for (i,t) in enumerate(tracks): #for every index of any point in the track for pi in range(len(t)-1): #if track segment is cutting the plane (assuming the plane is at the x-axis X=plane) if (t[pi][0] <= plane and t[pi+1][0] >= plane) or (t[pi+1][0] <= plane and t[pi][0] >= plane) : v=t[pi+1]-t[pi] k=(plane-t[pi][0])/v[0] hit=k*v+t[pi] #report the index of the track and the point of intersection with the plane cc.append((i,hit)) #indices cc_i=[c[0] for c in cc] print 'Number of tracks cutting plane Before',len(cc_i) #hit points cc_p=np.array([c[1] for c in cc]) p_neighb=len(cc_p)*[0] cnt=0 #imaging processing from now on im=np.zeros((ysize,zsize)) im2=np.zeros((ysize,zsize)) im_track={} cnt=0 for p in cc_p: p1=int(round(p[1])) p2=int(round(p[2])) im[p1,p2]=1 im2[p1,p2]=im2[p1,p2]+1 try: im_track[(p1,p2)]=im_track[(p1,p2)]+[cc_i[cnt]] except: im_track[(p1,p2)]=[cc_i[cnt]] cnt+=1 #create a cross structure cross=np.array([[0,1,0],[1,1,1],[0,1,0]]) im=(255*im).astype('uint8') im2=(np.interp(im2,[0,im2.max()],[0,255])).astype('uint8') #erosion img=nd.binary_erosion(im,structure=cross) #and another one erosion #img=nd.binary_erosion(img,structure=cross) #im2g=nd.grey_erosion(im2,structure=cross) #im2g2=nd.grey_erosion(im2g,structure=cross) indg2=np.where(im2==im2.max()) p1max=indg2[0][0] p2max=indg2[1][0] #label objects imgl=nd.label(img) no_labels=imgl[1] imgl=imgl[0] #find the biggest objects the second biggest should be the cc the biggest should be the background ''' find_big=np.zeros(no_labels) for i in range(no_labels): ind=np.where(imgl==i) find_big[i]=len(ind[0]) print find_big find_bigi=np.argsort(find_big) ''' cc_label=imgl[p1max,p2max] imgl2=np.zeros((ysize,zsize)) #cc is found and copied to a new image here #imgl2[imgl==int(find_bigi[-2])]=1 imgl2[imgl==int(cc_label)]=1 imgl2=imgl2.astype('uint8') #now do another dilation to recover some cc shape from the previous erosion imgl2d=nd.binary_dilation(imgl2,structure=cross) #and another one #imgl2d=nd.binary_dilation(imgl2d,structure=cross) imgl2d=imgl2d.astype('uint8') #get the tracks back cc_indices=[] indcc=np.where(imgl2d>0) for i in range(len(indcc[0])): p1=indcc[0][i] p2=indcc[1][i] cc_indices=cc_indices+im_track[(p1,p2)] print 'After', len(cc_indices) #export also the rest of the brain indices=range(len(tracks)) left=set(indices).difference(set(cc_indices)) left_indices=[l for l in left] #return im,im2,imgl2d,cc_indices,left_indices return cc_indices,left_indices def track_indices_for_a_value_in_atlas(atlas,value,tes,tracks): ind=np.where(atlas==value) indices=set([]) for i in range(len(ind[0])): try: tmp=tes[(ind[0][i], ind[1][i], ind[2][i])] indices=indices.union(set(tmp)) except: pass #bundle=[tracks[i] for i in list(indices)] #return bundle,list(indices) return list(indices) def relabel_by_atlas_value_and_mam(atlas_tracks,atlas,tes,tracks,tracksd,zhang_thr): emi=emi_atlas() brain_relabeled={} for e in range(1,9): #from emi: print emi[e]['bundle_name'] indices=emi[e]['init_ref']+emi[e]['selected_ref']+emi[e]['apr_ref'] tmp=detect_corresponding_tracks(indices,atlas_tracks,tracks) corresponding_indices=tmp[:,2] corresponding_indices=list(set(corresponding_indices)) value_indices=[] for value in emi[e]['value']: value_indices+=track_indices_for_a_value_in_atlas(atlas,value,tes,tracks) value_indices=list(set(value_indices)) print 'len corr_ind',len(corresponding_indices) #check if value_indices do not have anything in common with corresponding_indices and expand if list(set(value_indices).intersection(set(corresponding_indices)))==[]: #value_indices=corresponding_indices print 'len corr_ind',len(corresponding_indices) for ci in corresponding_indices: print 'koukou',ci ref=tracksd[ci] brain_rf, ind_fr = rm_far_tracks(ref,tracksd,dist=10,down=True) value_indices+=ind_fr value_indices=list(set(value_indices)) print 'len vi',len(value_indices) value_indices_new=[] #reduce value_indices which are far from every corresponding fiber for vi in value_indices: dist=[] for ci in corresponding_indices: dist.append(pf.zhang_distances(tracks[vi],tracks[ci],'avg')) for d in dist: if d <= zhang_thr[e-1]: value_indices_new.append(vi) value_indices=list(set(value_indices_new)) #store value indices brain_relabeled[e]={} brain_relabeled[e]['value_indices']=value_indices brain_relabeled[e]['corresponding_indices']=corresponding_indices brain_relabeled[e]['color']=emi[e]['color'] brain_relabeled[e]['bundle_name']=emi[e]['bundle_name'][0] return brain_relabeled def threshold_hitdata(hitdata, divergence_threshold=0.25, fibre_weight=0.8): ''' [1] Removes hits in hitdata which have divergence above threshold. [2] Removes fibres in hitdata whose fraction of remaining hits is below the required weight. Parameters: ---------------- ref: array, shape (N,5) xyzrf hit data from cut_planes divergence_threshold: float if radial coefficient of divergence is above this then drop the hit fibre_weight: float the number of remaing hits on a fibre as a fraction of len(trackdata), which is the maximum number possible Returns: ----------- reduced_hitdata: array, shape (M, 5) light_weight_fibres: list of integer track indices ''' # first pass: remove hits with r>divergence_threshold firstpass = [[[x,y,z,r,f] for (x,y,z,r,f) in plane if r<=divergence_threshold] for plane in hitdata] # second pass: find fibres hit weights fibrecounts = {} for l in [[f,r] for (x,y,z,r,f) in itertools.chain(*firstpass)]: f = l[0].astype('int') try: fibrecounts[f] += 1 except: fibrecounts[f] = 1 weight_thresh = len(hitdata)*fibre_weight heavy_weight_fibres = [f for f in fibrecounts.keys() if fibrecounts[f]>=weight_thresh] # third pass reduced_hitdata = [np.array([[x,y,z,r,f] for (x,y,z,r,f) in plane if fibrecounts[f.astype('int')] >= weight_thresh]) for plane in firstpass] return reduced_hitdata, heavy_weight_fibres def neck_finder(hitdata, ref): ''' To identify regions of concentration of fibres related by hitdata to a reference fibre ''' #typically len(hitdata) = len(ref)-2 at present, though it should ideally be # len(ref)-1 which is the number of segments in ref # We will assume that hitdata[i] relates to the segment from ref[i] to ref[i+1] #xyz=[] #rcd=[] #fibres=[] weighted_mean_rcd = [] unweighted_mean_rcd = [] weighted_mean_dist = [] unweighted_mean_dist = [] hitcount = [] for (p, plane) in enumerate(hitdata): xyz = plane[:,:3] rcd =plane[:,3] fibres = plane[:,4] hitcount +=[len(plane)] radial_distances=np.sqrt(np.diag(np.inner(xyz-ref[p],xyz-ref[p]))) unweighted_mean_rcd += [np.average(1-rcd)] weighted_mean_rcd += [np.average(1-rcd, weights=np.exp(-radial_distances))] unweighted_mean_dist += [np.average(np.exp(-radial_distances))] weighted_mean_dist += [np.average(np.exp(-radial_distances), weights=1-rcd)] return np.array(hitcount), np.array(unweighted_mean_rcd), np.array(weighted_mean_rcd), \ np.array(unweighted_mean_dist), np.array(weighted_mean_dist) def max_concentration(plane_hits,ref): ''' calculates the log determinant of the concentration matrix for the hits in planehits ''' dispersions = [np.prod(np.sort(npla.eigvals(np.cov(p[:,0:3].T)))[1:2]) for p in plane_hits] index = np.argmin(dispersions) log_max_concentration = -np.log2(dispersions[index]) centre = ref[index+1] return index, centre, log_max_concentration def refconc(brain, ref, divergence_threshold=0.3, fibre_weight=0.7): ''' given a reference fibre locates the parallel fibres in brain (tracks) with threshold_hitdata applied to cut_planes output then follows with concentration to locate the locus of a neck ''' hitdata = pf.cut_plane(brain, ref) reduced_hitdata, heavy_weight_fibres = threshold_hitdata(hitdata, divergence_threshold, fibre_weight) #index, centre, log_max_concentration = max_concentration(reduced_hitdata, ref) index=None centre=None log_max_concentration=None return heavy_weight_fibres, index, centre def bundle_from_refs(brain,braind, refs, divergence_threshold=0.3, fibre_weight=0.7,far_thresh=25,zhang_thresh=15, end_thresh=10): ''' ''' bundle = set([]) centres = [] indices = [] for ref in refs: refd=tm.downsample(ref,3) brain_rf, ind_fr = rm_far_tracks(refd,braind,dist=far_thresh,down=True) brain_rf=[brain[i] for i in ind_fr] #brain_rf,ind_fr = rm_far_tracks(ref,brain,dist=far_thresh,down=False) heavy_weight_fibres, index, centre = refconc(brain_rf, ref, divergence_threshold, fibre_weight) heavy_weight_fibres_z = [i for i in heavy_weight_fibres if pf.zhang_distances(ref,brain_rf[i],'avg')end_thresh] hwfind = set([ind_fr[i] for i in heavy_weight_fibres_z]) bundle = bundle.union(hwfind) bundle_med = [] for i in bundle: minmaxdist = 0. for ref in refs: minmaxdist=min(minmaxdist,tm.max_end_distances(brain[i],ref)) if minmaxdist<=end_thresh: bundle_med.append(i) #centres.append(centre) #indices.append(index) #return list(bundle), centres, indices return bundle_med class FACT_Delta(): ''' Generates tracks with termination criteria defined by a delta function [1]_ and it has similarities with FACT algorithm [2]_. Can be used with any reconstruction method as DTI,DSI,QBI,GQI which can calculate an orientation distribution function and find the local peaks of that function. For example a single tensor model can give you only one peak a dual tensor model 2 peaks and quantitative anisotropy method as used in GQI can give you 3,4,5 or even more peaks. The parameters of the delta function are checking thresholds for the direction propagation magnitude and the angle of propagation. A specific number of seeds is defined randomly and then the tracks are generated for that seed if the delta function returns true. Trilinear interpolation is being used for defining the weights of the propagation. References ---------- .. [1] Yeh. et al. Generalized Q-Sampling Imaging, TMI 2010. .. [2] Mori et al. Three-dimensional tracking of axonal projections in the brain by magnetic resonance imaging. Ann. Neurol. 1999. ''' def __init__(self,qa,ind,seeds_no=1000,odf_vertices=None,qa_thr=0.0239,step_sz=0.5,ang_thr=60.): ''' Parameters ---------- qa: array, shape(x,y,z,Np), magnitude of the peak (QA) or shape(x,y,z) a scalar volume like FA. ind: array, shape(x,y,z,Np), indices of orientations of the QA peaks found at odf_vertices used in QA or, shape(x,y,z), ind seeds_no: number of random seeds odf_vertices: sphere points which define a discrete representation of orientations for the peaks, the same for all voxels qa_thr: float, threshold for QA(typical 0.023) or FA(typical 0.2) step_sz: float, propagation step ang_thr: float, if turning angle is smaller than this threshold then tracking stops. Returns ------- tracks: sequence of arrays ''' if len(qa.shape)==3: qa.shape=qa.shape+(1,) ind.shape=ind.shape+(1,) #store number of maximum peacks self.Np=qa.shape[-1] x,y,z,g=qa.shape tlist=[] if odf_vertices==None: eds=np.load(os.path.join(os.path.dirname(__file__),'matrices',\ 'evenly_distributed_sphere_362.npz')) odf_vertices=eds['vertices'] self.seed_list=[] for i in range(seeds_no): rx=(x-1)*np.random.rand() ry=(y-1)*np.random.rand() rz=(z-1)*np.random.rand() seed=np.array([rx,ry,rz]) #print 'init seed', seed #self.seed_list.append(seed.copy()) track=self.propagation(seed.copy(),qa,ind,odf_vertices,qa_thr,ang_thr,step_sz) if track == None: pass else: self.seed_list.append(seed.copy()) tlist.append(track) self.tracks=tlist def trilinear_interpolation(self,X): ''' Parameters ---------- X: array, shape(3,), a point Returns -------- W: array, shape(8,2) weights, think of them like the 8 subvolumes of a unit cube surrounding the seed. IN: array, shape(8,2), the corners of the unit cube ''' Xf=np.floor(X) #d holds the distance from the (floor) corner of the voxel d=X-Xf #nd holds the distance from the opposite corner nd = 1-d #filling the weights W=np.array([[ nd[0] * nd[1] * nd[2] ], [ d[0] * nd[1] * nd[2] ], [ nd[0] * d[1] * nd[2] ], [ nd[0] * nd[1] * d[2] ], [ d[0] * d[1] * nd[2] ], [ nd[0] * d[1] * d[2] ], [ d[0] * nd[1] * d[2] ], [ d[0] * d[1] * d[2] ]]) IN=np.array([[ Xf[0] , Xf[1] , Xf[2] ], [ Xf[0]+1 , Xf[1] , Xf[2] ], [ Xf[0] , Xf[1]+1, Xf[2] ], [ Xf[0] , Xf[1] , Xf[2]+1 ], [ Xf[0]+1 , Xf[1]+1, Xf[2] ], [ Xf[0] , Xf[1]+1, Xf[2]+1 ], [ Xf[0]+1 , Xf[1] , Xf[2]+1 ], [ Xf[0]+1 , Xf[1]+1, Xf[2]+1 ]]) return W,IN.astype(np.int) def nearest_direction(self,dx,qa,ind,odf_vertices,qa_thr=0.0245,ang_thr=60.): ''' Give the nearest direction to a point Parameters ---------- dx: array, shape(3,), as float, moving direction of the current tracking qa: array, shape(Np,), float, quantitative anisotropy matrix, where Np the number of peaks, found using self.Np ind: array, shape(Np,), float, index of the track orientation odf_vertices: array, shape(N,3), float, odf sampling directions qa_thr: float, threshold for QA, we want everything higher than this threshold ang_thr: float, theshold, we only select fiber orientation with this range Returns -------- delta: bool, delta funtion, if 1 we give it weighting if it is 0 we don't give any weighting direction: array, shape(3,), the fiber orientation to be consider in the interpolation ''' max_dot=0 max_doti=0 angl = np.cos((np.pi*ang_thr)/180.) if qa[0] <= qa_thr: return False, np.array([0,0,0]) for i in range(self.Np): if qa[i]<= qa_thr: break curr_dot = np.abs(np.dot(dx, odf_vertices[ind[i]])) if curr_dot > max_dot: max_dot = curr_dot max_doti = i if max_dot < angl : return False, np.array([0,0,0]) if np.dot(dx,odf_vertices[ind[max_doti]]) < 0: return True, - odf_vertices[ind[max_doti]] else: return True, odf_vertices[ind[max_doti]] def propagation_direction(self,point,dx,qa,ind,odf_vertices,qa_thr,ang_thr): ''' Find where you are moving next ''' total_w = 0 # total weighting new_direction = np.array([0,0,0]) w,index=self.trilinear_interpolation(point) #print w[0],w[1],w[2],w[3],w[4],w[5],w[6],w[7] #print index #check if you are outside of the volume for i in range(3): if index[7][i] >= qa.shape[i] or index[0][i] < 0: return False, np.array([0,0,0]) #calculate qa & ind of each of the 8 corners for m in range(8): x,y,z = index[m] qa_tmp = qa[x,y,z] ind_tmp = ind[x,y,z] #print qa_tmp[0]#,qa_tmp[1],qa_tmp[2],qa_tmp[3],qa_tmp[4] delta,direction = self.nearest_direction(dx,qa_tmp,ind_tmp,odf_vertices,qa_thr,ang_thr) #print delta, direction if not delta: continue total_w += w[m] new_direction = new_direction + w[m][0]*direction if total_w < .5: # termination criteria return False, np.array([0,0,0]) return True, new_direction/np.sqrt(np.sum(new_direction**2)) def initial_direction(self,seed,qa,ind,odf_vertices,qa_thr): ''' First direction that we get from a seeding point ''' #very tricky/cool addition/flooring that helps create a valid #neighborhood (grid) for the trilinear interpolation to run smoothly #seed+=0.5 point=np.floor(seed+.5) x,y,z = point qa_tmp=qa[x,y,z,0]#maximum qa ind_tmp=ind[x,y,z,0]#corresponing orientation indices for max qa if qa_tmp < qa_thr: return False, np.array([0,0,0]) else: return True, odf_vertices[ind_tmp] def propagation(self,seed,qa,ind,odf_vertices,qa_thr,ang_thr,step_sz): ''' Parameters ---------- seed: array, shape(3,), point where the tracking starts qa: array, shape(Np,), float, quantitative anisotropy matrix, where Np the number of peaks, found using self.Np ind: array, shape(Np,), float, index of the track orientation Returns ------- d: bool, delta function result idirection: array, shape(3,), index of the direction of the propagation ''' point_bak=seed.copy() point=seed.copy() #d is the delta function d,idirection=self.initial_direction(seed,qa,ind,odf_vertices,qa_thr) #print('FD',idirection[0],idirection[1],idirection[2]) #print d if not d: return None dx = idirection #point = seed-0.5 track = [] track.append(point.copy()) #track towards one direction while d: d,dx = self.propagation_direction(point,dx,qa,ind,\ odf_vertices,qa_thr,ang_thr) if not d: break point = point + step_sz*dx track.append(point) d = True dx = - idirection point=point_bak.copy() #point = seed #track towards the opposite direction while d: d,dx = self.propagation_direction(point,dx,qa,ind,\ odf_vertices,qa_thr,ang_thr) if not d: break point = point + step_sz*dx track.insert(0,point.copy()) return np.array(track) dipy-0.13.0/scratch/odf.py000066400000000000000000000052271317371701200153210ustar00rootroot00000000000000import numpy as np from enthought.mayavi import mlab import Image def disp_odf(sph_map, theta_res=64, phi_res=32, colormap='RGB', colors=256): pi = np.pi sin = np.sin cos = np.cos theta, phi = np.mgrid[0:2*pi:theta_res*1j, 0:pi:phi_res*1j] x = sin(phi)*cos(theta) y = sin(phi)*sin(theta) z = cos(phi) nvox = np.prod(sph_map.shape) x_cen, y_cen, z_cen = _3grid(sph_map.shape) odf_values = sph_map.evaluate_at(theta, phi) max_value = odf_values.max() mlab.figure() for ii in range(nvox): odf_ii = odf_values.reshape(nvox, theta_res, phi_res)[ii,:,:] odf_ii /= max_value * 2 if colormap == 'RGB': rgb = np.r_['-1,3,0', x*odf_ii, y*odf_ii, z*odf_ii] rgb = np.abs(rgb*255/rgb.max()).astype('uint8') odf_im = Image.fromarray(rgb, mode='RGB') odf_im = odf_im.convert('P', palette=Image.ADAPTIVE, colors=colors) lut = np.empty((colors,4),'uint8') lut[:,3] = 255 lut[:,0:3] = np.reshape(odf_im.getpalette(),(colors,3)) oo = mlab.mesh(x*odf_ii + x_cen.flat[ii], y*odf_ii + y_cen.flat[ii], z*odf_ii + z_cen.flat[ii], scalars=np.int16(odf_im)) oo.module_manager.scalar_lut_manager.lut.table=lut else: oo = mlab.mesh(x*odf_ii + x_cen.flat[ii], y*odf_ii + y_cen.flat[ii], z*odf_ii + z_cen.flat[ii], scalars=odf_ii, colormap=colormap) def _3grid(shape): if len(shape) > 3: raise ValueError('cannot display 4d image') elif len(shape) < 3: d = [1, 1, 1] d[0:len(shape)] = shape else: d = shape return np.mgrid[0:d[0], 0:d[1], 0:d[2]] if __name__ == '__main__': import dipy.core.qball as qball from dipy.io.bvectxt import read_bvec_file filename='/Users/bagrata/HARDI/E1322S8I1.nii.gz' grad_table_filename='/Users/bagrata/HARDI/E1322S8I1.bvec' from nipy import load_image, save_image grad_table, b_values = read_bvec_file(grad_table_filename) img = load_image(filename) print 'input dimensions: ' print img.ndim print 'image size: ' print img.shape print 'image affine: ' print img.affine print 'images has pixels with size: ' print np.dot(img.affine, np.eye(img.ndim+1)).diagonal()[0:3] data = np.asarray(img) theta, phi = np.mgrid[0:2*np.pi:64*1j, 0:np.pi:32*1j] odf_i = qball.ODF(data[188:192,188:192,22:24,:],4,grad_table,b_values) disp_odf(odf_i[0:1,0:2,0:2]) dipy-0.13.0/scratch/profile_dti.py000066400000000000000000000006301317371701200170420ustar00rootroot00000000000000""" To use: import profile_dti as p import dipy.reconst.dti as dti lprun -f dti.restore_fit_tensor -f p.tm.fit_method p.func() """ import dipy.core.gradients as grad import dipy.data as dpd import dipy.reconst.dti as dti img, gtab = dpd.read_stanford_hardi() dd = img.get_data() tm = dti.TensorModel(gtab) tf = tm.fit(dd) def func(): tf.odf(dpd.default_sphere) if __name__=="__main__": func() dipy-0.13.0/scratch/restore_dti_simulations.py000066400000000000000000000017571317371701200215270ustar00rootroot00000000000000 import numpy as np import nibabel as nib import dipy.reconst.dti as dti import dipy.data as dpd import dipy.core.gradients as grad b0 = 1000. bvecs, bval = dpd.read_bvec_file(dpd.get_data('55dir_grad.bvec')) gtab = grad.gradient_table(bval, bvecs) B = bval[1] D = np.array([1., 1., 1., 0., 0., 1., -np.log(b0) * B]) / B evals = np.array([2., 1., 0.]) / B md = evals.mean() tensor = dti.from_lower_triangular(D) X = dti.design_matrix(bvecs, bval) data = np.exp(np.dot(X,D)) data.shape = (-1,) + data.shape dti_wls = dti.TensorModel(gtab) fit_wls = dti_wls.fit(data) fa1 = fit_wls.fa noisy_data = np.copy(data) noisy_data[..., -1] = 1.0 fit_wls_noisy = dti_wls.fit(noisy_data) fa2 = fit_wls_noisy.fa dti_restore = dti.TensorModel(gtab, fit_method='RESTORE', sigma=67.) fit_restore_noisy = dti_restore.fit(noisy_data) fa3 = fit_restore_noisy.fa print("FA for noiseless data: %s"%fa1) print("FA for noise-introduced data: %s"%fa2) print("FA for noise-introduced data, analyzed with RESTORE: %s"%fa3) dipy-0.13.0/scratch/sphplot.py000066400000000000000000000017071317371701200162410ustar00rootroot00000000000000import numpy as np from dipy.viz import fos import dipy.core.geometry as geometry import matplotlib.pyplot as mplp def plot_sphere(v,key): r = fos.ren() fos.add(r,fos.point(v,fos.green, point_radius= 0.01)) fos.show(r, title=key, size=(1000,1000)) def plot_lambert(v,key,centre=np.array([0,0])): lamb = geometry.lambert_equal_area_projection_cart(*v.T).T (y1,y2) = lamb radius = np.sum(lamb**2,axis=0) < 1 #print inner #print y1[inner] #print y1[-inner] fig = mplp.figure(facecolor='w') current = fig.add_subplot(111) current.patch.set_color('k') current.plot(y1[radius],y2[radius],'.g') current.plot(y1[-radius],y2[-radius],'.r') current.plot([0.],[0.],'ob') #current.patches.Circle(*centre, radius=50, color='w', fill=True, alpha=0.7) current.axes.set_aspect(aspect = 'equal', adjustable = 'box') current.title.set_text(key) fig.show() fig.waitforbuttonpress() mplp.close() dipy-0.13.0/scratch/twoD.py000066400000000000000000000012101317371701200154520ustar00rootroot00000000000000import pylab as pl import numpy as np def imshow(array, cmap='gray',interpolation='nearest', alpha=1.0, vmin=None, vmax=None, origin=None, extent=None): """ Wrapper for pylab.imshow that displays array values as well coordinate values with mouse over. """ pl.imshow(array.T, cmap=cmap, interpolation=interpolation, alpha=alpha, vmin=vmin, vmax=vmax, origin=origin, extent=extent) ax = pl.gca() ax.format_coord = __report_pixel def __report_pixel(x, y): x = np.round(x) y = np.round(y) v = pl.gca().get_images()[0].get_array()[y, x] return "x = %d y = %d v = %5.3f" % (x, y, v) dipy-0.13.0/scratch/very_scratch/000077500000000000000000000000001317371701200166655ustar00rootroot00000000000000dipy-0.13.0/scratch/very_scratch/bingham.py000066400000000000000000000126031317371701200206460ustar00rootroot00000000000000import sympy from scipy.integrate import quad, dblquad from scipy.optimize import fmin_powell import numpy as np import scipy as sc ''' def integrand(t,n,x): return np.exp(-x*t) / t**n def expint(n,x): return quad(integrand, 1, np.Inf, args=(n, x))[0] vec_expint = np.vectorize(expint) print vec_expint(3,np.arange(1.0,4.0,0.5)) ''' #array([ 0.1097, 0.0567, 0.0301, 0.0163, 0.0089, 0.0049]) ''' print sc.special.expn(3,np.arange(1.0,4.0,0.5)) ''' #array([ 0.1097, 0.0567, 0.0301, 0.0163, 0.0089, 0.0049]) ''' result = quad(lambda x: expint(3, x), 0, np.inf) print result ''' #(0.33333333324560266, 2.8548934485373678e-09) ''' I3 = 1.0/3.0 print I3 #0.333333333333 ''' def bingham_kernel(k1,k2,theta,phi): return np.exp(((k1*np.cos(phi)**2+k2*np.sin(phi)**2)*np.sin(theta)**2)/4*np.pi) def d(k1,k2): #print (k1,k2) return dblquad(lambda theta, phi: bingham_kernel(k1,k2,theta,phi), 0, np.pi, lambda phi: 0, lambda phi: 2*np.pi)[0] print d(-6.999, -3.345) #K1,K2,t1,t2,ph,th=sympy.symbols('K1,K2,t1,t2,ph,th') N = 100 def F((k1,k2),(t1,t2,N)): val = -N*4*np.pi - N*np.log(d(k1,k2)) + k1*t1 + k2*t2 print (-val,k1,k2) return -val min = fmin_powell(F,(-1,-1), ((-3.345, -6.999, 1000),)) print min #d = sympy.integrate(sympy.exp((k1*sympy.cos(phi)**2+k2*sympy.sin(phi)**2)*sympy.sin(theta)**2)/(4*sympy.pi),(phi,0,2*sympy.pi),(theta,0,sympy.pi)) ''' def I(n): return dblquad(lambda t, x: np.exp(-x*t)/t**n, 0, np.Inf, lambda x: 1, lambda x: np.Inf) print I(4) #(0.25000000000435768, 1.0518245707751597e-09) print I(3) #(0.33333333325010883, 2.8604069919261191e-09) print I(2) #(0.49999999999857514, 1.8855523253868967e-09) k1,k2,phi,theta=sympy.symbols('k1,k2,phi,theta') d = sympy.integrate(sympy.exp((k1*sympy.cos(phi)**2+k2*sympy.sin(phi)**2)*sympy.sin(theta)**2)/(4*sympy.pi),(phi,0,2*sympy.pi),(theta,0,sympy.pi)) from scipy.integrate import quad from math import pi d = sympy.integrate(sympy.exp((k1*sympy.cos(phi)**2+k2*sympy.sin(phi)**2)*sympy.sin(theta)**2)/(4*sympy.pi),(phi,0,2*sympy.pi),(theta,0,sympy.pi)) ''' ''' Table C.3: Maximum likelihood estimators of k1,k2 in the Bingham distribution for given eigenvalues w1,w2. Data from Mardia and Zemroch (1977). Upper (lower) number is k1(k2) w1 0.02 0.04 0.06 0.08 0.10 0.12 0.14 0.16 0.18 0.20 0.22 0.24 0.26 0.28 0.30 0.32 w2 0.02 -25.55 -25.55 0.04 -25.56 -13.11 -13.09 -13.11 0.06 -25.58 -13.14 -9.043 -8.996 -9.019 -9.043 0.08 -25.6 -13.16 -9.065 -7.035 -6.977 -6.999 -7.020 -7.035 0.10 -25.62 -13.18 -9.080 -7.042 -5.797 -5.760 -5.777 -5.791 -5.798 -5.797 0.12 -25.63 -13.19 -9.087 -7.041 -5.789 -4.917 -4.923 -4.934 -4.941 -4.941 -4.933 -4.917 0.14 -25.64 -13.20 -9.087 -7.033 -5.773 -4.896 -4.231 -4.295 -4.301 -4.301 -4.294 -4.279 -4.258 -4.231 0.16 -25.65 -13.20 -9.081 -7.019 -5.752 -4.868 -4.198 -3.659 -3.796 -3.796 -3.790 -3.777 -3.756 -3.729 -3.697 -3.659 0.18 -25.65 -13.19 -9.068 -6.999 -5.726 -4.836 -4.160 -3.616 -3.160 -3.381 -3.375 -3.363 -3.345 -3.319 -3.287 -3.249 -3.207 -3.160 0.20 -25.64 -13.18 -9.05 -6.974 -5.694 -4.799 -4.118 -3.570 -3.109 -2.709 -3.025 -3.014 -2.997 -2.973 -2.942 -2.905 -2.863 -2.816 -2.765 -2.709 0.22 -25.63 -13.17 -9.027 -6.944 -5.658 -4.757 -4.071 -3.518 -3.053 -2.649 -2.289 -2.712 -2.695 -2.673 -2.644 -2.609 -2.568 -2.521 -2.470 -2.414 -2.354 -2.289 0.24 -25.61 -23.14 -8.999 -6.910 -5.618 -4.711 -4.021 -3.463 -2.993 -2.584 -2.220 -1.888 -2.431 -2.410 -2.382 -2.349 -2.309 -2.263 -2.212 -2.157 -2.097 -2.032 -1.963 -1.888 0.26 -25.59 -13.12 -8.966 -6.870 -5.573 -4.661 -3.965 -3.403 -2.928 -2.515 -2.146 -1.809 -1.497 -2.175 -2.149 -2.117 -2.078 -2.034 -1.984 -1.929 -1.869 -1.805 -1.735 -1.661 -1.582 -1.497 0.28 -25.57 -13.09 -8.928 -6.827 -5.523 -4.606 -3.906 -3.338 -2.859 -2.441 -2.066 -1.724 -1.406 -1.106 -1.939 -1.908 -1.871 -1.828 -1.779 -1.725 -1.665 -1.601 -1.532 -1.458 -1.378 -1.294 -1.203 -1.106 0.30 -25.54 -13.05 -8.886 -6.778 -5.469 -4.547 -3.842 -3.269 -2.785 -2.361 -1.981 -1.634 -1.309 -1.002 -0.708 -1.718 -1.682 -1.641 -1.596 -1.540 -1.481 -1.417 -1.348 -1.274 -1.195 -1.110 -1.020 -0.923 -0.819 -0.708 0.32 -25.50 -13.01 -8.839 -6.725 -5.411 -4.484 -3.773 -3.195 -2.706 -2.277 -1.891 -1.537 -1.206 -0.891 -0.588 -0.292 -1.510 -1.470 -1.423 -1.371 -1.313 -1.250 -1.181 -1.108 -1.028 -0.944 -0.853 -0.756 -0.653 -0.541 -0.421 -0.292 0.34 -25.46 -12.96 -8.788 -6.668 -5.348 -4.415 -3.699 -3.116 -2.621 -2.186 -1.794 -1.433 -1.094 -0.771 -0.459 -0.152 -1.312 -1.267 -1.216 -1.159 -1.096 -1.028 -0.955 -0.876 -0.791 -0.701 -0.604 -0.500 -0.389 -0.269 -0.140 0.000 0.36 -25.42 -12.91 -8.731 -6.606 -5.280 -4.342 -3.620 -3.032 -2.531 -2.089 -1.690 -1.322 -0.974 -0.642 -1.123 -1.073 -1.017 -9.555 -0.887 -0.814 -0.736 -0.651 -0.561 -0.464 -0.360 -0.249 -0.129 0.000 0.38 -25.37 -12.86 -8.670 -6.539 -5.207 -4.263 -3.536 -2.941 -2.434 -1.986 -1.579 -1.202 -0.940 -0.885 -0.824 -0.757 -0.684 -0.606 -0.522 -0.432 -0.335 -0.231 -0.120 0.000 0.40 -25.31 -12.80 -8.604 -6.466 -5.126 -4.179 -3.446 -2.845 -2.330 -1.874 -0.762 -0.702 -0.636 -0.564 -0.486 -0.402 -0.312 -0.215 -0.111 -0.000 0.42 -25.5 -12.73 -8.532 -6.388 -5.045 -4.089 -3.349 -2.741 -0.589 -0.523 -0.452 -0.374 -0.290 -0.200 -0.104 0.000 0.44 -25.19 -12.66 -8.454 -6.305 -4.955 -3.992 -0.418 -0.347 -0.270 -0.186 -0.097 0.000 0.46 -25.12 -12.58 -8.371 -6.215 -0.250 -0.173 -0.090 0.000 Taken from http://magician.ucsd.edu/Essentials/WebBookse115.html#x136-237000C.2a ''' dipy-0.13.0/scratch/very_scratch/check_flipping.py000066400000000000000000000027701317371701200222120ustar00rootroot00000000000000import numpy as np from dipy.viz import fos from dipy.core import track_performance as pf tracks=[np.array([[0,0,0],[1,0,0,],[2,0,0]]), np.array([[3,0,0],[3.5,1,0],[4,2,0]]), np.array([[3.2,0,0],[3.7,1,0],[4.4,2,0]]), np.array([[3.4,0,0],[3.9,1,0],[4.6,2,0]]), np.array([[0,0.2,0],[1,0.2,0],[2,0.2,0]]), np.array([[2,0.2,0],[1,0.2,0],[0,0.2,0]]), np.array([[0,0,0],[0,1,0],[0,2,0]]), np.array([[0.2,0,0],[0.2,1,0],[0.2,2,0]]), np.array([[-0.2,0,0],[-0.2,1,0],[-0.2,2,0]]), np.array([[0,1.5,0],[1,1.5,0,],[6,1.5,0]]), np.array([[0,1.8,0],[1,1.8,0,],[6,1.8,0]]), np.array([[0,0,0],[2,2,0],[4,4,0]])] tracks=[t.astype(np.float32) for t in tracks] C=pf.larch_3split(tracks,None,0.5) r=fos.ren() fos.add(r,fos.line(tracks,fos.red)) #fos.show(r) for c in C: color=np.random.rand(3) for i in C[c]['indices']: fos.add(r,fos.line(tracks[i]+np.array([8.,0.,0.]),color)) fos.add(r,fos.line(tracks[i]+np.array([16.,0.,0.]),color)) fos.add(r,fos.line(C[c]['rep3']/C[c]['N']+np.array([16.,0.,0.]),fos.white)) fos.show(r) ''' print len(C) C=pf.larch_3merge(C,0.5) print len(C) for c in C: color=np.random.rand(3) for i in C[c]['indices']: fos.add(r,fos.line(tracks[i]+np.array([14.,0.,0.]),color)) #fos.show(r) for c in C: fos.add(r,fos.line(C[c]['rep3']/C[c]['N']+np.array([14.,0.,0.]),fos.white)) fos.show(r) ''' dipy-0.13.0/scratch/very_scratch/dcm2FAasnii.py000066400000000000000000000006261317371701200213230ustar00rootroot00000000000000import numpy as np import nibabel as ni from nibabel.dicom import dicomreaders as dcm from dipy.core import stensor as sten dname='/home/eg309/Data/Eleftherios/Series_003_CBU_DTI_64D_iso_1000' faname='/tmp/FA.nii' data,affine,bvals,gradients=dcm.read_mosaic_dwi_dir(dname) stl=sten.STensorL(bvals,gradients) stl.fit(data) stl.tensors FA=stl.fa img=ni.Nifti1Image(FA,affine) ni.save(img,faname) dipy-0.13.0/scratch/very_scratch/dcm2S0asnii.py000066400000000000000000000021751317371701200213200ustar00rootroot00000000000000import numpy as np import nibabel as ni from nibabel.dicom import dicomreaders as dcm import dipy.core.generalized_q_sampling as gq dname='/home/eg01/Data_Backup/Data/Frank_Eleftherios/frank/20100511_m030y_cbu100624/08_ep2d_advdiff_101dir_DSI' #dname ='/home/eg309/Data/Eleftherios/Series_003_CBU_DTI_64D_iso_1000' S0name='/tmp/S0.nii' #smallname='/tmp/small_volume2.5_steam_4000.nii' smallname='/tmp/small_64D.nii' smallname_grad = '/tmp/small_64D.gradients' smallname_bvals = '/tmp/small_64D.bvals' #read diffusion dicoms data,affine,bvals,gradients=dcm.read_mosaic_dir(dname) print data.shape #calculate QA #gqs = gq.GeneralizedQSampling(data,bvals,gradients) #gqs.QA[0] #S0 = data[:,:,:,0] ''' #save the structural volume #img=ni.Nifti1Image(S0,affine) #ni.save(img,S0name) #save the small roi volume #small= data[35:55,55:75,20:30,:] small= data[54:64,54:64,30:40,:] naffine = np.dot(affine, np.array([[1,0,0,54],[0,1,0,54],[0,0,1,30],[0,0,0,1]])) imgsmall=ni.Nifti1Image(small,naffine) ni.save(imgsmall,smallname) #save b-values and b-vecs np.save(smallname_grad,gradients) np.save(smallname_bvals,bvals) ''' dipy-0.13.0/scratch/very_scratch/diffusion_sphere_stats.py000066400000000000000000000432421317371701200240160ustar00rootroot00000000000000import nibabel import os import numpy as np import dipy as dp #import dipy.core.generalized_q_sampling as dgqs import dipy.reconst.gqi as dgqs import dipy.reconst.dti as ddti import dipy.reconst.recspeed as rp import dipy.io.pickles as pkl import scipy as sp from matplotlib.mlab import find #import dipy.core.sphere_plots as splots import dipy.core.sphere_stats as sphats import dipy.core.geometry as geometry import get_vertices as gv #old SimData files ''' results_SNR030_1fibre results_SNR030_1fibre+iso results_SNR030_2fibres_15deg results_SNR030_2fibres_30deg results_SNR030_2fibres_60deg results_SNR030_2fibres_90deg results_SNR030_2fibres+iso_15deg results_SNR030_2fibres+iso_30deg results_SNR030_2fibres+iso_60deg results_SNR030_2fibres+iso_90deg results_SNR030_isotropic ''' #fname='/home/ian/Data/SimData/results_SNR030_1fibre' ''' file has one row for every voxel, every voxel is repeating 1000 times with the same noise level , then we have 100 different directions. 1000 * 100 is the number of all rows. The 100 conditions are given by 10 polar angles (in degrees) 0, 20, 40, 60, 80, 80, 60, 40, 20 and 0, and each of these with longitude angle 0, 40, 80, 120, 160, 200, 240, 280, 320, 360. ''' #new complete SimVoxels files simdata = ['fibres_2_SNR_80_angle_90_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_60_angle_60_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_40_angle_30_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_40_angle_60_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_20_angle_15_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_100_angle_90_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_20_angle_30_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_40_angle_15_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_60_angle_15_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_100_angle_90_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_1_SNR_60_angle_00_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_80_angle_30_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_100_angle_15_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_100_angle_60_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_80_angle_60_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_60_angle_30_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_40_angle_60_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_80_angle_30_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_20_angle_30_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_60_angle_60_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_1_SNR_100_angle_00_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_1_SNR_100_angle_00_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_20_angle_15_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_1_SNR_20_angle_00_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_40_angle_15_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_20_angle_60_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_80_angle_15_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_1_SNR_80_angle_00_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_20_angle_90_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_60_angle_90_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_100_angle_30_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_80_angle_90_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_60_angle_15_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_20_angle_60_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_100_angle_15_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_1_SNR_20_angle_00_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_80_angle_60_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_1_SNR_80_angle_00_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_100_angle_30_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_1_SNR_40_angle_00_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_1_SNR_60_angle_00_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_40_angle_30_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_60_angle_30_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_40_angle_90_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_60_angle_90_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_80_angle_15_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_1_SNR_40_angle_00_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_100_angle_60_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_40_angle_90_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_20_angle_90_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00'] simdir = '/home/ian/Data/SimVoxels/' def gq_tn_calc_save(): for simfile in simdata: dataname = simfile print dataname sim_data=np.loadtxt(simdir+dataname) marta_table_fname='/home/ian/Data/SimData/Dir_and_bvals_DSI_marta.txt' b_vals_dirs=np.loadtxt(marta_table_fname) bvals=b_vals_dirs[:,0]*1000 gradients=b_vals_dirs[:,1:] gq = dgqs.GeneralizedQSampling(sim_data,bvals,gradients) gqfile = simdir+'gq/'+dataname+'.pkl' pkl.save_pickle(gqfile,gq) ''' gq.IN gq.__doc__ gq.glob_norm_param gq.QA gq.__init__ gq.odf gq.__class__ gq.__module__ gq.q2odf_params ''' tn = ddti.Tensor(sim_data,bvals,gradients) tnfile = simdir+'tn/'+dataname+'.pkl' pkl.save_pickle(tnfile,tn) ''' tn.ADC tn.__init__ tn._getevals tn.B tn.__module__ tn._getevecs tn.D tn.__new__ tn._getndim tn.FA tn.__reduce__ tn._getshape tn.IN tn.__reduce_ex__ tn._setevals tn.MD tn.__repr__ tn._setevecs tn.__class__ tn.__setattr__ tn.adc tn.__delattr__ tn.__sizeof__ tn.evals tn.__dict__ tn.__str__ tn.evecs tn.__doc__ tn.__subclasshook__ tn.fa tn.__format__ tn.__weakref__ tn.md tn.__getattribute__ tn._evals tn.ndim tn.__getitem__ tn._evecs tn.shape tn.__hash__ tn._getD ''' ''' file has one row for every voxel, every voxel is repeating 1000 times with the same noise level , then we have 100 different directions. 100 * 1000 is the number of all rows. At the moment this module is hardwired to the use of the EDS362 spherical mesh. I am assumung (needs testing) that directions 181 to 361 are the antipodal partners of directions 0 to 180. So when counting the number of different vertices that occur as maximal directions we wll map the indices modulo 181. ''' def analyze_maxima(indices, max_dirs, subsets): '''This calculates the eigenstats for each of the replicated batches of the simulation data ''' results = [] for direction in subsets: batch = max_dirs[direction,:,:] index_variety = np.array([len(set(np.remainder(indices[direction,:],181)))]) #normed_centroid, polar_centroid, centre, b1 = sphats.eigenstats(batch) centre, b1 = sphats.eigenstats(batch) # make azimuth be in range (0,360) rather than (-180,180) centre[1] += 360*(centre[1] < 0) #results.append(np.concatenate((normed_centroid, polar_centroid, centre, b1, index_variety))) results.append(np.concatenate((centre, b1, index_variety))) return results #dt_first_directions = tn.evecs[:,:,0].reshape((100,1000,3)) # these are the principal directions for the full set of simulations #gq_tn_calc_save() #eds=np.load(os.path.join(os.path.dirname(dp.__file__),'core','matrices','evenly_distributed_sphere_362.npz')) from dipy.data import get_sphere odf_vertices,odf_faces=get_sphere('symmetric362') #odf_vertices=eds['vertices'] def run_comparisons(sample_data=35): for simfile in [simdata[sample_data]]: dataname = simfile print dataname sim_data=np.loadtxt(simdir+dataname) gqfile = simdir+'gq/'+dataname+'.pkl' gq = pkl.load_pickle(gqfile) tnfile = simdir+'tn/'+dataname+'.pkl' tn = pkl.load_pickle(tnfile) dt_first_directions_in=odf_vertices[tn.IN] dt_indices = tn.IN.reshape((100,1000)) dt_results = analyze_maxima(dt_indices, dt_first_directions_in.reshape((100,1000,3)),range(10,90)) gq_indices = np.array(gq.IN[:,0],dtype='int').reshape((100,1000)) gq_first_directions_in=odf_vertices[np.array(gq.IN[:,0],dtype='int')] #print gq_first_directions_in.shape gq_results = analyze_maxima(gq_indices, gq_first_directions_in.reshape((100,1000,3)),range(10,90)) #for gqi see example dicoms_2_tracks gq.IN[:,0] np.set_printoptions(precision=3, suppress=True, linewidth=200, threshold=5000) out = open('/home/ian/Data/SimVoxels/Out/'+'***_'+dataname,'w') #print np.vstack(dt_results).shape, np.vstack(gq_results).shape results = np.hstack((np.vstack(dt_results), np.vstack(gq_results))) #print results.shape #results = np.vstack(dt_results) print >> out, results[:,:] out.close() #up = dt_batch[:,2]>= 0 #splots.plot_sphere(dt_batch[up], 'batch '+str(direction)) #splots.plot_lambert(dt_batch[up],'batch '+str(direction), centre) #spread = gq.q2odf_params e,v = np.linalg.eigh(np.dot(spread,spread.transpose())) effective_dimension = len(find(np.cumsum(e) > 0.05*np.sum(e))) #95% #rotated = np.dot(dt_batch,evecs) #rot_evals, rot_evecs = np.linalg.eig(np.dot(rotated.T,rotated)/rotated.shape[0]) #eval_order = np.argsort(rot_evals) #rotated = rotated[:,eval_order] #up = rotated[:,2]>= 0 #splot.plot_sphere(rotated[up],'first1000') #splot.plot_lambert(rotated[up],'batch '+str(direction)) def run_gq_sims(sample_data=[35,23,46,39,40,10,37,27,21,20]): results = [] out = open('/home/ian/Data/SimVoxels/Out/'+'npa+fa','w') for j in range(len(sample_data)): sample = sample_data[j] simfile = simdata[sample] dataname = simfile print dataname sim_data=np.loadtxt(simdir+dataname) marta_table_fname='/home/ian/Data/SimData/Dir_and_bvals_DSI_marta.txt' b_vals_dirs=np.loadtxt(marta_table_fname) bvals=b_vals_dirs[:,0]*1000 gradients=b_vals_dirs[:,1:] for j in np.vstack((np.arange(100)*1000,np.arange(100)*1000+1)).T.ravel(): # 0,1,1000,1001,2000,2001,... s = sim_data[j,:] gqs = dp.GeneralizedQSampling(s.reshape((1,102)),bvals,gradients,Lambda=3.5) tn = dp.Tensor(s.reshape((1,102)),bvals,gradients,fit_method='LS') t0, t1, t2, npa = gqs.npa(s, width = 5) print >> out, dataname, j, npa, tn.fa()[0] ''' for (i,o) in enumerate(gqs.odf(s)): print i,o for (i,o) in enumerate(gqs.odf_vertices): print i,o ''' #o = gqs.odf(s) #v = gqs.odf_vertices #pole = v[t0[0]] #eqv = dgqs.equatorial_zone_vertices(v, pole, 5) #print 'Number of equatorial vertices: ', len(eqv) #print np.max(o[eqv]),np.min(o[eqv]) #cos_e_pole = [np.dot(pole.T, v[i]) for i in eqv] #print np.min(cos1), np.max(cos1) #print 'equatorial max in equatorial vertices:', t1[0] in eqv #x = np.cross(v[t0[0]],v[t1[0]]) #x = x/np.sqrt(np.sum(x**2)) #print x #ptchv = dgqs.patch_vertices(v, x, 5) #print len(ptchv) #eqp = eqv[np.argmin([np.abs(np.dot(v[t1[0]].T,v[p])) for p in eqv])] #print (eqp, o[eqp]) #print t2[0] in ptchv, t2[0] in eqv #print np.dot(pole.T, v[t1[0]]), np.dot(pole.T, v[t2[0]]) #print ptchv[np.argmin([o[v] for v in ptchv])] #gq_indices = np.array(gq.IN[:,0],dtype='int').reshape((100,1000)) #gq_first_directions_in=odf_vertices[np.array(gq.IN[:,0],dtype='int')] #print gq_first_directions_in.shape #gq_results = analyze_maxima(gq_indices, gq_first_directions_in.reshape((100,1000,3)),range(100)) #for gqi see example dicoms_2_tracks gq.IN[:,0] #np.set_printoptions(precision=6, suppress=True, linewidth=200, threshold=5000) #out = open('/home/ian/Data/SimVoxels/Out/'+'+++_'+dataname,'w') #results = np.hstack((np.vstack(dt_results), np.vstack(gq_results))) #results = np.vstack(dt_results) #print >> out, results[:,:] out.close() def run_small_data(): #smalldir = '/home/ian/Devel/dipy/dipy/data/' smalldir = '/home/eg309/Devel/dipy/dipy/data/' # from os.path import join as opj # bvals=np.load(opj(os.path.dirname(__file__), \ # 'data','small_64D.bvals.npy')) bvals=np.load(smalldir+'small_64D.bvals.npy') # gradients=np.load(opj(os.path.dirname(__file__), \ # 'data','small_64D.gradients.npy')) gradients=np.load(smalldir+'small_64D.gradients.npy') # img =ni.load(os.path.join(os.path.dirname(__file__),\ # 'data','small_64D.nii')) img=nibabel.load(smalldir+'small_64D.nii') small_data=img.get_data() print 'real_data', small_data.shape gqsmall = dgqs.GeneralizedQSampling(small_data,bvals,gradients) tnsmall = ddti.Tensor(small_data,bvals,gradients) x,y,z,a,b=tnsmall.evecs.shape evecs=tnsmall.evecs xyz=x*y*z evecs = evecs.reshape(xyz,3,3) #vs = np.sign(evecs[:,2,:]) #print vs.shape #print np.hstack((vs,vs,vs)).reshape(1000,3,3).shape #evecs = np.hstack((vs,vs,vs)).reshape(1000,3,3) #print evecs.shape evals=tnsmall.evals evals = evals.reshape(xyz,3) #print evals.shape #print('GQS in %d' %(t2-t1)) ''' eds=np.load(opj(os.path.dirname(__file__),\ '..','matrices',\ 'evenly_distributed_sphere_362.npz')) ''' from dipy.data import get_sphere odf_vertices,odf_faces=get_sphere('symmetric362') #odf_vertices=eds['vertices'] #odf_faces=eds['faces'] #Yeh et.al, IEEE TMI, 2010 #calculate the odf using GQI scaling=np.sqrt(bvals*0.01506) # 0.01506 = 6*D where D is the free #water diffusion coefficient #l_values sqrt(6 D tau) D free water #diffusion coefficiet and tau included in the b-value tmp=np.tile(scaling,(3,1)) b_vector=gradients.T*tmp Lambda = 1.2 # smoothing parameter - diffusion sampling length q2odf_params=np.sinc(np.dot(b_vector.T, odf_vertices.T) * Lambda/np.pi) #implements equation no. 9 from Yeh et.al. S=small_data.copy() x,y,z,g=S.shape S=S.reshape(x*y*z,g) QA = np.zeros((x*y*z,5)) IN = np.zeros((x*y*z,5)) FA = tnsmall.fa().reshape(x*y*z) fwd = 0 #Calculate Quantitative Anisotropy and find the peaks and the indices #for every voxel summary = {} summary['vertices'] = odf_vertices v = odf_vertices.shape[0] summary['faces'] = odf_faces f = odf_faces.shape[0] for (i,s) in enumerate(S): #print 'Volume %d' % i istr = str(i) summary[istr] = {} t0, t1, t2, npa = gqsmall.npa(s, width = 5) summary[istr]['triple']=(t0,t1,t2) summary[istr]['npa']=npa odf = Q2odf(s,q2odf_params) peaks,inds=rp.peak_finding(odf,odf_faces) fwd=max(np.max(odf),fwd) #peaks = peaks - np.min(odf) n_peaks=min(len(peaks),5) peak_heights = [odf[i] for i in inds[:n_peaks]] #QA[i][:l] = peaks[:n_peaks] IN[i][:n_peaks] = inds[:n_peaks] summary[istr]['odf'] = odf summary[istr]['peaks'] = peaks summary[istr]['inds'] = inds summary[istr]['evecs'] = evecs[i,:,:] summary[istr]['evals'] = evals[i,:] summary[istr]['n_peaks'] = n_peaks summary[istr]['peak_heights'] = peak_heights # summary[istr]['fa'] = tnsmall.fa()[0] summary[istr]['fa'] = FA[i] ''' QA/=fwd QA=QA.reshape(x,y,z,5) IN=IN.reshape(x,y,z,5) ''' peaks_1 = [i for i in range(1000) if summary[str(i)]['n_peaks']==1] peaks_2 = [i for i in range(1000) if summary[str(i)]['n_peaks']==2] peaks_3 = [i for i in range(1000) if summary[str(i)]['n_peaks']==3] #peaks_2 = [i for i in range(1000) if len(summary[str(i)]['inds'])==2] #peaks_3 = [i for i in range(1000) if len(summary[str(i)]['inds'])==3] print '#voxels with 1, 2, 3 peaks', len(peaks_1),len(peaks_2),len(peaks_3) return FA, summary def Q2odf(s,q2odf_params): ''' construct odf for a voxel ''' odf=np.dot(s,q2odf_params) return odf #run_comparisons() #run_gq_sims() FA, summary = run_small_data() peaks_1 = [i for i in range(1000) if summary[str(i)]['n_peaks']==1] peaks_2 = [i for i in range(1000) if summary[str(i)]['n_peaks']==2] peaks_3 = [i for i in range(1000) if summary[str(i)]['n_peaks']==3] fa_npa_1 = [[summary[str(i)]['fa'], summary[str(i)]['npa'], summary[str(i)]['peak_heights']] for i in peaks_1] fa_npa_2 = [[summary[str(i)]['fa'], summary[str(i)]['npa'], summary[str(i)]['peak_heights']] for i in peaks_2] fa_npa_3 = [[summary[str(i)]['fa'], summary[str(i)]['npa'], summary[str(i)]['peak_heights']] for i in peaks_3] dipy-0.13.0/scratch/very_scratch/eddy_currents.py000066400000000000000000000024021317371701200221070ustar00rootroot00000000000000import numpy as np import dipy as dp import nibabel as ni dname = '/home/eg01/Data_Backup/Data/Eleftherios/CBU090133_METHODS/20090227_145404/Series_003_CBU_DTI_64D_iso_1000' #dname = '/home/eg01/Data_Backup/Data/Frank_Eleftherios/frank/20100511_m030y_cbu100624/08_ep2d_advdiff_101dir_DSI' data,affine,bvals,gradients=dp.load_dcm_dir(dname) ''' rot=np.array([[1,0,0,0], [0,np.cos(np.pi/2),-np.sin(np.pi/2),0], [0,np.sin(np.pi/2), np.cos(np.pi/2),0], [0,0,0,1]]) from scipy.ndimage import affine_transform as aff naffine=np.dot(affine,rot) ''' data[:,:,:,1] source=ni.Nifti1Image(data[:,:,:,1],affine) target=ni.Nifti1Image(data[:,:,:,0],affine) #similarity 'cc', 'cr', 'crl1', 'mi', je', 'ce', 'nmi', 'smi'. 'cr' similarity='cr' #interp 'pv', 'tri' interp = 'tri' #subsampling None or sequence (3,) subsampling=None #search 'affine', 'rigid', 'similarity' or ['rigid','affine'] search='affine' #optimizer 'simplex', 'powell', 'steepest', 'cg', 'bfgs' or #sequence of optimizers optimizer= 'powell' T=dp.volume_register(source,target,similarity,\ interp,subsampling,search,) sourceT=dp.volume_transform(source, T.inv(), reference=target) s=source.get_data() t=target.get_data() sT=sourceT.get_data() dipy-0.13.0/scratch/very_scratch/ellipse.py000066400000000000000000000012131317371701200206710ustar00rootroot00000000000000import sympy import numpy as np import scipy as sc from numpy.random import random_sample as random def random_uniform_in_disc(): # returns a tuple which is uniform in the disc theta = 2*np.pi*random() r2 = random() r = np.sqrt(r2) return np.array((r*np.sin(theta),r*np.cos(theta))) def random_uniform_in_ellipse(a=1,b=1): x = a*random_uniform_in_disc()[0] y = b*np.sqrt(1-(x/a)**2)*(1-2*random()) return np.array((x,y)) import matplotlib.pyplot as plt fig = plt.figure() ax = fig.add_subplot(111) sample = np.array([random_uniform_in_ellipse(a=2,b=1) for i in np.arange(10000)]) ax.scatter(*sample.T) plt.show() dipy-0.13.0/scratch/very_scratch/gen_iter.py000066400000000000000000000017301317371701200210340ustar00rootroot00000000000000class Reverse: "Iterator for looping over a sequence backwards" def __init__(self, data): self.data = data self.index = len(data) def __iter__(self): return self def next(self): if self.index == 0: raise StopIteration self.index = self.index - 1 return self.data[self.index] class ReverseGen: 'Iterator class using generator' def __init__(self, data): self.data = data def __iter__(self): for index in range(len(self.data)-1, -1, -1): yield self.data[index] rev = Reverse('golf') iter(rev) print('class') for char in rev: print char def reverse(data): for index in range(len(data)-1, -1, -1): yield data[index] print('generator') for char in reverse('golf'): print char print('class generator') revgen = ReverseGen('golf') iter(rev) for char in revgen: print char dipy-0.13.0/scratch/very_scratch/get_vertices.py000066400000000000000000000053071317371701200217270ustar00rootroot00000000000000sphere_dic = {'fy362': {'filepath' : '/home/ian/Devel/dipy/dipy/data/evenly_distributed_sphere_362.npz', 'object': 'npz', 'vertices': 'vertices', 'omit': 0, 'hemi': False}, 'fy642': {'filepath' : '/home/ian/Devel/dipy/dipy/data/evenly_distributed_sphere_642.npz', 'object': 'npz', 'vertices': 'odf_vertices', 'omit': 0, 'hemi': False}, 'siem64': {'filepath':'/home/ian/Devel/dipy/dipy/data/small_64D.gradients.npy', 'object': 'npy', 'omit': 1, 'hemi': True}, 'create2': {}, 'create3': {}, 'create4': {}, 'create5': {}, 'create6': {}, 'create7': {}, 'create8': {}, 'create9': {}, 'marta200': {'filepath': '/home/ian/Data/Spheres/200.npy', 'object': 'npy', 'omit': 0, 'hemi': True}, 'dsi102': {'filepath': '/home/ian/Data/Frank_Eleftherios/frank/20100511_m030y_cbu100624/08_ep2d_advdiff_101dir_DSI', 'object': 'dicom', 'omit': 1, 'hemi': True}} import numpy as np from dipy.core.triangle_subdivide import create_unit_sphere #from dipy.io import dicomreaders as dcm def get_vertex_set(key): if key[:6] == 'create': number = eval(key[6:]) vertices, edges, faces = create_unit_sphere(number) omit = 0 return vertices else: entry = sphere_dic[key] if entry.has_key('omit'): omit = entry['omit'] else: omit = 0 filepath = entry['filepath'] if entry['object'] == 'npz': filearray = np.load(filepath) vertices = filearray[entry['vertices']] elif sphere_dic[key]['object'] == 'npy': vertices = np.load(filepath) elif entry['object'] == 'dicom': data,affine,bvals,gradients=dcm.read_mosaic_dir(filepath) #print (bvals.shape, gradients.shape) grad3 = np.vstack((bvals,bvals,bvals)).transpose() #print grad3.shape #vertices = grad3*gradients vertices = gradients if omit > 0: vertices = vertices[omit:,:] if entry['hemi']: vertices = np.vstack([vertices, -vertices]) return vertices[omit:,:] print sphere_dic.keys() #vertices = get_vertex_set('create5') #vertices = get_vertex_set('siem64') #vertices = get_vertex_set('dsi102') vertices = get_vertex_set('fy362') gradients = get_vertex_set('siem64') gradients = gradients[:gradients.shape[0]/2] print gradients.shape from dipy.viz import fvtk sph=-np.sinc(np.dot(gradients[1],vertices.T)) r=fvtk.ren() #sph = np.arange(vertices.shape[0]) print sph.shape cols=fvtk.colors(sph,'jet') fvtk.add(r,fvtk.point(vertices,cols,point_radius=.1,theta=10,phi=10)) fvtk.show(r) dipy-0.13.0/scratch/very_scratch/gqsampling_stats.py000066400000000000000000000316511317371701200226250ustar00rootroot00000000000000import os import numpy as np from nose.tools import assert_true, assert_false, assert_equal, assert_raises from numpy.testing import assert_array_equal, assert_array_almost_equal import time #import dipy.core.reconstruction_performance as rp import dipy.reconst.recspeed as rp from os.path import join as opj import nibabel as ni #import dipy.core.generalized_q_sampling as gq import dipy.reconst.gqi as gq #import dipy.core.track_propagation as tp import dipy.core.dti as dt import dipy.core.meshes as meshes def test_gqiodf(): #read bvals,gradients and data bvals=np.load(opj(os.path.dirname(__file__), \ 'data','small_64D.bvals.npy')) gradients=np.load(opj(os.path.dirname(__file__), \ 'data','small_64D.gradients.npy')) img =ni.load(os.path.join(os.path.dirname(__file__),\ 'data','small_64D.nii')) data=img.get_data() #print(bvals.shape) #print(gradients.shape) #print(data.shape) t1=time.clock() gqs = gq.GeneralizedQSampling(data,bvals,gradients) ten = dt.Tensor(data,bvals,gradients,thresh=50) fa=ten.fa() x,y,z,a,b=ten.evecs.shape evecs=ten.evecs xyz=x*y*z evecs = evecs.reshape(xyz,3,3) #vs = np.sign(evecs[:,2,:]) #print vs.shape #print np.hstack((vs,vs,vs)).reshape(1000,3,3).shape #evecs = np.hstack((vs,vs,vs)).reshape(1000,3,3) #print evecs.shape evals=ten.evals evals = evals.reshape(xyz,3) #print evals.shape t2=time.clock() #print('GQS in %d' %(t2-t1)) eds=np.load(opj(os.path.dirname(__file__),\ '..','matrices',\ 'evenly_distributed_sphere_362.npz')) odf_vertices=eds['vertices'] odf_faces=eds['faces'] #Yeh et.al, IEEE TMI, 2010 #calculate the odf using GQI scaling=np.sqrt(bvals*0.01506) # 0.01506 = 6*D where D is the free #water diffusion coefficient #l_values sqrt(6 D tau) D free water #diffusion coefficiet and tau included in the b-value tmp=np.tile(scaling,(3,1)) b_vector=gradients.T*tmp Lambda = 1.2 # smoothing parameter - diffusion sampling length q2odf_params=np.sinc(np.dot(b_vector.T, odf_vertices.T) * Lambda/np.pi) #implements equation no. 9 from Yeh et.al. S=data.copy() x,y,z,g=S.shape S=S.reshape(x*y*z,g) QA = np.zeros((x*y*z,5)) IN = np.zeros((x*y*z,5)) fwd = 0 #Calculate Quantitative Anisotropy and find the peaks and the indices #for every voxel summary = {} summary['vertices'] = odf_vertices v = odf_vertices.shape[0] summary['faces'] = odf_faces f = odf_faces.shape[0] ''' If e = number_of_edges the Euler formula says f-e+v = 2 for a mesh on a sphere Here, assuming we have a healthy triangulation every face is a triangle, all 3 of whose edges should belong to exactly two faces = so 2*e = 3*f to avoid division we test whether 2*f - 3*f + 2*v == 4 or equivalently 2*v - f == 4 ''' assert_equal(2*v-f, 4,'Direct Euler test fails') assert_true(meshes.euler_characteristic_check(odf_vertices, odf_faces,chi=2),'euler_characteristic_check fails') coarse = meshes.coarseness(odf_faces) print 'coarseness: ', coarse for (i,s) in enumerate(S): #print 'Volume %d' % i istr = str(i) summary[istr] = {} odf = Q2odf(s,q2odf_params) peaks,inds=rp.peak_finding(odf,odf_faces) fwd=max(np.max(odf),fwd) peaks = peaks - np.min(odf) l=min(len(peaks),5) QA[i][:l] = peaks[:l] IN[i][:l] = inds[:l] summary[istr]['odf'] = odf summary[istr]['peaks'] = peaks summary[istr]['inds'] = inds summary[istr]['evecs'] = evecs[i,:,:] summary[istr]['evals'] = evals[i,:] QA/=fwd QA=QA.reshape(x,y,z,5) IN=IN.reshape(x,y,z,5) #print('Old %d secs' %(time.clock() - t2)) # assert_equal((gqs.QA-QA).max(),0.,'Frank QA different than our QA') # assert_equal((gqs.QA.shape),QA.shape, 'Frank QA shape is different') # assert_equal((gqs.QA-QA).max(), 0.) #import dipy.core.track_propagation as tp #tp.FACT_Delta(QA,IN) #return tp.FACT_Delta(QA,IN,seeds_no=10000).tracks peaks_1 = [i for i in range(1000) if len(summary[str(i)]['inds'])==1] peaks_2 = [i for i in range(1000) if len(summary[str(i)]['inds'])==2] peaks_3 = [i for i in range(1000) if len(summary[str(i)]['inds'])==3] # correct numbers of voxels with respectively 1,2,3 ODF/QA peaks assert_array_equal((len(peaks_1),len(peaks_2),len(peaks_3)), (790,196,14), 'error in numbers of QA/ODF peaks') # correct indices of odf directions for voxels 0,10,44 # with respectively 1,2,3 ODF/QA peaks assert_array_equal(summary['0']['inds'],[116], 'wrong peak indices for voxel 0') assert_array_equal(summary['10']['inds'],[105, 78], 'wrong peak indices for voxel 10') assert_array_equal(summary['44']['inds'],[95, 84, 108], 'wrong peak indices for voxel 44') assert_equal(np.argmax(summary['0']['odf']), 116) assert_equal(np.argmax(summary['10']['odf']), 105) assert_equal(np.argmax(summary['44']['odf']), 95) pole_1 = summary['vertices'][116] #print 'pole_1', pole_1 pole_2 = summary['vertices'][105] #print 'pole_2', pole_2 pole_3 = summary['vertices'][95] #print 'pole_3', pole_3 vertices = summary['vertices'] width = 0.02#0.3 #0.05 ''' print 'pole_1 equator contains:', len([i for i,v in enumerate(vertices) if np.abs(np.dot(v,pole_1)) < width]) print 'pole_2 equator contains:', len([i for i,v in enumerate(vertices) if np.abs(np.dot(v,pole_2)) < width]) print 'pole_3 equator contains:', len([i for i,v in enumerate(vertices) if np.abs(np.dot(v,pole_3)) < width]) ''' #print 'pole_1 equator contains:', len(meshes.equatorial_vertices(vertices,pole_1,width)) #print 'pole_2 equator contains:', len(meshes.equatorial_vertices(vertices,pole_2,width)) #print 'pole_3 equator contains:', len(meshes'equatorial_vertices(vertices,pole_3,width)) #print triple_odf_maxima(vertices,summary['0']['odf'],width) #print triple_odf_maxima(vertices,summary['10']['odf'],width) #print triple_odf_maxima(vertices,summary['44']['odf'],width) #print summary['0']['evals'] ''' pole=np.array([0,0,1]) from dipy.viz import fos r=fos.ren() fos.add(r,fos.point(pole,fos.green)) for i,ev in enumerate(vertices): if np.abs(np.dot(ev,pole)) 1- width] def patch_maximum(vertices, odf, pole, width): eqvert = patch_vertices(vertices, pole, width) ''' need to test for whether eqvert is empty or not ''' if len(eqvert) == 0: print 'empty cone around pole', pole, 'with width', width return Null, Null eqvals = [odf[i] for i in eqvert] eqargmax = np.argmax(eqvals) eqvertmax = eqvert[eqargmax] eqvalmax = eqvals[eqargmax] return eqvertmax, eqvalmax def triple_odf_maxima(vertices, odf, width): indmax1 = np.argmax([odf[i] for i,v in enumerate(vertices)]) odfmax1 = odf[indmax1] indmax2, odfmax2 = equatorial_maximum(vertices, odf, vertices[indmax1], width) cross12 = np.cross(vertices[indmax1],vertices[indmax2]) indmax3, odfmax3 = patch_maximum(vertices, odf, cross12, width) return [(indmax1, odfmax1),(indmax2, odfmax2),(indmax3, odfmax3)] def test_gqi_small(): #read bvals,gradients and data bvals=np.load(opj(os.path.dirname(__file__), \ 'data','small_64D.bvals.npy')) gradients=np.load(opj(os.path.dirname(__file__), \ 'data','small_64D.gradients.npy')) img =ni.load(os.path.join(os.path.dirname(__file__),\ 'data','small_64D.nii')) data=img.get_data() print(bvals.shape) print(gradients.shape) print(data.shape) t1=time.clock() gqs = gq.GeneralizedQSampling(data,bvals,gradients) t2=time.clock() print('GQS in %d' %(t2-t1)) eds=np.load(opj(os.path.dirname(__file__),\ '..','matrices',\ 'evenly_distributed_sphere_362.npz')) odf_vertices=eds['vertices'] odf_faces=eds['faces'] #Yeh et.al, IEEE TMI, 2010 #calculate the odf using GQI scaling=np.sqrt(bvals*0.01506) # 0.01506 = 6*D where D is the free #water diffusion coefficient #l_values sqrt(6 D tau) D free water #diffusion coefficiet and tau included in the b-value tmp=np.tile(scaling,(3,1)) b_vector=gradients.T*tmp Lambda = 1.2 # smoothing parameter - diffusion sampling length q2odf_params=np.sinc(np.dot(b_vector.T, odf_vertices.T) * Lambda/np.pi) #implements equation no. 9 from Yeh et.al. S=data.copy() x,y,z,g=S.shape S=S.reshape(x*y*z,g) QA = np.zeros((x*y*z,5)) IN = np.zeros((x*y*z,5)) fwd = 0 #Calculate Quantitative Anisotropy and find the peaks and the indices #for every voxel for (i,s) in enumerate(S): odf = Q2odf(s,q2odf_params) peaks,inds=rp.peak_finding(odf,odf_faces) fwd=max(np.max(odf),fwd) peaks = peaks - np.min(odf) l=min(len(peaks),5) QA[i][:l] = peaks[:l] IN[i][:l] = inds[:l] QA/=fwd QA=QA.reshape(x,y,z,5) IN=IN.reshape(x,y,z,5) print('Old %d secs' %(time.clock() - t2)) assert_equal((gqs.QA-QA).max(),0.,'Frank QA different than dipy QA') assert_equal((gqs.QA.shape),QA.shape, 'Frank QA shape is different') assert_equal(len(tp.FACT_Delta(QA,IN,seeds_no=100).tracks),100, 'FACT_Delta is not generating the right number of ' 'tracks for this dataset') def Q2odf(s,q2odf_params): odf=np.dot(s,q2odf_params) return odf def peak_finding(odf,odf_faces): #proton density already include from the scaling b_table[0][0] and s[0] #find local maxima peak=odf.copy() # where the smallest odf values in the vertices of a face remove the # two smallest vertices for face in odf_faces: i, j, k = face check=np.array([odf[i],odf[j],odf[k]]) zeroing=check.argsort() peak[face[zeroing[0]]]=0 peak[face[zeroing[1]]]=0 #for later testing expecting peak.max 794595.94774980657 and #np.where(peak>0) (array([166, 347]),) #we just need the first half of peak peak=peak[0:len(peak)/2] #find local maxima and give fiber orientation (inds) and magnitute #peaks in a descending order inds=np.where(peak>0)[0] pinds=np.argsort(peak[inds]) peaks=peak[inds[pinds]][::-1] return peaks, inds[pinds][::-1] if __name__ == "__main__": #T=test_gqiodf() T=test_gqi_small() dipy-0.13.0/scratch/very_scratch/joint_hist.py000066400000000000000000000250751317371701200214220ustar00rootroot00000000000000#Calculate joint histogram and related metrics from math import sin,cos,pi import numpy as np from scipy.ndimage import affine_transform, geometric_transform from scipy.ndimage.interpolation import rotate,shift,zoom from scipy.optimize import fmin as fmin_simplex, fmin_powell, fmin_cg from scipy.optimize import leastsq from dipy.core import geometry as gm import pylab def affine_transform2d(I,M): ''' Inspired by the work of Alexis Roche and the independent work of D. Kroon Parameters ---------- I: array, shape(N,M), 2d image M: inverse transformation matrix 3x3, array, shape (3,3) mode: 0: linear interpolation and outside pixels set to nearest pixel Returns ------- Iout: array, shape(N,M), transformed image ''' #the transpose is for contiguous C arrays (default) #I=I.T #create all x,y indices xy=np.array([(i,j) for (i,j) in np.ndindex(I.shape)]) #image center is now our origin (0,0) mean=np.array(I.shape)/2. mean=mean.reshape(1,2) xyd=xy-mean #transformed coordinates lxy = mean.T + np.dot(M[:2,:2],xyd.T) + M[:2,2].reshape(2,1) lxy=lxy.T #neighborh pixels for linear interp bas0=np.floor(lxy) bas1=bas0+1 #linear interp. constants com=lxy-bas0 perc0=(1-com[:,0])*(1-com[:,1]) perc1=(1-com[:,0])*com[:,1] perc2=com[:,0]*(1-com[:,1]) perc3=com[:,0]*com[:,1] #create final image Iout=np.zeros(I.shape) #zeroing indices outside boundaries check_xbas0=np.where(np.bitwise_or(bas0[:,0]<0,bas0[:,0]>=I.shape[0])) check_ybas0=np.where(np.bitwise_or(bas0[:,1]<0,bas0[:,1]>=I.shape[1])) bas0[check_xbas0,0]=0 bas0[check_ybas0,1]=0 check_xbas1=np.where(np.bitwise_or(bas1[:,0]<0,bas1[:,0]>=I.shape[0])) check_ybas1=np.where(np.bitwise_or(bas1[:,1]<0,bas1[:,1]>=I.shape[1])) bas1[check_xbas1,0]=0 bas1[check_ybas1,1]=0 #hold shape Ish=I.shape[0] #ravel image Ione=I.ravel() #new intensities xyz0=Ione[(bas0[:,0]+bas0[:,1]*Ish).astype('int')] xyz1=Ione[(bas0[:,0]+bas1[:,1]*Ish).astype('int')] xyz2=Ione[(bas1[:,0]+bas0[:,1]*Ish).astype('int')] xyz3=Ione[(bas1[:,0]+bas1[:,1]*Ish).astype('int')] #kill mirroring #xyz0[np.bitwise_or(check_xbas0,check_ybas0)]=0 #xyz1[np.bitwise_or(check_xbas0,check_ybas1)]=0 #xyz2[np.bitwise_or(check_xbas1,check_ybas0)]=0 #xyz3[np.bitwise_or(check_xbas1,check_ybas1)]=0 #apply recalculated intensities Iout=xyz0*perc0+xyz1*perc1+xyz2*perc2+xyz3*perc3 return Iout.reshape(I.shape) def joint_histogram(A,B,binA,binB): ''' Calculate joint histogram and individual histograms for A and B ndarrays Parameters ---------- A, B: ndarrays binA, binB: 1d arrays with the bins Returns ------- JH: joint histogram HA: histogram for A HB: histogram for B Example ------- >>> A=np.array([[1,.5,.2,0,0],[.5,1,.5,0,0],[.2,.5,1,0,0],[0,0,0,0,0],[0,0,0,0,0]]) >>> B=np.array([[0,0,0,0,0],[0,1,.5,.2,0],[0,.5,1,.5,0],[0,.2,.5,1,0],[0,0,0,0,0]]) >>> bin_A=np.array([-np.Inf,.1,.35,.75,np.Inf]) >>> bin_B=np.array([-np.Inf,.1,.35,.75,np.Inf]) >>> JH,HA,HB=joint_histogram(A,B,bin_A,bin_B) ''' A=A.ravel() B=B.ravel() A2=A.copy() B2=B.copy() #assign bins for i in range(1,len(binA)): Ai=np.where(np.bitwise_and(A>binA[i-1],A<=binA[i])) A2[Ai]=i-1 for i in range(1,len(binB)): Bi=np.where(np.bitwise_and(B>binB[i-1],B<=binB[i])) B2[Bi]=i-1 JH=np.zeros((len(binA)-1,len(binB)-1)) #calculate joint histogram for i in range(len(A)): JH[A2[i],B2[i]]+=1 #calculate histogram for A HA=np.zeros(len(binA)-1) for i in range(len(A)): HA[A2[i]]+=1 #calculate histogram for B HB=np.zeros(len(binB)-1) for i in range(len(B)): HB[B2[i]]+=1 return JH,HA,HB def mutual_information(A,B,binA,binB): ''' Calculate mutual information for A and B ''' JH,HA,HB=joint_histogram(A,B,binA,binB) N=float(len(A.ravel())) MI=np.zeros(JH.shape) #print N for i in range(JH.shape[0]): for j in range(JH.shape[1]): Pij= JH[i,j]/N Pi = HA[i]/N Pj= HB[j]/N #print i,j, Pij, Pi, Pj, JH[i,j], HA[i], HB[j] MI[i,j]=Pij*np.log2(Pij/(Pi*Pj)) MI[np.isnan(MI)]=0 return MI.sum() def apply_mapping(A,T,order=0,map_type='affine2d'): ''' Apply mapping ''' if map_type=='affine2d': #create the different components #translation[2], scale[2], rotation[1], shear[2] if len(T)==7: tc1,tc2,sc1,sc2,rc,sch1,sch2=T if len(T)==5: tc1,tc2,sc1,sc2,rc=T sch1,sch2=(0,0) if len(T)==4: tc1,tc2,rc,sc=T sc1,sc2,sch1,sch2=(sc,sc,1,1) if len(T)==3: tc1,tc2,rc=T sc1,sc2,sch1,sch2=(1,1,0,0) #translation TC=np.matrix([[1,0,tc1], [0,1,tc2], [0,0, 1]]) #scaling SC=np.matrix([[sc1, 0, 0], [0, sc2, 0], [0, 0, 1]]) #rotation RC=np.matrix([[cos(rc), sin(rc), 0], [-sin(rc), cos(rc), 0], [0 , 0, 1]]) #shear SHC=np.matrix([[1, sch1,0], [sch2, 1,0], [0, 0,1]]) #apply #M=TC*SC*RC*SHC if len(T)==3: M=TC*RC if len(T)==4: M=TC*SC*RC if len(T)==5: M=TC*SC*RC if len(T)==7: M=TC*SC*RC*SHC M=np.array(M) AT=affine_transform2d(A,M) return AT def objective_mi(T,A,B,binA,binB,order=0,map_type='affine2d'): ''' Objective function for mutual information ''' AT=apply_mapping(A,T,order=0,map_type=map_type) #AT=np.round(AT) AT=AT.T NegMI= -mutual_information(AT,B,binA,binB) print '====',T,'====> - MI : ',NegMI #pylab.imshow(AT) #raw_input('Press Enter...') #pylab.imshow(np.hstack((A,B,AT))) #raw_input('Press Enter...') return NegMI def objective_sd(T,A,B,order=0,map_type='affine2d'): AT=apply_mapping(A,T,order=0,map_type=map_type) AT=AT.T if AT.sum()==0: SD=10**15 else: SD= np.sum((AT-B)**2)/np.prod(AT.shape) print '====',T,'====> SD : ',SD #pylab.imshow(np.hstack((A,B,AT))) #raw_input('Press Enter...') return SD def register(A,B,guess,metric='sd',binA=None,binB=None,xtol=0.1,ftol=0.01,order=0,map_type='affine2d'): ''' Register source A to target B using modified powell's method Powell's method tries to minimize the objective function ''' if metric=='mi': finalT=fmin_powell(objective_mi,x0=guess,args=(A,B,binA,binB,order,map_type),xtol=xtol,ftol=ftol) #finalT=leastsq(func=objective_mi,x0=np.array(guess),args=(A,B,binA,binB,order,map_type)) if metric=='sd': finalT=fmin_powell(objective_sd,x0=guess,args=(A,B,order,map_type),xtol=xtol,ftol=ftol) #finalT=leastsq(func=objective_sd,x0=np.array(guess),args=(A,B,order,map_type)) return finalT def evaluate(A,B,guess,metric='sd',binA=None,binB=None,xtol=0.1,ftol=0.01,order=0,map_type='affine2d'): #tc1,tc2,sc1,sc2,rc=T tc1=np.linspace(-50,50,20) tc2=np.linspace(-50,50,20) sc1=np.linspace(-1.2,1.2,10) sc2=np.linspace(-1.2,1.2,10) rc=np.linspace(0,np.pi,8) f_min=np.inf T_final=[] ''' for c1 in tc1: for c2 in tc2: for s1 in sc1: for s2 in sc2: for r in rc: T=[c1,c2,s1,s2,r] f=objective_sd(T,A,B,order=0,map_type='affine2d') if f0)[0]: del C[k[i]] return C def most(C): for c in C: pass # pf.most_similar_track_mam() T=pkl.load_pickle(fname) print 'Reducing the number of points...' T=[pf.approx_polygon_track(t) for t in T] print 'Reducing further to tracks with 3 pts...' T2=[tm.downsample(t,3) for t in T] print 'LARCH ...' print 'Splitting ...' t=time.clock() C=pf.larch_3split(T2,None,5.) print time.clock()-t, len(C) for c in C: print c, C[c]['rep3']/C[c]['N'] r=show_rep3(C) print 'Merging ...' t=time.clock() C=merge(C,5.) print time.clock()-t, len(C) for c in C: print c, C[c]['rep3']/C[c]['N'] show_rep3(C,r,fos.red) ''' #print 'Showing initial dataset.' r=fos.ren() #fos.add(r,fos.line(T,fos.white,opacity=1)) #fos.show(r) print 'Showing dataset after clustering.' #fos.clear(r) colors=np.zeros((len(T),3)) for c in C: color=np.random.rand(1,3) for i in C[c]['indices']: colors[i]=color fos.add(r,fos.line(T,colors,opacity=1)) fos.show(r) print 'Some statistics about the clusters' print 'Number of clusters',len(C.keys()) lens=[len(C[c]['indices']) for c in C] print 'max ',max(lens), 'min ',min(lens) print 'singletons ',lens.count(1) print 'doubletons ',lens.count(2) print 'tripletons ',lens.count(3) print 'Showing dataset after merging.' fos.clear(r) T=[t + np.array([120,0,0]) for t in T] colors=np.zeros((len(T),3)) for c in C2: color=np.random.rand(1,3) for i in C2[c]['indices']: colors[i]=color fos.add(r,fos.line(T,colors,opacity=1)) fos.show(r) print 'Some statistics about the clusters' print 'Number of clusters',len(C.keys()) lens=[len(C2[c]['indices']) for c in C] print 'max ',max(lens), 'min ',min(lens) print 'singletons ',lens.count(1) print 'doubletons ',lens.count(2) print 'tripletons ',lens.count(3) ''' dipy-0.13.0/scratch/very_scratch/profile_dti.py000066400000000000000000000013461317371701200215430ustar00rootroot00000000000000# Profiling by fitting an actual, rather sizeable data-set. import time import numpy as np import dipy.data as dpd import dipy.reconst.dti as dti reload(dti) img, gtab = dpd.read_stanford_hardi() t1 = time.time() dm_ols = dti.TensorModel(gtab, fit_method='OLS') fit_ols = dm_ols.fit(img.get_data()) t2 = time.time() print("Done with OLS. That took %s seconds to run"%(t2-t1)) dm_nlls = dti.TensorModel(gtab, fit_method='NLLS') fit_nlls = dm_nlls.fit(img.get_data()) t3 = time.time() print("Done with NLLS. That took %s seconds to run"%(t3-t2)) dm_restore = dti.TensorModel(gtab, fit_method='restore', sigma=10) fit_restore = dm_restore.fit(img.get_data()) t4 = time.time() print("Done with RESTORE. That took %s seconds to run"%(t4-t3)) dipy-0.13.0/scratch/very_scratch/registration_example.py000066400000000000000000000170571317371701200234760ustar00rootroot00000000000000import os import numpy as np import dipy as dp import nibabel as ni import resources import time from subprocess import Popen,PIPE #Registration options #similarity 'cc', 'cr', 'crl1', 'mi', je', 'ce', 'nmi', 'smi'. 'cr' similarity='cr' #interp 'pv', 'tri' interp = 'tri' #subsampling None or sequence (3,) subsampling=[1,1,1] #search 'affine', 'rigid', 'similarity' or ['rigid','affine'] search='affine' #optimizer 'simplex', 'powell', 'steepest', 'cg', 'bfgs' or #sequence of optimizers optimizer= 'powell' def eddy_current_correction(data,affine,target=None,target_affine=None): result=[] no_dirs=data.shape[-1] if target==None and target_affine==None: target=ni.Nifti1Image(data[:,:,:,0],affine) else: target=ni.Nifti1Image(target,target_affine) for i in range(1,no_dirs): source=ni.Nifti1Image(data[:,:,:,i],affine) T=dp.volume_register(source,target,similarity,\ interp,subsampling,search,optimizer) sourceT=dp.volume_transform(source, T.inv(), reference=target) print i, sourceT.get_data().shape, sourceT.affine.shape result.append(sourceT) result.insert(0,target) print 'no of images',len(result) return ni.concat_images(result) def register_source_2_target(source_data,source_affine,target_data,target_affine): #subsampling=target_data.shape[:3] target=ni.Nifti1Image(target_data,target_affine) source=ni.Nifti1Image(source_data,source_affine) T=dp.volume_register(source,target,similarity,\ interp,subsampling,search,optimizer) sourceT=dp.volume_transform(source, T.inv(), reference=target) return sourceT def save_volumes_as_mosaic(fname,volume_list): import Image vols=[] for vol in volume_list: vol=np.rollaxis(vol,2,1) sh=vol.shape arr=vol.reshape(sh[0],sh[1]*sh[2]) arr=np.interp(arr,[arr.min(),arr.max()],[0,255]) arr=arr.astype('ubyte') print 'arr.shape',arr.shape vols.append(arr) mosaic=np.concatenate(vols) Image.fromarray(mosaic).save(fname) def haircut_dwi_reference(nii,nii_hair): cmd='bet '+nii+' '+ nii_hair + ' -f .2 -g 0' print cmd p = Popen(cmd, shell=True,stdout=PIPE,stderr=PIPE) sto=p.stdout.readlines() ste=p.stderr.readlines() print sto print ste def register_FA_same_subj_diff_sessions(dname_grid,dname_shell): print('create temporary directory') tmp_dir='/tmp' print('load dicom data') data_gr,affine_gr,bvals_gr,gradients_gr=dp.load_dcm_dir(dname_grid) data_sh,affine_sh,bvals_sh,gradients_sh=dp.load_dcm_dir(dname_shell) print('save DWI reference as nifti') tmp_grid=os.path.join(tmp_dir,os.path.basename(dname_grid)+'_ref.nii') tmp_shell=os.path.join(tmp_dir,os.path.basename(dname_shell)+'_ref.nii') ni.save(ni.Nifti1Image(data_gr[...,0],affine_gr),tmp_grid) ni.save(ni.Nifti1Image(data_sh[...,0],affine_sh),tmp_shell) print('prepare filenames for haircut (bet)') tmp_grid_bet=os.path.join(os.path.dirname(tmp_grid),\ os.path.splitext(os.path.basename(dname_grid))[0]+\ '_ref_bet.nii.gz') tmp_shell_bet=os.path.join(os.path.dirname(tmp_shell),\ os.path.splitext(os.path.basename(dname_shell))[0]+\ '_ref_bet.nii.gz') print('bet is running') haircut_dwi_reference(tmp_grid,tmp_grid_bet) haircut_dwi_reference(tmp_shell,tmp_shell_bet) print('load nii.gz reference (s0) volumes') img_gr_bet=ni.load(tmp_grid_bet) img_sh_bet=ni.load(tmp_shell_bet) print('register the shell reference to the grid reference') source=img_sh_bet target=img_gr_bet T=dp.volume_register(source,target,similarity,\ interp,subsampling,search,optimizer) print('apply the inverse of the transformation matrix') sourceT=dp.volume_transform(source, T.inv(), reference=target) #ni.save(sourceT,'/tmp/result.nii.gz') print('calculate FA for grid and shell data') FA_grid=dp.Tensor( data_gr,bvals_gr,gradients_gr,thresh=50).FA FA_shell=dp.Tensor(data_sh,bvals_sh,gradients_sh,thresh=50).FA print('create an FA nibabel image for shell') FA_shell_img=ni.Nifti1Image(FA_shell,affine_sh) print('transform FA_shell') FA_shell_imgT=dp.volume_transform(FA_shell_img,T.inv(),reference=target) return ni.Nifti1Image(FA_grid,affine_gr),FA_shell_imgT def flirt(in_nii, ref_nii,out_nii,transf_mat): cmd='flirt -in ' + in_nii + ' -ref ' + ref_nii + ' -out ' \ + out_nii +' -dof 6 -omat ' + transf_mat print(cmd) pipe(cmd) def flirt_apply_transform(in_nii, target_nii, out_nii, transf_mat): cmd='flirt -in ' + in_nii + ' -ref ' + target_nii + ' -out ' \ + out_nii +' -init ' + transf_mat +' -applyxfm' print(cmd) pipe(cmd) def test_registration(): S012='/tmp/compare_12_with_32_Verio_directly/18620_0004.nii_S0.nii.gz' S032='/tmp/compare_12_with_32_Verio_directly/18620_0006.nii_S0.nii.gz' S012T='/tmp/compare_12_with_32_Verio_directly/S0_reg.nii.gz' MP='/tmp/compare_12_with_32_Verio_directly/MPRAGE.nii' D114=resources.get_paths('DTI STEAM 114 Trio')[2] data,affine,bvals,gradients=dp.load_dcm_dir(D114) D114i=ni.Nifti1Image(data[...,0],affine) D101=resources.get_paths('DSI STEAM 101 Trio')[2] data,affine,bvals,gradients=dp.load_dcm_dir(D101) D101i=ni.Nifti1Image(data[...,0],affine) ni.save(D101i,'/tmp/compare_12_with_32_Verio_directly/S0_101_reg.nii.gz') #source=ni.load(S012) source=D114i #target=D101i #target=ni.load(S032) target=ni.load(MP) target._data=np.squeeze(target._data) #target._affine= np.dot(np.diag([-1, -1, 1, 1]), target._affine) similarity='cr' interp = 'tri' subsampling=None search='affine' optimizer= 'powell' T=dp.volume_register(source,target,similarity,\ interp,subsampling,search,optimizer) print('Transformation matrix') print(T.inv()) sourceT=dp.volume_transform(source,T.inv(),reference=target,interp_order=0) sourceTd=sourceT.get_data() sourceTd[sourceTd<0]=0 sourceT._data=sourceTd ni.save(sourceT,S012T) sourced=source.get_data() targetd=target.get_data() sourceTd=sourceT.get_data() print 'source info',sourced.min(), sourced.max() print 'target info',targetd.min(), targetd.max() print 'sourceT info',sourceTd.min(), sourceTd.max() #save_volumes_as_mosaic('/tmp/mosaic_S0_MP_cr_pv_powell.png',\ # [sourced,sourceTd,targetd]) # RAS to LPS np.dot(np.diag([-1, -1, 1, 1]), A) # LPS to RAS if __name__ == '__main__': ''' print('Goal is to compare FA of grid versus shell acquisitions using STEAM') print('find filenames for grid and shell data') dname_grid=resources.get_paths('DSI STEAM 101 Trio')[2] dname_shell=resources.get_paths('DTI STEAM 114 Trio')[2] #print('find filenames for T1') #fname_T1=resources.get_paths('MPRAGE nifti Trio')[2] FA_grid_img,FA_shell_imgT=register_FA_same_subj_diff_sessions(dname_grid,dname_shell) #FA_shell_data=FA_shell_imgT.get_data() #FA_shell_data[FA_shell_data<0]=0 print('tile volumes') save_volumes_as_mosaic('/tmp/mosaic_fa.png',\ [FA_grid_img.get_data(),FA_shell_imgT.get_data()]) ''' dipy-0.13.0/scratch/very_scratch/simulation_comparison_dsi_gqi.py000066400000000000000000000022471317371701200253610ustar00rootroot00000000000000import numpy as np import dipy as dp import dipy.io.pickles as pkl import scipy as sp fname='/home/ian/Data/SimData/results_SNR030_1fibre' #fname='/home/eg01/Data_Backup/Data/Marta/DSI/SimData/results_SNR030_isotropic' ''' file has one row for every voxel, every voxel is repeating 1000 times with the same noise level , then we have 100 different directions. 1000 * 100 is the number of all rows. ''' marta_table_fname='/home/ian/Data/SimData/Dir_and_bvals_DSI_marta.txt' sim_data=np.loadtxt(fname) #bvalsf='/home/eg01/Data_Backup/Data/Marta/DSI/SimData/bvals101D_float.txt' b_vals_dirs=np.loadtxt(marta_table_fname) bvals=b_vals_dirs[:,0]*1000 gradients=b_vals_dirs[:,1:] gq = dp.GeneralizedQSampling(sim_data,bvals,gradients) tn = dp.Tensor(sim_data,bvals,gradients) #''' gqfile = '/home/ian/Data/SimData/gq_SNR030_1fibre.pkl' pkl.save_pickle(gqfile,gq) tnfile = '/home/ian/Data/SimData/tn_SNR030_1fibre.pkl' pkl.save_pickle(tnfile,tn) ''' print tn.evals.shape print tn.evecs.shape evals=tn.evals[0] evecs=tn.evecs[0] print evecs.shape first_directions = tn.evecs[:,:,0] first1000 = first_directions[:1000,:] cross = np.dot(first1000.T,first1000) np.linalg.eig(cross) ''' dipy-0.13.0/scratch/very_scratch/simulation_comparisons.py000066400000000000000000000306431317371701200240460ustar00rootroot00000000000000import nibabel import os import numpy as np import dipy as dp #import dipy.core.generalized_q_sampling as dgqs#dipy. import dipy.reconst.gqi as dgqs import dipy.io.pickles as pkl import scipy as sp from matplotlib.mlab import find #import dipy.core.sphere_plots as splots import dipy.core.sphere_stats as sphats import dipy.core.geometry as geometry import get_vertices as gv #old SimData files ''' results_SNR030_1fibre results_SNR030_1fibre+iso results_SNR030_2fibres_15deg results_SNR030_2fibres_30deg results_SNR030_2fibres_60deg results_SNR030_2fibres_90deg results_SNR030_2fibres+iso_15deg results_SNR030_2fibres+iso_30deg results_SNR030_2fibres+iso_60deg results_SNR030_2fibres+iso_90deg results_SNR030_isotropic ''' #fname='/home/ian/Data/SimData/results_SNR030_1fibre' ''' file has one row for every voxel, every voxel is repeating 1000 times with the same noise level , then we have 100 different directions. 1000 * 100 is the number of all rows. The 100 conditions are given by 10 polar angles (in degrees) 0, 20, 40, 60, 80, 80, 60, 40, 20 and 0, and each of these with longitude angle 0, 40, 80, 120, 160, 200, 240, 280, 320, 360. ''' #new complete SimVoxels files simdata = ['fibres_2_SNR_80_angle_90_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_60_angle_60_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_40_angle_30_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_40_angle_60_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_20_angle_15_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_100_angle_90_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_20_angle_30_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_40_angle_15_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_60_angle_15_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_100_angle_90_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_1_SNR_60_angle_00_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_80_angle_30_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_100_angle_15_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_100_angle_60_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_80_angle_60_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_60_angle_30_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_40_angle_60_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_80_angle_30_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_20_angle_30_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_60_angle_60_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_1_SNR_100_angle_00_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_1_SNR_100_angle_00_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_20_angle_15_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_1_SNR_20_angle_00_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_40_angle_15_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_20_angle_60_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_80_angle_15_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_1_SNR_80_angle_00_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_20_angle_90_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_60_angle_90_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_100_angle_30_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_80_angle_90_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_60_angle_15_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_20_angle_60_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_100_angle_15_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_1_SNR_20_angle_00_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_80_angle_60_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_1_SNR_80_angle_00_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_100_angle_30_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_1_SNR_40_angle_00_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_1_SNR_60_angle_00_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_40_angle_30_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_60_angle_30_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_40_angle_90_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_60_angle_90_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_80_angle_15_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_1_SNR_40_angle_00_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_100_angle_60_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_40_angle_90_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_20_angle_90_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00'] simdir = '/home/ian/Data/SimVoxels/' def gq_tn_calc_save(): for simfile in simdata: dataname = simfile print dataname sim_data=np.loadtxt(simdir+dataname) marta_table_fname='/home/ian/Data/SimData/Dir_and_bvals_DSI_marta.txt' b_vals_dirs=np.loadtxt(marta_table_fname) bvals=b_vals_dirs[:,0]*1000 gradients=b_vals_dirs[:,1:] gq = dp.GeneralizedQSampling(sim_data,bvals,gradients) gqfile = simdir+'gq/'+dataname+'.pkl' pkl.save_pickle(gqfile,gq) ''' gq.IN gq.__doc__ gq.glob_norm_param gq.QA gq.__init__ gq.odf gq.__class__ gq.__module__ gq.q2odf_params ''' tn = dp.Tensor(sim_data,bvals,gradients) tnfile = simdir+'tn/'+dataname+'.pkl' pkl.save_pickle(tnfile,tn) ''' tn.ADC tn.__init__ tn._getevals tn.B tn.__module__ tn._getevecs tn.D tn.__new__ tn._getndim tn.FA tn.__reduce__ tn._getshape tn.IN tn.__reduce_ex__ tn._setevals tn.MD tn.__repr__ tn._setevecs tn.__class__ tn.__setattr__ tn.adc tn.__delattr__ tn.__sizeof__ tn.evals tn.__dict__ tn.__str__ tn.evecs tn.__doc__ tn.__subclasshook__ tn.fa tn.__format__ tn.__weakref__ tn.md tn.__getattribute__ tn._evals tn.ndim tn.__getitem__ tn._evecs tn.shape tn.__hash__ tn._getD ''' ''' file has one row for every voxel, every voxel is repeating 1000 times with the same noise level , then we have 100 different directions. 100 * 1000 is the number of all rows. At the moment this module is hardwired to the use of the EDS362 spherical mesh. I am assumung (needs testing) that directions 181 to 361 are the antipodal partners of directions 0 to 180. So when counting the number of different vertices that occur as maximal directions we wll map the indices modulo 181. ''' def analyze_maxima(indices, max_dirs, subsets): '''This calculates the eigenstats for each of the replicated batches of the simulation data ''' results = [] for direction in subsets: batch = max_dirs[direction,:,:] index_variety = np.array([len(set(np.remainder(indices[direction,:],181)))]) #normed_centroid, polar_centroid, centre, b1 = sphats.eigenstats(batch) centre, b1 = sphats.eigenstats(batch) # make azimuth be in range (0,360) rather than (-180,180) centre[1] += 360*(centre[1] < 0) #results.append(np.concatenate((normed_centroid, polar_centroid, centre, b1, index_variety))) results.append(np.concatenate((centre, b1, index_variety))) return results #dt_first_directions = tn.evecs[:,:,0].reshape((100,1000,3)) # these are the principal directions for the full set of simulations #gq_tn_calc_save() eds=np.load(os.path.join(os.path.dirname(dp.__file__),'core','matrices','evenly_distributed_sphere_362.npz')) odf_vertices=eds['vertices'] def run_comparisons(sample_data=35): for simfile in [simdata[sample_data]]: dataname = simfile print dataname sim_data=np.loadtxt(simdir+dataname) # gqfile = simdir+'gq/'+dataname+'.pkl' # gq = pkl.load_pickle(gqfile) tnfile = simdir+'tn/'+dataname+'.pkl' tn = pkl.load_pickle(tnfile) dt_first_directions_in=odf_vertices[tn.IN] dt_indices = tn.IN.reshape((100,1000)) dt_results = analyze_maxima(dt_indices, dt_first_directions_in.reshape((100,1000,3)),range(10,91)) # gq_indices = np.array(gq.IN[:,0],dtype='int').reshape((100,1000)) # gq_first_directions_in=odf_vertices[np.array(gq.IN[:,0],dtype='int')] #print gq_first_directions_in.shape # gq_results = analyze_maxima(gq_indices, gq_first_directions_in.reshape((100,1000,3)),range(100)) #for gqi see example dicoms_2_tracks gq.IN[:,0] np.set_printoptions(precision=6, suppress=True, linewidth=200, threshold=5000) out = open('/home/ian/Data/SimVoxels/Out/'+'***_'+dataname,'w') # results = np.hstack((np.vstack(dt_results), np.vstack(gq_results))) results = np.vstack(dt_results) print >> out, results[:,:] out.close() #up = dt_batch[:,2]>= 0 #splots.plot_sphere(dt_batch[up], 'batch '+str(direction)) #splots.plot_lambert(dt_batch[up],'batch '+str(direction), centre) #spread = gq.q2odf_params e,v = np.linalg.eigh(np.dot(spread,spread.transpose())) effective_dimension = len(find(np.cumsum(e) > 0.05*np.sum(e))) #95% #rotated = np.dot(dt_batch,evecs) #rot_evals, rot_evecs = np.linalg.eig(np.dot(rotated.T,rotated)/rotated.shape[0]) #eval_order = np.argsort(rot_evals) #rotated = rotated[:,eval_order] #up = rotated[:,2]>= 0 #splot.plot_sphere(rotated[up],'first1000') #splot.plot_lambert(rotated[up],'batch '+str(direction)) def run_gq_sims(sample_data=[35]): for simfile in [simdata[sample] for sample in sample_data]: dataname = simfile print dataname sim_data=np.loadtxt(simdir+dataname) marta_table_fname='/home/ian/Data/SimData/Dir_and_bvals_DSI_marta.txt' b_vals_dirs=np.loadtxt(marta_table_fname) bvals=b_vals_dirs[:,0]*1000 gradients=b_vals_dirs[:,1:] for j in range(10): s = sim_data[10000+j,:] gqs = dp.GeneralizedQSampling(s.reshape((1,102)),bvals,gradients,Lambda=7) t0, t1, t2, npa = gqs.npa(s, width = 5) print t0, t1, t2, npa ''' for (i,o) in enumerate(gqs.odf(s)): print i,o for (i,o) in enumerate(gqs.odf_vertices): print i,o ''' #o = gqs.odf(s) #v = gqs.odf_vertices #pole = v[t0[0]] #eqv = dgqs.equatorial_zone_vertices(v, pole, 5) #print 'Number of equatorial vertices: ', len(eqv) #print np.max(o[eqv]),np.min(o[eqv]) #cos_e_pole = [np.dot(pole.T, v[i]) for i in eqv] #print np.min(cos1), np.max(cos1) #print 'equatorial max in equatorial vertices:', t1[0] in eqv #x = np.cross(v[t0[0]],v[t1[0]]) #x = x/np.sqrt(np.sum(x**2)) #print x #ptchv = dgqs.patch_vertices(v, x, 5) #print len(ptchv) #eqp = eqv[np.argmin([np.abs(np.dot(v[t1[0]].T,v[p])) for p in eqv])] #print (eqp, o[eqp]) #print t2[0] in ptchv, t2[0] in eqv #print np.dot(pole.T, v[t1[0]]), np.dot(pole.T, v[t2[0]]) #print ptchv[np.argmin([o[v] for v in ptchv])] #gq_indices = np.array(gq.IN[:,0],dtype='int').reshape((100,1000)) #gq_first_directions_in=odf_vertices[np.array(gq.IN[:,0],dtype='int')] #print gq_first_directions_in.shape #gq_results = analyze_maxima(gq_indices, gq_first_directions_in.reshape((100,1000,3)),range(100)) #for gqi see example dicoms_2_tracks gq.IN[:,0] #np.set_printoptions(precision=6, suppress=True, linewidth=200, threshold=5000) #out = open('/home/ian/Data/SimVoxels/Out/'+'+++_'+dataname,'w') #results = np.hstack((np.vstack(dt_results), np.vstack(gq_results))) #results = np.vstack(dt_results) #print >> out, results[:,:] #out.close() #run_comparisons() run_gq_sims() dipy-0.13.0/scratch/very_scratch/simulation_comparisons_modified.py000066400000000000000000000314751317371701200257120ustar00rootroot00000000000000import nibabel import os import numpy as np import dipy as dp import dipy.core.generalized_q_sampling as dgqs import dipy.io.pickles as pkl import scipy as sp from matplotlib.mlab import find import dipy.core.sphere_plots as splots import dipy.core.sphere_stats as sphats import dipy.core.geometry as geometry import get_vertices as gv #old SimData files ''' results_SNR030_1fibre results_SNR030_1fibre+iso results_SNR030_2fibres_15deg results_SNR030_2fibres_30deg results_SNR030_2fibres_60deg results_SNR030_2fibres_90deg results_SNR030_2fibres+iso_15deg results_SNR030_2fibres+iso_30deg results_SNR030_2fibres+iso_60deg results_SNR030_2fibres+iso_90deg results_SNR030_isotropic ''' #fname='/home/ian/Data/SimData/results_SNR030_1fibre' ''' file has one row for every voxel, every voxel is repeating 1000 times with the same noise level , then we have 100 different directions. 1000 * 100 is the number of all rows. The 100 conditions are given by 10 polar angles (in degrees) 0, 20, 40, 60, 80, 80, 60, 40, 20 and 0, and each of these with longitude angle 0, 40, 80, 120, 160, 200, 240, 280, 320, 360. ''' #new complete SimVoxels files simdata = ['fibres_2_SNR_80_angle_90_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_60_angle_60_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_40_angle_30_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_40_angle_60_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_20_angle_15_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_100_angle_90_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_20_angle_30_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_40_angle_15_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_60_angle_15_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_100_angle_90_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_1_SNR_60_angle_00_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_80_angle_30_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_100_angle_15_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_100_angle_60_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_80_angle_60_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_60_angle_30_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_40_angle_60_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_80_angle_30_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_20_angle_30_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_60_angle_60_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_1_SNR_100_angle_00_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_1_SNR_100_angle_00_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_20_angle_15_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_1_SNR_20_angle_00_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_40_angle_15_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_20_angle_60_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_80_angle_15_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_1_SNR_80_angle_00_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_20_angle_90_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_60_angle_90_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_100_angle_30_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_80_angle_90_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_60_angle_15_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_20_angle_60_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_100_angle_15_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_1_SNR_20_angle_00_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_80_angle_60_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_1_SNR_80_angle_00_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_100_angle_30_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_1_SNR_40_angle_00_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_1_SNR_60_angle_00_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_40_angle_30_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_60_angle_30_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_40_angle_90_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_60_angle_90_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_80_angle_15_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_1_SNR_40_angle_00_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_100_angle_60_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00', 'fibres_2_SNR_40_angle_90_l1_1.4_l2_0.35_l3_0.35_iso_1_diso_0.7', 'fibres_2_SNR_20_angle_90_l1_1.4_l2_0.35_l3_0.35_iso_0_diso_00'] simdir = '/home/ian/Data/SimVoxels/' def gq_tn_calc_save(): for simfile in simdata: dataname = simfile print dataname sim_data=np.loadtxt(simdir+dataname) marta_table_fname='/home/ian/Data/SimData/Dir_and_bvals_DSI_marta.txt' b_vals_dirs=np.loadtxt(marta_table_fname) bvals=b_vals_dirs[:,0]*1000 gradients=b_vals_dirs[:,1:] gq = dp.GeneralizedQSampling(sim_data,bvals,gradients) gqfile = simdir+'gq/'+dataname+'.pkl' pkl.save_pickle(gqfile,gq) ''' gq.IN gq.__doc__ gq.glob_norm_param gq.QA gq.__init__ gq.odf gq.__class__ gq.__module__ gq.q2odf_params ''' tn = dp.Tensor(sim_data,bvals,gradients) tnfile = simdir+'tn/'+dataname+'.pkl' pkl.save_pickle(tnfile,tn) ''' tn.ADC tn.__init__ tn._getevals tn.B tn.__module__ tn._getevecs tn.D tn.__new__ tn._getndim tn.FA tn.__reduce__ tn._getshape tn.IN tn.__reduce_ex__ tn._setevals tn.MD tn.__repr__ tn._setevecs tn.__class__ tn.__setattr__ tn.adc tn.__delattr__ tn.__sizeof__ tn.evals tn.__dict__ tn.__str__ tn.evecs tn.__doc__ tn.__subclasshook__ tn.fa tn.__format__ tn.__weakref__ tn.md tn.__getattribute__ tn._evals tn.ndim tn.__getitem__ tn._evecs tn.shape tn.__hash__ tn._getD ''' ''' file has one row for every voxel, every voxel is repeating 1000 times with the same noise level , then we have 100 different directions. 100 * 1000 is the number of all rows. At the moment this module is hardwired to the use of the EDS362 spherical mesh. I am assumung (needs testing) that directions 181 to 361 are the antipodal partners of directions 0 to 180. So when counting the number of different vertices that occur as maximal directions we wll map the indices modulo 181. ''' def analyze_maxima(indices, max_dirs, subsets): '''This calculates the eigenstats for each of the replicated batches of the simulation data ''' results = [] for direction in subsets: batch = max_dirs[direction,:,:] index_variety = np.array([len(set(np.remainder(indices[direction,:],181)))]) #normed_centroid, polar_centroid, centre, b1 = sphats.eigenstats(batch) centre, b1 = sphats.eigenstats(batch) # make azimuth be in range (0,360) rather than (-180,180) centre[1] += 360*(centre[1] < 0) #results.append(np.concatenate((normed_centroid, polar_centroid, centre, b1, index_variety))) results.append(np.concatenate((centre, b1, index_variety))) return results #dt_first_directions = tn.evecs[:,:,0].reshape((100,1000,3)) # these are the principal directions for the full set of simulations #gq_tn_calc_save() eds=np.load(os.path.join(os.path.dirname(dp.__file__),'core','matrices','evenly_distributed_sphere_362.npz')) odf_vertices=eds['vertices'] def run_comparisons(sample_data=35): for simfile in [simdata[sample_data]]: dataname = simfile print dataname sim_data=np.loadtxt(simdir+dataname) gqfile = simdir+'gq/'+dataname+'.pkl' gq = pkl.load_pickle(gqfile) tnfile = simdir+'tn/'+dataname+'.pkl' tn = pkl.load_pickle(tnfile) dt_first_directions_in=odf_vertices[tn.IN] dt_indices = tn.IN.reshape((100,1000)) dt_results = analyze_maxima(dt_indices, dt_first_directions_in.reshape((100,1000,3)),range(10,90)) gq_indices = np.array(gq.IN[:,0],dtype='int').reshape((100,1000)) gq_first_directions_in=odf_vertices[np.array(gq.IN[:,0],dtype='int')] #print gq_first_directions_in.shape gq_results = analyze_maxima(gq_indices, gq_first_directions_in.reshape((100,1000,3)),range(10,90)) #for gqi see example dicoms_2_tracks gq.IN[:,0] np.set_printoptions(precision=3, suppress=True, linewidth=200, threshold=5000) out = open('/home/ian/Data/SimVoxels/Out/'+'***_'+dataname,'w') #print np.vstack(dt_results).shape, np.vstack(gq_results).shape results = np.hstack((np.vstack(dt_results), np.vstack(gq_results))) #print results.shape #results = np.vstack(dt_results) print >> out, results[:,:] out.close() #up = dt_batch[:,2]>= 0 #splots.plot_sphere(dt_batch[up], 'batch '+str(direction)) #splots.plot_lambert(dt_batch[up],'batch '+str(direction), centre) #spread = gq.q2odf_params e,v = np.linalg.eigh(np.dot(spread,spread.transpose())) effective_dimension = len(find(np.cumsum(e) > 0.05*np.sum(e))) #95% #rotated = np.dot(dt_batch,evecs) #rot_evals, rot_evecs = np.linalg.eig(np.dot(rotated.T,rotated)/rotated.shape[0]) #eval_order = np.argsort(rot_evals) #rotated = rotated[:,eval_order] #up = rotated[:,2]>= 0 #splot.plot_sphere(rotated[up],'first1000') #splot.plot_lambert(rotated[up],'batch '+str(direction)) def run_gq_sims(sample_data=[35,23,46,39,40,10,37,27,21,20]): results = [] out = open('/home/ian/Data/SimVoxels/Out/'+'npa+fa','w') for j in range(len(sample_data)): sample = sample_data[j] simfile = simdata[sample] dataname = simfile print dataname sim_data=np.loadtxt(simdir+dataname) marta_table_fname='/home/ian/Data/SimData/Dir_and_bvals_DSI_marta.txt' b_vals_dirs=np.loadtxt(marta_table_fname) bvals=b_vals_dirs[:,0]*1000 gradients=b_vals_dirs[:,1:] for j in np.vstack((np.arange(100)*1000,np.arange(100)*1000+1)).T.ravel(): # 0,1,1000,1001,2000,2001,... s = sim_data[j,:] gqs = dp.GeneralizedQSampling(s.reshape((1,102)),bvals,gradients,Lambda=3.5) tn = dp.Tensor(s.reshape((1,102)),bvals,gradients,fit_method='LS') t0, t1, t2, npa = gqs.npa(s, width = 5) print >> out, dataname, j, npa, tn.fa()[0] ''' for (i,o) in enumerate(gqs.odf(s)): print i,o for (i,o) in enumerate(gqs.odf_vertices): print i,o ''' #o = gqs.odf(s) #v = gqs.odf_vertices #pole = v[t0[0]] #eqv = dgqs.equatorial_zone_vertices(v, pole, 5) #print 'Number of equatorial vertices: ', len(eqv) #print np.max(o[eqv]),np.min(o[eqv]) #cos_e_pole = [np.dot(pole.T, v[i]) for i in eqv] #print np.min(cos1), np.max(cos1) #print 'equatorial max in equatorial vertices:', t1[0] in eqv #x = np.cross(v[t0[0]],v[t1[0]]) #x = x/np.sqrt(np.sum(x**2)) #print x #ptchv = dgqs.patch_vertices(v, x, 5) #print len(ptchv) #eqp = eqv[np.argmin([np.abs(np.dot(v[t1[0]].T,v[p])) for p in eqv])] #print (eqp, o[eqp]) #print t2[0] in ptchv, t2[0] in eqv #print np.dot(pole.T, v[t1[0]]), np.dot(pole.T, v[t2[0]]) #print ptchv[np.argmin([o[v] for v in ptchv])] #gq_indices = np.array(gq.IN[:,0],dtype='int').reshape((100,1000)) #gq_first_directions_in=odf_vertices[np.array(gq.IN[:,0],dtype='int')] #print gq_first_directions_in.shape #gq_results = analyze_maxima(gq_indices, gq_first_directions_in.reshape((100,1000,3)),range(100)) #for gqi see example dicoms_2_tracks gq.IN[:,0] #np.set_printoptions(precision=6, suppress=True, linewidth=200, threshold=5000) #out = open('/home/ian/Data/SimVoxels/Out/'+'+++_'+dataname,'w') #results = np.hstack((np.vstack(dt_results), np.vstack(gq_results))) #results = np.vstack(dt_results) #print >> out, results[:,:] out.close() run_comparisons() #run_gq_sims() dipy-0.13.0/scratch/very_scratch/simulation_dsi.py000066400000000000000000000130221317371701200222600ustar00rootroot00000000000000import numpy as np import dipy as dp import pyglet from pyglet.gl import * #from delaunay.core import Triangulation #http://flub.stuffwillmade.org/delny/ try: # Try and create a window with multisampling (antialiasing) config = Config(sample_buffers=1, samples=4, depth_size=24, double_buffer=True,vsync=False) window = pyglet.window.Window(resizable=True, config=config) except pyglet.window.NoSuchConfigException: # Fall back to no multisampling for old hardware window = pyglet.window.Window(resizable=True) #fps_display = pyglet.clock.ClockDisplay() @window.event def on_resize(width, height): # Override the default on_resize handler to create a 3D projection print('%d width, %d height' % (width,height)) glViewport(0, 0, width, height) glMatrixMode(GL_PROJECTION) glLoadIdentity() gluPerspective(60., width / float(height), .1, 1000.) glMatrixMode(GL_MODELVIEW) #window.flip() return pyglet.event.EVENT_HANDLED def update(dt): global rx, ry, rz #rx += dt * 5 #ry += dt * 80 #rz += dt * 30 #rx %= 360 #ry %= 360 #rz %= 360 pass pyglet.clock.schedule(update) #pyglet.clock.schedule_interval(update,1/100.) @window.event def on_draw(): global surf for i in range(0,900,3): if np.random.rand()>0.5: surf.vertex_list.vertices[i]+=0.001*np.random.rand() surf.vertex_list.vertices[i+1]+=0.001*np.random.rand() surf.vertex_list.vertices[i+2]+=0.001*np.random.rand() else: surf.vertex_list.vertices[i]-=0.001*np.random.rand() surf.vertex_list.vertices[i+1]-=0.001*np.random.rand() surf.vertex_list.vertices[i+2]-=0.001*np.random.rand() glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT) glLoadIdentity() #fps_display.draw() #glScalef(3,1,1) glTranslatef(0, 0, -4) glRotatef(rx, 0, 0, 1) glRotatef(ry, 0, 1, 0) glRotatef(rx, 1, 0, 0) batch.draw() #pyglet.image.get_buffer_manager().get_color_buffer().save('/tmp/test.png') print pyglet.clock.get_fps() #window.clear() #fps_display.draw() def setup(): # One-time GL setup glClearColor(1, 1, 1, 1) #glClearColor(0,0,0,0) glColor3f(1, 0, 0) glEnable(GL_DEPTH_TEST) #glEnable(GL_CULL_FACE) # Uncomment this line for a wireframe view glPolygonMode(GL_FRONT_AND_BACK, GL_LINE) glLineWidth(3.) # Simple light setup. On Windows GL_LIGHT0 is enabled by default, # but this is not the case on Linux or Mac, so remember to always # include it. glEnable(GL_LIGHTING) glEnable(GL_LIGHT0) glEnable(GL_LIGHT1) # Define a simple function to create ctypes arrays of floats: def vec(*args): return (GLfloat * len(args))(*args) glLightfv(GL_LIGHT0, GL_POSITION, vec(.5, .5, 1, 0)) glLightfv(GL_LIGHT0, GL_SPECULAR, vec(.5, .5, 1, 1)) glLightfv(GL_LIGHT0, GL_DIFFUSE, vec(1, 1, 1, 1)) glLightfv(GL_LIGHT1, GL_POSITION, vec(1, 0, .5, 0)) glLightfv(GL_LIGHT1, GL_DIFFUSE, vec(.5, .0, 0, 1)) glLightfv(GL_LIGHT1, GL_SPECULAR, vec(1, 0, 0, 1)) glMaterialfv(GL_FRONT_AND_BACK, GL_AMBIENT_AND_DIFFUSE, vec(0.5, 0, 0.3, 0.5)) glMaterialfv(GL_FRONT_AND_BACK, GL_SPECULAR, vec(1, 1, 1, 0.5)) glMaterialf(GL_FRONT_AND_BACK, GL_SHININESS, 50) class Surface(object): def __init__(self, vertices,faces,batch,group=None): inds=faces.ravel().tolist() verx=vertices.ravel().tolist() normals=np.zeros((len(vertices),3)) p=vertices l=faces trinormals=np.cross(p[l[:,0]]-p[l[:,1]],p[l[:,1]]-p[l[:,2]],axisa=1,axisb=1) for (i,lp) in enumerate(faces): normals[lp]+=trinormals[i] div=np.sqrt(np.sum(normals**2,axis=1)) div=div.reshape(len(div),1) normals=(normals/div) #normals=vertices/np.linalg.norm(vertices) norms=np.array(normals).ravel().tolist() self.vertex_list = batch.add_indexed(len(vertices),\ GL_TRIANGLES,\ group,\ inds,\ ('v3d/static',verx),\ ('n3d/static',norms)) def delete(self): self.vertex_list.delete() fname='/home/eg01/Data_Backup/Data/Marta/DSI/SimData/results_SNR030_1fibre' #fname='/home/eg01/Data_Backup/Data/Marta/DSI/SimData/results_SNR030_isotropic' marta_table_fname='/home/eg01/Data_Backup/Data/Marta/DSI/SimData/Dir_and_bvals_DSI_marta.txt' sim_data=np.loadtxt(fname) #bvalsf='/home/eg01/Data_Backup/Data/Marta/DSI/SimData/bvals101D_float.txt' dname = '/home/eg01/Data_Backup/Data/Frank_Eleftherios/frank/20100511_m030y_cbu100624/08_ep2d_advdiff_101dir_DSI' #real_data,affine,bvals,gradients=dp.load_dcm_dir(dname) b_vals_dirs=np.loadtxt(marta_table_fname) bvals=b_vals_dirs[:,0]*1000 gradients=b_vals_dirs[:,1:] sim_data=sim_data gq = dp.GeneralizedQSampling(sim_data,bvals,gradients) tn = dp.Tensor(sim_data,bvals,gradients) evals=tn.evals[0] evecs=tn.evecs[0] setup() batch = pyglet.graphics.Batch() eds=np.load('/home/eg01/Devel/dipy/dipy/core/matrices/evenly_distributed_sphere_362.npz') vertices=eds['vertices'] faces=eds['faces'] surf = Surface(vertices,faces, batch=batch) rx = ry = rz = 0 print('Application Starting Now...') pyglet.app.run() dipy-0.13.0/scratch/very_scratch/spherical_statistics.py000066400000000000000000000130771317371701200234730ustar00rootroot00000000000000import numpy as np import dipy.core.meshes as meshes import get_vertices as gv from dipy.core.triangle_subdivide import create_unit_sphere #from dipy.viz import fos #from dipy.io import dicomreaders as dcm #import dipy.core.geometry as geometry #import matplotlib.pyplot as mplp import dipy.core.sphere_plots as splot # set up a dictionary of sphere points that are in use EITHER as a set # directions for diffusion weighted acquisitions OR as a set of # evaluation points for an ODF (orientation distribution function. sphere_dic = {'fy362': {'filepath' : '/home/ian/Devel/dipy/dipy/core/data/evenly_distributed_sphere_362.npz', 'object': 'npz', 'vertices': 'vertices', 'omit': 0, 'hemi': False}, 'fy642': {'filepath' : '/home/ian/Devel/dipy/dipy/core/data/evenly_distributed_sphere_642.npz', 'object': 'npz', 'vertices': 'odf_vertices', 'omit': 0, 'hemi': False}, 'siem64': {'filepath':'/home/ian/Devel/dipy/dipy/core/tests/data/small_64D.gradients.npy', 'object': 'npy', 'omit': 1, 'hemi': True}, 'create2': {}, 'create3': {}, 'create4': {}, 'create5': {}, 'create6': {}, 'create7': {}, 'create8': {}, 'create9': {}, 'marta200': {'filepath': '/home/ian/Data/Spheres/200.npy', 'object': 'npy', 'omit': 0, 'hemi': True}, 'dsi101': {'filepath': '/home/ian/Data/Frank_Eleftherios/frank/20100511_m030y_cbu100624/08_ep2d_advdiff_101dir_DSI', 'object': 'dicom', 'omit': 0, 'hemi': True}} def plot_sphere(v,key): r = fos.ren() fos.add(r,fos.point(v,fos.green, point_radius= 0.01)) fos.show(r, title=key, size=(1000,1000)) def plot_lambert(v,key): lamb = geometry.lambert_equal_area_projection_cart(*v.T).T (y1,y2) = lamb radius = np.sum(lamb**2,axis=0) < 1 #print inner #print y1[inner] #print y1[-inner] figure = mplp.figure(facecolor='w') current = figure.add_subplot(111) current.patch.set_color('k') current.plot(y1[radius],y2[radius],'.g') current.plot(y1[-radius],y2[-radius],'.r') current.axes.set_aspect(aspect = 'equal', adjustable = 'box') figure.show() figure.waitforbuttonpress() mplp.close() def get_vertex_set(key): if key[:6] == 'create': number = eval(key[6:]) vertices, edges, faces = create_unit_sphere(number) omit = 0 else: entry = sphere_dic[key] #print entry if entry.has_key('omit'): omit = entry['omit'] else: omit = 0 filepath = entry['filepath'] if entry['object'] == 'npz': filearray = np.load(filepath) vertices = filearray[entry['vertices']] elif sphere_dic[key]['object'] == 'npy': vertices = np.load(filepath) elif entry['object'] == 'dicom': data,affine,bvals,gradients=dcm.read_mosaic_dir(filepath) #print (bvals.shape, gradients.shape) grad3 = np.vstack((bvals,bvals,bvals)).transpose() #print grad3.shape #vertices = grad3*gradients vertices = gradients if omit > 0: vertices = vertices[omit:,:] if entry['hemi']: vertices = np.vstack([vertices, -vertices]) print key, ': number of vertices = ', vertices.shape[0], '(drop ',omit,')' return vertices[omit:,:] xup=np.array([ 1,0,0]) xdn=np.array([-1,0,0]) yup=np.array([0, 1,0]) ydn=np.array([0,-1,0]) zup=np.array([0,0, 1]) zdn=np.array([0,0,-1]) #for key in sphere_dic: #for key in ['siem64']: for key in ['fy642']: v = gv.get_vertex_set(key) splot.plot_sphere(v,key) splot.plot_lambert(v,key,centre=np.array([0.,0.])) equat, polar = meshes.spherical_statistics(v,north=xup,width=0.2) l = 2.*len(v) equat = equat/l polar = polar/l print '%6.3f %6.3f %6.3f %6.3f' % (equat.min(), equat.mean(), equat.max(), np.sqrt(equat.var())) print '%6.3f %6.3f %6.3f %6.3f' % (polar.min(), polar.mean(), polar.max(), np.sqrt(polar.var())) def spherical_statistics(vertices, north=np.array([0,0,1]), width=0.02): ''' function to evaluate a spherical triangulation by looking at the variability of numbers of vertices in 'vertices' in equatorial bands of width 'width' orthogonal to each point in 'vertices' ''' equatorial_counts = np.array([len(equatorial_zone_vertices(vertices, pole, width=width)) for pole in vertices if np.dot(pole,north) >= 0]) #equatorial_counts = np.bincount(equatorial_counts) #args = np.where(equatorial_counts>0) #print zip(list(args[0]), equatorial_counts[args]) polar_counts = np.array([len(polar_zone_vertices(vertices, pole, width=width)) for pole in vertices if np.dot(pole,north) >= 0]) #unique_counts = np.sort(np.array(list(set(equatorial_counts)))) #polar_counts = np.bincount(polar_counts) #counts_tokens = [(uc, bin_counts[uc]) for uc in bin_counts if ] #args = np.where(polar_counts>0) #print '(number, frequency):', zip(unique_counts,tokens) #print '(number, frequency):', counts_tokens #print zip(args, bin_counts[args]) #print zip(list(args[0]), polar_counts[args]) return equatorial_counts, polar_counts def spherical_proportion(zone_width): # assuming radius is 1: (2*np.pi*zone_width)/(4*np.pi) # 0 <= zone_width <= 2 return zone_width/2. def angle_for_zone(zone_width): return np.arcsin(zone_width/2.) def coarseness(faces): faces = np.asarray(faces) coarseness = 0.0 for face in faces: a, b, c = face coarse = np.max(coarse, geom.circumradius(a,b,c)) return coarse dipy-0.13.0/scratch/very_scratch/tractography_clustering_new_fos.py000066400000000000000000000053411317371701200257300ustar00rootroot00000000000000import time import numpy as np from nibabel import trackvis as tv from dipy.core import track_metrics as tm from dipy.core import track_performance as pf from fos.core.scene import Scene from fos.core.actors import Actor from fos.core.plots import Plot from fos.core.tracks import Tracks #fname='/home/eg01/Data_Backup/Data/PBC/pbc2009icdm/brain1/brain1_scan1_fiber_track_mni.trk' fname='/home/eg01/Data_Backup/Data/PBC/pbc2009icdm/brain2/brain2_scan1_fiber_track_mni.trk' #fname='/home/eg309/Data/PBC/pbc2009icdm/brain1/brain1_scan1_fiber_track_mni.trk' opacity=0.5 print 'Loading file...' streams,hdr=tv.read(fname) print 'Copying tracks...' T=[i[0] for i in streams] T=T[:len(T)/5] #T=T[:1000] print 'Representing tracks using only 3 pts...' tracks=[tm.downsample(t,3) for t in T] print 'Deleting unnecessary data...' del streams,hdr print 'Local Skeleton Clustering...' now=time.clock() C=pf.local_skeleton_clustering(tracks,d_thr=20) print 'Done in', time.clock()-now,'s.' print 'Reducing the number of points...' T=[pf.approx_polygon_track(t) for t in T] print 'Showing initial dataset.' #r=fos.ren() #fos.add(r,fos.line(T,fos.white,opacity=0.1)) #fos.show(r) data=T colors =[np.tile(np.array([1,1,1,opacity],'f'),(len(t),1)) for t in T] t=Tracks(data,colors,line_width=1.) t.position=(-100,0,0) print 'Showing dataset after clustering.' print 'Calculating skeletal track for every bundle.' skeletals=[] colors2 = len(data)*[None] colors_sk = []#len(C.keys())*[None] for c in C: color=np.random.rand(3) r,g,b = color bundle=[] for i in C[c]['indices']: colors2[i]=np.tile(np.array([r,g,b,opacity],'f'),(len(data[i]),1)) bundle.append(data[i]) bi=pf.most_similar_track_mam(bundle)[0] C[c]['skeletal']=bundle[bi] if len(C[c]['indices'])>100 and tm.length(bundle[bi])>30.: colors_sk.append( np.tile(np.array([r,g,b,opacity],'f'),(len(bundle[bi]),1)) ) skeletals.append(bundle[bi]) print 'len_data', len(data) print 'len_skeletals', len(skeletals) print 'len_colors2', len(colors2) print 'len_colors_sk', len(colors_sk) t2=Tracks(data,colors2,line_width=1.) t2.position=(100,0,0) sk=Tracks(skeletals,colors_sk,line_width=3.) sk.position=(0,0,0) slot={0:{'actor':t,'slot':(0, 800000)}, 1:{'actor':t2,'slot':(0, 800000)}, 2:{'actor':sk,'slot':(0, 800000)}} Scene(Plot(slot)).run() print 'Some statistics about the clusters' lens=[len(C[c]['indices']) for c in C] print 'max ',max(lens), 'min ',min(lens) print 'singletons ',lens.count(1) print 'doubletons ',lens.count(2) print 'tripletons ',lens.count(3) ''' Next Level 12: cluster0=[T[t] for t in C[0]['indices']] 13: pf.most_similar_track_mam(cluster0) ''' dipy-0.13.0/scratch/very_scratch/tractography_clustering_using_larch.py000066400000000000000000000026241317371701200265670ustar00rootroot00000000000000import time import os import numpy as np from nibabel import trackvis as tv from dipy.viz import fos from dipy.io import pickles as pkl from dipy.core import track_learning as tl from dipy.core import track_performance as pf from dipy.core import track_metrics as tm fname='/home/eg01/Data/PBC/pbc2009icdm/brain1/brain1_scan1_fiber_track_mni.trk' C_fname='/tmp/larch_tree.pkl' appr_fname='/tmp/larch_tracks.trk' print 'Loading trackvis file...' streams,hdr=tv.read(fname) print 'Copying tracks...' tracks=[i[0] for i in streams] #tracks=tracks[:1000] #print 'Deleting unnecessary data...' del streams#,hdr if not os.path.isfile(C_fname): print 'Starting LARCH ...' tim=time.clock() C,atracks=tl.larch(tracks,[50.**2,20.**2,5.**2],True,True) #tracks=[tm.downsample(t,3) for t in tracks] #C=pf.local_skeleton_clustering(tracks,20.) print 'Done in total of ',time.clock()-tim,'seconds.' print 'Saving result...' pkl.save_pickle(C_fname,C) streams=[(i,None,None)for i in atracks] tv.write(appr_fname,streams,hdr) else: print 'Loading result...' C=pkl.load_pickle(C_fname) skel=[] for c in C: skel.append(C[c]['repz']) print 'Showing dataset after clustering...' r=fos.ren() fos.clear(r) colors=np.zeros((len(skel),3)) for (i,s) in enumerate(skel): color=np.random.rand(1,3) colors[i]=color fos.add(r,fos.line(skel,colors,opacity=1)) fos.show(r) dipy-0.13.0/scratch/very_scratch/warptalk.py000066400000000000000000000272531317371701200210750ustar00rootroot00000000000000import numpy as np import nibabel as nib import numpy.linalg as npl from dipy.io.dpy import Dpy def flirt2aff(mat, in_img, ref_img): """ Transform from `in_img` voxels to `ref_img` voxels given `matfile` Parameters ---------- matfile : (4,4) array contents (as array) of output ``-omat`` transformation file from flirt in_img : img image passed (as filename) to flirt as ``-in`` image ref_img : img image passed (as filename) to flirt as ``-ref`` image Returns ------- aff : (4,4) array Transform from voxel coordinates in ``in_img`` to voxel coordinates in ``ref_img`` """ in_hdr = in_img.header ref_hdr = ref_img.header # get_zooms gets the positive voxel sizes as returned in the header in_zoomer = np.diag(in_hdr.get_zooms() + (1,)) ref_zoomer = np.diag(ref_hdr.get_zooms() + (1,)) # The in_img voxels to ref_img voxels as recorded in the current affines current_in2ref = np.dot(ref_img.affine, in_img.affine) if npl.det(current_in2ref) < 0: raise ValueError('Negative determinant to current affine mapping - bailing out') return np.dot(npl.inv(ref_zoomer), np.dot(mat, in_zoomer)) def flirt2aff_files(matfile, in_fname, ref_fname): """ Map from `in_fname` image voxels to `ref_fname` voxels given `matfile` Parameters ---------- matfile : str filename of output ``-omat`` transformation file from flirt in_fname : str filename for image passed to flirt as ``-in`` image ref_fname : str filename for image passed to flirt as ``-ref`` image Returns ------- aff : (4,4) array Transform from voxel coordinates in image for ``in_fname`` to voxel coordinates in image for ``ref_fname`` """ mat = np.loadtxt(matfile) in_img = nib.load(in_fname) ref_img = nib.load(ref_fname) return flirt2aff(mat, in_img, ref_img) #d101='/home/eg309/Data/TEST_MR10032/subj_10/101/' d101='/home/eg309/Data/PROC_MR10032/subj_10/101/' ffa=d101+'1312211075232351192010092912092080924175865ep2dadvdiffDSI10125x25x25STs005a001_bet_FA.nii.gz' fdis=d101+'1312211075232351192010092912092080924175865ep2dadvdiffDSI10125x25x25STs005a001_nonlin_displacements.nii.gz' ffareg=d101+'1312211075232351192010092912092080924175865ep2dadvdiffDSI10125x25x25STs005a001_bet_FA_reg.nii.gz' flirtaff=d101+'1312211075232351192010092912092080924175865ep2dadvdiffDSI10125x25x25STs005a001_affine_transf.mat' ftrack=d101+'1312211075232351192010092912092080924175865ep2dadvdiffDSI10125x25x25STs005a001_QA_native.dpy' froi='/home/eg309/Data/PROC_MR10032/NIFTI_ROIs/AnatomicalROIs/ROI01_GCC.nii' froi2='/home/eg309/Data/PROC_MR10032/NIFTI_ROIs/AnatomicalROIs/ROI02_BCC.nii' #froi3='/home/eg309/Data/PROC_MR10032/NIFTI_ROIs/AnatomicalROIs/ROI03_SCC.nii' froi3='/home/eg309/Downloads/SCC_analyze.nii' ref_fname = '/usr/share/fsl/data/standard/FMRIB58_FA_1mm.nii.gz' dpr=Dpy(ftrack,'r') print dpr.track_no T=dpr.read_indexed([0,1,2,3,2000,1000000]) for t in T: print t.shape dpr.close() track=T[4] im2im = flirt2aff_files(flirtaff, ffa, ref_fname) #ref_name to be replaced by ffareg print im2im from dipy.core.track_metrics import length print len(track) print length(track) #ntrack=np.dot(im2im[:3,:3],track.T)+im2im[:3,[3]] ntrack=np.dot(track,im2im[:3,:3].T)+im2im[:3,3] print length(ntrack) #print length(ntrack.T) print length(ntrack)/length(track) #print npl.det(im2im)**(1/3.) disimg=nib.load(fdis) ddata=disimg.get_data() daff=disimg.affine from scipy.ndimage.interpolation import map_coordinates as mc di=ddata[:,:,:,0] dj=ddata[:,:,:,1] dk=ddata[:,:,:,2] mci=mc(di,ntrack.T) mcj=mc(dj,ntrack.T) mck=mc(dk,ntrack.T) wtrack=ntrack+np.vstack((mci,mcj,mck)).T np.set_printoptions(2) print np.hstack((wtrack,ntrack)) print length(wtrack),length(ntrack),length(track) imgroi=nib.load(froi) roidata=imgroi.get_data() roiaff=imgroi.affine roiaff=daff I=np.array(np.where(roidata>0)).T wI=np.dot(roiaff[:3,:3],I.T).T+roiaff[:3,3] print wI.shape wI=wI.astype('f4') imgroi2=nib.load(froi2) roidata2=imgroi2.get_data() roiaff2=imgroi2.affine roiaff2=daff I2=np.array(np.where(roidata2>0)).T wI2=np.dot(roiaff2[:3,:3],I2.T).T+roiaff2[:3,3] print wI2.shape wI2=wI2.astype('f4') imgroi3=nib.load(froi3) roidata3=imgroi3.get_data() roiaff3=imgroi3.affine roiaff3=daff I3=np.array(np.where(roidata3>0)).T wI3=np.dot(roiaff3[:3,:3],I3.T).T+roiaff3[:3,3] print wI3.shape wI3=wI3.astype('f4') dpr=Dpy(ftrack,'r') print dpr.track_no from time import time t1=time() iT=np.random.randint(0,dpr.track_no,10*10**2) T=dpr.read_indexed(iT) dpr.close() t2=time() print t2-t1,len(T) Tfinal=[] ''' for (i,track) in enumerate(T): print i ntrack=np.dot(track,im2im[:3,:3].T)+im2im[:3,3] mci=mc(di,ntrack.T) mcj=mc(dj,ntrack.T) mck=mc(dk,ntrack.T) wtrack=ntrack+np.vstack((mci,mcj,mck)).T Tfinal.append(np.dot(wtrack,daff[:3,:3].T)+daff[:3,3]) ''' lengths=[len(t) for t in T] lengths.insert(0,0) offsets=np.cumsum(lengths) caboodle=np.concatenate(T,axis=0) ntrack=np.dot(caboodle,im2im[:3,:3].T)+im2im[:3,3] mci=mc(di,ntrack.T,order=1) mcj=mc(dj,ntrack.T,order=1) mck=mc(dk,ntrack.T,order=1) wtrack=ntrack+np.vstack((mci,mcj,mck)).T caboodlew=np.dot(wtrack,daff[:3,:3].T)+daff[:3,3] #caboodlew=np.dot(wtrack,roiaff[:3,:3].T)+roiaff[:3,3] Tfinal=[] for i in range(len(offsets)-1): s=offsets[i] e=offsets[i+1] Tfinal.append(caboodlew[s:e]) #ref_fname = '/usr/share/fsl/data/standard/FMRIB58_FA_1mm.nii.gz' ref_fname = '/usr/share/fsl/data/standard/FMRIB58_FA-skeleton_1mm.nii.gz' imgref=nib.load(ref_fname) refdata=imgref.get_data() refaff=imgref.affine ''' refI=np.array(np.where(refdata>5000)).T wrefI=np.dot(refaff[:3,:3],refI.T).T+refaff[:3,3] print wrefI.shape wrefI=wrefI.astype('f4') ''' from dipy.viz import fos froi='/home/eg309/Data/ICBM_Wmpm/ICBM_WMPM.nii' def get_roi(froi,no): imgroi=nib.load(froi) roidata=imgroi.get_data() roiaff=imgroi.affine I=np.array(np.where(roidata==no)).T wI=np.dot(roiaff[:3,:3],I.T).T+roiaff[:3,3] wI=wI.astype('f4') return wI from dipy.viz import fos r=fos.ren() #fos.add(r,fos.point(wI,fos.blue)) #fos.add(r,fos.point(wI2,fos.yellow)) #fos.add(r,fos.point(wI3,fos.green)) #fos.add(r,fos.point(wrefI,fos.cyan)) #fos.add(r,fos.point(wrefI,fos.yellow)) fos.add(r,fos.point(get_roi(froi,3),fos.blue)) fos.add(r,fos.point(get_roi(froi,4),fos.yellow)) fos.add(r,fos.point(get_roi(froi,5),fos.green)) fos.add(r,fos.line(Tfinal,fos.red)) fos.show(r) print roiaff print roiaff2 print roiaff3 print daff ##load roi image #roiimg=ni.load(froi) #roidata=roiimg.get_data() #roiaff=roiimg.affine #print 'roiaff',roiaff,roidata.shape # ##load FA image #faimg=ni.load(ffa) #data=faimg.get_data() #aff=faimg.affine ##aff[0,:]=-aff[0,:] ##aff[0,0]=-aff[0,0] ##aff=np.array([[2.5,0,0,-2.5*48],[0,2.5,0,-2.5*39],[0,0,2.5,-2.5*23],[0,0,0,1]]) # #print 'aff',aff, data.shape # ##cube = np.array([v for v in np.ndindex(5,5,5)]).T + np.array([[47,47,27]]).T #cube = np.array([v for v in np.ndindex(data.shape[0],data.shape[1],data.shape[2])]).T # ##from image space(image coordinates) to native space (world coordinates) #cube_native = np.dot(aff[:3,:3],cube)+aff[:3,[3]] ##print cube_native.T # ##load flirt affine #laff=np.loadtxt(flirtaff) ##laff[0,:]=-laff[0,:] ##laff=np.linalg.inv(laff) ##laff[:3,3]=0 #print 'laff',laff ##print 'inverting laff' # # ##from native space(world coordinates) to mni space(world coordinates) #cube_mni = np.dot(laff[:3,:3],cube_native)+laff[:3,[3]] ##print cube_mni.T # #dis=ni.load(fdis) #disdata=dis.get_data() #mniaff=dis.affine #print 'mniaff',mniaff # ##invert disaff #mniaffinv= np.linalg.inv(mniaff) ##from mni space(world coordinates) to image mni space (image coordinates) #cube_mni_grid = np.dot(mniaffinv[:3,:3],cube_mni)+mniaffinv[:3,[3]] #print cube_mni_grid.shape # #cube_mni_grid_nearest=np.round(cube_mni_grid).astype(np.int) # #print np.max(cube_mni_grid[0,:]) #print np.max(cube_mni_grid[1,:]) #print np.max(cube_mni_grid[2,:]) # #print np.max(cube_mni_grid_nearest[0,:]) #print np.max(cube_mni_grid_nearest[1,:]) #print np.max(cube_mni_grid_nearest[2,:]) # #d0,d1,d2,junk = disdata.shape # #cube_mni_grid_nearest[np.where(cube_mni_grid_nearest<0)]=0 #cube_mni_grid_nearest[np.where(cube_mni_grid_nearest>181)]=0 # #n0=cube_mni_grid_nearest[0,:] #n1=cube_mni_grid_nearest[1,:] #n2=cube_mni_grid_nearest[2,:] ''' n0 = np.min(np.max(cube_mni_grid_nearest[0,:],0),d0) n1 = np.min(np.max(cube_mni_grid_nearest[1,:],0),d1) n2 = np.min(np.max(cube_mni_grid_nearest[2,:],0),d2) ''' #cube_mni_data=np.zeros(disdata.shape[:-1],dtype=np.float32) #cube_mni_data[n0,n1,n2]=1 ''' D=disdata[n0,n1,n2] ''' #from dipy.viz import fos #r=fos.ren() ##fos.add(r,fos.point(cube.T,fos.red)) ##fos.add(r,fos.point(cube_native.T,fos.yellow)) #fos.add(r,fos.point(cube_mni.T,fos.green)) #fos.add(r,fos.sphere(np.array([0,0,0]),10)) # ##fos.add(r,fos.point(cube_mni_grid_nearest.T,fos.red)) ###fos.add(r,fos.point(cube.T,fos.green)) ###fos.add(r,fos.point(cube_mni_grid.T,fos.red)) ###fos.add(r,fos.point(cube.T,fos.yellow)) #fos.show(r) # #def map_to_index(grid,shape): # x=grid[0,:] # y=grid[1,:] # z=grid[2,:] # xmin=x.min() # ymin=y.min() # zmin=z.min() # xmax=x.max() # ymax=y.max() # zmax=z.max() # i=(x-xmin)/(xmax-xmin)*shape[0] # j=(y-ymin)/(ymax-ymin)*shape[1] # k=(z-zmin)/(zmax-zmin)*shape[2] # return i,j,k # #i,j,k=map_to_index(cube_mni_grid,(182,218,182)) # #from scipy.ndimage import map_coordinates #FA_MNI_IMG = map_coordinates(data,np.c_[i, j, k].T) #from dipy.viz import fos #r=fos.ren() #fos.add(r,fos.point(cube_mni.T,fos.blue)) #fos.add(r,fos.point(cube_native.T,fos.green)) #fos.add(r,fos.point(cube_mni_grid.T,fos.red)) #fos.add(r,fos.point(cube.T,fos.yellow)) #fos.show(r) ###corner = cube[:,:].astype(np.int).T #print corner ###print data[corner[:,0:27],corner[:,0:27],corner[:,0:27]] #def func(x,y): # return (x+y)*np.exp(-5.*(x**2+y**2)) # #def map_to_index(x,y,bounds,N,M): # xmin,xmax,ymin,ymax=bounds # i1=(x-xmin)/(xmax-xmin)*N # i2=(y-ymin)/(ymax-ymin)*M # return i1,i2 # #x,y=np.mgrid[-1:1:10j,-1:1:10j] #fvals=func(x,y) # #xn,yn=np.mgrid[-1:1:100j,-1:1:100j] #i1,i2 = map_to_index(xn,yn,[-1,1,-1,1],*x.shape) # #from scipy.ndimage import map_coordinates # #fn = map_coordinates(fvals,[i1,i2]) #true = func(xn,yn) def test_flirt2aff(): from os.path import join as pjoin from nose.tools import assert_true import scipy.ndimage as ndi import nibabel as nib ''' matfile = pjoin('fa_data', '1312211075232351192010092912092080924175865ep2dadvdiffDSI10125x25x25STs005a001_affine_transf.mat') in_fname = pjoin('fa_data', '1312211075232351192010092912092080924175865ep2dadvdiffDSI10125x25x25STs005a001_bet_FA.nii.gz') ''' matfile=flirtaff in_fname = ffa ref_fname = '/usr/share/fsl/data/standard/FMRIB58_FA_1mm.nii.gz' res = flirt2aff_files(matfile, in_fname, ref_fname) mat = np.loadtxt(matfile) in_img = nib.load(in_fname) ref_img = nib.load(ref_fname) assert_true(np.all(res == flirt2aff(mat, in_img, ref_img))) # mm to mm transform mm_in2mm_ref = np.dot(ref_img.affine, np.dot(res, npl.inv(in_img.affine))) # make new in image thus transformed in_data = in_img.get_data() ires = npl.inv(res) in_data[np.isnan(in_data)] = 0 resliced_data = ndi.affine_transform(in_data, ires[:3,:3], ires[:3,3], ref_img.shape) resliced_img = nib.Nifti1Image(resliced_data, ref_img.affine) nib.save(resliced_img, 'test.nii') dipy-0.13.0/setup.py000077500000000000000000000232121317371701200142570ustar00rootroot00000000000000#!/usr/bin/env python """ Installation script for dipy package """ import os import sys import platform from copy import deepcopy from os.path import join as pjoin, dirname, exists from glob import glob # BEFORE importing distutils, remove MANIFEST. distutils doesn't properly # update it when the contents of directories change. if exists('MANIFEST'): os.remove('MANIFEST') # force_setuptools can be set from the setup_egg.py script if 'force_setuptools' not in globals(): # For some commands, always use setuptools if len(set(('develop', 'bdist_egg', 'bdist_rpm', 'bdist', 'bdist_dumb', 'bdist_mpkg', 'bdist_wheel', 'install_egg_info', 'egg_info', 'easy_install')).intersection(sys.argv)) > 0: force_setuptools = True else: force_setuptools = False if force_setuptools: import setuptools # Import distutils _after_ potential setuptools import above, and after removing # MANIFEST from distutils.core import setup from distutils.extension import Extension from cythexts import cyproc_exts, get_pyx_sdist from setup_helpers import (install_scripts_bat, add_flag_checking, SetupDependency, read_vars_from, make_np_ext_builder) from version_helpers import get_comrec_build # Get version and release info, which is all stored in dipy/info.py info = read_vars_from(pjoin('dipy', 'info.py')) # We may just have imported setuptools, or we may have been exec'd from a # setuptools environment like pip using_setuptools = 'setuptools' in sys.modules extra_setuptools_args = {} if using_setuptools: # Try to preempt setuptools monkeypatching of Extension handling when Pyrex # is missing. Otherwise the monkeypatched Extension will change .pyx # filenames to .c filenames, and we probably don't have the .c files. sys.path.insert(0, pjoin(dirname(__file__), 'fake_pyrex')) # Set setuptools extra arguments extra_setuptools_args = dict( tests_require=['nose'], test_suite='nose.collector', zip_safe=False, extras_require=dict( doc=['Sphinx>=1.0'], test=['nose>=0.10.1'])) # Define extensions EXTS = [] # We use some defs from npymath, but we don't want to link against npymath lib ext_kwargs = {'include_dirs': ['src']} # We add np.get_include() later for modulename, other_sources, language in ( ('dipy.reconst.peak_direction_getter', [], 'c'), ('dipy.reconst.recspeed', [], 'c'), ('dipy.reconst.vec_val_sum', [], 'c'), ('dipy.reconst.quick_squash', [], 'c'), ('dipy.tracking.distances', [], 'c'), ('dipy.tracking.streamlinespeed', [], 'c'), ('dipy.tracking.local.localtrack', [], 'c'), ('dipy.tracking.local.direction_getter', [], 'c'), ('dipy.tracking.local.tissue_classifier', [], 'c'), ('dipy.tracking.local.interpolation', [], 'c'), ('dipy.tracking.vox2track', [], 'c'), ('dipy.tracking.propspeed', [], 'c'), ('dipy.tracking.fbcmeasures', [], 'c'), ('dipy.segment.cythonutils', [], 'c'), ('dipy.segment.featurespeed', [], 'c'), ('dipy.segment.metricspeed', [], 'c'), ('dipy.segment.clusteringspeed', [], 'c'), ('dipy.segment.clustering_algorithms', [], 'c'), ('dipy.segment.mrf', [], 'c'), ('dipy.denoise.denspeed', [], 'c'), ('dipy.denoise.pca_noise_estimate', [], 'c'), ('dipy.denoise.nlmeans_block', [], 'c'), ('dipy.denoise.enhancement_kernel', [], 'c'), ('dipy.denoise.shift_twist_convolution', [], 'c'), ('dipy.align.vector_fields', [], 'c'), ('dipy.align.sumsqdiff', [], 'c'), ('dipy.align.expectmax', [], 'c'), ('dipy.align.crosscorr', [], 'c'), ('dipy.align.bundlemin', [], 'c'), ('dipy.align.transforms', [], 'c'), ('dipy.align.parzenhist', [], 'c'), ('dipy.utils.omp', [], 'c')): pyx_src = pjoin(*modulename.split('.')) + '.pyx' EXTS.append(Extension(modulename, [pyx_src] + other_sources, language=language, **deepcopy(ext_kwargs))) # deepcopy lists # Do our own build and install time dependency checking. setup.py gets called in # many different ways, and may be called just to collect information (egg_info). # We need to set up tripwires to raise errors when actually doing things, like # building, rather than unconditionally in the setup.py import or exec We may # make tripwire versions of build_ext, build_py, install need_cython = True pybuilder = get_comrec_build('dipy') # Cython is a dependency for building extensions, iff we don't have stamped # up pyx and c files. build_ext, need_cython = cyproc_exts(EXTS, info.CYTHON_MIN_VERSION, 'pyx-stamps') # Add openmp flags if they work simple_test_c = """int main(int argc, char** argv) { return(0); }""" omp_test_c = """#include int main(int argc, char** argv) { return(0); }""" msc_flag_defines = [[['/openmp'], [], omp_test_c, 'HAVE_VC_OPENMP'], ] gcc_flag_defines = [[['-msse2', '-mfpmath=sse'], [], simple_test_c, 'USING_GCC_SSE2'], [['-fopenmp'], ['-fopenmp'], omp_test_c, 'HAVE_OPENMP'], ] # Test if it is a 32 bits version if not sys.maxsize > 2 ** 32: # This flag is needed only on 32 bits msc_flag_defines += [[['/arch:SSE2'], [], simple_test_c, 'USING_VC_SSE2'], ] flag_defines = msc_flag_defines if 'msc' in platform.python_compiler().lower() else gcc_flag_defines extbuilder = add_flag_checking(build_ext, flag_defines, 'dipy') # Use ext builder to add np.get_include() at build time, not during setup.py # execution. extbuilder = make_np_ext_builder(extbuilder) if need_cython: SetupDependency('Cython', info.CYTHON_MIN_VERSION, req_type='install_requires', heavy=True).check_fill(extra_setuptools_args) SetupDependency('numpy', info.NUMPY_MIN_VERSION, req_type='install_requires', heavy=True).check_fill(extra_setuptools_args) SetupDependency('scipy', info.SCIPY_MIN_VERSION, req_type='install_requires', heavy=True).check_fill(extra_setuptools_args) SetupDependency('nibabel', info.NIBABEL_MIN_VERSION, req_type='install_requires', heavy=False).check_fill(extra_setuptools_args) SetupDependency('h5py', info.H5PY_MIN_VERSION, req_type='install_requires', heavy=False).check_fill(extra_setuptools_args) cmdclass = dict( build_py=pybuilder, build_ext=extbuilder, install_scripts=install_scripts_bat, sdist=get_pyx_sdist(include_dirs=['src'])) def main(**extra_args): setup(name=info.NAME, maintainer=info.MAINTAINER, maintainer_email=info.MAINTAINER_EMAIL, description=info.DESCRIPTION, long_description=info.LONG_DESCRIPTION, url=info.URL, download_url=info.DOWNLOAD_URL, license=info.LICENSE, classifiers=info.CLASSIFIERS, author=info.AUTHOR, author_email=info.AUTHOR_EMAIL, platforms=info.PLATFORMS, version=info.VERSION, requires=info.REQUIRES, provides=info.PROVIDES, packages=['dipy', 'dipy.tests', 'dipy.align', 'dipy.align.tests', 'dipy.core', 'dipy.core.tests', 'dipy.direction', 'dipy.direction.tests', 'dipy.tracking', 'dipy.tracking.local', 'dipy.tracking.local.tests', 'dipy.tracking.tests', 'dipy.tracking.benchmarks', 'dipy.reconst', 'dipy.reconst.benchmarks', 'dipy.reconst.tests', 'dipy.io', 'dipy.io.tests', 'dipy.viz', 'dipy.viz.tests', 'dipy.testing', 'dipy.testing.tests', 'dipy.boots', 'dipy.data', 'dipy.utils', 'dipy.data.tests', 'dipy.utils.tests', 'dipy.fixes', 'dipy.external', 'dipy.external.tests', 'dipy.segment', 'dipy.segment.benchmarks', 'dipy.segment.tests', 'dipy.sims', 'dipy.sims.tests', 'dipy.denoise', 'dipy.denoise.tests', 'dipy.workflows', 'dipy.workflows.tests'], ext_modules=EXTS, # The package_data spec has no effect for me (on python 2.6) -- even # changing to data_files doesn't get this stuff included in the source # distribution -- not sure if it has something to do with the magic # above, but distutils is surely the worst piece of code in all of # python -- duplicating things into MANIFEST.in but this is admittedly # only a workaround to get things started -- not a solution package_data={'dipy': [pjoin('data', 'files', '*') ]}, data_files=[('share/doc/dipy/examples', glob(pjoin('doc', 'examples', '*.py')))], scripts=glob(pjoin('bin', 'dipy_*')), cmdclass=cmdclass, **extra_args ) # simple way to test what setup will do # python setup.py install --prefix=/tmp if __name__ == "__main__": main(**extra_setuptools_args) dipy-0.13.0/setup_egg.py000066400000000000000000000006051317371701200150770ustar00rootroot00000000000000#!/usr/bin/env python # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Wrapper to run setup.py using setuptools.""" if __name__ == '__main__': execfile('setup.py', dict(__name__='__main__', __file__='setup.py', # needed in setup.py force_setuptools=True)) dipy-0.13.0/setup_helpers.py000066400000000000000000000333031317371701200160000ustar00rootroot00000000000000""" Distutils / setuptools helpers """ import os import sys from os.path import join as pjoin, split as psplit, splitext, dirname, exists import tempfile import shutil from distutils.version import LooseVersion from distutils.command.install_scripts import install_scripts from distutils.errors import CompileError, LinkError from distutils import log BAT_TEMPLATE = \ r"""@echo off REM wrapper to use shebang first line of {FNAME} set mypath=%~dp0 set pyscript="%mypath%{FNAME}" set /p line1=<%pyscript% if "%line1:~0,2%" == "#!" (goto :goodstart) echo First line of %pyscript% does not start with "#!" exit /b 1 :goodstart set py_exe=%line1:~2% REM quote exe in case of spaces in path name set py_exe="%py_exe%" call %py_exe% %pyscript% %* """ # Path of file to which to write C conditional vars from build-time checks CONFIG_H = pjoin('build', 'config.h') # File name (no directory) to which to write Python vars from build-time checks CONFIG_PY = '__config__.py' # Directory to which to write libraries for building LIB_DIR_TMP = pjoin('build', 'extra_libs') class install_scripts_bat(install_scripts): """ Make scripts executable on Windows Scripts are bare file names without extension on Unix, fitting (for example) Debian rules. They identify as python scripts with the usual ``#!`` first line. Unix recognizes and uses this first "shebang" line, but Windows does not. So, on Windows only we add a ``.bat`` wrapper of name ``bare_script_name.bat`` to call ``bare_script_name`` using the python interpreter from the #! first line of the script. Notes ----- See discussion at http://matthew-brett.github.com/pydagogue/installing_scripts.html and example at git://github.com/matthew-brett/myscripter.git for more background. """ def run(self): install_scripts.run(self) if not os.name == "nt": return for filepath in self.get_outputs(): # If we can find an executable name in the #! top line of the script # file, make .bat wrapper for script. with open(filepath, 'rt') as fobj: first_line = fobj.readline() if not (first_line.startswith('#!') and 'python' in first_line.lower()): log.info("No #!python executable found, skipping .bat wrapper") continue pth, fname = psplit(filepath) froot, ext = splitext(fname) bat_file = pjoin(pth, froot + '.bat') bat_contents = BAT_TEMPLATE.replace('{FNAME}', fname) log.info("Making %s wrapper for %s" % (bat_file, filepath)) if self.dry_run: continue with open(bat_file, 'wt') as fobj: fobj.write(bat_contents) def add_flag_checking(build_ext_class, flag_defines, top_package_dir=''): """ Override input `build_ext_class` to check compiler `flag_defines` Parameters ---------- build_ext_class : class Class implementing ``distutils.command.build_ext.build_ext`` interface, with a ``build_extensions`` method. flag_defines : sequence A sequence of elements, where the elements are sequences of length 4 consisting of (``compile_flags``, ``link_flags``, ``code``, ``defvar``). ``compile_flags`` is a sequence of compiler flags; ``link_flags`` is a sequence of linker flags. We check ``compile_flags`` to see whether a C source string ``code`` will compile, and ``link_flags`` to see whether the resulting object file will link. If both compile and link works, we add ``compile_flags`` to ``extra_compile_args`` and ``link_flags`` to ``extra_link_args`` of each extension when we build the extensions. If ``defvar`` is not None, it is the name of C variable to be defined in ``build/config.h`` with 1 if the combination of (``compile_flags``, ``link_flags``, ``code``) will compile and link, 0 otherwise. If None, do not write variable. top_package_dir : str String giving name of top-level package, for writing Python file containing configuration variables. If empty, do not write this file. Variables written are the same as the Cython variables generated via the `flag_defines` setting. Returns ------- checker_class : class A class with similar interface to ``distutils.command.build_ext.build_ext``, that adds all working ``compile_flags`` values to the ``extra_compile_args`` and working ``link_flags`` to ``extra_link_args`` attributes of extensions, before compiling. """ class Checker(build_ext_class): flag_defs = tuple(flag_defines) def can_compile_link(self, compile_flags, link_flags, code): cc = self.compiler fname = 'test.c' cwd = os.getcwd() tmpdir = tempfile.mkdtemp() try: os.chdir(tmpdir) with open(fname, 'wt') as fobj: fobj.write(code) try: objects = cc.compile([fname], extra_postargs=compile_flags) except CompileError: return False try: # Link shared lib rather then executable to avoid # http://bugs.python.org/issue4431 with MSVC 10+ cc.link_shared_lib(objects, "testlib", extra_postargs=link_flags) except (LinkError, TypeError): return False finally: os.chdir(cwd) shutil.rmtree(tmpdir) return True def build_extensions(self): """ Hook into extension building to check compiler flags """ def_vars = [] good_compile_flags = [] good_link_flags = [] config_dir = dirname(CONFIG_H) for compile_flags, link_flags, code, def_var in self.flag_defs: compile_flags = list(compile_flags) link_flags = list(link_flags) flags_good = self.can_compile_link(compile_flags, link_flags, code) if def_var: def_vars.append((def_var, flags_good)) if flags_good: good_compile_flags += compile_flags good_link_flags += link_flags else: log.warn("Flags {0} omitted because of compile or link " "error".format(compile_flags + link_flags)) if def_vars: # write config.h file if not exists(config_dir): self.mkpath(config_dir) with open(CONFIG_H, 'wt') as fobj: fobj.write('/* Automatically generated; do not edit\n') fobj.write(' C defines from build-time checks */\n') for v_name, v_value in def_vars: fobj.write('int {0} = {1};\n'.format( v_name, 1 if v_value else 0)) if def_vars and top_package_dir: # write __config__.py file config_py_dir = (top_package_dir if self.inplace else pjoin(self.build_lib, top_package_dir)) if not exists(config_py_dir): self.mkpath(config_py_dir) config_py = pjoin(config_py_dir, CONFIG_PY) with open(config_py, 'wt') as fobj: fobj.write('# Automatically generated; do not edit\n') fobj.write('# Variables from compile checks\n') for v_name, v_value in def_vars: fobj.write('{0} = {1}\n'.format(v_name, v_value)) if def_vars or good_compile_flags or good_link_flags: for ext in self.extensions: ext.extra_compile_args += good_compile_flags ext.extra_link_args += good_link_flags if def_vars: ext.include_dirs.append(config_dir) build_ext_class.build_extensions(self) return Checker def get_pkg_version(pkg_name): """ Return package version for `pkg_name` if installed Returns ------- pkg_version : str or None Return None if package not importable. Return 'unknown' if standard ``__version__`` string not present. Otherwise return version string. """ try: pkg = __import__(pkg_name) except ImportError: return None try: return pkg.__version__ except AttributeError: return 'unknown' def version_error_msg(pkg_name, found_ver, min_ver): """ Return informative error message for version or None """ if found_ver is None: return 'We need package {0}, but not importable'.format(pkg_name) if found_ver == 'unknown': return 'We need {0} version {1}, but cannot get version'.format( pkg_name, min_ver) if LooseVersion(found_ver) >= LooseVersion(min_ver): return None return 'We need {0} version {1}, but found version {2}'.format(pkg_name, min_ver, found_ver) class SetupDependency(object): """ SetupDependency class Parameters ---------- import_name : str Name with which required package should be ``import``ed. min_ver : str Distutils version string giving minimum version for package. req_type : {'install_requires', 'setup_requires'}, optional Setuptools dependency type. heavy : {False, True}, optional If True, and package is already installed (importable), then do not add to the setuptools dependency lists. This prevents setuptools reinstalling big packages when the package was installed without using setuptools, or this is an upgrade, and we want to avoid the pip default behavior of upgrading all dependencies. install_name : str, optional Name identifying package to install from pypi etc, if different from `import_name`. """ def __init__(self, import_name, min_ver, req_type='install_requires', heavy=False, install_name=None): self.import_name = import_name self.min_ver = min_ver self.req_type = req_type self.heavy = heavy self.install_name = (import_name if install_name is None else install_name) def check_fill(self, setuptools_kwargs): """ Process this dependency, maybe filling `setuptools_kwargs` Run checks on this dependency. If not using setuptools, then raise error for unmet dependencies. If using setuptools, add missing or not-heavy dependencies to `setuptools_kwargs`. A heavy dependency is one that is inconvenient to install automatically, such as numpy or (particularly) scipy, matplotlib. Parameters ---------- setuptools_kwargs : dict Dictionary of setuptools keyword arguments that may be modified in-place while checking dependencies. """ found_ver = get_pkg_version(self.import_name) ver_err_msg = version_error_msg(self.import_name, found_ver, self.min_ver) if 'setuptools' not in sys.modules: # Not using setuptools; raise error for any unmet dependencies if ver_err_msg is not None: raise RuntimeError(ver_err_msg) return # Using setuptools; add packages to given section of # setup/install_requires, unless it's a heavy dependency for which we # already have an acceptable importable version. if self.heavy and ver_err_msg is None: return new_req = '{0}>={1}'.format(self.import_name, self.min_ver) old_reqs = setuptools_kwargs.get(self.req_type, []) setuptools_kwargs[self.req_type] = old_reqs + [new_req] class Bunch(object): def __init__(self, vars): for key, name in vars.items(): if key.startswith('__'): continue self.__dict__[key] = name def read_vars_from(ver_file): """ Read variables from Python text file Parameters ---------- ver_file : str Filename of file to read Returns ------- info_vars : Bunch instance Bunch object where variables read from `ver_file` appear as attributes """ # Use exec for compabibility with Python 3 ns = {} with open(ver_file, 'rt') as fobj: exec(fobj.read(), ns) return Bunch(ns) def make_np_ext_builder(build_ext_class): """ Override input `build_ext_class` to add numpy includes to extension This is useful to delay call of ``np.get_include`` until the extension is being built. Parameters ---------- build_ext_class : class Class implementing ``distutils.command.build_ext.build_ext`` interface, with a ``build_extensions`` method. Returns ------- np_build_ext_class : class A class with similar interface to ``distutils.command.build_ext.build_ext``, that adds libraries in ``np.get_include()`` to include directories of extension. """ class NpExtBuilder(build_ext_class): def build_extensions(self): """ Hook into extension building to add np include dirs """ # Delay numpy import until last moment import numpy as np for ext in self.extensions: ext.include_dirs.append(np.get_include()) build_ext_class.build_extensions(self) return NpExtBuilder dipy-0.13.0/src/000077500000000000000000000000001317371701200133315ustar00rootroot00000000000000dipy-0.13.0/src/conditional_omp.h000066400000000000000000000015301317371701200166570ustar00rootroot00000000000000/* Header file to conditionally wrap omp.h defines * * _OPENMP should be defined if omp.h is safe to include */ #if defined(_OPENMP) #include #define have_openmp 1 #else /* These are fake defines to make these symbols valid in the c / pyx file * * All uses of these symbols should to be prefaced with ``if have_openmp``, as * in: * * cdef omp_lock_t lock * if have_openmp: * openmp.omp_init_lock(&lock) * * */ typedef int omp_lock_t; void omp_init_lock(omp_lock_t *lock) {}; void omp_destroy_lock(omp_lock_t *lock) {}; void omp_set_lock(omp_lock_t *lock) {}; void omp_unset_lock(omp_lock_t *lock) {}; int omp_test_lock(omp_lock_t *lock) {}; void omp_set_dynamic(int dynamic_threads) {}; void omp_set_num_threads(int num_threads) {}; int omp_get_num_procs() {}; int omp_get_max_threads() {}; #define have_openmp 0 #endif dipy-0.13.0/src/cythonutils.h000066400000000000000000000001231317371701200160630ustar00rootroot00000000000000// Maximum number of dimension supported by Cython's memoryview #define MAX_NDIM 7 dipy-0.13.0/src/dpy_math.h000066400000000000000000000037111317371701200153110ustar00rootroot00000000000000/* dipy math functions * * To give some platform independence for simple math functions */ #include #include "numpy/npy_math.h" #define DPY_PI NPY_PI /* From numpy npy_math.c.src commit b2f6792d284b0e9383093c30d51ec3a82e8312fd*/ double dpy_log2(double x) { #ifdef HAVE_LOG2 return log2(x); #else return NPY_LOG2E*log(x); #endif } #if (defined(_WIN32) || defined(_WIN64)) && !defined(__GNUC__) #define fmin min #endif #define dpy_floor(x) floor((double)(x)) double dpy_rint(double x) { #ifdef HAVE_RINT return rint(x); #else double y, r; y = dpy_floor(x); r = x - y; if (r > 0.5) { y += 1.0; } /* Round to nearest even */ if (r == 0.5) { r = y - 2.0*dpy_floor(0.5*y); if (r == 1.0) { y += 1.0; } } return y; #endif } int dpy_signbit(double x) { #ifdef signbit return signbit(x); #else union { double d; short s[4]; int i[2]; } u; u.d = x; #if NPY_SIZEOF_INT == 4 #ifdef WORDS_BIGENDIAN /* defined in pyconfig.h */ return u.i[0] < 0; #else return u.i[1] < 0; #endif #else /* NPY_SIZEOF_INT != 4 */ #ifdef WORDS_BIGENDIAN return u.s[0] < 0; #else return u.s[3] < 0; #endif #endif /* NPY_SIZEOF_INT */ #endif /*NPY_HAVE_DECL_SIGNBIT*/ } #ifndef NPY_HAVE_DECL_ISNAN #define dpy_isnan(x) ((x) != (x)) #else #ifdef _MSC_VER #define dpy_isnan(x) _isnan((x)) #else #define dpy_isnan(x) isnan(x) #endif #endif #ifndef NPY_HAVE_DECL_ISFINITE #ifdef _MSC_VER #define dpy_isfinite(x) _finite((x)) #else #define dpy_isfinite(x) !npy_isnan((x) + (-x)) #endif #else #define dpy_isfinite(x) isfinite((x)) #endif #ifndef NPY_HAVE_DECL_ISINF #define dpy_isinf(x) (!dpy_isfinite(x) && !dpy_isnan(x)) #else #ifdef _MSC_VER #define dpy_isinf(x) (!_finite((x)) && !_isnan((x))) #else #define dpy_isinf(x) isinf((x)) #endif #endif dipy-0.13.0/src/safe_openmp.pxd000066400000000000000000000010631317371701200163420ustar00rootroot00000000000000cdef extern from "conditional_omp.h": ctypedef struct omp_lock_t: pass extern void omp_init_lock(omp_lock_t *) nogil extern void omp_destroy_lock(omp_lock_t *) nogil extern void omp_set_lock(omp_lock_t *) nogil extern void omp_unset_lock(omp_lock_t *) nogil extern int omp_test_lock(omp_lock_t *) nogil extern void omp_set_dynamic(int dynamic_threads) nogil extern void omp_set_num_threads(int num_threads) nogil extern int omp_get_num_procs() nogil extern int omp_get_max_threads() nogil cdef int have_openmp dipy-0.13.0/tools/000077500000000000000000000000001317371701200137025ustar00rootroot00000000000000dipy-0.13.0/tools/build_dmgs.py000077500000000000000000000045241317371701200163750ustar00rootroot00000000000000#!/usr/bin/env python """Script to build dmgs for buildbot builds Example ------- %(prog)s "dipy-dist/dipy*-0.6.0-py*mpkg" Note quotes around the globber first argument to protect it from shell globbing. """ import os from os.path import join as pjoin, isfile, isdir import sys import shutil from glob import glob from functools import partial from subprocess import check_call import warnings from argparse import ArgumentParser, RawDescriptionHelpFormatter my_call = partial(check_call, shell=True) BUILDBOT_LOGIN = "buildbot@nipy.bic.berkeley.edu" BUILDBOT_HTML = "nibotmi/public_html/" def main(): parser = ArgumentParser(description=__doc__, formatter_class=RawDescriptionHelpFormatter) parser.add_argument('globber', type=str, help='glob to serch for build mpkgs') parser.add_argument('--out-path', type=str, default='mpkg-dist', help='path for output files (default="mpkg-dist")', metavar='OUTPATH') parser.add_argument('--clobber', action='store_true', help='Delete OUTPATH if exists') args = parser.parse_args() globber = args.globber out_path = args.out_path address = "{0}:{1}{2}".format(BUILDBOT_LOGIN, BUILDBOT_HTML, globber) if isdir(out_path): if not args.clobber: raise RuntimeError('Path {0} exists and "clobber" not set'.format( out_path)) shutil.rmtree(out_path) os.mkdir(out_path) cwd = os.path.abspath(os.getcwd()) os.chdir(out_path) try: my_call('scp -r {0} .'.format(address)) found_mpkgs = sorted(glob('*.mpkg')) for mpkg in found_mpkgs: pkg_name, ext = os.path.splitext(mpkg) assert ext == '.mpkg' my_call('sudo reown_mpkg {0} root admin'.format(mpkg)) os.mkdir(pkg_name) pkg_moved = pjoin(pkg_name, mpkg) os.rename(mpkg, pkg_moved) readme = pjoin(pkg_moved, 'Contents', 'Resources', 'ReadMe.txt') if isfile(readme): shutil.copy(readme, pkg_name) else: warnings.warn("Could not find readme with " + readme) my_call('sudo hdiutil create {0}.dmg -srcfolder ./{0}/ -ov'.format(pkg_name)) finally: os.chdir(cwd) if __name__ == '__main__': main() dipy-0.13.0/tools/build_release000077500000000000000000000012561317371701200164330ustar00rootroot00000000000000#!/usr/bin/env python """dipy release build script. """ import os from toollib import (c, get_dipydir, compile_tree, cd, pjoin, remove_tree) # Get main dipy dir, this will raise if it doesn't pass some checks dipydir = get_dipydir() cd(dipydir) # Load release info execfile(pjoin('dipy','info.py')) # Check that everything compiles compile_tree() # Cleanup for d in ['build','dist',pjoin('doc','_build'),pjoin('doc','dist')]: if os.path.isdir(d): remove_tree(d) # Build source and binary distros c('./setup.py sdist --formats=gztar,zip') # Build eggs for version in ['2.5', '2.6', '2.7']: cmd='python'+version+' ./setup_egg.py bdist_egg' stat = os.system(cmd) dipy-0.13.0/tools/dipnost000077500000000000000000000023411317371701200153100ustar00rootroot00000000000000#!/usr/bin/env python # vim: ft=python """ Run nosetests for dipy while patching nose Use as ``nosetests`` except we always run the doctests, and we patch the doctest plugin to deal with a bug in nose at least <= 1.2.1 To reproduce a standard test run:: dipnost /path/to/dipy/dipy """ import sys import nose from nose.plugins import doctests # We were getting errors for the extension modules. See: # https://github.com/nose-devs/nose/pull/661 # and # https://github.com/nose-devs/nose/issues/447 def id(self): name = self._dt_test.name filename = self._dt_test.filename if filename is not None: pk = doctests.getpackage(filename) if pk is None: return name if not name.startswith(pk): name = "%s.%s" % (pk, name) return name def prepare_imports(): # Set matplotlib backend as 'agg' try: import matplotlib as mpl except ImportError: pass else: mpl.use('agg') if __name__ == '__main__': # Monkeypatch. Yes, it's nasty doctests.DocTestCase.id = id # Set mpl backend prepare_imports() # Enable doctests argv = sys.argv + ['--with-doctest'] nose.core.TestProgram(argv=argv, addplugins=[doctests.Doctest()]) dipy-0.13.0/tools/doc_mod.py000077500000000000000000000013051317371701200156620ustar00rootroot00000000000000#!/usr/bin/env python """ Make documentation for module Depends on some guessed filepaths Filepaths guessed by importing """ import sys from os.path import join as pjoin, dirname, abspath ROOT_DIR = abspath(pjoin(dirname(__file__), '..')) DOC_SDIR = pjoin(ROOT_DIR, 'doc', 'reference') TEMPLATE = \ """:mod:`%s` ========================= .. automodule:: %s :members: """ def main(): try: mod_name = sys.argv[1] except IndexError: raise OSError('Need module import as input') out_fname = pjoin(DOC_SDIR, mod_name + '.rst') open(out_fname, 'wt').write(TEMPLATE % (mod_name, mod_name)) if __name__ == '__main__': main() dipy-0.13.0/tools/doctest_extmods.py000077500000000000000000000031771317371701200174770ustar00rootroot00000000000000#!/usr/bin/env python """Run doctests in extension modules of Collect extension modules in Run doctests in each extension module Example: %prog dipy """ import sys import os from os.path import dirname, relpath, sep, join as pjoin, splitext, abspath from distutils.sysconfig import get_config_vars import doctest from optparse import OptionParser EXT_EXT = get_config_vars('SO')[0] def get_ext_modules(pkg_name): pkg = __import__(pkg_name, fromlist=['']) pkg_dir = abspath(dirname(pkg.__file__)) # pkg_root = __import__(pkg_name) ext_modules = [] for dirpath, dirnames, filenames in os.walk(pkg_dir): reldir = relpath(dirpath, pkg_dir) if reldir == '.': reldir = '' for filename in filenames: froot, ext = splitext(filename) if ext == EXT_EXT: mod_path = pjoin(reldir, froot) mod_uri = pkg_name + '.' + mod_path.replace(sep, '.') # fromlist=[''] results in submodule being returned, rather than the # top level module. See help(__import__) mod = __import__(mod_uri, fromlist=['']) ext_modules.append(mod) return ext_modules def main(): usage = "usage: %prog [options] \n\n" + __doc__ parser = OptionParser(usage=usage) opts, args = parser.parse_args() if len(args) == 0: parser.print_help() sys.exit(1) mod_name = args[0] mods = get_ext_modules(mod_name) for mod in mods: print("Testing module: " + mod.__name__) doctest.testmod(mod) if __name__ == '__main__': main() dipy-0.13.0/tools/ex2rst000077500000000000000000000222371317371701200150650ustar00rootroot00000000000000#!/usr/bin/env python # # Note before note: dipy copied this file from nitime who ... # Note: this file is copied (possibly with minor modifications) from the # sources of the PyMVPA project - http://pymvpa.org. It remains licensed as # the rest of PyMVPA (MIT license as of October 2010). # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## # # See COPYING file distributed along with the PyMVPA package for the # copyright and license terms. # ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Helper to automagically generate ReST versions of examples""" __docformat__ = 'restructuredtext' import os import sys import re import glob from optparse import OptionParser def auto_image(line): """Automatically replace generic image markers with ones that have full size (width/height) info, plus a :target: link to the original png, to be used in the html docs. """ img_re = re.compile(r'(\s*)\.\. image::\s*(.*)$') m = img_re.match(line) if m is None: # Not an image declaration, leave the line alone and return unmodified return line # Match means it's an image spec, we rewrite it with extra tags ini_space = m.group(1) lines = [line, ini_space + ' :width: 500\n', #ini_space + ' :height: 350\n' ] fspec = m.group(2) if fspec.endswith('.*'): fspec = fspec.replace('.*', '.png') fspec = fspec.replace('fig/', '../_images/') lines.append(ini_space + (' :target: %s\n' % fspec) ) lines.append('\n') return ''.join(lines) def exfile2rst(filename): """Open a Python script and convert it into an ReST string. """ # output string s = '' # open source file xfile = open(filename) # parser status vars inheader = True indocs = False doc2code = False code2doc = False # an empty line found in the example enables the check for a potentially # indented docstring starting on the next line (as an attempt to exclude # function or class docstrings) last_line_empty = False # indentation of indented docstring, which is removed from the RsT output # since we typically do not want an indentation there. indent_level = 0 for line in xfile: # skip header if inheader and \ not (line.startswith('"""') or line.startswith("'''")): continue # determine end of header if inheader and (line.startswith('"""') or line.startswith("'''")): inheader = False # strip comments and remove trailing whitespace if not indocs and last_line_empty: # first remove leading whitespace and store indent level cleanline = line[:line.find('#')].lstrip() indent_level = len(line) - len(cleanline) - 1 cleanline = cleanline.rstrip() else: cleanline = line[:line.find('#')].rstrip() if not indocs and line == '\n': last_line_empty = True else: last_line_empty = False # if we have something that should go into the text if indocs \ or (cleanline.startswith('"""') or cleanline.startswith("'''")): proc_line = None # handle doc start if not indocs: # guarenteed to start with """ if len(cleanline) > 3 \ and (cleanline.endswith('"""') \ or cleanline.endswith("'''")): # single line doc code2doc = True doc2code = True proc_line = cleanline[3:-3] else: # must be start of multiline block indocs = True code2doc = True # rescue what is left on the line proc_line = cleanline[3:] # strip """ else: # we are already in the docs # handle doc end if cleanline.endswith('"""') or cleanline.endswith("'''"): indocs = False doc2code = True # rescue what is left on the line proc_line = cleanline[:-3] # reset the indentation indent_level = 0 else: # has to be documentation # if the indentation is whitespace remove it, other wise # keep it (accounts for some variation in docstring # styles real_indent = \ indent_level - len(line[:indent_level].lstrip()) proc_line = line[real_indent:] if code2doc: code2doc = False s += '\n' proc_line = auto_image(proc_line) if proc_line: s += proc_line.rstrip() + '\n' else: if doc2code: doc2code = False s += '\n::\n' # has to be code s += ' %s' % line xfile.close() return s def exfile2rstfile(filename, opts): """ """ # doc filename dfilename = os.path.basename(filename[:-3]) + '.rst' # open dest file dfile = open(os.path.join(opts.outdir, os.path.basename(dfilename)), 'w') # place header dfile.write('.. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n') # place cross-ref target dfile.write('.. _example_' + dfilename[:-4] + ':\n\n') # write converted ReST dfile.write(exfile2rst(filename)) if opts.sourceref: # write post example see also box msg = """ .. admonition:: Example source code You can download :download:`the full source code of this example <%s>`. This same script is also included in the %s source distribution under the :file:`doc/examples/` directory. """ % (filename, opts.project) dfile.write(msg) dfile.close() def main(): parser = OptionParser( \ usage="%prog [options] [...]", \ version="%prog 0.1", description="""\ %prog converts Python scripts into restructered text (ReST) format suitable for integration into the Sphinx documentation framework. Its key feature is that it extracts stand-alone (unassigned) single, or multiline triple-quote docstrings and moves them out of the code listing so that they are rendered as regular ReST, while at the same time maintaining their position relative to the listing. The detection of such docstrings is exclusively done by parsing the raw code so it is never actually imported into a running Python session. Docstrings have to be written using triple quotes (both forms " and ' are possible). It is recommend that such docstrings are preceded and followed by an empty line. Intended docstring can make use of the full linewidth from the second docstring line on. If the indentation of multiline docstring is maintained for all lines, the respective indentation is removed in the ReST output. The parser algorithm automatically excludes file headers and starts with the first (module-level) docstring instead. """ ) #' # define options parser.add_option('--verbose', action='store_true', dest='verbose', default=False, help='print status messages') parser.add_option('-x', '--exclude', action='append', dest='excluded', help="""\ Use this option to exclude single files from the to be parsed files. This is especially useful to exclude files when parsing complete directories. This option can be specified multiple times. """) parser.add_option('-o', '--outdir', action='store', dest='outdir', type='string', default=None, help="""\ Target directory to write the ReST output to. This is a required option. """) parser.add_option('--no-sourceref', action='store_false', default=True, dest='sourceref', help="""\ If specified, the source reference section will be suppressed. """) parser.add_option('--project', type='string', action='store', default='', dest='project', help="""\ Name of the project that contains the examples. This name is used in the 'seealso' source references. Default: '' """) # parse options (opts, args) = parser.parse_args() # read sys.argv[1:] by default # check for required options if opts.outdir is None: print('Required option -o, --outdir not specified.') sys.exit(1) # build up list of things to parse toparse = [] for t in args: # expand dirs if os.path.isdir(t): # add all python files in that dir toparse += glob.glob(os.path.join(t, '*.py')) else: toparse.append(t) # filter parse list if not opts.excluded is None: toparse = [t for t in toparse if not t in opts.excluded] toparse_list = toparse toparse = set(toparse) if len(toparse) != len(toparse_list): print('Ignoring duplicate parse targets.') if not os.path.exists(opts.outdir): os.mkdir(opts.outdir) # finally process all examples for t in toparse: exfile2rstfile(t, opts) if __name__ == '__main__': main() dipy-0.13.0/tools/github_stats.py000077500000000000000000000152351317371701200167650ustar00rootroot00000000000000#!/usr/bin/env python """Simple tools to query github.com and gather stats about issues. Taken from ipython """ #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- from __future__ import print_function import json import re import sys from datetime import datetime, timedelta from subprocess import check_output from urllib import urlopen #----------------------------------------------------------------------------- # Globals #----------------------------------------------------------------------------- ISO8601 = "%Y-%m-%dT%H:%M:%SZ" PER_PAGE = 100 element_pat = re.compile(r'<(.+?)>') rel_pat = re.compile(r'rel=[\'"](\w+)[\'"]') LAST_RELEASE = datetime(2015, 3, 18) #----------------------------------------------------------------------------- # Functions #----------------------------------------------------------------------------- def parse_link_header(headers): link_s = headers.get('link', '') urls = element_pat.findall(link_s) rels = rel_pat.findall(link_s) d = {} for rel,url in zip(rels, urls): d[rel] = url return d def get_paged_request(url): """get a full list, handling APIv3's paging""" results = [] while url: print("fetching %s" % url, file=sys.stderr) f = urlopen(url) results.extend(json.load(f)) links = parse_link_header(f.headers) url = links.get('next') return results def get_issues(project="nipy/dipy", state="closed", pulls=False): """Get a list of the issues from the Github API.""" which = 'pulls' if pulls else 'issues' url = "https://api.github.com/repos/%s/%s?state=%s&per_page=%i" % (project, which, state, PER_PAGE) return get_paged_request(url) def _parse_datetime(s): """Parse dates in the format returned by the Github API.""" if s: return datetime.strptime(s, ISO8601) else: return datetime.fromtimestamp(0) def issues2dict(issues): """Convert a list of issues to a dict, keyed by issue number.""" idict = {} for i in issues: idict[i['number']] = i return idict def is_pull_request(issue): """Return True if the given issue is a pull request.""" return 'pull_request_url' in issue def issues_closed_since(period=LAST_RELEASE, project="nipy/dipy", pulls=False): """Get all issues closed since a particular point in time. period can either be a datetime object, or a timedelta object. In the latter case, it is used as a time before the present.""" which = 'pulls' if pulls else 'issues' if isinstance(period, timedelta): period = datetime.now() - period url = "https://api.github.com/repos/%s/%s?state=closed&sort=updated&since=%s&per_page=%i" % (project, which, period.strftime(ISO8601), PER_PAGE) allclosed = get_paged_request(url) # allclosed = get_issues(project=project, state='closed', pulls=pulls, since=period) filtered = [i for i in allclosed if _parse_datetime(i['closed_at']) > period] # exclude rejected PRs if pulls: filtered = [ pr for pr in filtered if pr['merged_at'] ] return filtered def sorted_by_field(issues, field='closed_at', reverse=False): """Return a list of issues sorted by closing date date.""" return sorted(issues, key = lambda i:i[field], reverse=reverse) def report(issues, show_urls=False): """Summary report about a list of issues, printing number and title. """ # titles may have unicode in them, so we must encode everything below if show_urls: for i in issues: role = 'ghpull' if 'merged_at' in i else 'ghissue' print('* :%s:`%d`: %s' % (role, i['number'], i['title'].encode('utf-8'))) else: for i in issues: print('* %d: %s' % (i['number'], i['title'].encode('utf-8'))) #----------------------------------------------------------------------------- # Main script #----------------------------------------------------------------------------- if __name__ == "__main__": # Whether to add reST urls for all issues in printout. show_urls = True # By default, search one month back tag = None if len(sys.argv) > 1: try: days = int(sys.argv[1]) except: tag = sys.argv[1] else: tag = check_output(['git', 'describe', '--abbrev=0']).strip() if tag: cmd = ['git', 'log', '-1', '--format=%ai', tag] tagday, tz = check_output(cmd).strip().rsplit(' ', 1) since = datetime.strptime(tagday, "%Y-%m-%d %H:%M:%S") else: since = datetime.now() - timedelta(days=days) print("fetching GitHub stats since %s (tag: %s)" % (since, tag), file=sys.stderr) # turn off to play interactively without redownloading, use %run -i if 1: issues = issues_closed_since(since, pulls=False) pulls = issues_closed_since(since, pulls=True) # For regular reports, it's nice to show them in reverse chronological order issues = sorted_by_field(issues, reverse=True) pulls = sorted_by_field(pulls, reverse=True) n_issues, n_pulls = map(len, (issues, pulls)) n_total = n_issues + n_pulls # Print summary report we can directly include into release notes. print() since_day = since.strftime("%Y/%m/%d") today = datetime.today().strftime("%Y/%m/%d") print("GitHub stats for %s - %s (tag: %s)" % (since_day, today, tag)) print() print("These lists are automatically generated, and may be incomplete or contain duplicates.") print() if tag: # print git info, in addition to GitHub info: since_tag = tag+'..' cmd = ['git', 'log', '--oneline', since_tag] ncommits = len(check_output(cmd).splitlines()) author_cmd = ['git', 'log', '--format=* %aN', since_tag] all_authors = check_output(author_cmd).splitlines() unique_authors = sorted(set(all_authors)) if len(unique_authors) == 0: print("No commits during this period.") else: print("The following %i authors contributed %i commits." % (len(unique_authors), ncommits)) print() print('\n'.join(unique_authors)) print() print() print("We closed a total of %d issues, %d pull requests and %d regular issues;\n" "this is the full list (generated with the script \n" ":file:`tools/github_stats.py`):" % (n_total, n_pulls, n_issues)) print() print('Pull Requests (%d):\n' % n_pulls) report(pulls, show_urls) print() print('Issues (%d):\n' % n_issues) report(issues, show_urls) dipy-0.13.0/tools/gitwash_dumper.py000077500000000000000000000172551317371701200173130ustar00rootroot00000000000000#!/usr/bin/env python ''' Checkout gitwash repo into directory and do search replace on name ''' import os from os.path import join as pjoin import shutil import sys import re import glob import fnmatch import tempfile from subprocess import call from optparse import OptionParser verbose = False def clone_repo(url, branch): cwd = os.getcwd() tmpdir = tempfile.mkdtemp() try: cmd = 'git clone %s %s' % (url, tmpdir) call(cmd, shell=True) os.chdir(tmpdir) cmd = 'git checkout %s' % branch call(cmd, shell=True) except: shutil.rmtree(tmpdir) raise finally: os.chdir(cwd) return tmpdir def cp_files(in_path, globs, out_path): try: os.makedirs(out_path) except OSError: pass out_fnames = [] for in_glob in globs: in_glob_path = pjoin(in_path, in_glob) for in_fname in glob.glob(in_glob_path): out_fname = in_fname.replace(in_path, out_path) pth, _ = os.path.split(out_fname) if not os.path.isdir(pth): os.makedirs(pth) shutil.copyfile(in_fname, out_fname) out_fnames.append(out_fname) return out_fnames def filename_search_replace(sr_pairs, filename, backup=False): ''' Search and replace for expressions in files ''' in_txt = open(filename, 'rt').read(-1) out_txt = in_txt[:] for in_exp, out_exp in sr_pairs: in_exp = re.compile(in_exp) out_txt = in_exp.sub(out_exp, out_txt) if in_txt == out_txt: return False open(filename, 'wt').write(out_txt) if backup: open(filename + '.bak', 'wt').write(in_txt) return True def copy_replace(replace_pairs, repo_path, out_path, cp_globs=('*',), rep_globs=('*',), renames = ()): out_fnames = cp_files(repo_path, cp_globs, out_path) renames = [(re.compile(in_exp), out_exp) for in_exp, out_exp in renames] fnames = [] for rep_glob in rep_globs: fnames += fnmatch.filter(out_fnames, rep_glob) if verbose: print '\n'.join(fnames) for fname in fnames: filename_search_replace(replace_pairs, fname, False) for in_exp, out_exp in renames: new_fname, n = in_exp.subn(out_exp, fname) if n: os.rename(fname, new_fname) break def make_link_targets(proj_name, user_name, repo_name, known_link_fname, out_link_fname, url=None, ml_url=None): """ Check and make link targets If url is None or ml_url is None, check if there are links present for these in `known_link_fname`. If not, raise error. The check is: Look for a target `proj_name`. Look for a target `proj_name` + ' mailing list' Also, look for a target `proj_name` + 'github'. If this exists, don't write this target into the new file below. If we are writing any of the url, ml_url, or github address, then write new file with these links, of form: .. _`proj_name` .. _`proj_name`: url .. _`proj_name` mailing list: url """ link_contents = open(known_link_fname, 'rt').readlines() have_url = not url is None have_ml_url = not ml_url is None have_gh_url = None for line in link_contents: if not have_url: match = re.match(r'..\s+_`%s`:\s+' % proj_name, line) if match: have_url = True if not have_ml_url: match = re.match(r'..\s+_`%s mailing list`:\s+' % proj_name, line) if match: have_ml_url = True if not have_gh_url: match = re.match(r'..\s+_`%s github`:\s+' % proj_name, line) if match: have_gh_url = True if not have_url or not have_ml_url: raise RuntimeError('Need command line or known project ' 'and / or mailing list URLs') lines = [] if not url is None: lines.append('.. _`%s`: %s\n' % (proj_name, url)) if not have_gh_url: gh_url = 'http://github.com/%s/%s\n' % (user_name, repo_name) lines.append('.. _`%s github`: %s\n' % (proj_name, gh_url)) if not ml_url is None: lines.append('.. _`%s mailing list`: %s\n' % (proj_name, ml_url)) if len(lines) == 0: # Nothing to do return # A neat little header line lines = ['.. %s\n' % proj_name] + lines out_links = open(out_link_fname, 'wt') out_links.writelines(lines) out_links.close() USAGE = ''' If not set with options, the repository name is the same as the If not set with options, the main github user is the same as the repository name.''' GITWASH_CENTRAL = 'git://github.com/matthew-brett/gitwash.git' GITWASH_BRANCH = 'master' def main(): parser = OptionParser() parser.set_usage(parser.get_usage().strip() + USAGE) parser.add_option("--repo-name", dest="repo_name", help="repository name - e.g. nitime", metavar="REPO_NAME") parser.add_option("--github-user", dest="main_gh_user", help="github username for main repo - e.g fperez", metavar="MAIN_GH_USER") parser.add_option("--gitwash-url", dest="gitwash_url", help="URL to gitwash repository - default %s" % GITWASH_CENTRAL, default=GITWASH_CENTRAL, metavar="GITWASH_URL") parser.add_option("--gitwash-branch", dest="gitwash_branch", help="branch in gitwash repository - default %s" % GITWASH_BRANCH, default=GITWASH_BRANCH, metavar="GITWASH_BRANCH") parser.add_option("--source-suffix", dest="source_suffix", help="suffix of ReST source files - default '.rst'", default='.rst', metavar="SOURCE_SUFFIX") parser.add_option("--project-url", dest="project_url", help="URL for project web pages", default=None, metavar="PROJECT_URL") parser.add_option("--project-ml-url", dest="project_ml_url", help="URL for project mailing list", default=None, metavar="PROJECT_ML_URL") (options, args) = parser.parse_args() if len(args) < 2: parser.print_help() sys.exit() out_path, project_name = args if options.repo_name is None: options.repo_name = project_name if options.main_gh_user is None: options.main_gh_user = options.repo_name repo_path = clone_repo(options.gitwash_url, options.gitwash_branch) try: copy_replace((('PROJECTNAME', project_name), ('REPONAME', options.repo_name), ('MAIN_GH_USER', options.main_gh_user)), repo_path, out_path, cp_globs=(pjoin('gitwash', '*'),), rep_globs=('*.rst',), renames=(('\.rst$', options.source_suffix),)) make_link_targets(project_name, options.main_gh_user, options.repo_name, pjoin(out_path, 'gitwash', 'known_projects.inc'), pjoin(out_path, 'gitwash', 'this_project.inc'), options.project_url, options.project_ml_url) finally: shutil.rmtree(repo_path) if __name__ == '__main__': main() dipy-0.13.0/tools/make_examples.py000077500000000000000000000125301317371701200170730ustar00rootroot00000000000000#!/usr/bin/env python """Run the py->rst conversion and run all examples. Steps are: analyze example index file for example py filenames check for any filenames in example directory not included do py to rst conversion, writing into build directory run """ # ----------------------------------------------------------------------------- # Library imports # ----------------------------------------------------------------------------- # Stdlib imports import os import os.path as op import sys import shutil from subprocess import check_call from glob import glob from time import time # Third-party imports # We must configure the mpl backend before making any further mpl imports import matplotlib matplotlib.use('Agg') import matplotlib.pyplot as plt from matplotlib._pylab_helpers import Gcf import dipy # ----------------------------------------------------------------------------- # Function defintions # ----------------------------------------------------------------------------- # These global variables let show() be called by the scripts in the usual # manner, but when generating examples, we override it to write the figures to # files with a known name (derived from the script name) plus a counter figure_basename = None # We must change the show command to save instead def show(): allfm = Gcf.get_all_fig_managers() for fcount, fm in enumerate(allfm): fm.canvas.figure.savefig('%s_%02i.png' % (figure_basename, fcount + 1)) _mpl_show = plt.show plt.show = show # ----------------------------------------------------------------------------- # Main script # ----------------------------------------------------------------------------- # Where things are DOC_PATH = op.abspath('..') EG_INDEX_FNAME = op.join(DOC_PATH, 'examples_index.rst') EG_SRC_DIR = op.join(DOC_PATH, 'examples') # Work in examples directory #os.chdir(op.join(DOC_PATH, 'examples_built')) if not os.getcwd().endswith(op.join('doc', 'examples_built')): raise OSError('This must be run from the doc directory') # Copy the py files; check they are in the examples list and warn if not eg_index_contents = open(EG_INDEX_FNAME, 'rt').read() # Here I am adding an extra step. The list of examples to be executed need # also to be added in the following file (valid_examples.txt). This helps # with debugging the examples and the documentation only a few examples at # the time. flist_name = op.join(op.dirname(os.getcwd()), 'examples', 'valid_examples.txt') flist = open(flist_name, "r") validated_examples = flist.readlines() flist.close() # Parse "#" in lines validated_examples = [line.split("#", 1)[0] for line in validated_examples] # Remove leading and trailing white space from example names validated_examples = [line.strip() for line in validated_examples] # Remove blank lines validated_examples = list(filter(None, validated_examples)) for example in validated_examples: fullpath = op.join(EG_SRC_DIR, example) if not example.endswith(".py"): print("%s not a python file, skipping." % example) continue elif not op.isfile(fullpath): print("Cannot find file, %s, skipping." % example) continue shutil.copyfile(fullpath, example) # Check that example file is included in the docs file_root = example[:-3] if file_root not in eg_index_contents: msg = "Example, %s, not in index file %s." msg = msg % (example, EG_INDEX_FNAME) print(msg) # Run the conversion from .py to rst file check_call('python ../../tools/ex2rst --project dipy --outdir . .', shell=True) # added the path so that scripts can import other scripts on the same directory sys.path.insert(0, os.getcwd()) if not op.isdir('fig'): os.mkdir('fig') use_xvfb = os.environ.get('TEST_WITH_XVFB', False) use_memprof = os.environ.get('TEST_WITH_MEMPROF', False) if use_xvfb: try: from xvfbwrapper import Xvfb except ImportError: raise RuntimeError("You are trying to run a documentation build", "with 'TEST_WITH_XVFB' set to True, but ", "xvfbwrapper is not available. Please install", "xvfbwrapper and try again") display = Xvfb(width=1920, height=1080) display.start() if use_memprof: try: import memory_profiler except ImportError: raise RuntimeError("You are trying to run a documentation build", "with 'TEST_WITH_MEMPROF' set to True, but ", "memory_profiler is not available. Please install", "memory_profiler and try again") name = '' def run_script(): namespace = {} t1 = time() exec(open(script).read(), namespace) t2 = time() print("That took %.2f seconds to run" % (t2 - t1)) plt.close('all') del namespace # Execute each python script in the directory: for script in validated_examples: figure_basename = op.join('fig', op.splitext(script)[0]) if use_memprof: print("memory profiling ", script) memory_profiler.profile(run_script)() else: print(script) run_script() if use_xvfb: display.stop() # clean up stray images, pickles, npy files, etc for globber in ('*.nii.gz', '*.dpy', '*.npy', '*.pkl', '*.mat', '*.img', '*.hdr'): for fname in glob(globber): os.unlink(fname) dipy-0.13.0/tools/osxbuild.py000066400000000000000000000103621317371701200161070ustar00rootroot00000000000000"""Python script to build the OSX universal binaries. Stolen with thankfulness from the numpy distribution This is a simple script, most of the heavy lifting is done in bdist_mpkg. To run this script: 'python build.py' Installer is built using sudo so file permissions are correct when installed on user system. Script will prompt for sudo pwd. """ import os import sys import shutil import subprocess from optparse import OptionParser from getpass import getuser #USER_README = 'docs/README.rst' #DEV_README = SRC_DIR + 'README.rst' BUILD_DIR = 'build' DIST_DIR = 'dist' DIST_DMG_DIR = 'dist-dmg' def remove_dirs(sudo): print 'Removing old build and distribution directories...' print """The distribution is built as root, so the files have the correct permissions when installed by the user. Chown them to user for removal.""" if os.path.exists(BUILD_DIR): cmd = 'chown -R %s %s' % (getuser(), BUILD_DIR) if sudo: cmd = 'sudo ' + cmd shellcmd(cmd) shutil.rmtree(BUILD_DIR) if os.path.exists(DIST_DIR): cmd = 'sudo chown -R %s %s' % (getuser(), DIST_DIR) if sudo: cmd = 'sudo ' + cmd shellcmd(cmd) shutil.rmtree(DIST_DIR) def build_dist(readme, python_exe, sudo): print 'Building distribution... (using sudo)' cmd = '%s setup_egg.py bdist_mpkg --readme=%s' % ( python_exe, readme) if sudo: cmd = 'sudo ' + cmd shellcmd(cmd) def build_dmg(sudo): print 'Building disk image...' # Since we removed the dist directory at the start of the script, # our pkg should be the only file there. pkg = os.listdir(DIST_DIR)[0] fn, ext = os.path.splitext(pkg) dmg = fn + '.dmg' srcfolder = os.path.join(DIST_DIR, pkg) dstfolder = os.path.join(DIST_DMG_DIR, dmg) # build disk image try: os.mkdir(DIST_DMG_DIR) except OSError: pass try: os.unlink(dstfolder) except OSError: pass cmd = 'hdiutil create -srcfolder %s %s' % (srcfolder, dstfolder) if sudo: cmd = 'sudo ' + cmd shellcmd(cmd) def copy_readme(): """Copy a user README with info regarding the website, instead of the developer README which tells one how to build the source. """ print 'Copy user README.rst for installer.' shutil.copy(USER_README, DEV_README) def revert_readme(): """Revert the developer README.""" print 'Reverting README.rst...' cmd = 'svn revert %s' % DEV_README shellcmd(cmd) def shellcmd(cmd, verbose=True): """Call a shell command.""" if verbose: print cmd try: subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError, err: msg = """ Error while executing a shell command. %s """ % str(err) raise Exception(msg) def build(): parser = OptionParser() parser.add_option("-p", "--python", dest="python", default=sys.executable, help="python interpreter executable", metavar="PYTHON_EXE") parser.add_option("-r", "--readme", dest="readme", default='README.rst', help="README file", metavar="README") parser.add_option("-s", "--sudo", dest="sudo", default=False, help="Run as sudo or no", metavar="SUDO") (options, args) = parser.parse_args() try: src_dir = args[0] except IndexError: src_dir = '.' # Check source directory if not os.path.isfile(os.path.join(src_dir, 'setup.py')): raise RuntimeError('Run this script from directory ' 'with setup.py, or pass in this ' 'directory on command line') # update end-user documentation #copy_readme() #shellcmd("svn stat %s"%DEV_README) # change to source directory cwd = os.getcwd() os.chdir(src_dir) # build distribution remove_dirs(options.sudo) build_dist(options.readme, options.python, options.sudo) build_dmg(options.sudo) # change back to original directory os.chdir(cwd) # restore developer documentation #revert_readme() if __name__ == '__main__': build() dipy-0.13.0/tools/pack_examples.py000077500000000000000000000021741317371701200170770ustar00rootroot00000000000000#!/usr/bin/env python """ Script to pack built examples into suitably named archive Usage %s output_dir [doc_dir] """ import os from os.path import join as pjoin import sys import shutil import tarfile import dipy __doc__ = __doc__ % sys.argv[0] EG_BUILT_SDIR = 'examples_built' dpv = 'dipy-' + dipy.__version__ archive_name = dpv + '-doc-examples.tar.gz' try: out_root = sys.argv[1] except IndexError: print __doc__ sys.exit(1) try: os.mkdir(out_root) except OSError: pass try: doc_dir = sys.argv[2] except IndexError: doc_dir = os.getcwd() archive_fname = os.path.join(out_root, archive_name) eg_built_dir = pjoin(doc_dir, EG_BUILT_SDIR) eg_out_base = pjoin(out_root, dpv, 'doc') eg_out_dir = pjoin(eg_out_base, EG_BUILT_SDIR) if os.path.isdir(eg_out_dir): shutil.rmtree(eg_out_dir) def ignorandi(src, names): return [name for name in names if name == 'README' or name == '.gitignore'] shutil.copytree(eg_built_dir, eg_out_dir, ignore=ignorandi) os.chdir(out_root) tar = tarfile.open(archive_fname, 'w|gz') tar.add(dpv) tar.close() shutil.rmtree(pjoin(out_root, dpv)) print("Written " + archive_fname) dipy-0.13.0/tools/pythonsudo000077500000000000000000000010551317371701200160450ustar00rootroot00000000000000#!/bin/bash if [ $# -ne 1 ] then echo "Usage: `basename $0` {python-minor-version}" exit 1 fi PYV=$1 WORKON_HOME=/Users/mb312/.virtualenvs PYPATH=/Library/Frameworks/Python.framework/Versions/2.$PYV/bin VENV=$WORKON_HOME/python2$PYV VENV_LIB=$VENV/lib/python2.$PYV/site-packages export PATH=$PYPATH:$PATH:$VENV/bin export PYTHONPATH=$VENV_LIB:$VENV_LIB/Cython-0.14.1-py2.$PYV-macosx-10.3-i386.egg:/Users/mb312/.virtualenvs/python2$PYV/lib/python2.$PYV/site-packages/nibabel-1.0.0-py2.$PYV.egg export CC=/usr/bin/gcc-4.0 export CXX=/usr/bin/g++-4.0 bash dipy-0.13.0/tools/release000077500000000000000000000023711317371701200152530ustar00rootroot00000000000000#!/usr/bin/env python """dipy release script. This should only be run at real release time. """ from os.path import join as pjoin from toollib import get_dipydir, cd, c # Get main dipy dir, this will raise if it doesn't pass some checks dipydir = get_dipydir() tooldir = pjoin(dipydir,'tools') distdir = pjoin(dipydir,'dist') #### Where I keep static backups of each release ###nibbackupdir = os.path.expanduser('~/dipy/backup') # Start in main dipy dir cd(dipydir) # Load release info execfile(pjoin('dipy','info.py')) print print "Releasing dipy" print "=================" print print 'Source dipy directory:', dipydir print # Perform local backup, go to tools dir to run it. cd(tooldir) # c('./make_tarball.py') # c('mv dipy-*.tgz %s' % nibbackupdir) # Build release files c('./build_release %s' % dipydir) # Register with the Python Package Index (PyPI) print "Registering with PyPI..." cd(dipydir) c('./setup.py register') # Upload all files c('./setup.py sdist --formats=gztar,zip upload') c('./setup.py bdist_egg upload') cd(distdir) #print "Uploading distribution files..." #c('scp * dipy@dipy.scipy.org:www/dist/') # print "Uploading backup files..." # cd(nibbackupdir) # c('scp `ls -1tr *tgz | tail -1` dipy@dipy.scipy.org:www/backup/') print "Done!" dipy-0.13.0/tools/toollib.py000066400000000000000000000024661317371701200157300ustar00rootroot00000000000000"""Various utilities common to nibabel release and maintenance tools. """ # Library imports import os import sys from distutils.dir_util import remove_tree # Useful shorthands pjoin = os.path.join cd = os.chdir # Utility functions def c(cmd): """Run system command, raise SystemExit if it returns an error.""" print ("$",cmd) stat = os.system(cmd) #stat = 0 # Uncomment this and comment previous to run in debug mode if stat: raise SystemExit("Command %s failed with code: %s" % (cmd, stat)) def get_dipydir(): """Get dipy directory from command line, or assume it's the one above.""" # Initialize arguments and check location try: dipydir = sys.argv[1] except IndexError: dipydir = '..' dipydir = os.path.abspath(dipydir) cd(dipydir) if not os.path.isdir('dipy') and os.path.isfile('setup.py'): raise SystemExit('Invalid dipy directory: %s' % dipydir) return dipydir # import compileall and then get dir os.path.split def compile_tree(): """Compile all Python files below current directory.""" stat = os.system('python -m compileall .') if stat: msg = '*** ERROR: Some Python files in tree do NOT compile! ***\n' msg += 'See messages above for the actual file that produced it.\n' raise SystemExit(msg) dipy-0.13.0/version_helpers.py000066400000000000000000000057351317371701200163350ustar00rootroot00000000000000''' Distutils / setuptools helpers for versioning This code started life in the nibabel package as nisexts/sexts.py Code transferred by Matthew Brett, who holds copyright. This version under the standard dipy BSD license. ''' from os.path import join as pjoin try: from ConfigParser import ConfigParser except ImportError: from configparser import ConfigParser from distutils.command.build_py import build_py def get_comrec_build(pkg_dir, build_cmd=build_py): """ Return extended build command class for recording commit The extended command tries to run git to find the current commit, getting the empty string if it fails. It then writes the commit hash into a file in the `pkg_dir` path, named ``COMMIT_INFO.txt``. In due course this information can be used by the package after it is installed, to tell you what commit it was installed from if known. To make use of this system, you need a package with a COMMIT_INFO.txt file - e.g. ``myproject/COMMIT_INFO.txt`` - that might well look like this:: # This is an ini file that may contain information about the code state [commit hash] # The line below may contain a valid hash if it has been substituted during 'git archive' archive_subst_hash=$Format:%h$ # This line may be modified by the install process install_hash= The COMMIT_INFO file above is also designed to be used with git substitution - so you probably also want a ``.gitattributes`` file in the root directory of your working tree that contains something like this:: myproject/COMMIT_INFO.txt export-subst That will cause the ``COMMIT_INFO.txt`` file to get filled in by ``git archive`` - useful in case someone makes such an archive - for example with via the github 'download source' button. Although all the above will work as is, you might consider having something like a ``get_info()`` function in your package to display the commit information at the terminal. See the ``pkg_info.py`` module in the nipy package for an example. """ class MyBuildPy(build_cmd): ''' Subclass to write commit data into installation tree ''' def run(self): build_cmd.run(self) import subprocess proc = subprocess.Popen('git rev-parse --short HEAD', stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) repo_commit, _ = proc.communicate() # Fix for python 3 repo_commit = str(repo_commit) # We write the installation commit even if it's empty cfg_parser = ConfigParser() cfg_parser.read(pjoin(pkg_dir, 'COMMIT_INFO.txt')) cfg_parser.set('commit hash', 'install_hash', repo_commit) out_pth = pjoin(self.build_lib, pkg_dir, 'COMMIT_INFO.txt') cfg_parser.write(open(out_pth, 'wt')) return MyBuildPy